repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Zavteq/fixofx | lib/ofx/document.py | 1 | 3703 | #coding: utf-8
# Copyright 2005-2010 Wesabe, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ofx.document - abstract OFX document.
#
import ofx
import xml.sax.saxutils as sax
class Document:
def as_xml(self, original_format=None, date_format=None):
"""Formats this document as an OFX 2.0 XML document."""
xml = ""
# NOTE: Encoding in OFX, particularly in OFX 1.02,
# is kind of a mess. The OFX 1.02 spec talks about "UNICODE"
# as a supported encoding, which the OFX 2.0 spec has
# back-rationalized to "UTF-8". The "US-ASCII" encoding is
# given as "USASCII". Yet the 1.02 spec acknowledges that
# not everyone speaks English nor uses UNICODE, so they let
# you throw any old encoding in there you'd like. I'm going
# with the idea that if the most common encodings are named
# in an OFX file, they should be translated to "real" XML
# encodings, and if no encoding is given, UTF-8 (which is a
# superset of US-ASCII) should be assumed; but if a named
# encoding other than USASCII or 'UNICODE' is given, that
# should be preserved. I'm also adding a get_encoding()
# method so that we can start to survey what encodings
# we're actually seeing, and use that to maybe be smarter
# about this in the future.
#forcing encoding to utf-8
encoding = "UTF-8"
header_dict = self.parse_dict["header"]
if 'OLDFILEUID' not in header_dict:
OLDFILEUID = 'NONE'
else:
OLDFILEUID = header_dict['OLDFILEUID']
if 'NEWFILEUID' not in header_dict:
NEWFILEUID = 'NONE'
else:
NEWFILEUID = header_dict['NEWFILEUID']
xml += """<?xml version="1.0" encoding="%s"?>\n""" % encoding
xml += """<?OFX OFXHEADER="200" VERSION="200" """ + \
"""SECURITY="%s" OLDFILEUID="%s" NEWFILEUID="%s"?>\n""" % \
(self.parse_dict["header"]["SECURITY"],
OLDFILEUID,
NEWFILEUID)
if original_format is not None:
xml += """<!-- Converted from: %s -->\n""" % original_format
if date_format is not None:
xml += """<!-- Date format was: %s -->\n""" % date_format
taglist = self.parse_dict["body"]["OFX"].asList()
if len(taglist) == 1 and isinstance(taglist[0], list):
xml += self._format_xml(taglist[0])
else:
xml += self._format_xml(taglist)
return xml
def _format_xml(self, mylist, indent=0):
xml = ""
indentstring = " " * indent
tag = mylist.pop(0)
if len(mylist) > 0 and isinstance(mylist[0], list):
xml += "%s<%s>\n" % (indentstring, tag)
for value in mylist:
xml += self._format_xml(value, indent=indent + 2)
xml += "%s</%s>\n" % (indentstring, tag)
elif len(mylist) > 0:
# Unescape then reescape so we don't wind up with '&lt;', oy.
value = sax.escape(sax.unescape(mylist[0]))
xml += "%s<%s>%s</%s>\n" % (indentstring, tag, value, tag)
return xml
| apache-2.0 | -1,877,798,710,501,073,700 | 38.393617 | 77 | 0.591142 | false |
rhyolight/nupic.son | app/melange/request/access.py | 1 | 13682 | # Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for checking access to pages."""
from django.utils import translation
from melange.logic import user as user_logic
from melange.models import profile as profile_model
from melange.request import exception
from melange.request import links
from soc.models import program as program_model
_MESSAGE_NOT_PROGRAM_ADMINISTRATOR = translation.ugettext(
'You need to be a program administrator to access this page.')
_MESSAGE_NOT_DEVELOPER = translation.ugettext(
'This page is only accessible to developers.')
_MESSAGE_HAS_PROFILE = translation.ugettext(
'This page is accessible only to users without a profile.')
_MESSAGE_NO_PROFILE = translation.ugettext(
'Active profile is required to access this page.')
_MESSAGE_NO_URL_PROFILE = translation.ugettext(
'Active profile for %s is required to access this page.')
_MESSAGE_PROGRAM_NOT_EXISTING = translation.ugettext(
'Requested program does not exist.')
_MESSAGE_PROGRAM_NOT_ACTIVE = translation.ugettext(
'Requested program is not active at this moment.')
_MESSAGE_STUDENTS_DENIED = translation.ugettext(
'This page is not accessible to users with student profiles.')
_MESSAGE_NOT_USER_IN_URL = translation.ugettext(
'You are not logged in as the user in the URL.')
_MESSAGE_NOT_ORG_ADMIN_FOR_ORG = translation.ugettext(
'You are not organization administrator for %s')
_MESSAGE_INACTIVE_BEFORE = translation.ugettext(
'This page is inactive before %s.')
_MESSAGE_INACTIVE_OUTSIDE = translation.ugettext(
'This page is inactive before %s and after %s.')
_MESSAGE_INVALID_URL_ORG_STATUS = translation.ugettext(
'This page is not accessible to organizations with status %s.')
def ensureLoggedIn(data):
"""Ensures that the user is logged in.
Args:
data: request_data.RequestData for the current request.
Raises:
exception.LoginRequired: If the user is not logged in.
"""
if not data.gae_user:
raise exception.LoginRequired()
def ensureLoggedOut(data):
"""Ensures that the user is logged out.
Args:
data: request_data.RequestData for the current request.
Raises:
exception.Redirect: If the user is logged in this
exception will redirect them to the logout page.
"""
if data.gae_user:
raise exception.Redirect(links.LINKER.logout(data.request))
class AccessChecker(object):
"""Interface for page access checkers."""
def checkAccess(self, data, check):
"""Ensure that the user's request should be satisfied.
Implementations of this method must not effect mutations of the
passed parameters (or anything else).
Args:
data: A request_data.RequestData describing the current request.
check: An access_checker.AccessChecker object.
Raises:
exception.LoginRequired: Indicating that the user is not logged
in, but must log in to access the resource specified in their
request.
exception.Redirect: Indicating that the user is to be redirected
to another URL.
exception.UserError: Describing what was erroneous about the
user's request and describing an appropriate response.
exception.ServerError: Describing some problem that arose during
request processing and describing an appropriate response.
"""
raise NotImplementedError()
class AllAllowedAccessChecker(AccessChecker):
"""AccessChecker that allows all requests for access."""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
pass
ALL_ALLOWED_ACCESS_CHECKER = AllAllowedAccessChecker()
# TODO(nathaniel): There's some ninja polymorphism to be addressed here -
# RequestData doesn't actually have an "is_host" attribute, but its two
# major subclasses (the GCI-specific and GSoC-specific RequestData classes)
# both do, so this "works" but isn't safe or sanely testable.
class ProgramAdministratorAccessChecker(AccessChecker):
"""AccessChecker that ensures that the user is a program administrator."""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if data.is_developer:
# NOTE(nathaniel): Developers are given all the powers of
# program administrators.
return
elif not data.gae_user:
raise exception.LoginRequired()
elif not user_logic.isHostForProgram(data.ndb_user, data.program.key()):
raise exception.Forbidden(message=_MESSAGE_NOT_PROGRAM_ADMINISTRATOR)
PROGRAM_ADMINISTRATOR_ACCESS_CHECKER = ProgramAdministratorAccessChecker()
# TODO(nathaniel): Eliminate this or make it a
# "SiteAdministratorAccessChecker" - there should be no aspects of Melange
# that require developer action or are limited only to developers.
class DeveloperAccessChecker(AccessChecker):
"""AccessChecker that ensures that the user is a developer."""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.is_developer:
raise exception.Forbidden(message=_MESSAGE_NOT_DEVELOPER)
DEVELOPER_ACCESS_CHECKER = DeveloperAccessChecker()
class ConjuctionAccessChecker(AccessChecker):
"""Aggregated access checker that holds a collection of other access
checkers and ensures that access is granted only if each of those checkers
grants access individually."""
def __init__(self, checkers):
"""Initializes a new instance of the access checker.
Args:
checkers: list of AccessChecker objects to be examined by this checker.
"""
self._checkers = checkers
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
for checker in self._checkers:
checker.checkAccess(data, check)
class NonStudentUrlProfileAccessChecker(AccessChecker):
"""AccessChecker that ensures that the URL user has a non-student profile."""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if data.url_ndb_profile.status != profile_model.Status.ACTIVE:
raise exception.Forbidden(
message=_MESSAGE_NO_URL_PROFILE % data.kwargs['user'])
if data.url_ndb_profile.is_student:
raise exception.Forbidden(message=_MESSAGE_STUDENTS_DENIED)
NON_STUDENT_URL_PROFILE_ACCESS_CHECKER = NonStudentUrlProfileAccessChecker()
class NonStudentProfileAccessChecker(AccessChecker):
"""AccessChecker that ensures that the currently logged-in user
has a non-student profile."""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if (not data.ndb_profile
or data.ndb_profile.status != profile_model.Status.ACTIVE):
raise exception.Forbidden(message=_MESSAGE_NO_PROFILE)
if data.ndb_profile.is_student:
raise exception.Forbidden(message=_MESSAGE_STUDENTS_DENIED)
NON_STUDENT_PROFILE_ACCESS_CHECKER = NonStudentProfileAccessChecker()
class ProgramActiveAccessChecker(AccessChecker):
"""AccessChecker that ensures that the program is currently active.
A program is considered active when the current point of time comes after
its start date and before its end date. Additionally, its status has to
be set to visible.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.program:
raise exception.NotFound(message=_MESSAGE_PROGRAM_NOT_EXISTING)
if (data.program.status != program_model.STATUS_VISIBLE
or not data.timeline.programActive()):
raise exception.Forbidden(message=_MESSAGE_PROGRAM_NOT_ACTIVE)
PROGRAM_ACTIVE_ACCESS_CHECKER = ProgramActiveAccessChecker()
class IsUrlUserAccessChecker(AccessChecker):
"""AccessChecker that ensures that the logged in user is the user whose
identifier is set in URL data.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
key_id = data.kwargs.get('user')
if not key_id:
raise exception.BadRequest('The request does not contain user data.')
ensureLoggedIn(data)
if not data.ndb_user or data.ndb_user.key.id() != key_id:
raise exception.Forbidden(message=_MESSAGE_NOT_USER_IN_URL)
IS_URL_USER_ACCESS_CHECKER = IsUrlUserAccessChecker()
class IsUserOrgAdminForUrlOrg(AccessChecker):
"""AccessChecker that ensures that the logged in user is organization
administrator for the organization whose identifier is set in URL data.
"""
# TODO(daniel): remove this when all organizations moved to NDB
def __init__(self, is_ndb=False):
"""Initializes a new instance of this access checker.
Args:
is_ndb: a bool used to specify if the access checker will be used
for old db organizations or newer ndb organizations.
"""
self._is_ndb = is_ndb
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not self._is_ndb:
if not data.profile:
raise exception.Forbidden(message=_MESSAGE_NO_PROFILE)
# good ol' db
if data.url_org.key() not in data.profile.org_admin_for:
raise exception.Forbidden(
message=_MESSAGE_NOT_ORG_ADMIN_FOR_ORG % data.url_org.key().name())
else:
if not data.ndb_profile:
raise exception.Forbidden(message=_MESSAGE_NO_PROFILE)
if data.url_ndb_org.key not in data.ndb_profile.admin_for:
raise exception.Forbidden(
message=_MESSAGE_NOT_ORG_ADMIN_FOR_ORG %
data.url_ndb_org.key.id())
IS_USER_ORG_ADMIN_FOR_ORG = IsUserOrgAdminForUrlOrg()
IS_USER_ORG_ADMIN_FOR_NDB_ORG = IsUserOrgAdminForUrlOrg(is_ndb=True)
class HasProfileAccessChecker(AccessChecker):
"""AccessChecker that ensures that the logged in user has an active profile
for the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if (not data.ndb_profile
or data.ndb_profile.status != profile_model.Status.ACTIVE):
raise exception.Forbidden(message=_MESSAGE_NO_PROFILE)
HAS_PROFILE_ACCESS_CHECKER = HasProfileAccessChecker()
class UrlOrgStatusAccessChecker(AccessChecker):
"""AccessChecker that ensures that the organization specified in the URL
has the required status.
"""
def __init__(self, statuses):
"""Initializes a new instance of this access checker.
Args:
statuses: List of org_model.Status options with the allowed statuses.
"""
self.statuses = statuses
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if data.url_ndb_org.status not in self.statuses:
raise exception.Forbidden(
message=_MESSAGE_INVALID_URL_ORG_STATUS % data.url_ndb_org.status)
class HasNoProfileAccessChecker(AccessChecker):
"""AccessChecker that ensures that the logged in user does not have a profile
for the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
ensureLoggedIn(data)
if data.ndb_profile:
raise exception.Forbidden(message=_MESSAGE_HAS_PROFILE)
HAS_NO_PROFILE_ACCESS_CHECKER = HasNoProfileAccessChecker()
class OrgSignupStartedAccessChecker(AccessChecker):
"""AccessChecker that ensures that organization sign-up period has started
for the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.timeline.afterOrgSignupStart():
active_from = data.timeline.orgSignupStart()
raise exception.Forbidden(message=_MESSAGE_INACTIVE_BEFORE % active_from)
ORG_SIGNUP_STARTED_ACCESS_CHECKER = OrgSignupStartedAccessChecker()
class OrgSignupActiveAccessChecker(AccessChecker):
"""AccessChecker that ensures that organization sign-up period is active
for the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.timeline.orgSignup():
raise exception.Forbidden(message=_MESSAGE_INACTIVE_OUTSIDE % (
data.timeline.orgSignupBetween()))
ORG_SIGNUP_ACTIVE_ACCESS_CHECKER = OrgSignupActiveAccessChecker()
class OrgsAnnouncedAccessChecker(AccessChecker):
"""AccessChecker that ensures that organizations have been announced for
the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.timeline.orgsAnnounced():
active_from = data.timeline.orgsAnnouncedOn()
raise exception.Forbidden(message=_MESSAGE_INACTIVE_BEFORE % active_from)
class StudentSignupActiveAccessChecker(AccessChecker):
"""AccessChecker that ensures that student sign-up period is active
for the program specified in the URL.
"""
def checkAccess(self, data, check):
"""See AccessChecker.checkAccess for specification."""
if not data.timeline.studentSignup():
raise exception.Forbidden(message=_MESSAGE_INACTIVE_OUTSIDE % (
data.timeline.studentsSignupBetween()))
STUDENT_SIGNUP_ACTIVE_ACCESS_CHECKER = StudentSignupActiveAccessChecker()
| apache-2.0 | 930,231,751,972,041,200 | 34.172237 | 79 | 0.7371 | false |
papedaniel/oioioi | oioioi/questions/admin.py | 1 | 6957 | from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
from django.shortcuts import redirect, get_object_or_404
from django.template.response import TemplateResponse
from django.utils.text import get_text_list
from django.utils.translation import ugettext_lazy as _
from oioioi.base import admin
from oioioi.base.permissions import is_superuser
from oioioi.contests.admin import ContestAdmin, contest_site
from oioioi.contests.models import Contest, ContestPermission
from oioioi.contests.utils import is_contest_admin
from oioioi.questions.forms import ChangeContestMessageForm
from oioioi.questions.models import Message, MessageNotifierConfig, \
ReplyTemplate
class MessageAdmin(admin.ModelAdmin):
list_display = ['id', 'date', 'topic', 'author']
fields = ['date', 'author', 'contest', 'round', 'problem_instance',
'kind', 'topic', 'content']
readonly_fields = ['date', 'author', 'contest', 'round',
'problem_instance']
def has_add_permission(self, request):
return is_contest_admin(request)
def has_change_permission(self, request, obj=None):
if obj and not obj.contest:
return False
return self.has_add_permission(request)
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
queryset = super(MessageAdmin, self).get_queryset(request)
queryset = queryset.filter(contest=request.contest)
return queryset
def add_view(self, request, form_url='', extra_context=None):
return redirect('add_contest_message', contest_id=request.contest.id)
def get_list_select_related(self):
return super(MessageAdmin, self).get_list_select_related() \
+ ['author', 'problem_instance', 'contest']
def change_view(self, request, object_id, form_url='', extra_context=None):
message = get_object_or_404(Message, id=object_id)
if not self.has_change_permission(request, message):
raise PermissionDenied
if request.method == 'POST':
form = ChangeContestMessageForm(message.kind, request,
request.POST, instance=message)
if form.is_valid():
if form.changed_data:
change_message = _("Changed %s.") % \
get_text_list(form.changed_data, _("and"))
else:
change_message = _("No fields changed.")
form.save()
super(MessageAdmin, self).log_change(request, message,
change_message)
return redirect('contest_messages',
contest_id=request.contest.id)
else:
form = ChangeContestMessageForm(message.kind, request,
instance=message)
return TemplateResponse(request, 'admin/questions/change_message.html',
{'form': form, 'message': message})
def response_delete(self, request):
super(MessageAdmin, self).response_delete(request)
return redirect('contest_messages', contest_id=request.contest.id)
contest_site.contest_register(Message, MessageAdmin)
class MessageNotifierConfigInline(admin.TabularInline):
model = MessageNotifierConfig
can_delete = True
extra = 0
def has_add_permission(self, request):
return True
def has_change_permission(self, request, obj=None):
return True
def has_delete_permission(self, request, obj=None):
return True
def formfield_for_foreignkey(self, db_field, request, **kwargs):
contest_admin_perm = ContestPermission.objects \
.filter(contest=request.contest) \
.filter(permission='contests.contest_admin') \
.select_related('user')
admin_ids = [p.user.id for p in contest_admin_perm]
if request.user.is_superuser:
admin_ids += [u.id for u in User.objects.filter(is_superuser=True)]
elif is_contest_admin(request):
added = MessageNotifierConfig.objects \
.filter(contest=request.contest)
admin_ids += [request.user.id] + [conf.user.id for conf in added]
else:
admin_ids = []
if db_field.name == 'user':
kwargs['queryset'] = User.objects.filter(id__in=admin_ids) \
.order_by('username')
return super(MessageNotifierConfigInline, self) \
.formfield_for_foreignkey(db_field, request, **kwargs)
class MessageNotifierContestAdminMixin(object):
def __init__(self, *args, **kwargs):
super(MessageNotifierContestAdminMixin, self).__init__(*args, **kwargs)
self.inlines = self.inlines + [MessageNotifierConfigInline]
ContestAdmin.mix_in(MessageNotifierContestAdminMixin)
class ReplyTemplateAdmin(admin.ModelAdmin):
def get_list_display(self, request):
if is_superuser(request):
return ['visible_name', 'content', 'contest', 'usage_count']
return ['visible_name', 'content', 'usage_count']
def get_list_filter(self, request):
if is_superuser(request):
return ['contest']
return []
def get_readonly_fields(self, request, obj=None):
fields = []
if obj is None:
fields.append('usage_count')
return fields
def get_form(self, request, obj=None, **kwargs):
form = super(ReplyTemplateAdmin, self).get_form(request, obj, **kwargs)
if 'contest' in form.base_fields:
if not is_superuser(request):
qs = Contest.objects.filter(pk=request.contest.pk)
form.base_fields['contest']._set_queryset(qs)
form.base_fields['contest'].required = True
form.base_fields['contest'].empty_label = None
form.base_fields['contest'].initial = request.contest
return form
def has_add_permission(self, request):
# Correct object contest ensured by form.
return is_contest_admin(request)
def has_change_permission(self, request, obj=None):
if obj:
return is_superuser(request) or \
(is_contest_admin(request) and
obj.contest == request.contest)
return self.has_add_permission(request)
def has_delete_permission(self, request, obj=None):
return self.has_change_permission(request, obj)
def get_queryset(self, request):
queryset = super(ReplyTemplateAdmin, self).get_queryset(request)
if not is_superuser(request):
queryset = queryset.filter(contest=request.contest)
return queryset
contest_site.contest_register(ReplyTemplate, ReplyTemplateAdmin)
| gpl-3.0 | 106,197,862,934,026,060 | 38.754286 | 79 | 0.625557 | false |
cgrima/rsr | rsr/fit.py | 1 | 4401 | """
Various tools for extracting signal components from a fit of the amplitude
distribution
"""
from . import pdf
from .Classdef import Statfit
import numpy as np
import time
import random
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from lmfit import minimize, Parameters, report_fit
def param0(sample, method='basic'):
"""Estimate initial parameters for HK fitting
Arguments
---------
sample : sequence
amplitudes
Keywords
--------
method : string
method to compute the initial parameters
"""
if method == 'basic':
a = np.nanmean(sample)
s = np.nanstd(sample)
mu = 1.
return {'a':a, 's':s, 'mu':mu}
def lmfit(sample, fit_model='hk', bins='auto', p0 = None,
xtol=1e-4, ftol=1e-4):
"""Lmfit
Arguments
---------
sample : sequence
amplitudes between 0 and 1.
Keywords
--------
fit_model : string
name of the function (in pdf module) to use for the fit
bins : string
method to compute the bin width (inherited from numpy.histogram)
p0 : dict
Initial parameters. If None, estimated automatically.
xtol : float
??
ftol : float
??
Return
------
A Statfit Class
"""
start = time.time()
winsize = len(sample)
bad = False
#--------------------------------------------------------------------------
# Clean sample
#--------------------------------------------------------------------------
sample = np.array(sample)
sample = sample[np.isfinite(sample)]
if len(sample) == 0:
bad = True
sample = np.zeros(10)+1
#--------------------------------------------------------------------------
# Make the histogram
#--------------------------------------------------------------------------
# n, edges, patches = hist(sample, bins=bins, normed=True)
n, edges = np.histogram(sample, bins=bins, density=True)
# plt.clf()
x = ((np.roll(edges, -1) + edges)/2.)[0:-1]
#--------------------------------------------------------------------------
# Initial Parameters for the fit
#--------------------------------------------------------------------------
if p0 is None:
p0 = param0(sample)
prm0 = Parameters()
# (Name, Value, Vary, Min, Max, Expr)
prm0.add('a', p0['a'], True, 0, 1, None)
prm0.add('s', p0['s'], True, 0, 1, None)
prm0.add('mu', p0['mu'], True, .5, 10, None)
prm0.add('pt', np.average(sample)**2,False, 0, 1, 'a**2+2*s**2*mu')
#if fit_model == 'hk':
# # From [Dutt and Greenleaf. 1994, eq.14]
# prm0.add('a4', np.average(sample)**4,False, 0, 1,
# '8*(1+1/mu)*s**4 + 8*s**2*s**2 + a**4')
#--------------------------------------------------------------------------
# Fit
#--------------------------------------------------------------------------
pdf2use = getattr(pdf, fit_model)
# use 'lbfgs' fit if error with 'leastsq' fit
try:
p = minimize(pdf2use, prm0, args=(x, n), method='leastsq',
xtol=xtol, ftol=ftol)
except KeyboardInterrupt:
raise
except:
print('!! Error with LEASTSQ fit, use L-BFGS-B instead')
p = minimize(pdf2use, prm0, args=(x, n), method='lbfgs')
#--------------------------------------------------------------------------
# Output
#--------------------------------------------------------------------------
elapsed = time.time() - start
values = {}
# Create values dict For lmfit >0.9.0 compatibility since it is no longer
# in the minimize output
for i in p.params.keys():
values[i] = p.params[i].value
# Results
result = Statfit(sample, pdf2use, values, p.params,
p.chisqr, p.redchi, elapsed, p.nfev, p.message, p.success,
p.residual, x, n, edges, bins=bins)
# Identify bad results
if bad is True:
result.success = False
result.values['a'] = 0
result.values['s'] = 0
result.values['mu'] = 0
result.values['pt'] = 0
result.chisqr = 0
result.redchi = 0
result.message = 'No valid data in the sample'
result.residual = 0
return result
| mit | -4,480,014,395,952,754,700 | 29.143836 | 81 | 0.452852 | false |
emitrom/integra-openstack-ui | schedules/tables.py | 1 | 2134 | from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.integra.schedules import utils
from django.utils.http import urlencode
from django.core.urlresolvers import reverse
class AddTableData(tables.LinkAction):
name = "addSchedule"
verbose_name = _("Add Schedule")
url = "horizon:integra:schedules:create"
classes = ("btn-launch", "ajax-modal")
class ScheduleTasksData(tables.LinkAction):
name = "addTask"
verbose_name = _("Schedule Tasks")
url = "horizon:integra:schedules:addTask"
classes = ("btn-launch", "ajax-modal")
def get_link_url(self, datum):
base_url = reverse(self.url)
params = urlencode({"source_id": self.table.get_object_id(datum)})
return "?".join([base_url, params])
class DeleteTableData(tables.DeleteAction):
data_type_singular = _("Schedule")
data_type_plural = _("Schedule")
def delete(self, request, obj_id):
utils.deleteSchedule(self, obj_id)
class FilterAction(tables.FilterAction):
def filter(self, table, posts, filter_string):
filterString = filter_string.lower()
return [post for post in posts
if filterString in post.title.lower()]
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, post_id):
pass
class ScheduleTable(tables.DataTable):
id = tables.Column("id",
verbose_name=_("Id"))
name = tables.Column("name",
verbose_name=_("Name"))
description = tables.Column("description",
verbose_name=_("Description"))
priority = tables.Column("priority",
verbose_name=_("Priority"))
enabled = tables.Column("enabled",
verbose_name=_("Enabled"))
class Meta:
name = "integra"
verbose_name = _("Schedules")
#status_columns = ["status"]
row_class = UpdateRow
table_actions = (AddTableData,
FilterAction)
row_actions = (DeleteTableData,ScheduleTasksData)
| apache-2.0 | 7,110,622,473,895,393,000 | 28.232877 | 74 | 0.619963 | false |
jiceher/oneapi-python | test.py | 1 | 5026 | # -*- coding: utf-8 -*-
import pdb
import logging as mod_logging
import unittest as mod_unittest
import oneapi.object as mod_object
import oneapi.models as mod_models
class Tests(mod_unittest.TestCase):
def test_json_deserialization(self):
class Alias(mod_object.AbstractModel):
name = mod_object.FieldConverter()
def __init__(self):
mod_object.AbstractModel.__init__(self)
class Person(mod_object.AbstractModel):
name = mod_object.FieldConverter('name')
family_name = mod_object.FieldConverter('familyName')
age = mod_object.FieldConverter()
aliases = mod_object.ObjectsListFieldConverter(Alias)
main_alias = mod_object.ObjectFieldConverter(Alias, json_field_name='mainAlias')
def __init__(self):
mod_object.AbstractModel.__init__(self)
json = '{"familyName": "bbb", "name": "aaa", "aliases": [{"name": "qqqq"}, {"name": "wwww"}, {"name": "yyyy"}], "age": 17, "mainAlias": {"name": "gazda"}}';
person = mod_object.Conversions.from_json(Person, json, False)
mod_logging.debug('person={0}'.format(person))
self.assertTrue(person)
self.assertEquals(int, person.age.__class__)
self.assertEquals(17, person.age)
self.assertEquals(list, person.aliases.__class__)
self.assertEquals(3, len(person.aliases))
self.assertEquals('wwww', person.aliases[1].name)
self.assertEquals('gazda', person.main_alias.name)
def test_nonstandard_json_deserialization(self):
class Person(mod_object.AbstractModel):
name = mod_object.FieldConverter('name')
family_name = mod_object.FieldConverter('familyName')
age = mod_object.FieldConverter()
# Nonstandard deserialization the first not-null JSON element must be used:
main_alias = [mod_object.FieldConverter('mainAlias.name'), mod_object.FieldConverter('aliases.0.name')]
def __init__(self):
mod_object.AbstractModel.__init__(self)
json = '{"familyName": "bbb", "name": "aaa", "aliases": [{"name": "qqqq"}, {"name": "wwww"}, {"name": "yyyy"}], "age": 17, "mainAlias": {"name": "gazda"}}';
person = mod_object.Conversions.from_json(Person, json, False)
mod_logging.debug('person={0}'.format(person))
self.assertTrue(person)
self.assertRaises(TypeError, person.main_alias, 'gaszda')
json = '{"familyName": "bbb", "name": "aaa", "aliases": [{"name": "qqqq"}, {"name": "wwww"}, {"name": "yyyy"}], "age": 17, "mainAlias": {"name": null}}';
person = mod_object.Conversions.from_json(Person, json, False)
mod_logging.debug('person={0}'.format(person))
self.assertTrue(person)
self.assertRaises(TypeError, 'qqqq', person.main_alias)
def test_exception_serialization(self):
json = '{"requestError":{"serviceException":{"text":"Request URI missing required component(s): ","messageId":"SVC0002","variables":["aaa"]}}}';
error = mod_object.Conversions.from_json(mod_models.OneApiError, json, False)
self.assertTrue(error != None)
self.assertFalse(error.is_success())
self.assertEquals(error.message_id, 'SVC0002')
self.assertEquals(error.text, 'Request URI missing required component(s): ')
self.assertEquals(len(error.variables), 1)
self.assertEquals(error.variables[0], 'aaa')
def test_exception_serialization(self):
"""
Trying to deserialize an object but instead we got a error response =>
object with filled exception and is_success == False
"""
json = '{"requestError":{"policyException":{"text":"Request URI missing required component(s): ","messageId":"SVC0002","variables":["aaa"]}}}';
# Deserialize when is_error == True:
result = mod_object.Conversions.from_json(mod_models.SMSRequest, json, is_error = True)
self.assertFalse(result.is_success())
self.assertTrue(result.exception != None)
self.assertTrue(result.exception != None)
self.assertTrue(result.exception.is_success())
self.assertEquals(result.exception.message_id, 'SVC0002')
self.assertEquals(result.exception.text, 'Request URI missing required component(s): ')
self.assertEquals(len(result.exception.variables), 1)
self.assertEquals(result.exception.variables[0], 'aaa')
def test_client_correlator(self):
json = '{"resourceReference":{"resourceURL":"http://test.com/1/smsmessaging/outbound/38598123456/requests/hzmrjiywg5"}, "clientCorrelator":"hzmrjiywg5"}'
result = mod_object.Conversions.from_json(mod_models.ResourceReference, json, is_error=False)
self.assertEquals(result.client_correlator, 'hzmrjiywg5')
if __name__ == '__main__':
mod_logging.basicConfig(level=mod_logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
mod_unittest.main()
| apache-2.0 | -5,334,345,587,301,496,000 | 43.875 | 164 | 0.640669 | false |
fred49/linshare-cli | linsharecli/user/contactslistscontacts.py | 1 | 7937 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""TODO"""
# This file is part of Linshare cli.
#
# LinShare cli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LinShare cli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LinShare cli. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2017 Frédéric MARTIN
#
# Contributors list :
#
# Frédéric MARTIN [email protected]
#
from argparse import RawTextHelpFormatter
from argtoolbox import DefaultCompleter as Completer
from linshareapi.cache import Time
from linsharecli.user.core import DefaultCommand as Command
from linsharecli.common.filters import PartialOr
from linsharecli.common.core import add_list_parser_options
from linsharecli.common.core import add_delete_parser_options
from linsharecli.common.tables import TableBuilder
from linsharecli.common.tables import DeleteAction
class ContactListsCompleter(object):
"""TODO"""
def __init__(self, config):
self.config = config
def __call__(self, prefix, **kwargs):
from argcomplete import debug
try:
debug("\n------------ ContactListsCompleter -----------------")
debug("Kwargs content :")
for i, j in list(kwargs.items()):
debug("key : " + str(i))
debug("\t - " + str(j))
debug("\n------------ ContactListsCompleter -----------------\n")
args = kwargs.get('parsed_args')
thread_cmd = DefaultCommand(self.config)
return thread_cmd.complete_lists(args, prefix)
# pylint: disable=broad-except
except Exception as ex:
debug("\nERROR:An exception was caught :" + str(ex) + "\n")
class DefaultCommand(Command):
"""TODO"""
IDENTIFIER = "mail"
DEFAULT_SORT = "mail"
DEFAULT_TOTAL = "ContactsList found : %(count)s"
MSG_RS_NOT_FOUND = "No contactslist could be found."
MSG_RS_UPDATED = "The contactslist '%(mail)s' (%(uuid)s) was successfully updated."
MSG_RS_CREATED = "The contactslist '%(mail)s' (%(uuid)s) was successfully created."
CFG_DELETE_MODE = 1
CFG_DELETE_ARG_ATTR = "mailing_list_uuid"
def complete(self, args, prefix):
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.contactslistscontacts.list(args.mailing_list_uuid)
return (v.get(self.RESOURCE_IDENTIFIER)
for v in json_obj if v.get(self.RESOURCE_IDENTIFIER).startswith(prefix))
def complete_lists(self, args, prefix):
"""TODO"""
# pylint: disable=unused-argument
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.contactslists.list()
return (v.get('uuid')
for v in json_obj if v.get('uuid').startswith(prefix))
class ListCommand(DefaultCommand):
""" List all contactslists store into LinShare."""
@Time('linsharecli.contactslistscontacts', label='Global time : %(time)s')
def __call__(self, args):
super(ListCommand, self).__call__(args)
endpoint = self.ls.contactslistscontacts
tbu = TableBuilder(self.ls, endpoint, self.DEFAULT_SORT)
tbu.load_args(args)
tbu.add_action('delete', DeleteAction(
mode=self.CFG_DELETE_MODE,
parent_identifier=self.CFG_DELETE_ARG_ATTR
))
tbu.add_filters(
PartialOr(self.IDENTIFIER, args.pattern, True)
)
json_obj = endpoint.list(args.mailing_list_uuid)
return tbu.build().load_v2(json_obj).render()
def complete_fields(self, args, prefix):
"""TODO"""
# pylint: disable=unused-argument
super(ListCommand, self).__call__(args)
cli = self.ls.contactslistscontacts
return cli.get_rbu().get_keys(True)
class CreateCommand(DefaultCommand):
"""TODO"""
@Time('linsharecli.contactslistscontacts', label='Global time : %(time)s')
def __call__(self, args):
super(CreateCommand, self).__call__(args)
rbu = self.ls.contactslistscontacts.get_rbu()
rbu.load_from_args(args)
identifier = getattr(args, self.IDENTIFIER)
# FIXME : CREATE
return self._run(
self.ls.contactslistscontacts.create,
self.MSG_RS_CREATED,
identifier,
rbu.to_resource())
class DeleteCommand(DefaultCommand):
"""Delete contactslist."""
@Time('linsharecli.contactslistscontacts', label='Global time : %(time)s')
def __call__(self, args):
super(DeleteCommand, self).__call__(args)
act = DeleteAction(
mode=self.CFG_DELETE_MODE,
parent_identifier=self.CFG_DELETE_ARG_ATTR
)
act.init(args, self.ls, self.ls.contactslistscontacts)
return act.delete(args.uuids)
class UpdateCommand(DefaultCommand):
"""TODO"""
@Time('linsharecli.contactslistscontacts', label='Global time : %(time)s')
def __call__(self, args):
super(UpdateCommand, self).__call__(args)
cli = self.ls.contactslistscontacts
identifier = getattr(args, self.RESOURCE_IDENTIFIER)
resource = cli.get(args.mailing_list_uuid, identifier)
rbu = cli.get_rbu()
rbu.copy(resource)
rbu.load_from_args(args)
# FIXME: CREATE
return self._run(
cli.update,
self.MSG_RS_UPDATED,
identifier,
rbu.to_resource())
def add_parser(subparsers, name, desc, config):
"""TODO"""
parser_tmp = subparsers.add_parser(name, help=desc)
parser_tmp.add_argument(
'-u',
'--uuid',
action="store",
dest="mailing_list_uuid",
help="list uuid",
required=True).completer = ContactListsCompleter(config)
subparsers2 = parser_tmp.add_subparsers()
# command : list
parser = subparsers2.add_parser(
'list',
formatter_class=RawTextHelpFormatter,
help="list contactslist from linshare")
parser.add_argument(
'pattern', nargs="*",
help="Filter documents by their names")
parser.add_argument('identifiers', nargs="*", help="")
add_list_parser_options(parser, delete=True, cdate=True)
parser.set_defaults(__func__=ListCommand(config))
# command : create
parser = subparsers2.add_parser(
'create', help="create contactslist.")
parser.add_argument(DefaultCommand.IDENTIFIER, action="store", help="")
parser.add_argument('--public', dest="public", action="store_true", help="")
parser.add_argument('--first-name', action="store", help="")
parser.add_argument('--last-name', action="store", help="")
parser.set_defaults(__func__=CreateCommand(config))
# command : delete
parser = subparsers2.add_parser(
'delete',
help="delete contactslist")
add_delete_parser_options(parser)
parser.set_defaults(__func__=DeleteCommand(config))
# command : update
parser = subparsers2.add_parser(
'update', help="update contactslist.")
parser.add_argument(
'uuid', action="store", help="").completer = Completer()
#parser.add_argument('--identifier', action="store", help="")
parser.add_argument("--" + DefaultCommand.IDENTIFIER, action="store", help="")
parser.add_argument('--public', dest="public", action="store_true", help="")
parser.add_argument('--first-name', action="store", help="")
parser.add_argument('--last-name', action="store", help="")
parser.set_defaults(__func__=UpdateCommand(config))
| gpl-3.0 | -6,270,641,253,567,035,000 | 34.734234 | 88 | 0.636707 | false |
gordielachance/plugin.audio.subsonic | main.py | 1 | 46940 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Module: main
# Author: G.Breant
# Created on: 14 January 2017
# License: GPL v.3 https://www.gnu.org/copyleft/gpl.html
import xbmcvfs
import os
import xbmcaddon
import xbmcplugin
import xbmcgui
import json
import shutil
import dateutil.parser
from datetime import datetime
from collections import MutableMapping, namedtuple
# Add the /lib folder to sys
sys.path.append(xbmcvfs.translatePath(os.path.join(xbmcaddon.Addon("plugin.audio.subsonic").getAddonInfo("path"), "lib")))
import libsonic#Removed libsonic_extra
from simpleplugin import Plugin
from simpleplugin import Addon
# Create plugin instance
plugin = Plugin()
# initialize_gettext
#_ = plugin.initialize_gettext()
connection = None
cachetime = int(Addon().get_setting('cachetime'))
local_starred = set({})
ListContext = namedtuple('ListContext', ['listing', 'succeeded','update_listing', 'cache_to_disk','sort_methods', 'view_mode','content', 'category'])
PlayContext = namedtuple('PlayContext', ['path', 'play_item', 'succeeded'])
def popup(text, time=5000, image=None):
title = plugin.addon.getAddonInfo('name')
icon = plugin.addon.getAddonInfo('icon')
xbmc.executebuiltin('Notification(%s, %s, %d, %s)' % (title, text,
time, icon))
def get_connection():
global connection
if connection==None:
connected = False
# Create connection
try:
connection = libsonic.Connection(
baseUrl=Addon().get_setting('subsonic_url'),
username=Addon().get_setting('username', convert=False),
password=Addon().get_setting('password', convert=False),
port=Addon().get_setting('port'),
apiVersion=Addon().get_setting('apiversion'),
insecure=Addon().get_setting('insecure'),
legacyAuth=Addon().get_setting('legacyauth'),
useGET=Addon().get_setting('useget'),
)
connected = connection.ping()
except:
pass
if connected==False:
popup('Connection error')
return False
return connection
@plugin.action()
def root(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'folders': {
'name': Addon().get_localized_string(30038),
'callback': 'browse_folders',
'thumb': None
},
'library': {
'name': Addon().get_localized_string(30019),
'callback': 'browse_library',
'thumb': None
},
'albums': {
'name': Addon().get_localized_string(30020),
'callback': 'menu_albums',
'thumb': None
},
'tracks': {
'name': Addon().get_localized_string(30021),
'callback': 'menu_tracks',
'thumb': None
},
'playlists': {
'name': Addon().get_localized_string(30022),
'callback': 'list_playlists',
'thumb': None
},
'search': {
'name': Addon().get_localized_string(30039),
'callback': 'search',
'thumb': None
},
}
# Iterate through categories
for mid in menus:
# image
if 'thumb' in menus[mid]:
thumb = menus[mid]['thumb']
listing.append({
'label': menus[mid]['name'],
'thumb': thumb, # Item thumbnail
'fanart': thumb, # Item thumbnail
'url': plugin.get_url(
action=menus[mid]['callback'],
menu_id=mid
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
def menu_albums(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'albums_newest': {
'name': Addon().get_localized_string(30023),
'thumb': None,
'args': {"ltype": "newest"}
},
'albums_frequent': {
'name': Addon().get_localized_string(30024),
'thumb': None,
'args': {"ltype": "frequent"}
},
'albums_recent': {
'name': Addon().get_localized_string(30025),
'thumb': None,
'args': {"ltype": "recent"}
},
'albums_random': {
'name': Addon().get_localized_string(30026),
'thumb': None,
'args': {"ltype": "random"}
}
}
# Iterate through categories
for menu_id in menus:
menu = menus.get(menu_id)
# image
if 'thumb' in menu:
thumb = menu.get('thumb')
listing.append({
'label': menu.get('name'),
'thumb': menu.get('thumb'), # Item thumbnail
'fanart': menu.get('thumb'), # Item thumbnail
'url': plugin.get_url(
action= 'list_albums',
page= 1,
query_args= json.dumps(menu.get('args')),
menu_id= menu_id
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
#sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
def menu_tracks(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
menus = {
'tracks_starred': {
'name': Addon().get_localized_string(30036),
'thumb': None
},
'tracks_random': {
'name': Addon().get_localized_string(30037),
'thumb': None
}
}
# Iterate through categories
for menu_id in menus:
menu = menus.get(menu_id)
# image
if 'thumb' in menu:
thumb = menu.get('thumb')
listing.append({
'label': menu.get('name'),
'thumb': menu.get('thumb'), # Item thumbnail
'fanart': menu.get('thumb'), # Item thumbnail
'url': plugin.get_url(
action= 'list_tracks',
menu_id= menu_id
)
}) # Item label
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
#sort_methods = None, #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_folders(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_folders()
# Iterate through items
for item in items:
entry = {
'label': item.get('name'),
'url': plugin.get_url(
action= 'browse_indexes',
folder_id= item.get('id'),
menu_id= params.get('menu_id')
)
}
listing.append(entry)
if len(listing) == 1:
plugin.log('One single Media Folder found; do return listing from browse_indexes()...')
return browse_indexes(params)
else:
add_directory_items(create_listing(listing))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_indexes(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
# optional folder ID
folder_id = params.get('folder_id')
items = walk_index(folder_id)
# Iterate through items
for item in items:
entry = {
'label': item.get('name'),
'url': plugin.get_url(
action= 'list_directory',
id= item.get('id'),
menu_id= params.get('menu_id')
)
}
listing.append(entry)
add_directory_items(create_listing(
listing
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def list_directory(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
id = params.get('id')
items = walk_directory(id)
# Iterate through items
for item in items:
#is a directory
if (item.get('isDir')==True):
entry = {
'label': item.get('title'),
'url': plugin.get_url(
action= 'list_directory',
id= item.get('id'),
menu_id= params.get('menu_id')
)
}
else:
entry = get_entry_track(item,params)
listing.append(entry)
add_directory_items(create_listing(
listing
))
@plugin.action()
#@plugin.cached(cachetime) # cache (in minutes)
def browse_library(params):
"""
List artists from the library (ID3 tags)
"""
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_artists()
# Iterate through items
for item in items:
entry = get_entry_artist(item,params)
#context menu actions
context_actions = []
if can_star('artist',item.get('id')):
action_star = context_action_star('artist',item.get('id'))
context_actions.append(action_star)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
listing.append(entry)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('artists',params), #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'artists' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_albums(params):
"""
List albums from the library (ID3 tags)
"""
listing = []
# get connection
connection = get_connection()
if connection==False:
return
#query
query_args = {}
try:
query_args_json = params['query_args']
query_args = json.loads(query_args_json)
except:
pass
#size
albums_per_page = int(Addon().get_setting('albums_per_page'))
query_args["size"] = albums_per_page
#offset
offset = int(params.get('page',1)) - 1;
if offset > 0:
query_args["offset"] = offset * albums_per_page
#debug
query_args_json = json.dumps(query_args)
plugin.log('list_albums with args:' + query_args_json);
#Get items
if 'artist_id' in params:
generator = walk_artist(params.get('artist_id'))
else:
generator = walk_albums(**query_args)
#make a list out of the generator so we can iterate it several times
items = list(generator)
#check if there==only one artist for this album (and then hide it)
artists = [item.get('artist',None) for item in items]
if len(artists) <= 1:
params['hide_artist'] = True
# Iterate through items
for item in items:
album = get_entry_album(item, params)
listing.append(album)
# Root menu
link_root = navigate_root()
listing.append(link_root)
if not 'artist_id' in params:
# Pagination if we've not reached the end of the lsit
# if type(items) != type(True): TO FIX
link_next = navigate_next(params)
listing.append(link_next)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('albums',params),
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'albums' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_tracks(params):
menu_id = params.get('menu_id')
listing = []
#query
query_args = {}
try:
query_args_json = params['query_args']
query_args = json.loads(query_args_json)
except:
pass
#size
tracks_per_page = int(Addon().get_setting('tracks_per_page'))
query_args["size"] = tracks_per_page
#offset
offset = int(params.get('page',1)) - 1;
if offset > 0:
query_args["offset"] = offset * tracks_per_page
#debug
query_args_json = json.dumps(query_args)
plugin.log('list_tracks with args:' + query_args_json);
# get connection
connection = get_connection()
if connection==False:
return
# Album
if 'album_id' in params:
generator = walk_album(params['album_id'])
# Playlist
elif 'playlist_id' in params:
generator = walk_playlist(params['playlist_id'])
#TO FIX
#tracknumber = 0
#for item in items:
# tracknumber += 1
# items[item]['tracknumber'] = tracknumber
# Starred
elif menu_id == 'tracks_starred':
generator = walk_tracks_starred()
# Random
elif menu_id == 'tracks_random':
generator = walk_tracks_random(**query_args)
# Filters
#else:
#TO WORK
#make a list out of the generator so we can iterate it several times
items = list(generator)
#check if there==only one artist for this album (and then hide it)
artists = [item.get('artist',None) for item in items]
if len(artists) <= 1:
params['hide_artist'] = True
#update stars
if menu_id == 'tracks_starred':
ids_list = [item.get('id') for item in items]
stars_cache_update(ids_list)
# Iterate through items
key = 0;
for item in items:
track = get_entry_track(item,params)
listing.append(track)
key +=1
# Root menu
#link_root = navigate_root()
#listing.append(link_root)
# Pagination if we've not reached the end of the lsit
# if type(items) != type(True): TO FIX
#link_next = navigate_next(params)
#listing.append(link_next)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods= get_sort_methods('tracks',params),
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
content = 'songs' #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
#stars (persistent) cache==used to know what context action (star/unstar) we should display.
#run this function every time we get starred items.
#ids can be a single ID or a list
#using a set makes sure that IDs will be unique.
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def list_playlists(params):
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = walk_playlists()
# Iterate through items
for item in items:
entry = get_entry_playlist(item,params)
listing.append(entry)
add_directory_items(create_listing(
listing,
#succeeded = True, #if False Kodi won’t open a new listing and stays on the current level.
#update_listing = False, #if True, Kodi won’t open a sub-listing but refresh the current one.
#cache_to_disk = True, #cache this view to disk.
sort_methods = get_sort_methods('playlists',params), #he list of integer constants representing virtual folder sort methods.
#view_mode = None, #a numeric code for a skin view mode. View mode codes are different in different skins except for 50 (basic listing).
#content = None #string - current plugin content, e.g. ‘movies’ or ‘episodes’.
))
@plugin.action()
#@plugin.cached(cachetime) #cache (in minutes)
def search(params):
dialog = xbmcgui.Dialog()
d = dialog.input(Addon().get_localized_string(30039), type=xbmcgui.INPUT_ALPHANUM)
if not d:
d = " "
# get connection
connection = get_connection()
if connection==False:
return
listing = []
# Get items
items = connection.search2(query=d)
# Iterate through items
for item in items.get('searchResult2').get('song'):
entry = get_entry_track( item, params)
listing.append(entry)
if len(listing) == 1:
plugin.log('One single Media Folder found; do return listing from browse_indexes()...')
return browse_indexes(params)
else:
add_directory_items(create_listing(listing))
@plugin.action()
def play_track(params):
id = params['id']
plugin.log('play_track #' + id);
# get connection
connection = get_connection()
if connection==False:
return
url = connection.streamUrl(sid=id,
maxBitRate=Addon().get_setting('bitrate_streaming'),
tformat=Addon().get_setting('transcode_format_streaming')
)
#return url
_set_resolved_url(resolve_url(url))
@plugin.action()
def star_item(params):
ids= params.get('ids'); #can be single or lists of IDs
unstar= params.get('unstar',False);
unstar = (unstar) and (unstar != 'None') and (unstar != 'False') #TO FIX better statement ?
type= params.get('type');
sids = albumIds = artistIds = None
#validate type
if type == 'track':
sids = ids
elif type == 'artist':
artistIds = ids
elif type == 'album':
albumIds = ids
#validate capability
if not can_star(type,ids):
return;
#validate IDs
if (not sids and not artistIds and not albumIds):
return;
# get connection
connection = get_connection()
if connection==False:
return
###
did_action = False
try:
if unstar:
request = connection.unstar(sids, albumIds, artistIds)
else:
request = connection.star(sids, albumIds, artistIds)
if request['status'] == 'ok':
did_action = True
except:
pass
if did_action:
if unstar:
message = Addon().get_localized_string(30031)
plugin.log('Unstarred %s #%s' % (type,json.dumps(ids)))
else: #star
message = Addon().get_localized_string(30032)
plugin.log('Starred %s #%s' % (type,json.dumps(ids)))
stars_cache_update(ids,unstar)
popup(message)
#TO FIX clear starred lists caches ?
#TO FIX refresh current list after star set ?
else:
if unstar:
plugin.log_error('Unable to unstar %s #%s' % (type,json.dumps(ids)))
else:
plugin.log_error('Unable to star %s #%s' % (type,json.dumps(ids)))
#return did_action
return
@plugin.action()
def download_item(params):
id= params.get('id'); #can be single or lists of IDs
type= params.get('type');
#validate path
download_folder = Addon().get_setting('download_folder')
if not download_folder:
popup("Please set a directory for your downloads")
plugin.log_error("No directory set for downloads")
#validate capability
if not can_download(type,id):
return;
if type == 'track':
did_action = download_tracks(id)
elif type == 'album':
did_action = download_album(id)
if did_action:
plugin.log('Downloaded %s #%s' % (type,id))
popup('Item has been downloaded!')
else:
plugin.log_error('Unable to downloaded %s #%s' % (type,id))
return did_action
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_playlist(item,params):
image = connection.getCoverArtUrl(item.get('coverArt'))
return {
'label': item.get('name'),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_tracks',
playlist_id= item.get('id'),
menu_id= params.get('menu_id')
),
'info': {'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'title': item.get('name'),
'count': item.get('songCount'),
'duration': item.get('duration'),
'date': convert_date_from_iso8601(item.get('created'))
}}
}
#star (or unstar) an item
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_artist(item,params):
image = connection.getCoverArtUrl(item.get('coverArt'))
return {
'label': get_starred_label(item.get('id'),item.get('name')),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_albums',
artist_id= item.get('id'),
menu_id= params.get('menu_id')
),
'info': {
'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'count': item.get('albumCount'),
'artist': item.get('name')
}
}
}
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_album(item, params):
image = connection.getCoverArtUrl(item.get('coverArt'))
entry = {
'label': get_entry_album_label(item,params.get('hide_artist',False)),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'list_tracks',
album_id= item.get('id'),
hide_artist= item.get('hide_artist'),
menu_id= params.get('menu_id')
),
'info': {
'music': { ##http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'count': item.get('songCount'),
'date': convert_date_from_iso8601(item.get('created')), #date added
'duration': item.get('duration'),
'artist': item.get('artist'),
'album': item.get('name'),
'year': item.get('year')
}
}
}
#context menu actions
context_actions = []
if can_star('album',item.get('id')):
action_star = context_action_star('album',item.get('id'))
context_actions.append(action_star)
if can_download('album',item.get('id')):
action_download = context_action_download('album',item.get('id'))
context_actions.append(action_download)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
return entry
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_track(item,params):
menu_id = params.get('menu_id')
image = connection.getCoverArtUrl(item.get('coverArt'))
entry = {
'label': get_entry_track_label(item,params.get('hide_artist')),
'thumb': image,
'fanart': image,
'url': plugin.get_url(
action= 'play_track',
id= item.get('id'),
menu_id= menu_id
),
'is_playable': True,
'mime': item.get("contentType"),
'info': {'music': { #http://romanvm.github.io/Kodistubs/_autosummary/xbmcgui.html#xbmcgui.ListItem.setInfo
'title': item.get('title'),
'album': item.get('album'),
'artist': item.get('artist'),
'tracknumber': item.get('tracknumber'),
'year': item.get('year'),
'genre': item.get('genre'),
'size': item.get('size'),
'duration': item.get('duration'),
'date': item.get('created')
}
}
}
#context menu actions
context_actions = []
if can_star('track',item.get('id')):
action_star = context_action_star('track',item.get('id'))
context_actions.append(action_star)
if can_download('track',item.get('id')):
action_download = context_action_download('track',item.get('id'))
context_actions.append(action_download)
if len(context_actions) > 0:
entry['context_menu'] = context_actions
return entry
#@plugin.cached(cachetime) #cache (in minutes)
def get_starred_label(id,label):
if is_starred(id):
label = '[COLOR=FF00FF00]%s[/COLOR]' % label
return label
def is_starred(id):
starred = stars_cache_get()
id = int(id)
if id in starred:
return True
else:
return False
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_track_label(item,hide_artist = False):
if hide_artist:
label = item.get('title', '<Unknown>')
else:
label = '%s - %s' % (
item.get('artist', '<Unknown>'),
item.get('title', '<Unknown>')
)
return get_starred_label(item.get('id'),label)
#@plugin.cached(cachetime) #cache (in minutes)
def get_entry_album_label(item,hide_artist = False):
if hide_artist:
label = item.get('name', '<Unknown>')
else:
label = '%s - %s' % (item.get('artist', '<Unknown>'),
item.get('name', '<Unknown>'))
return get_starred_label(item.get('id'),label)
#@plugin.cached(cachetime) #cache (in minutes)
def get_sort_methods(type,params):
#sort method for list types
#https://github.com/xbmc/xbmc/blob/master/xbmc/SortFileItem.h
#TO FIX _DATE or _DATEADDED ?
#TO FIX
#actually it seems possible to 'restore' the default sorting (by labels)
#so our starred items don't get colorized.
#so do not sort stuff
#see http://forum.kodi.tv/showthread.php?tid=293037
return []
sortable = [
xbmcplugin.SORT_METHOD_NONE,
xbmcplugin.SORT_METHOD_LABEL,
xbmcplugin.SORT_METHOD_UNSORTED
]
if type=='artists':
artists = [
xbmcplugin.SORT_METHOD_ARTIST
]
sortable = sortable + artists
elif type=='albums':
albums = [
xbmcplugin.SORT_METHOD_ALBUM,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE,
#xbmcplugin.SORT_METHOD_YEAR
]
if not params.get('hide_artist',False):
albums.append(xbmcplugin.SORT_METHOD_ARTIST)
sortable = sortable + albums
elif type=='tracks':
tracks = [
xbmcplugin.SORT_METHOD_TITLE,
xbmcplugin.SORT_METHOD_ALBUM,
xbmcplugin.SORT_METHOD_TRACKNUM,
#xbmcplugin.SORT_METHOD_YEAR,
xbmcplugin.SORT_METHOD_GENRE,
xbmcplugin.SORT_METHOD_SIZE,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE,
xbmcplugin.SORT_METHOD_BITRATE
]
if not params.get('hide_artist',False):
tracks.append(xbmcplugin.SORT_METHOD_ARTIST)
if params.get('playlist_id',False):
xbmcplugin.SORT_METHOD_PLAYLIST_ORDER,
sortable = sortable + tracks
elif type=='playlists':
playlists = [
xbmcplugin.SORT_METHOD_TITLE,
xbmcplugin.SORT_METHOD_DURATION,
xbmcplugin.SORT_METHOD_DATE
]
sortable = sortable + playlists
return sortable
def stars_cache_update(ids,remove=False):
#get existing cache set
starred = stars_cache_get()
#make sure this==a list
if not isinstance(ids, list):
ids = [ids]
#abord if empty
if len(ids) == 0:
return
#parse items
for item_id in ids:
item_id = int(item_id)
if not remove:
starred.add(item_id)
else:
starred.remove(item_id)
#store them
with plugin.get_storage() as storage:
storage['starred_ids'] = starred
plugin.log('stars_cache_update:')
plugin.log(starred)
def stars_cache_get(): #Retrieving stars from cache is too slow, so load to local variable
global local_starred
plugin.log(len(local_starred))
if(len(local_starred)>0):
plugin.log('stars already loaded:')
plugin.log(local_starred)
return(local_starred)
else:
with plugin.get_storage() as storage:
local_starred = storage.get('starred_ids',set())
plugin.log('stars_cache_get:')
plugin.log(local_starred)
return local_starred
def navigate_next(params):
page = int(params.get('page',1))
page += 1
title = Addon().get_localized_string(30029) +"(%d)" % (page)
return {
'label': title,
'url': plugin.get_url(
action= params.get('action',None),
page= page,
query_args= params.get('query_args',None)
)
}
def navigate_root():
return {
'label': Addon().get_localized_string(30030),
'url': plugin.get_url(action='root')
}
#converts a date string from eg. '2012-04-17T19:53:44' to eg. '17.04.2012'
def convert_date_from_iso8601(iso8601):
date_obj = dateutil.parser.parse(iso8601)
return date_obj.strftime('%d.%m.%Y')
def context_action_star(type,id):
starred = is_starred(id)
if not starred:
label = Addon().get_localized_string(30033)
else:
#Should be available only in the stars lists;
#so we don't have to fetch the starred status for each item
#(since it is not available into the XML response from the server)
label = Addon().get_localized_string(30034)
xbmc.log('Context action star returning RunPlugin(%s)' % plugin.get_url(action='star_item',type=type,ids=id,unstar=starred),xbmc.LOGDEBUG)
return (
label,
'RunPlugin(%s)' % plugin.get_url(action='star_item',type=type,ids=id,unstar=starred)
)
#Subsonic API says this==supported for artist,tracks and albums,
#But I can see it available only for tracks on Subsonic 5.3, so disable it.
def can_star(type,ids = None):
if not ids:
return False
if not isinstance(ids, list) or isinstance(ids, tuple):
ids = [ids]
if len(ids) == 0:
return False
if type == 'track':
return True
elif type == 'artist':
return False
elif type == 'album':
return False
def context_action_download(type,id):
label = Addon().get_localized_string(30035)
return (
label,
'RunPlugin(%s)' % plugin.get_url(action='download_item',type=type,id=id)
)
def can_download(type,id = None):
if id==None:
return False
if type == 'track':
return True
elif type == 'album':
return True
def download_tracks(ids):
#popup==fired before, in download_item
download_folder = Addon().get_setting('download_folder')
if not download_folder:
return
if not ids:
return False
#make list
if not isinstance(ids, list) or isinstance(ids, tuple):
ids = [ids]
ids_count = len(ids)
#check if empty
if ids_count == 0:
return False
plugin.log('download_tracks IDs:')
plugin.log(json.dumps(ids))
# get connection
connection = get_connection()
if connection==False:
return
#progress...
pc_step = 100/ids_count
pc_progress = 0
ids_parsed = 0
progressdialog = xbmcgui.DialogProgress()
progressdialog.create("Downloading tracks...") #Title
for id in ids:
if (progressdialog.iscanceled()):
return False
# debug
plugin.log('Trying to download track #'+str(id))
# get track infos
response = connection.getSong(id);
track = response.get('song')
plugin.log('Track info :')
plugin.log(track)
# progress bar
pc_progress = ids_parsed * pc_step
progressdialog.update(pc_progress, 'Getting track informations...',get_entry_track_label(track))
track_path_relative = track.get("path", None).encode('utf8', 'replace') # 'Radiohead/Kid A/Idioteque.mp3'
track_path = os.path.join(download_folder, track_path_relative) # 'C:/users/.../Radiohead/Kid A/Idioteque.mp3'
track_directory = os.path.dirname(os.path.abspath(track_path)) # 'C:/users/.../Radiohead/Kid A'
#check if file exists
if os.path.isfile(track_path):
progressdialog.update(pc_progress, 'Track has already been downloaded!')
plugin.log("File '%s' already exists" % (id))
else:
progressdialog.update(pc_progress, "Downloading track...",track_path)
try:
#get remote file (file-object like)
file_obj = connection.download(id)
#create directory if it does not exists
if not os.path.exists(track_directory):
os.makedirs(track_directory)
#create blank file
file = open(track_path, 'a') #create a new file but don't erase the existing one if it exists
#fill blank file
shutil.copyfileobj(file_obj, file)
file.close()
except:
popup("Error while downloading track #%s" % (id))
plugin.log("Error while downloading track #%s" % (id))
pass
ids_parsed += 1
progressdialog.update(100, "Done !","Enjoy !")
xbmc.sleep(1000)
progressdialog.close()
def download_album(id):
# get connection
connection = get_connection()
if connection==False:
return
# get album infos
response = connection.getAlbum(id);
album = response.get('album')
tracks = album.get('song')
plugin.log('getAlbum:')
plugin.log(json.dumps(album))
ids = [] #list of track IDs
for i, track in enumerate(tracks):
track_id = track.get('id')
ids.append(track_id)
download_tracks(ids)
#@plugin.cached(cachetime) #cache (in minutes)
def create_listing(listing, succeeded=True, update_listing=False, cache_to_disk=False, sort_methods=None,view_mode=None, content=None, category=None):
return ListContext(listing, succeeded, update_listing, cache_to_disk,sort_methods, view_mode, content, category)
def resolve_url(path='', play_item=None, succeeded=True):
"""
Create and return a context dict to resolve a playable URL
:param path: the path to a playable media.
:type path: str or unicode
:param play_item: a dict of item properties as described in the class docstring.
It allows to set additional properties for the item being played, like graphics, metadata etc.
if ``play_item`` parameter==present, then ``path`` value==ignored, and the path must be set via
``'path'`` property of a ``play_item`` dict.
:type play_item: dict
:param succeeded: if ``False``, Kodi won't play anything
:type succeeded: bool
:return: context object containing necessary parameters
for Kodi to play the selected media.
:rtype: PlayContext
"""
return PlayContext(path, play_item, succeeded)
#@plugin.cached(cachetime) #cache (in minutes)
def create_list_item(item):
"""
Create an :class:`xbmcgui.ListItem` instance from an item dict
:param item: a dict of ListItem properties
:type item: dict
:return: ListItem instance
:rtype: xbmcgui.ListItem
"""
major_version = xbmc.getInfoLabel('System.BuildVersion')[:2]
if major_version >= '18':
list_item = xbmcgui.ListItem(label=item.get('label', ''),
label2=item.get('label2', ''),
path=item.get('path', ''),
offscreen=item.get('offscreen', False))
art = item.get('art', {})
art['thumb'] = item.get('thumb', '')
art['icon'] = item.get('icon', '')
art['fanart'] = item.get('fanart', '')
item['art'] = art
cont_look = item.get('content_lookup')
if cont_look is not None:
list_item.setContentLookup(cont_look)
if item.get('art'):
list_item.setArt(item['art'])
if item.get('stream_info'):
for stream, stream_info in item['stream_info'].items():
list_item.addStreamInfo(stream, stream_info)
if item.get('info'):
for media, info in item['info'].items():
list_item.setInfo(media, info)
if item.get('context_menu') is not None:
list_item.addContextMenuItems(item['context_menu'])
if item.get('subtitles'):
list_item.setSubtitles(item['subtitles'])
if item.get('mime'):
list_item.setMimeType(item['mime'])
if item.get('properties'):
for key, value in item['properties'].items():
list_item.setProperty(key, value)
if major_version >= '17':
cast = item.get('cast')
if cast is not None:
list_item.setCast(cast)
db_ids = item.get('online_db_ids')
if db_ids is not None:
list_item.setUniqueIDs(db_ids)
ratings = item.get('ratings')
if ratings is not None:
for rating in ratings:
list_item.setRating(**rating)
return list_item
def _set_resolved_url(context):
plugin.log_debug('Resolving URL from {0}'.format(str(context)))
if context.play_item==None:
list_item = xbmcgui.ListItem(path=context.path)
else:
list_item = self.create_list_item(context.play_item)
xbmcplugin.setResolvedUrl(plugin.handle, context.succeeded, list_item)
#@plugin.cached(cachetime) #cache (in minutes)
def add_directory_items(context):
plugin.log_debug('Creating listing from {0}'.format(str(context)))
if context.category is not None:
xbmcplugin.setPluginCategory(plugin.handle, context.category)
if context.content is not None:
xbmcplugin.setContent(plugin.handle, context.content) # This must be at the beginning
for item in context.listing:
is_folder = item.get('is_folder', True)
if item.get('list_item') is not None:
list_item = item['list_item']
else:
list_item = create_list_item(item)
if item.get('is_playable'):
list_item.setProperty('IsPlayable', 'true')
is_folder = False
xbmcplugin.addDirectoryItem(plugin.handle, item['url'], list_item, is_folder)
if context.sort_methods is not None:
if isinstance(context.sort_methods, (int, dict)):
sort_methods = [context.sort_methods]
elif isinstance(context.sort_methods, (tuple, list)):
sort_methods = context.sort_methods
else:
raise TypeError(
'sort_methods parameter must be of int, dict, tuple or list type!')
for method in sort_methods:
if isinstance(method, int):
xbmcplugin.addSortMethod(plugin.handle, method)
elif isinstance(method, dict):
xbmcplugin.addSortMethod(plugin.handle, **method)
else:
raise TypeError(
'method parameter must be of int or dict type!')
xbmcplugin.endOfDirectory(plugin.handle,
context.succeeded,
context.update_listing,
context.cache_to_disk)
if context.view_mode is not None:
xbmc.executebuiltin('Container.SetViewMode({0})'.format(context.view_mode))
def walk_index(folder_id=None):
"""
Request Subsonic's index and iterate each item.
"""
response = connection.getIndexes(folder_id)
for index in response["indexes"]["index"]:
for artist in index["artist"]:
yield artist
def walk_playlists():
"""
Request Subsonic's playlists and iterate over each item.
"""
response = connection.getPlaylists()
for child in response["playlists"]["playlist"]:
yield child
def walk_playlist(playlist_id):
"""
Request Subsonic's playlist items and iterate over each item.
"""
response = connection.getPlaylist(playlist_id)
for child in response["playlist"]["entry"]:
yield child
def walk_folders():
response = connection.getMusicFolders()
for child in response["musicFolders"]["musicFolder"]:
yield child
def walk_directory(directory_id):
"""
Request a Subsonic music directory and iterate over each item.
"""
response = connection.getMusicDirectory(directory_id)
try:
for child in response["directory"]["child"]:
if child.get("isDir"):
for child in walk_directory(child["id"]):
yield child
else:
yield child
except:
yield from ()
def walk_artist(artist_id):
"""
Request a Subsonic artist and iterate over each album.
"""
response = connection.getArtist(artist_id)
for child in response["artist"]["album"]:
yield child
def walk_artists():
"""
(ID3 tags)
Request all artists and iterate over each item.
"""
response = connection.getArtists()
for index in response["artists"]["index"]:
for artist in index["artist"]:
yield artist
def walk_genres():
"""
(ID3 tags)
Request all genres and iterate over each item.
"""
response = connection.getGenres()
for genre in response["genres"]["genre"]:
yield genre
def walk_albums(ltype, size=None, fromYear=None,toYear=None, genre=None, offset=None):
"""
(ID3 tags)
Request all albums for a given genre and iterate over each album.
"""
if ltype == 'byGenre' and genre is None:
return
if ltype == 'byYear' and (fromYear is None or toYear is None):
return
response = connection.getAlbumList2(
ltype=ltype, size=size, fromYear=fromYear, toYear=toYear,genre=genre, offset=offset)
if not response["albumList2"]["album"]:
return
for album in response["albumList2"]["album"]:
yield album
def walk_album(album_id):
"""
(ID3 tags)
Request an album and iterate over each item.
"""
response = connection.getAlbum(album_id)
for song in response["album"]["song"]:
yield song
def walk_tracks_random(size=None, genre=None, fromYear=None,toYear=None):
"""
Request random songs by genre and/or year and iterate over each song.
"""
response = connection.getRandomSongs(
size=size, genre=genre, fromYear=fromYear, toYear=toYear)
for song in response["randomSongs"]["song"]:
yield song
def walk_tracks_starred():
"""
Request Subsonic's starred songs and iterate over each item.
"""
response = connection.getStarred()
for song in response["starred"]["song"]:
yield song
# Start plugin from within Kodi.
if __name__ == "__main__":
# Map actions
# Note that we map callable objects without brackets ()
plugin.run()
| mit | 1,061,290,670,147,951,900 | 29.268734 | 151 | 0.568871 | false |
WilJoey/tn_ckan | ckan/lib/field_types.py | 1 | 12439 | import re
import time
import datetime
import warnings
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '.*compile_mappers.*')
import formalchemy
from ckan.common import OrderedDict
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
class DateConvertError(Exception):
pass
class DateType(object):
'''Utils for handling dates in forms.
* Full or partial dates
* User inputs in form DD/MM/YYYY and it is stored in db as YYYY-MM-DD.
'''
format_types = ('form', 'db')
datetime_fields = OrderedDict([('year', (1000, 2100, 4, 'YYYY')),
('month', (1, 12, 2, 'MM')),
('day', (1, 31, 2, 'DD')),
('hour', (0, 23, 2, 'HH')),
('minute', (0, 59, 2, 'MM')),
])
datetime_fields_indexes = {'min':0, 'max':1, 'digits':2, 'format_code':3}
date_fields_order = {'db':('year', 'month', 'day'),
'form':('day', 'month', 'year')}
parsing_separators = {'date':'-/',
'time':':\.'}
default_separators = {'db':{'date':'-',
'time':':'},
'form':{'date':'/',
'time':':'},}
field_code_map = {'year':'YYYY', 'month':'MM', 'day':'DD',
'hour':'HH', 'minute':'MM'}
word_match = re.compile('[A-Za-z]+')
timezone_match = re.compile('(\s[A-Z]{3})|(\s[+-]\d\d:?\d\d)')
months_abbreviated = [month[:3] for month in months]
@classmethod
def parse_timedate(cls, timedate_str, format_type):
'''Takes a timedate and returns a dictionary of the fields.
* Little validation is done.
* If it can\'t understand the layout it raises DateConvertError
'''
assert format_type in cls.format_types
if not hasattr(cls, 'matchers'):
# build up a list of re matches for the different
# acceptable ways of expressing the time and date
cls.matchers = {}
cls.readable_formats = {}
for format_type_ in cls.format_types:
finished_regexps = []
readable_formats = [] # analogous to the regexps,
# but human readable
year_re = '(?P<%s>\d{2,4})'
month_re = '(?P<%s>\w+)'
two_digit_decimal_re = '(?P<%s>\d{1,2})'
time_re = '%s[%s]%s' % (
two_digit_decimal_re % 'hour',
cls.parsing_separators['time'],
two_digit_decimal_re % 'minute')
time_readable = '%s%s%s' % (
cls.datetime_fields['hour'][cls.datetime_fields_indexes['format_code']],
cls.default_separators[format_type_]['time'],
cls.datetime_fields['minute'][cls.datetime_fields_indexes['format_code']])
date_field_re = {'year':year_re % 'year',
'month':month_re % 'month',
'day':two_digit_decimal_re % 'day'}
date_fields = list(cls.date_fields_order[format_type_])
for how_specific in ('day', 'month', 'year'):
date_sep_re = '[%s]' % cls.parsing_separators['date']
date_sep_readable = cls.default_separators[format_type_]['date']
date_field_regexps = [date_field_re[field] for field in date_fields]
date_field_readable = [cls.datetime_fields[field][cls.datetime_fields_indexes['format_code']] for field in date_fields]
date_re = date_sep_re.join(date_field_regexps)
date_readable = date_sep_readable.join(date_field_readable)
finished_regexps.append(date_re)
readable_formats.append(date_readable)
date_fields.remove(how_specific)
full_date_re = finished_regexps[0]
full_date_readable = readable_formats[0]
# Allow time to be before or after the date
for format_ in ('%(time_re)s%(sep)s%(full_date_re)s',
'%(full_date_re)s%(sep)s%(time_re)s'):
finished_regexps.insert(0, format_ % {
'time_re':time_re,
'sep':'\s',
'full_date_re':full_date_re})
readable_formats.insert(0, format_ % {
'time_re':time_readable,
'sep':' ',
'full_date_re':full_date_readable})
cls.matchers[format_type_] = [re.compile('^%s$' % regexp) for regexp in finished_regexps]
cls.readable_formats[format_type_] = readable_formats
#print format_type_, finished_regexps, readable_formats
for index, matcher in enumerate(cls.matchers[format_type]):
match = matcher.match(timedate_str)
if match:
timedate_dict = match.groupdict()
timedate_dict = cls.int_timedate(timedate_dict)
timedate_dict['readable_format'] = cls.readable_formats[format_type][index]
return timedate_dict
else:
acceptable_formats = ', '.join(["'%s'" % format_ for format_ in cls.readable_formats[format_type]])
raise DateConvertError("Cannot parse %s date '%s'. Acceptable formats: %s" % (format_type, timedate_str, acceptable_formats))
@classmethod
def int_timedate(cls, timedate_dict):
# Convert timedate string values to integers
int_timedate_dict = timedate_dict.copy()
for field in cls.datetime_fields.keys():
if timedate_dict.has_key(field):
val = timedate_dict[field]
if field == 'year':
if len(val) == 2:
# Deal with 2 digit dates
try:
int_val = int(val)
except ValueError:
raise DateConvertError('Expecting integer for %s value: %s' % (field, val))
val = cls.add_centurys_to_two_digit_year(int_val)
elif len(val) == 3:
raise DateConvertError('Expecting 2 or 4 digit year: "%s"' % (val))
if field == 'month':
# Deal with months expressed as words
if val in months:
val = months.index(val) + 1
if val in cls.months_abbreviated:
val = cls.months_abbreviated.index(val) + 1
try:
int_timedate_dict[field] = int(val)
except ValueError:
raise DateConvertError('Expecting integer for %s value: %s' % (field, val))
return int_timedate_dict
@classmethod
def iso_to_db(cls, iso_date, format):
# e.g. 'Wed, 06 Jan 2010 09:30:00'
# '%a, %d %b %Y %H:%M:%S'
assert isinstance(iso_date, (unicode, str))
try:
date_tuple = time.strptime(iso_date, format)
except ValueError, e:
raise DateConvertError('Could not read date as ISO format "%s". Date provided: "%s"' % (format, iso_date))
date_obj = datetime.datetime(*date_tuple[:4])
date_str = cls.date_to_db(date_obj)
return date_str
@classmethod
def strip_iso_timezone(cls, iso_date):
return cls.timezone_match.sub('', iso_date)
@classmethod
def form_to_db(cls, form_str, may_except=True):
'''
27/2/2005 -> 2005-02-27
27/Feb/2005 -> 2005-02-27
2/2005 -> 2005-02
Feb/2005 -> 2005-02
2005 -> 2005
'''
try:
# Allow blank input or None
if not form_str:
return u''
form_str = form_str.strip()
if not form_str:
return u''
# Parse form value
timedate_dict = cls.parse_timedate(form_str, 'form')
# Check range of dates and format as standard string
try:
db_datetime = cls.format(timedate_dict, 'db')
except DateConvertError, e:
msg = 'Date error reading in format \'%s\': %s' % (timedate_dict['readable_format'], ' '.join(e.args))
raise DateConvertError(msg)
return db_datetime
except DateConvertError, e:
if may_except:
raise e
else:
return form_str
@classmethod
def date_to_db(cls, date):
'''
datetime.date(2005, 2, 27) -> 2005-02-27
'''
assert isinstance(date, datetime.date)
date_str = date.strftime('%Y-%m-%d')
return date_str
@classmethod
def format(cls, datetime_dict, format_type):
'''Takes datetime_dict and formats them either for
the form or the database. If it encounters an out
of range value, it raises an exception.
'''
assert isinstance(datetime_dict, dict)
assert format_type in ('form', 'db')
# convert each field to a string
str_datetime_dict = {} # strings by field
for field in cls.datetime_fields:
if not datetime_dict.has_key(field):
break
val = datetime_dict[field]
min_, max_ = cls.datetime_fields[field][cls.datetime_fields_indexes['min']:cls.datetime_fields_indexes['max'] + 1]
if val < min_ or val > max_:
raise DateConvertError('%s value of "%s" is out of range.' % (field.capitalize(), val))
if format_type == 'form':
int_format_string = '%d'
elif format_type == 'db':
num_digits = cls.datetime_fields['hour'][cls.datetime_fields_indexes['digits']]
int_format_string = '%%0%sd' % num_digits
str_datetime_dict[field] = int_format_string % val
# assemble the date
date_fields = []
for field in cls.date_fields_order[format_type]:
if str_datetime_dict.has_key(field):
date_fields.append(str_datetime_dict[field])
formatted_datetime = unicode(cls.default_separators[format_type]['date'].join(date_fields))
# add in the time if specified
if str_datetime_dict.has_key('hour'):
if format_type == 'form':
datetime_format_string = '%(hour)s%(time_separator)s%(minute)s %(date)s'
elif format_type == 'db':
datetime_format_string = '%(date)s %(hour)s%(time_separator)s%(minute)s'
format_dict = str_datetime_dict.copy()
format_dict['date'] = formatted_datetime
format_dict['time_separator'] = cls.default_separators[format_type]['time']
formatted_datetime = datetime_format_string % format_dict
return formatted_datetime
@staticmethod
def form_validator(form_date_str, field=None):
try:
DateType.form_to_db(form_date_str)
except DateConvertError, e:
raise formalchemy.ValidationError(e)
@classmethod
def db_to_form(cls, db_str):
'2005-02-27 -> 27/2/2005 if correct format, otherwise, display as is.'
db_str = db_str.strip()
if not db_str:
return db_str
try:
timedate_dict = cls.parse_timedate(db_str, 'db')
except DateConvertError, e:
# cannot parse - simply display as-is
return db_str
try:
datetime_form = cls.format(timedate_dict, 'form')
except DateConvertError, e:
# values out of range - simply display as-is
return db_str
return datetime_form
@classmethod
def add_centurys_to_two_digit_year(cls, year, near_year=2010):
assert isinstance(year, int)
assert isinstance(near_year, int)
assert year < 1000, repr(year)
assert near_year > 1000 and near_year < 2200, repr(near_year)
year += 1000
while abs(year - near_year) > 50:
year += 100
return year
| mit | -3,390,214,483,191,561,700 | 43.584229 | 139 | 0.51845 | false |
fergalmoran/dss | spa/migrations/0056_auto__add_field_label_object_created__add_field_label_object_updated__.py | 1 | 38640 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Label.object_created'
db.add_column(u'spa_label', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Label.object_updated'
db.add_column(u'spa_label', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Playlist.object_created'
db.add_column(u'spa_playlist', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Playlist.object_updated'
db.add_column(u'spa_playlist', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Mix.object_created'
db.add_column(u'spa_mix', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Mix.object_updated'
db.add_column(u'spa_mix', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Tracklist.object_created'
db.add_column(u'spa_tracklist', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Tracklist.object_updated'
db.add_column(u'spa_tracklist', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'ReleaseAudio.object_created'
db.add_column(u'spa_releaseaudio', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'ReleaseAudio.object_updated'
db.add_column(u'spa_releaseaudio', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Genre.object_created'
db.add_column(u'spa_genre', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Genre.object_updated'
db.add_column(u'spa_genre', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'UserProfile.object_created'
db.add_column(u'spa_userprofile', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'UserProfile.object_updated'
db.add_column(u'spa_userprofile', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Venue.object_created'
db.add_column(u'spa_venue', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Venue.object_updated'
db.add_column(u'spa_venue', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field '_Lookup.object_created'
db.add_column(u'spa__lookup', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field '_Lookup.object_updated'
db.add_column(u'spa__lookup', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Activity.object_created'
db.add_column(u'spa_activity', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Activity.object_updated'
db.add_column(u'spa_activity', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Release.object_created'
db.add_column(u'spa_release', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Release.object_updated'
db.add_column(u'spa_release', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'PurchaseLink.object_created'
db.add_column(u'spa_purchaselink', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'PurchaseLink.object_updated'
db.add_column(u'spa_purchaselink', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'ChatMessage.object_created'
db.add_column(u'spa_chatmessage', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'ChatMessage.object_updated'
db.add_column(u'spa_chatmessage', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Comment.object_created'
db.add_column(u'spa_comment', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Comment.object_updated'
db.add_column(u'spa_comment', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
# Adding field 'Notification.object_created'
db.add_column(u'spa_notification', 'object_created',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now_add=True, blank=True),
keep_default=False)
# Adding field 'Notification.object_updated'
db.add_column(u'spa_notification', 'object_updated',
self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2014, 8, 19, 0, 0), auto_now=True, db_index=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Label.object_created'
db.delete_column(u'spa_label', 'object_created')
# Deleting field 'Label.object_updated'
db.delete_column(u'spa_label', 'object_updated')
# Deleting field 'Playlist.object_created'
db.delete_column(u'spa_playlist', 'object_created')
# Deleting field 'Playlist.object_updated'
db.delete_column(u'spa_playlist', 'object_updated')
# Deleting field 'Mix.object_created'
db.delete_column(u'spa_mix', 'object_created')
# Deleting field 'Mix.object_updated'
db.delete_column(u'spa_mix', 'object_updated')
# Deleting field 'Tracklist.object_created'
db.delete_column(u'spa_tracklist', 'object_created')
# Deleting field 'Tracklist.object_updated'
db.delete_column(u'spa_tracklist', 'object_updated')
# Deleting field 'ReleaseAudio.object_created'
db.delete_column(u'spa_releaseaudio', 'object_created')
# Deleting field 'ReleaseAudio.object_updated'
db.delete_column(u'spa_releaseaudio', 'object_updated')
# Deleting field 'Genre.object_created'
db.delete_column(u'spa_genre', 'object_created')
# Deleting field 'Genre.object_updated'
db.delete_column(u'spa_genre', 'object_updated')
# Deleting field 'UserProfile.object_created'
db.delete_column(u'spa_userprofile', 'object_created')
# Deleting field 'UserProfile.object_updated'
db.delete_column(u'spa_userprofile', 'object_updated')
# Deleting field 'Venue.object_created'
db.delete_column(u'spa_venue', 'object_created')
# Deleting field 'Venue.object_updated'
db.delete_column(u'spa_venue', 'object_updated')
# Deleting field '_Lookup.object_created'
db.delete_column(u'spa__lookup', 'object_created')
# Deleting field '_Lookup.object_updated'
db.delete_column(u'spa__lookup', 'object_updated')
# Deleting field 'Activity.object_created'
db.delete_column(u'spa_activity', 'object_created')
# Deleting field 'Activity.object_updated'
db.delete_column(u'spa_activity', 'object_updated')
# Deleting field 'Release.object_created'
db.delete_column(u'spa_release', 'object_created')
# Deleting field 'Release.object_updated'
db.delete_column(u'spa_release', 'object_updated')
# Deleting field 'PurchaseLink.object_created'
db.delete_column(u'spa_purchaselink', 'object_created')
# Deleting field 'PurchaseLink.object_updated'
db.delete_column(u'spa_purchaselink', 'object_updated')
# Deleting field 'ChatMessage.object_created'
db.delete_column(u'spa_chatmessage', 'object_created')
# Deleting field 'ChatMessage.object_updated'
db.delete_column(u'spa_chatmessage', 'object_updated')
# Deleting field 'Comment.object_created'
db.delete_column(u'spa_comment', 'object_created')
# Deleting field 'Comment.object_updated'
db.delete_column(u'spa_comment', 'object_updated')
# Deleting field 'Notification.object_created'
db.delete_column(u'spa_notification', 'object_created')
# Deleting field 'Notification.object_updated'
db.delete_column(u'spa_notification', 'object_updated')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'schedule.calendar': {
'Meta': {'object_name': 'Calendar'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200'})
},
'schedule.event': {
'Meta': {'object_name': 'Event'},
'calendar': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['schedule.Calendar']", 'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {}),
'end_recurring_period': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['schedule.Rule']", 'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'schedule.rule': {
'Meta': {'object_name': 'Rule'},
'description': ('django.db.models.fields.TextField', [], {}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'params': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'spa._lookup': {
'Meta': {'object_name': '_Lookup'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'spa.activity': {
'Meta': {'object_name': 'Activity'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']", 'null': 'True', 'blank': 'True'})
},
'spa.activitycomment': {
'Meta': {'object_name': 'ActivityComment', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_comments'", 'to': "orm['spa.Mix']"})
},
'spa.activitydownload': {
'Meta': {'object_name': 'ActivityDownload', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_downloads'", 'to': "orm['spa.Mix']"})
},
'spa.activityfavourite': {
'Meta': {'object_name': 'ActivityFavourite', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_favourites'", 'to': "orm['spa.Mix']"})
},
'spa.activityfollow': {
'Meta': {'object_name': 'ActivityFollow', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_follow'", 'to': "orm['spa.UserProfile']"})
},
'spa.activitylike': {
'Meta': {'object_name': 'ActivityLike', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_likes'", 'to': "orm['spa.Mix']"})
},
'spa.activityplay': {
'Meta': {'object_name': 'ActivityPlay', '_ormbases': ['spa.Activity']},
u'activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa.Activity']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'activity_plays'", 'to': "orm['spa.Mix']"})
},
'spa.chatmessage': {
'Meta': {'object_name': 'ChatMessage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'chat_messages'", 'null': 'True', 'to': "orm['spa.UserProfile']"})
},
'spa.comment': {
'Meta': {'object_name': 'Comment'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['spa.Mix']"}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'time_index': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'spa.genre': {
'Meta': {'object_name': 'Genre'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'spa.label': {
'Meta': {'object_name': 'Label'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'})
},
'spa.mix': {
'Meta': {'object_name': 'Mix'},
'description': ('django.db.models.fields.TextField', [], {}),
'download_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'favourites': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'favourites'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
'filetype': ('django.db.models.fields.CharField', [], {'default': "'mp3'", 'max_length': '10'}),
'genres': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Genre']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'likes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
'mix_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'blank': 'True'}),
'mp3tags_updated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'uid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '38', 'blank': 'True'}),
'upload_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mixes'", 'to': "orm['spa.UserProfile']"}),
'waveform_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'spa.notification': {
'Meta': {'object_name': 'Notification'},
'accepted_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'notifications'", 'null': 'True', 'to': "orm['spa.UserProfile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notification_html': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'notification_text': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'notification_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_notications'", 'to': "orm['spa.UserProfile']"}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
},
'spa.playlist': {
'Meta': {'object_name': 'Playlist'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mixes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Mix']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'playlists'", 'to': "orm['spa.UserProfile']"})
},
'spa.purchaselink': {
'Meta': {'object_name': 'PurchaseLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'track': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_link'", 'to': "orm['spa.Tracklist']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'spa.recurrence': {
'Meta': {'object_name': 'Recurrence', '_ormbases': ['spa._Lookup']},
u'_lookup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Lookup']", 'unique': 'True', 'primary_key': 'True'})
},
'spa.release': {
'Meta': {'object_name': 'Release'},
'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'release_artist': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'release_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)'}),
'release_description': ('django.db.models.fields.TextField', [], {}),
'release_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'release_label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Label']"}),
'release_title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"})
},
'spa.releaseaudio': {
'Meta': {'object_name': 'ReleaseAudio'},
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_audio'", 'null': 'True', 'to': "orm['spa.Release']"})
},
'spa.show': {
'Meta': {'object_name': 'Show', '_ormbases': ['schedule.Event']},
u'event_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['schedule.Event']", 'unique': 'True', 'primary_key': 'True'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'show'", 'to': "orm['spa.Mix']"})
},
'spa.tracklist': {
'Meta': {'object_name': 'Tracklist'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.SmallIntegerField', [], {}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tracklist'", 'to': "orm['spa.Mix']"}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'remixer': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'timeindex': ('django.db.models.fields.TimeField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'spa.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'activity_sharing': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'activity_sharing_networks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'avatar_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '1024', 'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'social'", 'max_length': '15'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'following': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'followers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['spa.UserProfile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_known_session': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': u"orm['auth.User']"})
},
'spa.venue': {
'Meta': {'object_name': 'Venue'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now_add': 'True', 'blank': 'True'}),
'object_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 8, 19, 0, 0)', 'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'venue_address': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'venue_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'venue_name': ('django.db.models.fields.CharField', [], {'max_length': '250'})
}
}
complete_apps = ['spa'] | bsd-2-clause | -8,021,846,938,036,338,000 | 71.361423 | 205 | 0.571429 | false |
sirMackk/ZeroNet | src/Site/SiteStorage.py | 1 | 18469 | import os
import re
import shutil
import json
import time
import sys
import sqlite3
import gevent.event
from Db import Db
from Debug import Debug
from Config import config
from util import helper
from Plugin import PluginManager
@PluginManager.acceptPlugins
class SiteStorage(object):
def __init__(self, site, allow_create=True):
self.site = site
self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory
self.allowed_dir = os.path.abspath(self.directory.decode(sys.getfilesystemencoding())) # Only serve file within this dir
self.log = site.log
self.db = None # Db class
self.db_checked = False # Checked db tables since startup
self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild
self.has_db = self.isFile("dbschema.json") # The site has schema
if not os.path.isdir(self.directory):
if allow_create:
os.mkdir(self.directory) # Create directory if not found
else:
raise Exception("Directory not exists: %s" % self.directory)
# Load db from dbschema.json
def openDb(self, check=True):
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
except Exception, err:
raise Exception("dbschema.json is not a valid JSON: %s" % err)
if check:
if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null
self.rebuildDb()
if not self.db:
self.db = Db(schema, db_path)
if check and not self.db_checked:
changed_tables = self.db.checkTables()
if changed_tables:
self.rebuildDb(delete_db=False) # TODO: only update the changed table datas
def closeDb(self):
if self.db:
self.db.close()
self.event_db_busy = None
self.db = None
# Return db class
def getDb(self):
if not self.db:
self.log.debug("No database, waiting for dbschema.json...")
self.site.needFile("dbschema.json", priority=3)
self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist
if self.has_db:
self.openDb()
return self.db
# Return possible db files for the site
def getDbFiles(self):
for content_inner_path, content in self.site.content_manager.contents.iteritems():
# content.json file itself
if self.isFile(content_inner_path): # Missing content.json file
yield self.getPath(content_inner_path), self.open(content_inner_path)
else:
self.log.error("[MISSING] %s" % content_inner_path)
# Data files in content.json
content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site
for file_relative_path in content["files"].keys():
if not file_relative_path.endswith(".json"):
continue # We only interesed in json files
file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
if self.isFile(file_inner_path):
yield self.getPath(file_inner_path), self.open(file_inner_path)
else:
self.log.error("[MISSING] %s" % file_inner_path)
# Rebuild sql cache
def rebuildDb(self, delete_db=True):
self.has_db = self.isFile("dbschema.json")
if not self.has_db:
return False
self.event_db_busy = gevent.event.AsyncResult()
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path) and delete_db:
if self.db:
self.db.close() # Close db if open
time.sleep(0.5)
self.log.info("Deleting %s" % db_path)
try:
os.unlink(db_path)
except Exception, err:
self.log.error("Delete error: %s" % err)
self.db = None
self.openDb(check=False)
self.log.info("Creating tables...")
self.db.checkTables()
self.log.info("Importing data...")
cur = self.db.getCursor()
cur.execute("BEGIN")
cur.logging = False
found = 0
s = time.time()
try:
for file_inner_path, file in self.getDbFiles():
try:
if self.db.loadJson(file_inner_path, file=file, cur=cur):
found += 1
except Exception, err:
self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err)))
finally:
cur.execute("END")
self.log.info("Imported %s data file in %ss" % (found, time.time() - s))
self.event_db_busy.set(True) # Event done, notify waiters
self.event_db_busy = None # Clear event
# Execute sql query or rebuild on dberror
def query(self, query, params=None):
if self.event_db_busy: # Db not ready for queries
self.log.debug("Wating for db...")
self.event_db_busy.get() # Wait for event
try:
res = self.getDb().execute(query, params)
except sqlite3.DatabaseError, err:
if err.__class__.__name__ == "DatabaseError":
self.log.error("Database error: %s, query: %s, try to rebuilding it..." % (err, query))
self.rebuildDb()
res = self.db.cur.execute(query, params)
else:
raise err
return res
# Open file object
def open(self, inner_path, mode="rb"):
return open(self.getPath(inner_path), mode)
# Open file object
def read(self, inner_path, mode="r"):
return open(self.getPath(inner_path), mode).read()
# Write content to file
def write(self, inner_path, content):
file_path = self.getPath(inner_path)
# Create dir if not exist
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
# Write file
if hasattr(content, 'read'): # File-like object
with open(file_path, "wb") as file:
shutil.copyfileobj(content, file) # Write buff to disk
else: # Simple string
with open(file_path, "wb") as file:
file.write(content)
del content
self.onUpdated(inner_path)
# Remove file from filesystem
def delete(self, inner_path):
file_path = self.getPath(inner_path)
os.unlink(file_path)
self.onUpdated(inner_path, file=False)
def deleteDir(self, inner_path):
dir_path = self.getPath(inner_path)
os.rmdir(dir_path)
def rename(self, inner_path_before, inner_path_after):
for retry in range(3):
# To workaround "The process cannot access the file beacause it is being used by another process." error
try:
os.rename(self.getPath(inner_path_before), self.getPath(inner_path_after))
err = None
break
except Exception, err:
self.log.error("%s rename error: %s (retry #%s)" % (inner_path_before, err, retry))
time.sleep(0.1 + retry)
if err:
raise err
# List files from a directory
def list(self, dir_inner_path):
directory = self.getPath(dir_inner_path)
for root, dirs, files in os.walk(directory):
root = root.replace("\\", "/")
root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/")
for file_name in files:
if root_relative_path: # Not root dir
yield root_relative_path + "/" + file_name
else:
yield file_name
# Site content updated
def onUpdated(self, inner_path, file=None):
file_path = self.getPath(inner_path)
# Update Sql cache
if inner_path == "dbschema.json":
self.has_db = self.isFile("dbschema.json")
# Reopen DB to check changes
if self.has_db:
self.closeDb()
self.openDb()
elif not config.disable_db and inner_path.endswith(".json") and self.has_db: # Load json file to db
if config.verbose:
self.log.debug("Loading json file to db: %s" % inner_path)
try:
self.getDb().loadJson(file_path, file)
except Exception, err:
self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err)))
self.closeDb()
# Load and parse json file
def loadJson(self, inner_path):
with self.open(inner_path) as file:
return json.load(file)
# Write formatted json file
def writeJson(self, inner_path, data):
content = json.dumps(data, indent=1, sort_keys=True)
# Make it a little more compact by removing unnecessary white space
def compact_dict(match):
if "\n" in match.group(0):
return match.group(0).replace(match.group(1), match.group(1).strip())
else:
return match.group(0)
content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL)
# Remove end of line whitespace
content = re.sub("(?m)[ ]+$", "", content)
# Write to disk
self.write(inner_path, content)
# Get file size
def getSize(self, inner_path):
path = self.getPath(inner_path)
try:
return os.path.getsize(path)
except:
return 0
# File exist
def isFile(self, inner_path):
return os.path.isfile(self.getPath(inner_path))
# File or directory exist
def isExists(self, inner_path):
return os.path.exists(self.getPath(inner_path))
# Dir exist
def isDir(self, inner_path):
return os.path.isdir(self.getPath(inner_path))
# Security check and return path of site's file
def getPath(self, inner_path):
inner_path = inner_path.replace("\\", "/") # Windows separator fix
if not inner_path:
return self.directory
file_path = u"%s/%s" % (self.directory, inner_path)
if ".." in file_path:
raise Exception(u"File not allowed: %s" % file_path)
return file_path
# Get site dir relative path
def getInnerPath(self, path):
if path == self.directory:
inner_path = ""
else:
inner_path = re.sub("^%s/" % re.escape(self.directory), "", path)
return inner_path
# Verify all files sha512sum using content.json
def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True):
bad_files = []
i = 0
if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first
self.log.debug("VerifyFile content.json not exists")
self.site.needFile("content.json", update=True) # Force update to fix corrupt file
self.site.content_manager.loadContent() # Reload content.json
for content_inner_path, content in self.site.content_manager.contents.items():
i += 1
if i % 50 == 0:
time.sleep(0.0001) # Context switch to avoid gevent hangs
if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file
self.log.debug("[MISSING] %s" % content_inner_path)
bad_files.append(content_inner_path)
for file_relative_path in content.get("files", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
self.log.debug("[MISSING] %s" % file_inner_path)
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"]
else:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if not ok:
self.log.debug("[CHANGED] %s" % file_inner_path)
if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files
bad_files.append(file_inner_path)
# Optional files
optional_added = 0
optional_removed = 0
for file_relative_path in content.get("files_optional", {}).keys():
file_node = content["files_optional"][file_relative_path]
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
file_inner_path = file_inner_path.strip("/") # Strip leading /
file_path = self.getPath(file_inner_path)
if not os.path.isfile(file_path):
if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
if add_optional:
bad_files.append(file_inner_path)
continue
if quick_check:
ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"]
else:
ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb"))
if ok:
if not self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalDownloaded(file_inner_path, file_node["sha512"], file_node["size"])
optional_added += 1
else:
if self.site.content_manager.hashfield.hasHash(file_node["sha512"]):
self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"])
optional_removed += 1
bad_files.append(file_inner_path)
self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path)
if config.verbose:
self.log.debug(
"%s verified: %s, quick: %s, optionals: +%s -%s" %
(content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed)
)
time.sleep(0.0001) # Context switch to avoid gevent hangs
return bad_files
# Check and try to fix site files integrity
def updateBadFiles(self, quick_check=True):
s = time.time()
bad_files = self.verifyFiles(
quick_check,
add_optional=self.site.isDownloadable(""),
add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned
)
self.site.bad_files = {}
if bad_files:
for bad_file in bad_files:
self.site.bad_files[bad_file] = 1
self.log.debug("Checked files in %.2fs... Found bad files: %s, Quick:%s" % (time.time() - s, len(bad_files), quick_check))
# Delete site's all file
def deleteFiles(self):
self.log.debug("Deleting files from content.json...")
files = [] # Get filenames
for content_inner_path in self.site.content_manager.contents.keys():
content = self.site.content_manager.contents[content_inner_path]
files.append(content_inner_path)
# Add normal files
for file_relative_path in content.get("files", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
# Add optional files
for file_relative_path in content.get("files_optional", {}).keys():
file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir
files.append(file_inner_path)
if self.isFile("dbschema.json"):
self.log.debug("Deleting db file...")
self.closeDb()
self.has_db = False
try:
schema = self.loadJson("dbschema.json")
db_path = self.getPath(schema["db_file"])
if os.path.isfile(db_path):
os.unlink(db_path)
except Exception, err:
self.log.error("Db file delete error: %s" % err)
for inner_path in files:
path = self.getPath(inner_path)
if os.path.isfile(path):
for retry in range(5):
try:
os.unlink(path)
break
except Exception, err:
self.log.error("Error removing %s: %s, try #%s" % (path, err, retry))
time.sleep(float(retry)/10)
self.onUpdated(inner_path, False)
self.log.debug("Deleting empty dirs...")
for root, dirs, files in os.walk(self.directory, topdown=False):
for dir in dirs:
path = os.path.join(root, dir)
if os.path.isdir(path) and os.listdir(path) == []:
os.removedirs(path)
self.log.debug("Removing %s" % path)
if os.path.isdir(self.directory) and os.listdir(self.directory) == []:
os.removedirs(self.directory) # Remove sites directory if empty
if os.path.isdir(self.directory):
self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory)
return False # Some files not deleted
else:
self.log.debug("Site data directory deleted: %s..." % self.directory)
return True # All clean
| gpl-2.0 | -8,272,034,263,457,734,000 | 41.263158 | 130 | 0.561427 | false |
denys-duchier/kivy | kivy/uix/label.py | 1 | 25250 | '''Label
=====
The :class:`Label` widget is for rendering text. It supports ascii and unicode
strings::
# hello world text
l = Label(text='Hello world')
# unicode text; can only display glyphs that are available in the font
l = Label(text=u'Hello world ' + unichr(2764))
# multiline text
l = Label(text='Multi\\nLine')
# size
l = Label(text='Hello world', font_size='20sp')
Text alignment and wrapping
---------------------------
The :class:`Label` has :attr:`halign` and :attr:`valign` properties to
control the alignment of its text, but by default these have no effect
and the text is always centered within the Label. This is for
efficiency; the text is aligned only within the pixel drawing of the
characters, which should normally be as small as possible to minimise
the number of pixels pushed to the GPU. By default, this text image is
only just large enough to contain the characters and is positioned in the
center of the Label.
In order for the alignment properties to take effect, the simplest
solution is to set the :attr:`text_size`, which specifies the size of
the bounding box within which text is aligned. For instance, the
following code binds this size to the size of the Label, so text will
be aligned within the widget bounds. This will also automatically wrap
the text of the Label to remain within this area.
.. code-block:: python
# in Python
from kivy.uix.label import Label
class MyLabel(Label):
pass
# in kv
<MyLabel>:
text_size: self.size
halign: 'right'
valign: 'middle'
Markup text
-----------
.. versionadded:: 1.1.0
You can change the style of the text using :doc:`api-kivy.core.text.markup`.
The syntax is similar to the bbcode syntax but only the inline styling is
allowed::
# hello world with world in bold
l = Label(text='Hello [b]World[/b]', markup=True)
# hello in red, world in blue
l = Label(text='[color=ff3333]Hello[/color][color=3333ff]World[/color]',
markup = True)
If you need to escape the markup from the current text, use
:func:`kivy.utils.escape_markup`::
text = 'This is an important message [1]'
l = Label(text='[b]' + escape_markup(text) + '[/b]', markup=True)
The following tags are available:
``[b][/b]``
Activate bold text
``[i][/i]``
Activate italic text
``[font=<str>][/font]``
Change the font
``[size=<integer>][/size]``
Change the font size
``[color=#<color>][/color]``
Change the text color
``[ref=<str>][/ref]``
Add an interactive zone. The reference + bounding box inside the
reference will be available in :attr:`Label.refs`
``[anchor=<str>]``
Put an anchor in the text. You can get the position of your anchor within
the text with :attr:`Label.anchors`
``[sub][/sub]``
Display the text at a subscript position relative to the text before it.
``[sup][/sup]``
Display the text at a superscript position relative to the text before it.
If you want to render the markup text with a [ or ] or & character, you need to
escape them. We created a simple syntax::
[ -> &bl;
] -> &br;
& -> &
Then you can write::
"[size=24]Hello &bl;World&bt;[/size]"
Interactive Zone in Text
------------------------
.. versionadded:: 1.1.0
You can now have definable "links" using text markup. The idea is to be able
to detect when the user clicks on part of the text and to react.
The tag ``[ref=xxx]`` is used for that.
In this example, we are creating a reference on the word "World". When
this word is clicked, the function ``print_it`` will be called with the
name of the reference::
def print_it(instance, value):
print('User clicked on', value)
widget = Label(text='Hello [ref=world]World[/ref]', markup=True)
widget.bind(on_ref_press=print_it)
For prettier rendering, you could add a color for the reference. Replace the
``text=`` in the previous example with::
'Hello [ref=world][color=0000ff]World[/color][/ref]'
Usage example
-------------
The following example marks the anchors and references contained in a label::
from kivy.app import App
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.graphics import Color, Rectangle
class TestApp(App):
@staticmethod
def get_x(label, ref_x):
""" Return the x value of the ref/anchor relative to the canvas """
return label.center_x - label.texture_size[0] * 0.5 + ref_x
@staticmethod
def get_y(label, ref_y):
""" Return the y value of the ref/anchor relative to the canvas """
# Note the inversion of direction, as y values start at the top of
# the texture and increase downwards
return label.center_y + label.texture_size[1] * 0.5 - ref_y
def show_marks(self, label):
# Indicate the position of the anchors with a red top marker
for name, anc in label.anchors.items():
with label.canvas:
Color(1, 0, 0)
Rectangle(pos=(self.get_x(label, anc[0]),
self.get_y(label, anc[1])),
size=(3, 3))
# Draw a green surround around the refs. Note the sizes y inversion
for name, boxes in label.refs.items():
for box in boxes:
with label.canvas:
Color(0, 1, 0, 0.25)
Rectangle(pos=(self.get_x(label, box[0]),
self.get_y(label, box[1])),
size=(box[2] - box[0],
box[1] - box[3]))
def build(self):
label = Label(
text='[anchor=a]a\\nChars [anchor=b]b\\n[ref=myref]ref[/ref]',
markup=True)
Clock.schedule_once(lambda dt: self.show_marks(label), 1)
return label
TestApp().run()
'''
__all__ = ('Label', )
from functools import partial
from kivy.clock import Clock
from kivy.uix.widget import Widget
from kivy.core.text import Label as CoreLabel
from kivy.core.text.markup import MarkupLabel as CoreMarkupLabel
from kivy.properties import StringProperty, OptionProperty, \
NumericProperty, BooleanProperty, ReferenceListProperty, \
ListProperty, ObjectProperty, DictProperty
from kivy.utils import get_hex_from_color
class Label(Widget):
'''Label class, see module documentation for more information.
:Events:
`on_ref_press`
Fired when the user clicks on a word referenced with a
``[ref]`` tag in a text markup.
'''
__events__ = ['on_ref_press']
_font_properties = ('text', 'font_size', 'font_name', 'bold', 'italic',
'halign', 'valign', 'padding_x', 'padding_y',
'text_size', 'shorten', 'mipmap', 'markup',
'line_height', 'max_lines', 'strip', 'shorten_from',
'split_str', 'unicode_errors')
def __init__(self, **kwargs):
self._trigger_texture = Clock.create_trigger(self.texture_update, -1)
self._trigger_markup_color = partial(self._trigger_texture_update, 'color')
super(Label, self).__init__(**kwargs)
# bind all the property for recreating the texture
d = Label._font_properties
fbind = self.fast_bind
update = self._trigger_texture_update
for x in d:
fbind(x, update, x)
self._label = None
self._create_label()
# force the texture creation
self._trigger_texture()
def on_markup(self, inst, markup):
if markup:
self.fast_bind('color', self._trigger_markup_color)
else:
self.fast_unbind('color', self._trigger_markup_color)
def _create_label(self):
# create the core label class according to markup value
if self._label is not None:
cls = self._label.__class__
else:
cls = None
markup = self.markup
if (markup and cls is not CoreMarkupLabel) or \
(not markup and cls is not CoreLabel):
# markup have change, we need to change our rendering method.
d = Label._font_properties
dkw = dict(list(zip(d, [getattr(self, x) for x in d])))
if markup:
self._label = CoreMarkupLabel(**dkw)
else:
self._label = CoreLabel(**dkw)
def _trigger_texture_update(self, name=None, source=None, value=None):
# check if the label core class need to be switch to a new one
if name == 'markup':
self._create_label()
if source:
if name == 'text':
self._label.text = value
elif name == 'text_size':
self._label.usersize = value
elif name == 'font_size':
self._label.options[name] = value
else:
self._label.options[name] = value
self._trigger_texture()
def texture_update(self, *largs):
'''Force texture recreation with the current Label properties.
After this function call, the :attr:`texture` and :attr:`texture_size`
will be updated in this order.
'''
mrkup = self._label.__class__ is CoreMarkupLabel
self.texture = None
if (not self._label.text or (self.halign[-1] == 'y' or self.strip) and
not self._label.text.strip()):
self.texture_size = (0, 0)
if mrkup:
self.refs, self._label._refs = {}, {}
self.anchors, self._label._anchors = {}, {}
else:
if mrkup:
text = self.text
# we must strip here, otherwise, if the last line is empty,
# markup will retain the last empty line since it only strips
# line by line within markup
if self.halign[-1] == 'y' or self.strip:
text = text.strip()
self._label.text = ''.join(('[color=',
get_hex_from_color(self.color),
']', text, '[/color]'))
self._label.refresh()
# force the rendering to get the references
if self._label.texture:
self._label.texture.bind()
self.refs = self._label.refs
self.anchors = self._label.anchors
else:
self._label.refresh()
texture = self._label.texture
if texture is not None:
self.texture = self._label.texture
self.texture_size = list(self.texture.size)
def on_touch_down(self, touch):
if super(Label, self).on_touch_down(touch):
return True
if not len(self.refs):
return False
tx, ty = touch.pos
tx -= self.center_x - self.texture_size[0] / 2.
ty -= self.center_y - self.texture_size[1] / 2.
ty = self.texture_size[1] - ty
for uid, zones in self.refs.items():
for zone in zones:
x, y, w, h = zone
if x <= tx <= w and y <= ty <= h:
self.dispatch('on_ref_press', uid)
return True
return False
def on_ref_press(self, ref):
pass
#
# Properties
#
disabled_color = ListProperty([1, 1, 1, .3])
'''Text color, in the format (r, g, b, a)
.. versionadded:: 1.8.0
:attr:`disabled_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [1, 1, 1, .5].
'''
text = StringProperty('')
'''Text of the label.
Creation of a simple hello world::
widget = Label(text='Hello world')
If you want to create the widget with an unicode string, use::
widget = Label(text=u'My unicode string')
:attr:`text` is a :class:`~kivy.properties.StringProperty` and defaults to
''.
'''
text_size = ListProperty([None, None])
'''By default, the label is not constrained to any bounding box.
You can set the size constraint of the label with this property.
The text will autoflow into the constrains. So although the font size
will not be reduced, the text will be arranged to fit into the box as best
as possible, with any text still outside the box clipped.
This sets and clips :attr:`texture_size` to text_size if not None.
.. versionadded:: 1.0.4
For example, whatever your current widget size is, if you want the label to
be created in a box with width=200 and unlimited height::
Label(text='Very big big line', text_size=(200, None))
.. note::
This text_size property is the same as the
:attr:`~kivy.core.text.Label.usersize` property in the
:class:`~kivy.core.text.Label` class. (It is named size= in the
constructor.)
:attr:`text_size` is a :class:`~kivy.properties.ListProperty` and
defaults to (None, None), meaning no size restriction by default.
'''
font_name = StringProperty('DroidSans')
'''Filename of the font to use. The path can be absolute or relative.
Relative paths are resolved by the :func:`~kivy.resources.resource_find`
function.
.. warning::
Depending of your text provider, the font file can be ignored. However,
you can mostly use this without problems.
If the font used lacks the glyphs for the particular language/symbols
you are using, you will see '[]' blank box characters instead of the
actual glyphs. The solution is to use a font that has the glyphs you
need to display. For example, to display |unicodechar|, use a font such
as freesans.ttf that has the glyph.
.. |unicodechar| image:: images/unicode-char.png
:attr:`font_name` is a :class:`~kivy.properties.StringProperty` and
defaults to 'DroidSans'.
'''
font_size = NumericProperty('15sp')
'''Font size of the text, in pixels.
:attr:`font_size` is a :class:`~kivy.properties.NumericProperty` and
defaults to 15sp.
'''
line_height = NumericProperty(1.0)
'''Line Height for the text. e.g. line_height = 2 will cause the spacing
between lines to be twice the size.
:attr:`line_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to 1.0.
.. versionadded:: 1.5.0
'''
bold = BooleanProperty(False)
'''Indicates use of the bold version of your font.
.. note::
Depending of your font, the bold attribute may have no impact on your
text rendering.
:attr:`bold` is a :class:`~kivy.properties.BooleanProperty` and defaults to
False.
'''
italic = BooleanProperty(False)
'''Indicates use of the italic version of your font.
.. note::
Depending of your font, the italic attribute may have no impact on your
text rendering.
:attr:`italic` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
padding_x = NumericProperty(0)
'''Horizontal padding of the text inside the widget box.
:attr:`padding_x` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
.. versionchanged:: 1.9.0
`padding_x` has been fixed to work as expected.
In the past, the text was padded by the negative of its values.
'''
padding_y = NumericProperty(0)
'''Vertical padding of the text inside the widget box.
:attr:`padding_y` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
.. versionchanged:: 1.9.0
`padding_y` has been fixed to work as expected.
In the past, the text was padded by the negative of its values.
'''
padding = ReferenceListProperty(padding_x, padding_y)
'''Padding of the text in the format (padding_x, padding_y)
:attr:`padding` is a :class:`~kivy.properties.ReferenceListProperty` of
(:attr:`padding_x`, :attr:`padding_y`) properties.
'''
halign = OptionProperty('left', options=['left', 'center', 'right',
'justify'])
'''Horizontal alignment of the text.
:attr:`halign` is an :class:`~kivy.properties.OptionProperty` and
defaults to 'left'. Available options are : left, center, right and
justify.
.. warning::
This doesn't change the position of the text texture of the Label
(centered), only the position of the text in this texture. You probably
want to bind the size of the Label to the :attr:`texture_size` or set a
:attr:`text_size`.
.. versionchanged:: 1.6.0
A new option was added to :attr:`halign`, namely `justify`.
'''
valign = OptionProperty('bottom', options=['bottom', 'middle', 'top'])
'''Vertical alignment of the text.
:attr:`valign` is an :class:`~kivy.properties.OptionProperty` and defaults
to 'bottom'. Available options are : bottom, middle and top.
.. warning::
This doesn't change the position of the text texture of the Label
(centered), only the position of the text within this texture. You
probably want to bind the size of the Label to the :attr:`texture_size`
or set a :attr:`text_size` to change this behavior.
'''
color = ListProperty([1, 1, 1, 1])
'''Text color, in the format (r, g, b, a)
:attr:`color` is a :class:`~kivy.properties.ListProperty` and defaults to
[1, 1, 1, 1].
'''
texture = ObjectProperty(None, allownone=True)
'''Texture object of the text.
The text is rendered automatically when a property changes. The OpenGL
texture created in this operation is stored in this property. You can use
this :attr:`texture` for any graphics elements.
Depending on the texture creation, the value will be a
:class:`~kivy.graphics.texture.Texture` or
:class:`~kivy.graphics.texture.TextureRegion` object.
.. warning::
The :attr:`texture` update is scheduled for the next frame. If you need
the texture immediately after changing a property, you have to call
the :meth:`texture_update` method before accessing :attr:`texture`::
l = Label(text='Hello world')
# l.texture is good
l.font_size = '50sp'
# l.texture is not updated yet
l.texture_update()
# l.texture is good now.
:attr:`texture` is an :class:`~kivy.properties.ObjectProperty` and defaults
to None.
'''
texture_size = ListProperty([0, 0])
'''Texture size of the text. The size is determined by the font size and
text. If :attr:`text_size` is [None, None], the texture will be the size
required to fit the text, otherwise it's clipped to fit :attr:`text_size`.
When :attr:`text_size` is [None, None], one can bind to texture_size
and rescale it proportionally to fit the size of the label in order to
make the text fit maximally in the label.
.. warning::
The :attr:`texture_size` is set after the :attr:`texture`
property. If you listen for changes to :attr:`texture`,
:attr:`texture_size` will not be up-to-date in your callback.
Bind to :attr:`texture_size` instead.
'''
mipmap = BooleanProperty(False)
'''Indicates whether OpenGL mipmapping is applied to the texture or not.
Read :ref:`mipmap` for more information.
.. versionadded:: 1.0.7
:attr:`mipmap` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
shorten = BooleanProperty(False)
'''
Indicates whether the label should attempt to shorten its textual contents
as much as possible if a :attr:`text_size` is given. Setting this to True
without an appropriately set :attr:`text_size` will lead to unexpected
results.
:attr:`shorten_from` and :attr:`split_str` control the direction from
which the :attr:`text` is split, as well as where in the :attr:`text` we
are allowed to split.
:attr:`shorten` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
shorten_from = OptionProperty('center', options=['left', 'center',
'right'])
'''The side from which we should shorten the text from, can be left,
right, or center.
For example, if left, the ellipsis will appear towards the left side and we
will display as much text starting from the right as possible. Similar to
:attr:`shorten`, this option only applies when :attr:`text_size` [0] is
not None, In this case, the string is shortened to fit within the specified
width.
.. versionadded:: 1.9.0
:attr:`shorten_from` is a :class:`~kivy.properties.OptionProperty` and
defaults to `center`.
'''
split_str = StringProperty('')
'''The string used to split the :attr:`text` while shortening the string
when :attr:`shorten` is True.
For example, if it's a space, the string will be broken into words and as
many whole words that can fit into a single line will be displayed. If
:attr:`shorten_from` is the empty string, `''`, we split on every character
fitting as much text as possible into the line.
.. versionadded:: 1.9.0
:attr:`split_str` is a :class:`~kivy.properties.StringProperty` and
defaults to `''` (the empty string).
'''
unicode_errors = OptionProperty(
'replace', options=('strict', 'replace', 'ignore'))
'''How to handle unicode decode errors. Can be `'strict'`, `'replace'` or
`'ignore'`.
.. versionadded:: 1.9.0
:attr:`unicode_errors` is an :class:`~kivy.properties.OptionProperty` and
defaults to `'replace'`.
'''
markup = BooleanProperty(False)
'''
.. versionadded:: 1.1.0
If True, the text will be rendered using the
:class:`~kivy.core.text.markup.MarkupLabel`: you can change the
style of the text using tags. Check the
:doc:`api-kivy.core.text.markup` documentation for more information.
:attr:`markup` is a :class:`~kivy.properties.BooleanProperty` and defaults
to False.
'''
refs = DictProperty({})
'''
.. versionadded:: 1.1.0
List of ``[ref=xxx]`` markup items in the text with the bounding box of
all the words contained in a ref, available only after rendering.
For example, if you wrote::
Check out my [ref=hello]link[/ref]
The refs will be set with::
{'hello': ((64, 0, 78, 16), )}
The references marked "hello" have a bounding box at (x1, y1, x2, y2).
These co-ordinates are relative to the top left corner of the text, with
the y value increasing downwards. You can define multiple refs with the same
name: each occurence will be added as another (x1, y1, x2, y2) tuple to
this list.
The current Label implementation uses these references if they exist in
your markup text, automatically doing the collision with the touch and
dispatching an `on_ref_press` event.
You can bind a ref event like this::
def print_it(instance, value):
print('User click on', value)
widget = Label(text='Hello [ref=world]World[/ref]', markup=True)
widget.on_ref_press(print_it)
.. note::
This works only with markup text. You need :attr:`markup` set to
True.
'''
anchors = DictProperty({})
'''
.. versionadded:: 1.1.0
Position of all the ``[anchor=xxx]`` markup in the text.
These co-ordinates are relative to the top left corner of the text, with
the y value increasing downwards. Anchors names should be unique and only
the first occurence of any duplicate anchors will be recorded.
You can place anchors in your markup text as follows::
text = """
[anchor=title1][size=24]This is my Big title.[/size]
[anchor=content]Hello world
"""
Then, all the ``[anchor=]`` references will be removed and you'll get all
the anchor positions in this property (only after rendering)::
>>> widget = Label(text=text, markup=True)
>>> widget.texture_update()
>>> widget.anchors
{"content": (20, 32), "title1": (20, 16)}
.. note::
This works only with markup text. You need :attr:`markup` set to
True.
'''
max_lines = NumericProperty(0)
'''Maximum number of lines to use, defaults to 0, which means unlimited.
Please note that :attr:`shorten` take over this property. (with
shorten, the text is always one line.)
.. versionadded:: 1.8.0
:attr:`max_lines` is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.
'''
strip = BooleanProperty(False)
'''Whether leading and trailing spaces and newlines should be stripped from
each displayed line. If True, every line will start at the right or left
edge, depending on :attr:`halign`. If :attr:`halign` is `justify` it is
implicitly True.
.. versionadded:: 1.9.0
:attr:`strip` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
| mit | -6,323,048,173,125,887,000 | 33.400545 | 83 | 0.617465 | false |
Weasyl/weasyl | weasyl/shout.py | 1 | 4702 | import arrow
from libweasyl import staff
from weasyl import define as d
from weasyl import frienduser
from weasyl import ignoreuser
from weasyl import macro as m
from weasyl import media
from weasyl import welcome
from weasyl.comment import thread
from weasyl.error import WeasylError
def select(userid, ownerid, limit=None, staffnotes=False):
statement = ["""
SELECT
sh.commentid, sh.parentid, sh.userid, pr.username,
sh.content, sh.unixtime, sh.settings, sh.hidden_by
FROM comments sh
INNER JOIN profile pr USING (userid)
WHERE sh.target_user = %i
AND sh.settings %s~ 's'
""" % (ownerid, "" if staffnotes else "!")]
# moderators get to view hidden comments
if userid not in staff.MODS:
statement.append(" AND sh.settings !~ 'h'")
if userid:
statement.append(m.MACRO_IGNOREUSER % (userid, "sh"))
statement.append(" ORDER BY sh.commentid")
query = d.execute("".join(statement))
result = thread(query, reverse_top_level=True)
if limit:
result = result[:limit]
media.populate_with_user_media(result)
return result
def count(ownerid, staffnotes=False):
db = d.connect()
sh = d.meta.tables['comments']
op = '~' if staffnotes else '!~'
q = (
d.sa.select([d.sa.func.count()])
.select_from(sh)
.where(sh.c.settings.op(op)('s'))
.where(sh.c.target_user == ownerid))
(ret,), = db.execute(q)
return ret
def insert(userid, target_user, parentid, content, staffnotes):
# Check invalid content
if not content:
raise WeasylError("commentInvalid")
elif not target_user or (not d.is_vouched_for(target_user) and not staffnotes):
raise WeasylError("Unexpected")
# Determine parent userid
if parentid:
parentuserid = d.engine.scalar(
"SELECT userid FROM comments WHERE commentid = %(parent)s",
parent=parentid,
)
if parentuserid is None:
raise WeasylError("shoutRecordMissing")
else:
parentuserid = None
# Check permissions
if userid not in staff.MODS:
if ignoreuser.check(target_user, userid):
raise WeasylError("pageOwnerIgnoredYou")
elif ignoreuser.check(userid, target_user):
raise WeasylError("youIgnoredPageOwner")
elif ignoreuser.check(parentuserid, userid):
raise WeasylError("replyRecipientIgnoredYou")
elif ignoreuser.check(userid, parentuserid):
raise WeasylError("youIgnoredReplyRecipient")
is_banned, _ = d.get_login_settings(target_user)
profile_config = d.get_config(target_user)
if is_banned or "w" in profile_config or "x" in profile_config and not frienduser.check(userid, target_user):
raise WeasylError("insufficientActionPermissions")
# Create comment
settings = 's' if staffnotes else ''
co = d.meta.tables['comments']
db = d.connect()
commentid = db.scalar(
co.insert()
.values(userid=userid, target_user=target_user, parentid=parentid or None, content=content,
unixtime=arrow.utcnow(), settings=settings)
.returning(co.c.commentid))
# Create notification
if parentid and userid != parentuserid:
if not staffnotes or parentuserid in staff.MODS:
welcome.shoutreply_insert(userid, commentid, parentuserid, parentid, staffnotes)
elif not staffnotes and target_user and userid != target_user:
welcome.shout_insert(userid, commentid, otherid=target_user)
d.metric('increment', 'shouts')
return commentid
def remove(userid, commentid=None):
query = d.engine.execute(
"SELECT userid, target_user, settings FROM comments WHERE commentid = %(id)s AND settings !~ 'h'",
id=commentid,
).first()
if not query or ('s' in query[2] and userid not in staff.MODS):
raise WeasylError("shoutRecordMissing")
if userid != query[1] and userid not in staff.MODS:
if userid != query[0]:
raise WeasylError("InsufficientPermissions")
# user is commenter
replies = d.execute(
"SELECT commentid FROM comments WHERE parentid = %d", [commentid])
if replies:
# a commenter cannot remove their comment if it has replies
raise WeasylError("InsufficientPermissions")
# remove notifications
welcome.comment_remove(commentid, 'shout')
d._page_header_info.invalidate(userid)
# hide comment
d.execute("UPDATE comments SET settings = settings || 'h', hidden_by = %i WHERE commentid = %i", [userid, commentid])
return query[1]
| apache-2.0 | -7,151,868,114,302,378,000 | 32.112676 | 121 | 0.64866 | false |
alsmirn/adist | extinction/tabular.py | 1 | 3610 | """
Copyright (c) 2009, Alexey Smirnov
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Saint-Petersburg State University nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY ALEXEY SMIRNOV ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ALEXEY SMIRNOV BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import os
STD_V = {}
STD_BV = {}
# Little bit modernized table of values from:
# http://vizier.cfa.harvard.edu/viz-bin/Cat?J/PAZh/34/21#sRM2.1
__file_with_standards = os.path.join(os.path.dirname(__file__),
"standards.dat")
try:
file = open(__file_with_standards, 'r')
except IOError:
import sys
print "File %s does not exists." % __file_with_standards
print __file_with_standards
sys.exit(True)
for row in [line.split()[1:] for line in file]:
do_key = lambda lumin: "%s%d" % (row[0], lumin)
for lumin in (1, 3, 5):
key = do_key(lumin)
STD_V[key], STD_BV[key] = map(float, (row[lumin], row[lumin+1]))
def av_tabular(t_class, s_class, l_class, b_v):
"""
@param t_class: Temperature class, from list 'OBAFGKM'.
@param s_class: Temperature subclass, from 0 to 9.
@param l_class: Luminosity class, like 1, 3 or 5.
@param b_v: B-V value.
@return a_v: full extinction value in visual band.
@author: Alexey Smirnov
@note: computations are based on next paper results: "Inaccuracies in the
spectral classification of stars from the Tycho-2 Spectral Type Catalogue",
Tsvetkov, A. S.; Popov, A. V.; Smirnov, A. A.,
Astronomy Letters, Volume 34, Issue 1, pp.17-27
"""
t_class_basis = tuple('OBAFGKM')
s_class_basis = range(10)
l_class_basis = (1, 3, 5)
if t_class not in t_class_basis:
raise NameError("Temperature class %s is not in range %s" %
(t_class, t_class_basis))
if s_class not in s_class_basis:
raise NameError("Temperature subclass %s is not in range %s" %
(s_class, s_class_basis))
if l_class not in l_class_basis:
raise NameError("Luminosity class %s is not in range %s" %
(l_class, l_class_basis))
do_key = lambda *args: "%s%d%d" % args
key = do_key(t_class, s_class, l_class)
e_b_v = b_v - STD_BV[key]
r_const = 3.30 + 0.28 * STD_BV[key] + 0.4 * e_b_v
a_v = r_const * e_b_v
return a_v
| bsd-3-clause | -2,222,819,617,115,563,300 | 40.022727 | 80 | 0.667313 | false |
PedroMDuarte/thesis-hubbard-lda_evap | qmc.py | 1 | 16230 |
"""
This file provides a way to obtain thermodynamic quantities from an
interpolation of available QMC solutions
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from matplotlib import rc
rc('font', **{'family':'serif'})
rc('text', usetex=True)
import glob
import os
import ldaconf
basedir = ldaconf.basedir
from scipy.spatial import Delaunay
from scipy.interpolate import CloughTocher2DInterpolator, LinearNDInterpolator
from scipy.interpolate.interpnd import _ndim_coords_from_arrays
import logging
# create logger
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
#logger.disabled = True
def get_qty_mu( dat, mu, MUCOL, COL, **kwargs ):
# Control the interpolation between availble
# density points here
#~qtyinterp = 'nearest'
qtyinterp = 'linear'
msg = kwargs.get('msg', None)
DENSCOL = 1
ENTRCOL = 2
SPICOL = 3
CMPRCOL = 4
if COL == SPICOL:
default_minus = 1.0
default_plus = 0.0
elif COL == ENTRCOL:
default_minus = 0.0
default_plus = 0.0
elif COL == DENSCOL:
default_minus = 0.0
default_plus = 2.0
elif COL == CMPRCOL:
default_minus = 0.0
default_plus = 0.0
else:
raise ValueError("Column not defined: COL = {:d}".format(COL) )
CAREFUL = kwargs.get('careful', True)
if CAREFUL and (mu < -10. or mu > 60.):
CAREFUL = False
if qtyinterp == 'nearest':
index = np.argmin( np.abs(dat[:, MUCOL] - mu ))
qtyresult = dat[index,COL]
else:
# find the two closest chemical potentials that
# stride the point
mudat = dat[:,MUCOL]
verbose = False
if np.all(mu < mudat):
qtyresult = default_minus
if COL == DENSCOL or COL == ENTRCOL:
if verbose:
print "QTY=", COL,
print "===>>> mu={:0.2f} ".format(mu), msg
if dat[:,DENSCOL].min() < 0.1 :
qtyresult = default_minus
elif CAREFUL:
return 'out-of-bounds'
#print "====>>> BE CAREFUL : Using default density" + \
# " n=%.2f"%default_minus + \
# " at mu={:0.2f} ".format(mu),
#if msg is not None:
# print msg
#raise ValueError('density error')
elif np.all( mu > mudat):
qtyresult = default_plus
if COL == DENSCOL or COL == ENTRCOL:
if verbose:
print "QTY=", COL,
print "====>>> mu={:0.2f} ".format(mu), msg
if dat[:,DENSCOL].max() > 1.9 :
qtyresult = default_plus
elif CAREFUL:
return 'out-of-bounds'
#print "====>>> BE CAREFUL : Using default density" + \
# " n=%.2f"%default_plus + \
# " at mu={:0.2f} ".format(mu),
#if msg is not None:
# print msg
#raise ValueError('density error')
else:
# since the mu's are ordered we can do:
index0 = np.where( mudat <=mu )[0][-1]
index1 = np.where( mudat > mu )[0][0]
qty0 = dat[ index0, COL ]
qty1 = dat[ index1, COL ]
mu0 = dat[ index0, MUCOL ]
mu1 = dat[ index1, MUCOL ]
qtyresult = qty0 + (mu-mu0) * (qty1-qty0) / (mu1-mu0)
return qtyresult
#print
#print " mu = ", mu
#print "index0 = ", index0
#print "index1 = ", index1
#print "Doing linear interpolation for the qty"
#print " mu0 = ", mu0
#print " mu1 = ", mu1
#print "qty0 = ", qty0
#print "qty1 = ", qty1
#print "qtyresult = ", qtyresult
def find_closest_qmc( U=8, T=0.67, mu=4.0, **kwargs):
"""
This function finds the closest values of U and T in the QMC data
that straddle the values U and T given as arguments.
"""
nUs = 4
nTs = 3
ALLPTS = kwargs.get('ALLPTS', False)
# select which quantity will be returned, options are
# spi and entropy
QTY = kwargs.get('QTY', 'spi' )
if QTY == 'spi':
datadir = basedir + 'COMB_Final_Spi/'
elif QTY == 'entropy':
datadir = basedir + 'COMB_Final_Entr/'
elif QTY == 'density':
datadir = basedir + 'COMB_Final_Spi/'
elif QTY == 'kappa':
datadir = basedir + 'COMB_Final_Spi/'
else:
raise ValueError('Quantity not defined:' + str(QTY) )
fname = datadir + 'U*'
us = [ float(u.split('/U')[-1]) for u in glob.glob(fname) ]
du = [ np.abs(U-u) for u in us ]
index = np.argsort(du)
if ALLPTS:
Ulist0 = range(len(index))
else:
Ulist0 = range( nUs )
us = [ us[index[i]] for i in Ulist0]
#print us
#print du
#print index
#print "Closest Us = ", us
datfiles = []
for u in us:
# For the Spi and Stheta data
if QTY == 'spi' or QTY == 'density' or QTY == 'kappa':
fname = datadir + 'U{U:02d}/T*dat'.format(U=int(u))
fs = sorted(glob.glob(fname))
Ts = [ float(f.split('T')[1].split('.dat')[0]) for f in fs ]
elif QTY=='entropy':
fname = datadir + 'U{U:02d}/S*dat'.format(U=int(u))
fs = sorted(glob.glob(fname))
Ts = [ float(f.split('S')[1].split('.dat')[0]) for f in fs ]
Ts_g = [] ; Ts_l = [];
for t in Ts:
if t > T:
Ts_g.append(t)
else:
Ts_l.append(t)
order_g = np.argsort( [ np.abs( T -t ) for t in Ts_g ] )
order_l = np.argsort( [ np.abs( T -t ) for t in Ts_l ] )
try:
Tpts = [ Ts_g[ order_g[0]] , Ts_l[ order_l[0]] ]
except:
#print
#print "problem adding U=",u, "T=",Ts
#print "available T data does not stride the point"
#print "T =", T
#print "Ts =", Ts
#print "will add nearest Ts nevertheless"
Tpts = [ ]
#raise ValueError("QMC data not available.")
dT = [ np.abs( T - t) for t in Ts ]
index = np.argsort(dT)
if ALLPTS:
Tlist0 = range(len(Ts))
else:
Tlist0 = range( min(nTs , len(Ts)))
for i in Tlist0:
Tnew = Ts[index[i]]
if Tnew not in Tpts:
Tpts.append(Tnew)
for Tpt in Tpts:
index = Ts.index( Tpt )
try:
datfiles.append( [ fs[ index ], u, Ts[index] ] )
except:
print "problem adding U=",u, "T=",Ts
raise
# Need to make sure that selected T values stride both
# sides of the point
#print
#print u
#print Ts
#print dT
#print index
#print fs
# for i in range(min(3, len(Ts))):
# try:
# datfiles.append( [ fs[index[i]], u, Ts[index[i]] ] )
# except:
# print "problem adding U=",u, "T=",Ts
# raise
#
#datfiles.append( [ fs[index[1]], u, Ts[index[1]] ] )
#print datfiles
MUCOL = 0
DENSCOL = 1
ENTRCOL = 2
SPICOL = 3
CMPRCOL = 4
if QTY == 'spi':
COL = SPICOL
elif QTY == 'entropy':
COL = ENTRCOL
elif QTY == 'density':
COL = DENSCOL
elif QTY == 'kappa':
COL = CMPRCOL
msg0 = 'U={:0.2f}, T={:0.2f}'.format(U,T)
logger.debug("number of nearby points = " + str(len(datfiles)))
basedat = []
basedaterr = []
datserr = []
for mm, f in enumerate(datfiles):
# f[0] is the datafile name
# f[1] is U
# f[2] is T
radius = kwargs.get('radius', np.nan )
msg = 'U={:0.2f}, T={:0.2f}'.format(U,T) + \
' mu={:0.2f}, r={:0.2f}, Upt={:0.3f}, Tpt={:0.3f}'.\
format(mu, radius, f[1], f[2])
try:
dat = np.loadtxt(f[0])
spival = get_qty_mu( dat, mu, MUCOL, COL, msg=msg )
# Toggle the false here to plot all of the out of bounds
if spival == 'out-of-bounds':
#spival_symmetry =
logger.info('qty is out of bounds')
basedaterr.append( [f[1], f[2], np.nan] )
datserr.append( dat )
if False:
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( dat[:,MUCOL], dat[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msg, ha='center', va='bottom', \
transform=ax.transAxes, fontsize=6.)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%mm, dpi=200)
plt.close(fig)
else:
plt.show()
plt.close(fig)
continue
else:
basedat.append( [f[1], f[2], spival] )
except Exception as e :
print "Failed to get data from file = ", f
# toggle plotting, not implemented yet:
if True:
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( dat[:,MUCOL], dat[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msg, ha='center', va='bottom', \
transform=ax.transAxes)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%mm, dpi=200)
else:
plt.show()
raise e
logger.debug("number of nearby valid points = " + str(len(basedat)))
error = False
points = None
# MAKE THE TRIANGULATION
basedat = np.array(basedat)
Us = np.unique(basedat[:,0] )
Ts = np.unique(basedat[:,1] )
validTriang = not ( len(Us) ==1 or len(Ts) == 1 )
#print "#Us={:d}, #Ts={:d}".format( len(Us), len(Ts) )
#print msg
if validTriang:
points = _ndim_coords_from_arrays(( basedat[:,0] , basedat[:,1]))
#print "Closest dat = ", basedat
#finterp = CloughTocher2DInterpolator(points, basedat[:,2])
finterp = LinearNDInterpolator( points, basedat[:,2] )
else:
logerr = 'not enough finterp points, QTY=%s'%QTY + '\n' + msg + '\n' \
+ "number of basedat pts = " + str(len(basedat))
print basedat
print "len Us = ", len(Us)
print "len Ts = ", len(Ts)
print "len 'out-of-bounds' = ", len( basedaterr )
if len( basedaterr ) > 0:
for bb, bdaterr in enumerate(basedaterr):
msgbb = 'U={:0.2f}, T={:0.2f}'.format(U,T) +\
' mu={:0.2f}, r={:0.2f}, Upt={:0.3f}, Tpt={:0.3f}'.\
format(mu, radius, basedaterr[bb][0], basedaterr[bb][1] )
daterr = datserr[bb]
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.plot( daterr[:,MUCOL], daterr[:,COL], '.-')
ax.axvline( mu )
ax.text( 0.5, 1.05, msgbb, ha='center', va='bottom', \
transform=ax.transAxes, fontsize=6.)
if matplotlib.get_backend() == 'agg':
fig.savefig('err_mu_%02d.png'%bb, dpi=200)
plt.close(fig)
else:
plt.show()
plt.close(fig)
logger.exception(logerr)
raise ValueError('finterp')
if points == None:
logger.warning( "points object is None" )
if error == False:
try:
result = finterp( U,T )
if np.isnan(result):
if U >= 30.0 and U <=32.5:
result = finterp( 29.99, T )
logger.warning(" qmc: U={:0.1f} replaced to U=29.99 ".\
format(U) )
if np.isnan(result):
raise Exception("\n!!!! qmc: Invalid result, QTY:%s!!!!\n"%QTY \
+ msg0)
except Exception as e:
if kwargs.get('error_nan', False):
return np.nan
else:
error = True
logger.exception("Invalid QTY result!")
if error == False:
if result >= 8. and QTY == 'spi' :
print " Obtained Spi > 8. : U={:0.2f}, T={:0.2f}, mu={:0.2f}".\
format( U, T, mu ),
print " ==> Spi={:0.2f}".format(float(result))
error = True
elif result >=4. and QTY == 'entropy':
print " Obtained Ent > 4. : U={:0.2f}, T={:0.2f}, mu={:0.2f}".\
format( U, T, mu ),
print " ==> Result={:0.2f}".format(float(result))
error = True
logger.debug("error status = " + str(error))
if error or kwargs.get('showinterp',False):
logger.debug("Inside error if statement...")
if kwargs.get('error_nan', False):
pass
#return np.nan
#print "Interp points:"
#print basedat
if len(basedat) == 0 and len(basedaterr) > 0 :
basedaterr = np.array(basedaterr)
Userr = np.unique(basedaterr[:,0] )
Tserr = np.unique(basedaterr[:,1] )
validTriangerr = not ( len(Userr) ==1 or len(Tserr) == 1 )
points = _ndim_coords_from_arrays(( basedaterr[:,0] , basedaterr[:,1]))
tri = Delaunay(points)
else:
tri = Delaunay(points)
fig = plt.figure( figsize=(3.5,3.5))
gs = matplotlib.gridspec.GridSpec( 1,1 ,\
left=0.15, right=0.96, bottom=0.12, top=0.88)
ax = fig.add_subplot( gs[0] )
ax.grid(alpha=0.5)
ax.triplot(points[:,0], points[:,1], tri.simplices.copy())
ax.plot(points[:,0], points[:,1], 'o')
ax.plot( U, T, 'o', ms=6., color='red')
xlim = ax.get_xlim()
dx = (xlim[1]-xlim[0])/10.
ax.set_xlim( xlim[0]-dx, xlim[1]+dx )
ylim = ax.get_ylim()
dy = (ylim[1]-ylim[0])/10.
ax.set_ylim( ylim[0]-dy, ylim[1]+dy )
ax.set_xlabel('$U/t$')
ax.set_ylabel('$T/t$',rotation=0,labelpad=8)
tt = kwargs.get('title_text','')
ax.set_title( tt + '$U/t={:.2f}$'.format(U) + \
',\ \ ' + '$T/t={:.2f}$'.format(T), \
ha='center', va='bottom', fontsize=10)
save_err = kwargs.get('save_err',None)
if save_err is not None:
print "Saving png."
fig.savefig( save_err, dpi=300)
if matplotlib.get_backend() == 'agg':
fig.savefig('err.png', dpi=200)
print "Saved error to err.png"
else:
plt.show()
if not kwargs.get('single', False):
raise ValueError("Could not interpolate using QMC data.")
if ALLPTS:
if 'savepath' in kwargs.keys():
fig.savefig( kwargs.get('savepath',None) , dpi=300)
if error:
raise
return result
| mit | -3,425,785,279,829,014,000 | 31.721774 | 83 | 0.463648 | false |
charityscience/csh-sms | tests/jobs/test_text_reminder_job.py | 1 | 2487 | import mock
from mock import patch, call
from freezegun import freeze_time
from datetime import datetime
from django.test import TestCase
from tests.fixtures import contact_object
from modules.text_reminder import TextReminder
from jobs import text_reminder_job
FAKE_NOW = datetime(2017, 7, 17, 0, 0)
class TextReminderJobTests(TestCase):
@freeze_time(FAKE_NOW)
@patch("logging.info")
@patch("modules.text_reminder.Texter.send")
def test_remind_two_people(self, mocked_send_text, mocked_logger):
c1 = contact_object(name="Roland",
phone_number="1-111-1111",
date_of_birth="12/6/2017") # 7 days before 6 week appointment
c2 = contact_object(name="Sai",
phone_number="1-112-1111",
date_of_birth="12/6/2017",
language="Hindi")
text_reminder_job.remind_all()
calls = [call(message=TextReminder(c1).get_reminder_msg(),
phone_number=c1.phone_number),
call(message=TextReminder(c2).get_reminder_msg(),
phone_number=c2.phone_number)]
mocked_send_text.assert_has_calls(calls, any_order=True)
self.assertEqual(mocked_send_text.call_count, 2)
@freeze_time(FAKE_NOW)
@patch("logging.info")
@patch("modules.text_reminder.Texter.send")
def test_remind_two_people_but_not_the_cancelled_one(self, mocked_send_text, mocked_logger):
c1 = contact_object(name="Roland",
phone_number="1-111-1111",
date_of_birth="12/6/2017") # 7 days before 6 week appointment
c2 = contact_object(name="Sai",
phone_number="1-112-1111",
date_of_birth="12/6/2017",
language="Hindi")
c3 = contact_object(name="Cancelled",
phone_number="1-111-1112",
date_of_birth="12/6/2017")
c3.cancelled = True
c3.save()
text_reminder_job.remind_all()
calls = [call(message=TextReminder(c1).get_reminder_msg(),
phone_number=c1.phone_number),
call(message=TextReminder(c2).get_reminder_msg(),
phone_number=c2.phone_number)]
mocked_send_text.assert_has_calls(calls, any_order=True)
self.assertEqual(mocked_send_text.call_count, 2)
| gpl-3.0 | -2,241,891,620,633,596,700 | 44.218182 | 96 | 0.572577 | false |
hajicj/safire | scripts/text_preprocessing_explorer.py | 1 | 5761 | #!/usr/bin/env python
import argparse
from copy import deepcopy
import itertools
import logging
import operator
import os
import random
import webbrowser
from safire.data.text_browser import TextBrowser
import safire.utils
from safire.data.image_browser import ImageBrowser
from safire.data.loaders import MultimodalDatasetLoader, IndexLoader, \
ModelLoader, MultimodalShardedDatasetLoader
__author__ = 'Jan Hajic jr.'
##############################################################################
def build_argument_parser():
parser = argparse.ArgumentParser(description=__doc__, add_help=True)
parser.add_argument('-r', '--root', action='store', default=None,
required=True, help='The path to'+
' the directory which is the root of a dataset.' +
' (Will be passed to a Loader as a root.)')
parser.add_argument('-n', '--name', help='The dataset name passed to the' +
' Loader. Has to correspond to the *.vtlist file name.')
parser.add_argument('-l', '--labels', nargs='+',
help='The corpus labels.')
parser.add_argument('--first_n_sentences', type=int, default=10,
help='Display only this many sentences from the '
'beginning of a text.')
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on'+
' INFO logging messages.')
parser.add_argument('--debug', action='store_true', help='Turn on debug '+
'prints.')
return parser
def print_interactive_help():
"""Prints the help message for interactive mode."""
print 'Image index explorer interactive mode help\n' \
'==========================================\n' \
'\n' \
'Commands:\n' \
' h ... help\n' \
' c N ... compare representations for N-th document in vtlist\n' \
' q|e ... exit (will ask for confirmation)\n' \
'\n' \
'On the \'c\' command, will show two columns of most similar images\n' \
'with the similarities. Will show query image on top.'
def run_interactive(vtlist, raw_corpus, raw_browser,
corpora, browsers, labels):
exit_commands = frozenset(['q', 'e'])
compare_commands = frozenset(['c'])
help_commands = frozenset(['h'])
# Starting settings
highest_scoring = 10
exit_interactive = False
while not exit_interactive:
# Parse command
user_input = raw_input('--> ')
split_input = user_input.split(' ', 1)
if len(split_input) > 1:
command, options = split_input
else:
command = split_input[0]
options = None
# Execute command
if command in help_commands:
print_interactive_help()
continue
elif command in compare_commands:
N = int(options)
text = raw_browser.get_text(N)
btext = text + '\n[end of text]\n'
#print btext
representations = []
for label, browser in zip(labels, browsers):
representation = browser.get_word_representation(N,
highest_scoring=highest_scoring)
# Add headers to representation
representation = [('model', label), ('-----', '-----')] \
+ representation
representations.append(representation)
all_representations = list(itertools.chain(*representations))
# ???
formatted_repr = raw_browser.format_representation(
all_representations, n_cols=len(representations))
output = text + '\n\n' + formatted_repr
raw_browser.text_to_window(output)
elif command in exit_commands:
confirmation = raw_input('-[y/n]-> ')
if confirmation in exit_commands or confirmation == '' \
or confirmation == 'y':
exit_interactive = True
continue
else:
print 'Invalid command %s' % command
def main(args):
logging.info('Initializing loaders with root %s, name %s' % (
args.root, args.name))
dloader = MultimodalShardedDatasetLoader(args.root, args.name)
vtlist_file = dloader.layout.vtlist
with open(os.path.join(args.root, vtlist_file)) as vtlist_handle:
vtlist = [ l.strip() for l in vtlist_handle ]
# The corpus and browser used for displaying the raw texts
raw_text_corpus = dloader.load_text_corpus()
raw_text_browser = TextBrowser(args.root, raw_text_corpus,
first_n_sentences=args.first_n_sentences)
# The browsers from which we pull representations
text_corpora = [ dloader.load_text_corpus(label) for label in args.labels]
text_browsers = [ TextBrowser(args.root, corpus,
first_n_sentences=args.first_n_sentences)
for corpus in text_corpora ]
run_interactive(vtlist, raw_text_corpus, raw_text_browser,
text_corpora, text_browsers, args.labels)
# Explicit delete
del raw_text_browser
for browser in text_browsers:
del browser
if __name__ == '__main__':
parser = build_argument_parser()
args = parser.parse_args()
if args.debug:
logging.basicConfig(format='%(levelname)s : %(message)s',
level=logging.DEBUG)
elif args.verbose:
logging.basicConfig(format='%(levelname)s : %(message)s',
level=logging.INFO)
main(args) | gpl-3.0 | -4,264,359,828,596,876,000 | 34.349693 | 82 | 0.559799 | false |
indexofire/cdc | cdc/contrib/cache/helpers.py | 1 | 12245 | # -*- coding: utf-8 -*-
import types
import hashlib
import logging
import cPickle as pickle
from django.conf import settings
from django.core.cache import cache
from django.utils.encoding import smart_str
from keyedcache.utils import is_string_like, is_list_or_tuple
log = logging.getLogger('cdc_cache')
# The debugging variable CACHED_KEYS is exact only with the the Django
# debugging server (or any single worker process server) and without restarting
# the server between restarts of the main cache (memcached).
# Keys in CACHED_KEYS variable never expire and can eat much memory on long
# running servers. Currently it is not confirmed in Satchmo.
# If more worker processes are used, the reported values of the following three
# variables can skip randomly upwards downwards.
CACHED_KEYS = {}
CACHE_CALLS = 0
CACHE_HITS = 0
KEY_DELIM = "::"
REQUEST_CACHE = {'enabled' : False}
try:
CACHES = getattr(settings, "CACHES")
CACHE_BACKEND = CACHES['default']['BACKEND']
try:
CACHE_PREFIX = CACHES['default']['KEY_PREFIX']
except KeyError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No KEY_PREFIX found in settings.CACHES['default'], using SITE_ID. Please update your settings to add a CACHES")
try:
CACHE_TIMEOUT = CACHES['default']['TIMEOUT']
except KeyError:
CACHE_TIMEOUT = getattr(settings, 'CACHE_TIMEOUT', 0)
log.warn("No TIMEOUT found in settings.CACHES['default'], so we used %s%s. "
"Please update your settings to add a TIMEOUT and avoid this warning.",
CACHE_TIMEOUT,
CACHE_TIMEOUT == 0 and ", disabling the cache system" or "")
except AttributeError:
try:
CACHE_BACKEND = settings.CACHE_BACKEND
except AttributeError:
CACHE_BACKEND = "locmem://"
log.warn("No cache settings are set. Using default locmem. Please update your settings")
try:
CACHE_PREFIX = settings.CACHE_PREFIX
except AttributeError:
CACHE_PREFIX = str(settings.SITE_ID)
log.warn("No CACHE_PREFIX found in settings, using SITE_ID. Please update your settings to add a CACHE_PREFIX")
try:
CACHE_TIMEOUT = settings.CACHE_TIMEOUT
except AttributeError:
CACHE_TIMEOUT = 0
log.warn("No CACHE_TIMEOUT found in settings, so we used 0, disabling the cache system. Please update your settings to add a CACHE_TIMEOUT and avoid this warning.")
_CACHE_ENABLED = CACHE_TIMEOUT > 0
class CacheWrapper(object):
def __init__(self, val, inprocess=False):
self.val = val
self.inprocess = inprocess
def __str__(self):
return str(self.val)
def __repr__(self):
return repr(self.val)
def wrap(cls, obj):
if isinstance(obj, cls):
return obj
else:
return cls(obj)
wrap = classmethod(wrap)
class MethodNotFinishedError(Exception):
def __init__(self, f):
self.func = f
class NotCachedError(Exception):
def __init__(self, k):
self.key = k
class CacheNotRespondingError(Exception):
pass
def cache_delete(*keys, **kwargs):
"""
Deletes the object identified by all ``keys`` from the cache.
keys:
Parameters of general type which are convertable to string or hashable
unambiguously.
kwargs:
children:
If it is True more objects starting with these keys are deleted.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
If no keys are present, all cached objects are to be deleted.
Deleting multiple multiple or all objects is usually not complete if the
project is running with multiple worker processes.
(It is reliable e.g. with a development server.)
"""
removed = []
if cache_enabled():
global CACHED_KEYS
log.debug('cache_delete')
children = kwargs.pop('children', False)
if (keys or kwargs):
key = cache_key(*keys, **kwargs)
if CACHED_KEYS.has_key(key):
del CACHED_KEYS[key]
removed.append(key)
cache.delete(key)
if children:
key = key + KEY_DELIM
children = [x for x in CACHED_KEYS.keys() if x.startswith(key)]
for k in children:
del CACHED_KEYS[k]
cache.delete(k)
removed.append(k)
else:
key = "All Keys"
deleteneeded = _cache_flush_all()
removed = CACHED_KEYS.keys()
if deleteneeded:
for k in CACHED_KEYS:
cache.delete(k)
CACHED_KEYS = {}
if removed:
log.debug("Cache delete: %s", removed)
else:
log.debug("No cached objects to delete for %s", key)
return removed
def cache_delete_function(func):
return cache_delete(['func', func.__name__, func.__module__], children=True)
def cache_enabled():
global _CACHE_ENABLED
return _CACHE_ENABLED
def cache_enable(state=True):
global _CACHE_ENABLED
_CACHE_ENABLED=state
def _cache_flush_all():
if is_memcached_backend():
cache._cache.flush_all()
return False
return True
def cache_function(length=CACHE_TIMEOUT):
"""
A variant of the snippet posted by Jeff Wheeler at
http://www.djangosnippets.org/snippets/109/
Caches a function, using the function and its arguments as the key, and the return
value as the value saved. It passes all arguments on to the function, as
it should.
The decorator itself takes a length argument, which is the number of
seconds the cache will keep the result around.
It will put a temp value in the cache while the function is
processing. This should not matter in most cases, but if the app is using
threads, you won't be able to get the previous value, and will need to
wait until the function finishes. If this is not desired behavior, you can
remove the first two lines after the ``else``.
"""
def decorator(func):
def inner_func(*args, **kwargs):
if not cache_enabled():
value = func(*args, **kwargs)
else:
try:
value = cache_get('func', func.__name__, func.__module__, args, kwargs)
except NotCachedError, e:
# This will set a temporary value while ``func`` is being
# processed. When using threads, this is vital, as otherwise
# the function can be called several times before it finishes
# and is put into the cache.
funcwrapper = CacheWrapper(".".join([func.__module__, func.__name__]), inprocess=True)
cache_set(e.key, value=funcwrapper, length=length, skiplog=True)
value = func(*args, **kwargs)
cache_set(e.key, value=value, length=length)
except MethodNotFinishedError, e:
value = func(*args, **kwargs)
return value
return inner_func
return decorator
def cache_get(*keys, **kwargs):
"""
Gets the object identified by all ``keys`` from the cache.
kwargs:
default:
Default value used if the object is not in the cache. If the object
is not found and ``default`` is not set or is None, the exception
``NotCachedError`` is raised with the attribute ``.key = keys``.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
"""
if kwargs.has_key('default'):
default_value = kwargs.pop('default')
use_default = True
else:
use_default = False
key = cache_key(keys, **kwargs)
if not cache_enabled():
raise NotCachedError(key)
else:
global CACHE_CALLS, CACHE_HITS, REQUEST_CACHE
CACHE_CALLS += 1
if CACHE_CALLS == 1:
cache_require()
obj = None
tid = -1
if REQUEST_CACHE['enabled']:
tid = cache_get_request_uid()
if tid > -1:
try:
obj = REQUEST_CACHE[tid][key]
log.debug('Got from request cache: %s', key)
except KeyError:
pass
if obj == None:
obj = cache.get(key)
if obj and isinstance(obj, CacheWrapper):
CACHE_HITS += 1
CACHED_KEYS[key] = True
log.debug('got cached [%i/%i]: %s', CACHE_CALLS, CACHE_HITS, key)
if obj.inprocess:
raise MethodNotFinishedError(obj.val)
cache_set_request(key, obj, uid=tid)
return obj.val
else:
try:
del CACHED_KEYS[key]
except KeyError:
pass
if use_default:
return default_value
raise NotCachedError(key)
def cache_set(*keys, **kwargs):
"""Set the object identified by all ``keys`` into the cache.
kwargs:
value:
The object to be cached.
length:
Timeout for the object. Default is CACHE_TIMEOUT.
skiplog:
If it is True the call is never logged. Default is False.
other kwargs:
Unknown key=val is interpreted like two aditional keys: (key, val)
"""
if cache_enabled():
global CACHED_KEYS, REQUEST_CACHE
obj = kwargs.pop('value')
length = kwargs.pop('length', CACHE_TIMEOUT)
skiplog = kwargs.pop('skiplog', False)
key = cache_key(keys, **kwargs)
val = CacheWrapper.wrap(obj)
if not skiplog:
log.debug('setting cache: %s', key)
cache.set(key, val, length)
CACHED_KEYS[key] = True
if REQUEST_CACHE['enabled']:
cache_set_request(key, val)
def _hash_or_string(key):
if is_string_like(key) or isinstance(key, (types.IntType, types.LongType, types.FloatType)):
return smart_str(key)
else:
try:
#if it has a PK, use it.
return str(key._get_pk_val())
except AttributeError:
return md5_hash(key)
def cache_key(*keys, **pairs):
"""Smart key maker, returns the object itself if a key, else a list
delimited by ':', automatically hashing any non-scalar objects."""
if len(keys) == 1 and is_list_or_tuple(keys[0]):
keys = keys[0]
if pairs:
keys = list(keys)
for k in sorted(pairs.keys()):
keys.extend((k, pairs[k]))
key = KEY_DELIM.join([_hash_or_string(x) for x in keys])
prefix = CACHE_PREFIX + KEY_DELIM
if not key.startswith(prefix):
key = prefix+key
return key.replace(" ", ".")
def md5_hash(obj):
pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
return hashlib.md5(pickled).hexdigest()
def is_memcached_backend():
try:
return cache._cache.__module__.endswith('memcache')
except AttributeError:
return False
def cache_require():
"""Error if keyedcache isn't running."""
if cache_enabled():
key = cache_key('require_cache')
cache_set(key,value='1')
v = cache_get(key, default = '0')
if v != '1':
raise CacheNotRespondingError()
else:
log.debug("Cache responding OK")
return True
def cache_clear_request(uid):
"""Clears all locally cached elements with that uid"""
global REQUEST_CACHE
try:
del REQUEST_CACHE[uid]
log.debug('cleared request cache: %s', uid)
except KeyError:
pass
def cache_use_request_caching():
global REQUEST_CACHE
REQUEST_CACHE['enabled'] = True
def cache_get_request_uid():
from threaded_multihost import threadlocals
return threadlocals.get_thread_variable('request_uid', -1)
def cache_set_request(key, val, uid=None):
if uid == None:
uid = cache_get_request_uid()
if uid>-1:
global REQUEST_CACHE
if not uid in REQUEST_CACHE:
REQUEST_CACHE[uid] = {key:val}
else:
REQUEST_CACHE[uid][key] = val
| mit | 9,166,052,249,214,150,000 | 30.559278 | 173 | 0.596815 | false |
diplomacy/research | diplomacy_research/scripts/render.py | 1 | 9835 | #!/usr/bin/env python3
# ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Renders a same tournament game
Argument: File path to .json in history folder
"""
import argparse
import os
import multiprocessing
import shutil
from diplomacy import Game
import ujson as json
from diplomacy_research.proto.diplomacy_proto.game_pb2 import SavedGame as SavedGameProto
from diplomacy_research.utils.proto import proto_to_dict, read_next_proto
def render_saved_game(saved_game, output_dir, prefix=''):
""" Renders a specific saved game
:param saved_game: The saved game to render
:param output_dir: The output directory where to save the rendering
:param prefix: An optional prefix to add before the game id
"""
if prefix:
output_dir = os.path.join(output_dir, prefix + '_' + saved_game['id'])
else:
output_dir = os.path.join(output_dir, saved_game['id'])
nb_phases = len(saved_game['phases'])
svg_count = 0
# Checking if already generated
# Otherwise, regenerating completely
if os.path.exists(output_dir):
nb_svg = len([os.path.join(output_dir, file) for file in os.listdir(output_dir) if file[-4:] == '.svg'])
if nb_svg == 2 * nb_phases:
print('Rendered {} (Skipped)'.format(saved_game['id']))
return
shutil.rmtree(output_dir, ignore_errors=True)
os.makedirs(output_dir, exist_ok=True)
# Creating a Game to replay all orders, and a new Game object per phase to validate
entire_game = Game()
if saved_game['phases']:
entire_game.set_state(saved_game['phases'][0]['state'])
# Rendering
for phase in saved_game['phases']:
phase_game = Game()
# Setting state
state = phase['state']
phase_game.set_state(state)
entire_game.note = phase_game.note
# Setting orders
phase_game.clear_orders()
orders = phase['orders']
for power_name in orders:
phase_game.set_orders(power_name, orders[power_name])
entire_game.set_orders(power_name, orders[power_name])
# Validating that we are at the same place
for power_name in orders:
assert sorted(phase_game.get_units(power_name)) == sorted(entire_game.get_units(power_name))
assert sorted(phase_game.get_centers(power_name)) == sorted(entire_game.get_centers(power_name))
# Rendering with and without orders
with open(os.path.join(output_dir, '%03d%s' % (svg_count, '.svg')), 'w') as file:
file.write(entire_game.render(incl_orders=False))
svg_count += 1
with open(os.path.join(output_dir, '%03d%s' % (svg_count, '.svg')), 'w') as file:
file.write(entire_game.render(incl_orders=True))
# Processing (for entire game)
svg_count += 1
entire_game.process()
print('Rendered {}'.format(saved_game['id']))
# =========================================
# ------- JSON RENDERING ----------
# =========================================
def render_json(file_path):
""" Renders a specific json file
:param file_path: The full path to the json file
:return: Nothing, but creates a directory (file_path without '.json') containing the rendered images
"""
dir_path = os.path.dirname(file_path)
# Aborting if file doesn't exist
if not os.path.exists(file_path):
print('File {} does not exist.'.format(file_path))
return
# Loading saved game
file_content = open(file_path, 'r').read()
saved_game = json.loads(file_content)
# Rendering
render_saved_game(saved_game, dir_path)
def render_multi_json_per_folder(history_dir, nb_json_per_folder):
""" Finds all subfolders under history and renders 'nb_jsons' games in each subfolder found
:param history_dir: The full path to the history folder
:param nb_json_per_folder: The number of jsons to render per subfolder
:return: Nothing
"""
jsons_to_render = []
# Finding files to render
subfolders = [os.path.join(history_dir, path)
for path in os.listdir(history_dir)
if os.path.isdir(os.path.join(history_dir, path))]
for folder in subfolders:
json_games = sorted([os.path.join(folder, json_filename)
for json_filename in os.listdir(folder)
if json_filename[-5:] == '.json'])
json_games = json_games[:nb_json_per_folder]
for json_path in json_games:
jsons_to_render += [json_path]
# Running over multiple processes
nb_cores = multiprocessing.cpu_count()
with multiprocessing.Pool(nb_cores) as pool:
pool.map(render_json, jsons_to_render)
# =========================================
# ------- PROTO RENDERING ----------
# =========================================
def render_saved_game_proto(saved_game_proto, output_dir, prefix='', json_only=False):
""" Renders a saved game proto
:param saved_game_proto: A `.proto.game.SavedGame` object
:param output_dir: The output directory where the save the renderings
:param prefix: An optional prefix to add before the game id
:param json_only: Indicates we only want to extract the underlying JSON
"""
saved_game = proto_to_dict(saved_game_proto)
if json_only:
os.makedirs(os.path.join(output_dir, 'json'), exist_ok=True)
output_path = os.path.join(output_dir, 'json', prefix + '_' + saved_game['id'] + '.json')
with open(output_path, 'w') as file:
file.write(json.dumps(saved_game))
print('Saved JSON for {}'.format(saved_game['id']))
else:
render_saved_game(saved_game, output_dir, prefix)
def render_proto_file(file_path, args, compressed=True):
""" Renders all saved game proto in a proto file
:param file_path: The path to the proto file
:param args: The parsed command line arguments
:param compressed: Boolean that indicates if compression was used.
"""
dir_path = os.path.dirname(file_path)
game_count = 0
# Aborting if file doesn't exist
if not os.path.exists(file_path):
print('File {} does not exist.'.format(file_path))
return
# Processing filter
games_to_render = []
if args.filter:
for part in args.filter.split(','):
if '-' in part:
start, stop = part.split('-')
games_to_render += list(range(int(start), int(stop) + 1))
elif ':' in part:
start, stop, step = part.split(':')
games_to_render += list(range(int(start), int(stop) + 1, int(step)))
else:
games_to_render += [int(part)]
# Rendering each game in the proto file
with open(file_path, 'rb') as file:
while True:
saved_game_proto = read_next_proto(SavedGameProto, file, compressed)
if saved_game_proto is None:
break
game_count += 1
if game_count in games_to_render or (not games_to_render and not args.count):
print('(Game #%d) ' % game_count, end='')
render_saved_game_proto(saved_game_proto, dir_path, prefix='%05d' % game_count, json_only=args.json)
if game_count % 100 == 0 and args.count:
print('... %d games found so far.' % game_count)
# Printing the number of games in the proto file
if args.count:
print('Found %d games in the proto file.' % game_count)
# =========================================
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Render some saved games.')
PARSER.add_argument('--count', action='store_true', help='Count the number of games in the file')
PARSER.add_argument('--json', action='store_true', help='Only extract jsons without rendering the games')
PARSER.add_argument('--filter', help='Only render some games e.g. 1-5,6,8,10:100:2')
PARSER.add_argument('--nb_per_folder', type=int, default=0, help='The number of games per folder to generate')
PARSER.add_argument('file_path', help='The file path containing the saved games.')
ARGS = PARSER.parse_args()
# Rendering a single JSON
# Syntax: render.py <json path>
if ARGS.file_path[-5:] == '.json':
render_json(ARGS.file_path)
exit(0)
# Render a series of game in a .pb file
# Syntax: render.py <pb path>
if ARGS.file_path[-3:] == '.pb':
render_proto_file(ARGS.file_path, ARGS, compressed=False)
exit(0)
if ARGS.file_path[-4:] == '.pbz':
render_proto_file(ARGS.file_path, ARGS, compressed=True)
exit(0)
# Rendering a certain number of JSON per folder
# Syntax: render.py <history/> --nb_per_folder <# of json per folder to generate>
if os.path.exists(ARGS.file_path) and ARGS.nb_per_folder:
render_multi_json_per_folder(ARGS.file_path, ARGS.nb_per_folder)
exit(0)
# Invalid syntax
PARSER.print_help()
exit(-1)
| mit | 5,406,339,455,071,944,000 | 40.673729 | 116 | 0.604677 | false |
Shinao/SmartMirror | Motion/main.py | 1 | 1803 | import cv2
import time, os
from motion import Motion
from tornado import web, ioloop
import threading
import json
import requests
from config import config
import logging
# Send gesture to node server
logging.getLogger("requests").setLevel(logging.WARNING) # get infos on error
take_photo = False
photo_filepath = ""
def SendGesture(gesture):
try:
requests.get("http://localhost:3000/motion/gesture", params=json.dumps(gesture.properties))
except Exception as ex:
print("Could not send gesture: " + str(ex))
# Received command from node server to take a photo
def ManageCommands(motion):
global take_photo
if not take_photo:
return
print("Taking photo: " + photo_filepath)
cv2.imwrite("../public/" + photo_filepath, motion.currentFrame)
take_photo = False
# Main loop - get gestures and send them
def ManageMotion():
motion = Motion()
while motion.IsActive():
ManageCommands(motion)
# Manage motion and gestures
motion.GetInformationOnNextFrame()
if motion.TimeElapsedSinceLastMotion() > config['timeToWaitWhenNoMovementBeforeSleep']:
time.sleep(config['timeToSleepWhenNoMovement'])
gesture = motion.GetGesture()
threading.Thread(target=SendGesture, args=(gesture,)).start()
motion.Dispose()
os._exit(1)
class CommandHandler(web.RequestHandler):
def get(self):
global take_photo, photo_filepath
filepath = self.get_argument('filepath', 'public/frame.jpg')
take_photo = True
photo_filepath = filepath
if __name__ == '__main__':
threading.Thread(target=ManageMotion).start()
application = web.Application([
(r"/takePhoto", CommandHandler),
])
application.listen(3001)
ioloop.IOLoop.current().start()
| mit | 3,999,531,328,340,736,000 | 26.738462 | 99 | 0.687188 | false |
iceslide/Lili | formatter.py | 1 | 2383 | # -*- coding: utf-8 -*-
import constants
__author__ = constants.__author__
__copyright__ = constants.__copyright__
__license__ = constants.__license__
def format(block):
""" Apply formatting to a block. """
wideopeningchars = constants.WIDE_OPENING_CHARS
text = block.gettext()
newtext = []
isquote = False
for i in range(len(text)):
line = text[i]
lineno = i + 1
if (lineno == 1 and len(line) > 0 and line[0] in wideopeningchars):
isquote = True
newtext.append(_formatline(line, lineno, isquote))
block.settext(newtext)
# =================================================================
def _formatline(line, lineno, isquote):
""" Apply formatting to a line. """
widewhitespace = constants.WIDE_WHITESPACE
wideopeningchars = constants.WIDE_OPENING_CHARS
wideclosingchars = constants.WIDE_CLOSING_CHARS
newline = constants.NEWLINE
#has_newline = line.endswith(newline)
if(line.strip() == ''):
# Empty line or filled with whitespaces
return line
line = line.rstrip()
#
# Indentation rules
#
# Remove leading normal white spaces
while (line.startswith(' ')):
line = line[1:]
#
if (lineno == 1 and isquote):
while (line[0] not in wideopeningchars):
line = line[1:]
if (lineno == 1 and not isquote):
if (not line.startswith(widewhitespace)):
line = widewhitespace + line
# Insert double width whitespace to align lines/paragraph
if (lineno > 1 and isquote):
if (not line.startswith(widewhitespace)):
line = widewhitespace + line
# If no quotation, the lines/paragraph is not aligned
if (lineno > 1 and not isquote):
if (line.startswith(widewhitespace)):
line = line[1:]
# A quote cannot end in dot '.', except in the case of ellipsis "..."
if (isquote):
for c in wideclosingchars:
i = line.find(c)
while(i != -1):
if(line[i - 1] == '.' and not line.endswith('...')):
line = line[:i - 1] + line[i:]
i = line.find(c, i+1)
#if (has_newline):
# line = line + constants.NEWLINE
return line | gpl-3.0 | 1,472,706,962,389,387,500 | 27.380952 | 75 | 0.535879 | false |
Castronova/EMIT | wrappers/odm2.py | 1 | 2672 | __author__ = 'tonycastronova'
import wrappers
import stdlib
from wrappers import base
from utilities import geometry
from utilities.status import Status
from api_old.ODM2.Core.services import readCore
from api_old.ODM2.Results.services import readResults
class wrapper(base.BaseWrapper):
def __init__(self, args):
super(wrapper, self).__init__()
self.args = args
session = self.args['session']
resultid = self.args['resultid']
# get result object and result timeseries
core = readCore(session)
obj = core.getResultByID(resultID=int(resultid))
readres = readResults(session)
results = readres.getTimeSeriesValuesByResultId(resultId=int(resultid))
# separate the date and value pairs in the timeseries
dates = [date.ValueDateTime for date in results]
values = [val.DataValue for val in results]
# basic exchange item info
name = obj.VariableObj.VariableCode
desc = obj.VariableObj.VariableDefinition
type = stdlib.ExchangeItemType.OUTPUT
start = min(dates)
end = max(dates)
# build variable
variable = stdlib.Variable()
variable.VariableDefinition(obj.VariableObj.VariableDefinition)
variable.VariableNameCV(obj.VariableObj.VariableNameCV)
# build unit
unit = stdlib.Unit()
unit.UnitAbbreviation(obj.UnitObj.UnitsAbbreviation)
unit.UnitName(obj.UnitObj.UnitsName)
unit.UnitTypeCV(obj.UnitObj.UnitsTypeCV)
# build geometries
# todo: need to specify srs and elevation
wkb = str(obj.FeatureActionObj.SamplingFeatureObj.FeatureGeometry.data)
geom = geometry.fromWKB(wkb)
# build exchange item object
oei = stdlib.ExchangeItem( name=name,
desc=desc,
geometry=geom,
unit=unit,
variable=variable,type=type )
# set global parameters
self.name(name)
self.simulation_start(start)
self.simulation_end(end)
self.outputs(name=name, value=oei)
self.description(obj.VariableObj.VariableDefinition)
self.current_time(start)
# self.__obj = obj
# self.__resultid = obj.ResultID
# self.__session = session
# set model status
self.status(Status.Loaded)
def type(self):
return wrappers.Types().ODM2
def finish(self):
return
def prepare(self):
self.status(Status.Ready)
def run(self, inputs):
self.status(Status.Finished)
| gpl-2.0 | 3,811,784,181,105,661,000 | 29.022472 | 79 | 0.623503 | false |
ClearingHouse/clearinghoused | docs/conf.py | 1 | 7820 | # -*- coding: utf-8 -*-
#
# clearinghoused documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 20 15:45:40 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'clearinghoused'
copyright = u'2014, Clearinghouse Team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'clearinghouseddoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'clearinghoused.tex', u'clearinghoused Documentation',
u'ClearingHouse Team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'clearinghoused', u'clearinghoused Documentation',
[u'ClearingHouse Team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'clearinghoused', u'clearinghoused Documentation',
u'ClearingHouse Team', 'clearinghoused', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit | 9,053,527,023,928,959,000 | 31.31405 | 80 | 0.707033 | false |
garthg/petitions-dataverse | merge_doi_maps.py | 1 | 2439 | '''merge_doi_maps.py
Copyright 2018 Garth Griffin
Distributed under the GNU GPL v3. For full terms see the file LICENSE.
This file is part of PetitionsDataverse.
PetitionsDataverse is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
PetitionsDataverse is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
PetitionsDataverse. If not, see <http://www.gnu.org/licenses/>.
________________________________________________________________________________
Author: Garth Griffin (http://garthgriffin.com)
Date: February 23 2018
'''
import sys
import collections
import tsvfile
merge_into_tsv = sys.argv[1]
merge_new_tsvs = sys.argv[2:]
def merge(merge_into_tsv, merge_new_tsv):
print 'Merge %s <-- %s' % (merge_into_tsv, merge_new_tsv)
rows = tsvfile.ReadDicts(merge_into_tsv)
update_rows = tsvfile.ReadDicts(merge_new_tsv)
prev_map_id = dict([(x['Local ID'], x['DOI']) for x in rows])
prev_map_doi = dict([(x['DOI'], x['Local ID']) for x in rows])
if len(prev_map_id) != len(rows):
raise ValueError('Non-unique local IDs in %s' % merge_into_tsv)
if len(prev_map_doi) != len(rows):
raise ValueError('Non-unique DOIs in %s' % merge_into_tsv)
counters = collections.defaultdict(int)
for row in update_rows:
counters['total'] += 1
local_id = row['Local ID']
doi = row['DOI']
needs_update = True
if local_id in prev_map_id:
if prev_map_id[local_id] != doi:
raise ValueError('Conflicted local ID in %s: %s' % (
merge_new_tsv, local_id))
needs_update = False
if doi in prev_map_doi:
if prev_map_doi[doi] != local_id:
raise ValueError('Conflicted DOI in %s: %s' % (merge_new_tsv, doi))
needs_update = False
if needs_update:
counters['update'] += 1
prev_map_id[local_id] = doi
prev_map_doi[doi] = local_id
rows.append(row)
else:
counters['preexisting'] += 1
print str(dict(counters))
tsvfile.WriteDicts(merge_into_tsv, rows)
for f in merge_new_tsvs:
merge(merge_into_tsv, f)
| gpl-3.0 | 7,386,181,290,347,419,000 | 32.410959 | 80 | 0.664207 | false |
k4ml/Marimorepy | mamopublic/common.py | 1 | 2741 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009, MARIMORE Inc Tokyo, Japan.
# Contributed by
# Iqbal Abdullah <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the MARIMORE Inc nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This module defines commonly used code for the mamo package
"""
__author__ = "Iqbal Abdullah <[email protected]>"
__date__ = "$LastChangedDate$"
__version__ = "$LastChangedRevision$"
import re
class BaseClass(object):
"""
BaseClass contains very common functions
and implementations for mamo classes. All of mamo packages classes
uses BaseClass as their parent class
"""
def read_only_property(self):
raise (AttributeError), "Read-only attribute"
def _prop_set_classname(self, value):
self.read_only_property()
def _prop_get_classname(self):
compiled_re = re.compile("'.*'")
clsname = compiled_re.search("%s" % (self.__class__)).group()
clsname = clsname.replace("'","")
clsname = clsname.replace("%s" % (self.__module__), "")
clsname = clsname.replace(".","")
return clsname
myclassname = property(_prop_get_classname, _prop_set_classname,
doc="Returns the name of the class")
| bsd-3-clause | 641,584,754,417,837,600 | 41.828125 | 85 | 0.69938 | false |
sympsi/sympsi | sympsi/commutator.py | 1 | 7156 | """The commutator: [A,B] = A*B - B*A."""
from __future__ import print_function, division
from sympy import S, Expr, Mul, Add
from sympy.core.compatibility import u
from sympy.integrals.integrals import Integral
from sympy.printing.pretty.stringpict import prettyForm
from sympsi.dagger import Dagger
from sympsi.operator import Operator
__all__ = [
'Commutator'
]
#-----------------------------------------------------------------------------
# Commutator
#-----------------------------------------------------------------------------
class Commutator(Expr):
"""The standard commutator, in an unevaluated state.
Evaluating a commutator is defined [1]_ as: ``[A, B] = A*B - B*A``. This
class returns the commutator in an unevaluated form. To evaluate the
commutator, use the ``.doit()`` method.
Cannonical ordering of a commutator is ``[A, B]`` for ``A < B``. The
arguments of the commutator are put into canonical order using ``__cmp__``.
If ``B < A``, then ``[B, A]`` is returned as ``-[A, B]``.
Parameters
==========
A : Expr
The first argument of the commutator [A,B].
B : Expr
The second argument of the commutator [A,B].
Examples
========
>>> from sympsi import Commutator, Dagger, Operator
>>> from sympy.abc import x, y
>>> A = Operator('A')
>>> B = Operator('B')
>>> C = Operator('C')
Create a commutator and use ``.doit()`` to evaluate it:
>>> comm = Commutator(A, B)
>>> comm
[A,B]
>>> comm.doit()
A*B - B*A
The commutator orders it arguments in canonical order:
>>> comm = Commutator(B, A); comm
-[A,B]
Commutative constants are factored out:
>>> Commutator(3*x*A, x*y*B)
3*x**2*y*[A,B]
Using ``.expand(commutator=True)``, the standard commutator expansion rules
can be applied:
>>> Commutator(A+B, C).expand(commutator=True)
[A,C] + [B,C]
>>> Commutator(A, B+C).expand(commutator=True)
[A,B] + [A,C]
>>> Commutator(A*B, C).expand(commutator=True)
[A,C]*B + A*[B,C]
>>> Commutator(A, B*C).expand(commutator=True)
[A,B]*C + B*[A,C]
Adjoint operations applied to the commutator are properly applied to the
arguments:
>>> Dagger(Commutator(A, B))
-[Dagger(A),Dagger(B)]
References
==========
.. [1] http://en.wikipedia.org/wiki/Commutator
"""
is_commutative = False
def __new__(cls, A, B):
r = cls.eval(A, B)
if r is not None:
return r
obj = Expr.__new__(cls, A, B)
return obj
@classmethod
def eval(cls, a, b):
if not (a and b):
return S.Zero
if a == b:
return S.Zero
if a.is_commutative or b.is_commutative:
return S.Zero
# [xA,yB] -> xy*[A,B]
# from sympy.physics.qmul import QMul
ca, nca = a.args_cnc()
cb, ncb = b.args_cnc()
c_part = ca + cb
if c_part:
return Mul(Mul(*c_part), cls(Mul._from_args(nca), Mul._from_args(ncb)))
# Canonical ordering of arguments
# The Commutator [A, B] is in canonical form if A < B.
if a.compare(b) == 1:
return S.NegativeOne*cls(b, a)
def _eval_expand_commutator(self, **hints):
A = self.args[0]
B = self.args[1]
if isinstance(A, Add):
# [A + B, C] -> [A, C] + [B, C]
sargs = []
for term in A.args:
comm = Commutator(term, B)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(B, Add):
# [A, B + C] -> [A, B] + [A, C]
sargs = []
for term in B.args:
comm = Commutator(A, term)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(A, Mul):
# [A*B, C] -> A*[B, C] + [A, C]*B
a = A.args[0]
b = Mul(*A.args[1:])
c = B
comm1 = Commutator(b, c)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(a, comm1)
second = Mul(comm2, b)
return Add(first, second)
elif isinstance(B, Mul):
# [A, B*C] -> [A, B]*C + B*[A, C]
a = A
b = B.args[0]
c = Mul(*B.args[1:])
comm1 = Commutator(a, b)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(comm1, c)
second = Mul(b, comm2)
return Add(first, second)
elif isinstance(A, Integral):
# [∫adx, B] -> ∫[a, B]dx
func, lims = A.function, A.limits
new_args = [Commutator(func, B)]
for lim in lims:
new_args.append(lim)
return Integral(*new_args)
elif isinstance(B, Integral):
# [A, ∫bdx] -> ∫[A, b]dx
func, lims = B.function, B.limits
new_args = [Commutator(A, func)]
for lim in lims:
new_args.append(lim)
return Integral(*new_args)
# No changes, so return self
return self
def doit(self, **hints):
""" Evaluate commutator """
A = self.args[0]
B = self.args[1]
if isinstance(A, Operator) and isinstance(B, Operator):
try:
comm = A._eval_commutator(B, **hints)
except NotImplementedError:
try:
comm = -1*B._eval_commutator(A, **hints)
except NotImplementedError:
comm = None
if comm is not None:
return comm.doit(**hints)
return (A*B - B*A).doit(**hints)
def _eval_adjoint(self):
return Commutator(Dagger(self.args[1]), Dagger(self.args[0]))
def _sympyrepr(self, printer, *args):
return "%s(%s,%s)" % (
self.__class__.__name__, printer._print(
self.args[0]), printer._print(self.args[1])
)
def _sympystr(self, printer, *args):
return "[%s,%s]" % (self.args[0], self.args[1])
def _pretty(self, printer, *args):
pform = printer._print(self.args[0], *args)
pform = prettyForm(*pform.right((prettyForm(u(',')))))
pform = prettyForm(*pform.right((printer._print(self.args[1], *args))))
pform = prettyForm(*pform.parens(left='[', right=']'))
return pform
def _latex(self, printer, *args):
return "\\left[%s,%s\\right]" % tuple([
printer._print(arg, *args) for arg in self.args])
| bsd-3-clause | -1,256,890,188,336,245,500 | 30.488987 | 83 | 0.504896 | false |
MangoMangoDevelopment/neptune | lib/ros_comm-1.12.0/tools/rosmaster/test/test_rosmaster_paramserver.py | 2 | 37540 | # Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import unittest
import time
import random
import datetime
from rosgraph.names import make_global_ns, ns_join
# mock of subscription tests
class ThreadPoolMock(object):
def queue_task(*args): pass
## Unit tests for rosmaster.paramserver module
class TestRospyParamServer(unittest.TestCase):
def test_compute_param_updates(self):
from rosmaster.registrations import Registrations
from rosmaster.paramserver import compute_param_updates
# spec requires that subscriptions always have a trailing slash
tests = [
# [correct val], (subscribers, param_key, param_value)
([],({}, '/foo', 1)),
([],({'/bar': 'barapi'}, '/foo/', 1)),
([],({'/bar/': 'barapi'}, '/foo/', 1)),
# make sure that it's robust to aliases
([('fooapi', '/foo/', 1)], ({'/foo/': 'fooapi'}, '/foo', 1)),
([('fooapi', '/foo/', 1)], ({'/foo/': 'fooapi'}, '/foo/', 1)),
# check namespace subscription
([('fooapi', '/foo/val/', 1)], ({'/foo/': 'fooapi'}, '/foo/val', 1)),
# check against dictionary param values
([],({'/bar/': 'barapi'}, '/foo/', {'bar': 2})),
([('fooapi', '/foo/val/', 1)], ({'/foo/val/': 'fooapi'}, '/foo', {'val' : 1})),
([('fooapi', '/foo/bar/val/', 1)], ({'/foo/bar/val/': 'fooapi'}, '/foo', {'bar' : {'val' : 1}})),
([('fooapi', '/foo/bar/', {'val': 1})], ({'/foo/bar/': 'fooapi'}, '/foo', {'bar' : {'val' : 1}})),
([('fooapi', '/foo/', {'bar':{'val': 1}})], ({'/foo/': 'fooapi'}, '/foo', {'bar' : {'val' : 1}})),
([('fooapi', '/foo/', {'bar': 1, 'baz': 2}), ('foobazapi', '/foo/baz/', 2)],
({'/foo/': 'fooapi', '/foo/baz/': 'foobazapi'}, '/foo', {'bar' : 1, 'baz': 2})),
([('foobarapi', '/foo/bar/', 1), ('foobazapi', '/foo/baz/', 2)],
({'/foo/bar/': 'foobarapi', '/foo/baz/': 'foobazapi'}, '/foo', {'bar' : 1, 'baz': 2})),
# deletion of higher level tree
([('delapi', '/del/bar/', {})],
({'/del/bar/': 'delapi'}, '/del', {})),
]
for correct, args in tests:
reg = Registrations(Registrations.PARAM_SUBSCRIPTIONS)
reg.map = args[0]
param_key = args[1]
param_val = args[2]
val = compute_param_updates(reg, param_key, param_val)
self.assertEquals(len(correct), len(val), "Failed: \n%s \nreturned \n%s\nvs correct\n%s"%(str(args), str(val), str(correct)))
for c in correct:
self.assert_(c in val, "Failed: \n%s \ndid not include \n%s. \nIt returned \n%s"%(str(args), c, val))
def notify_task(self, updates):
self.last_update = updates
def test_subscribe_param_simple(self):
from rosmaster.registrations import RegistrationManager
from rosmaster.paramserver import ParamDictionary
# setup node and subscriber data
reg_manager = RegistrationManager(ThreadPoolMock())
param_server = ParamDictionary(reg_manager)
# subscribe to parameter that has not been set yet
self.last_update = None
self.assertEquals({}, param_server.subscribe_param('/foo', ('node1', 'http://node1:1')))
param_server.set_param('/foo', 1, notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1:1')], '/foo/', 1), ], self.last_update)
# resubscribe
self.assertEquals(1, param_server.subscribe_param('/foo', ('node1', 'http://node1:1')))
param_server.set_param('/foo', 2, notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1:1')], '/foo/', 2), ], self.last_update)
# resubscribe (test canonicalization of parameter name)
self.assertEquals(2, param_server.subscribe_param('/foo/', ('node1', 'http://node1:1')))
param_server.set_param('/foo', 'resub2', notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1:1')], '/foo/', 'resub2'), ], self.last_update)
# change the URI
self.assertEquals('resub2', param_server.subscribe_param('/foo', ('node1', 'http://node1b:1')))
self.assertEquals('http://node1b:1', reg_manager.get_node('node1').api)
param_server.set_param('/foo', 3, notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1b:1')], '/foo/', 3), ], self.last_update)
# multiple subscriptions to same param
self.assertEquals(3, param_server.subscribe_param('/foo', ('node2', 'http://node2:2')))
self.assertEquals('http://node2:2', reg_manager.get_node('node2').api)
param_server.set_param('/foo', 4, notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1b:1'), ('node2', 'http://node2:2')], '/foo/', 4), ], self.last_update)
def test_subscribe_param_tree(self):
from rosmaster.registrations import RegistrationManager
from rosmaster.paramserver import ParamDictionary
# setup node and subscriber data
reg_manager = RegistrationManager(ThreadPoolMock())
param_server = ParamDictionary(reg_manager)
# Test Parameter Tree Subscriptions
# simple case - subscribe and set whole tree
gains = {'p': 'P', 'i': 'I', 'd' : 'D'}
self.assertEquals({}, param_server.subscribe_param('/gains', ('ptnode', 'http://ptnode:1')))
param_server.set_param('/gains', gains.copy(), notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/', gains), ], self.last_update)
# - test with trailing slash
param_server.set_param('/gains/', gains.copy(), notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/', gains), ], self.last_update)
# change params within tree
param_server.set_param('/gains/p', 'P2', notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/p/', 'P2'), ], self.last_update)
param_server.set_param('/gains/i', 'I2', notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/i/', 'I2'), ], self.last_update)
# test overlapping subscriptions
self.assertEquals('P2', param_server.subscribe_param('/gains/p', ('ptnode2', 'http://ptnode2:2')))
param_server.set_param('/gains', gains.copy(), notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/', gains), \
([('ptnode2', 'http://ptnode2:2')], '/gains/p/', 'P'), \
], self.last_update)
# - retest with trailing slash on subscribe
self.last_update = None
self.assertEquals('P', param_server.subscribe_param('/gains/p/', ('ptnode2', 'http://ptnode2:2')))
param_server.set_param('/gains', gains.copy(), notify_task=self.notify_task)
self.assertEquals([([('ptnode', 'http://ptnode:1')], '/gains/', gains), \
([('ptnode2', 'http://ptnode2:2')], '/gains/p/', 'P'), \
], self.last_update)
# test with overlapping (change to sub param)
param_server.set_param('/gains/p', 'P3', notify_task=self.notify_task)
# - this is a bit overtuned as a more optimal ps could use one update
ptnode2 = ([('ptnode2', 'http://ptnode2:2')], '/gains/p/', 'P3')
ptnode = ([('ptnode', 'http://ptnode:1')], '/gains/p/', 'P3')
self.assertTrue(len(self.last_update) == 2)
self.assertTrue(ptnode2 in self.last_update)
self.assertTrue(ptnode in self.last_update)
# virtual deletion: subscribe to subparam, parameter tree reset
self.last_update = None
param_server.set_param('/gains2', gains.copy(), notify_task=self.notify_task)
self.assertEquals('P', param_server.subscribe_param('/gains2/p/', ('ptnode3', 'http://ptnode3:3')))
# - erase the sub parameters
param_server.set_param('/gains2', {}, notify_task=self.notify_task)
self.assertEquals([([('ptnode3', 'http://ptnode3:3')], '/gains2/p/', {}), ], self.last_update)
#Final test: test subscription to entire tree
self.last_update = None
param_server.delete_param('/gains')
param_server.delete_param('/gains2')
self.assertEquals({}, param_server.get_param('/'))
self.assertEquals({}, param_server.subscribe_param('/', ('allnode', 'http://allnode:1')))
param_server.set_param('/one', 1, notify_task=self.notify_task)
self.assertEquals([([('allnode', 'http://allnode:1')], '/one/', 1), ], self.last_update)
param_server.set_param('/two', 2, notify_task=self.notify_task)
self.assertEquals([([('allnode', 'http://allnode:1')], '/two/', 2), ], self.last_update)
param_server.set_param('/foo/bar', 'bar', notify_task=self.notify_task)
self.assertEquals([([('allnode', 'http://allnode:1')], '/foo/bar/', 'bar'), ], self.last_update)
# verify that subscribe_param works with parameter deletion
def test_subscribe_param_deletion(self):
from rosmaster.registrations import RegistrationManager
from rosmaster.paramserver import ParamDictionary
# setup node and subscriber data
reg_manager = RegistrationManager(ThreadPoolMock())
param_server = ParamDictionary(reg_manager)
# subscription to then delete parameter
self.assertEquals({}, param_server.subscribe_param('/foo', ('node1', 'http://node1:1')))
param_server.set_param('/foo', 1, notify_task=self.notify_task)
param_server.delete_param('/foo', notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1:1')], '/foo/', {}), ], self.last_update)
# subscribe to and delete whole tree
gains = {'p': 'P', 'i': 'I', 'd' : 'D'}
self.assertEquals({}, param_server.subscribe_param('/gains', ('deltree', 'http://deltree:1')))
param_server.set_param('/gains', gains.copy(), notify_task=self.notify_task)
param_server.delete_param('/gains', notify_task=self.notify_task)
self.assertEquals([([('deltree', 'http://deltree:1')], '/gains/', {}), ], self.last_update)
# subscribe to and delete params within subtree
self.assertEquals({}, param_server.subscribe_param('/gains2', ('deltree2', 'http://deltree2:2')))
param_server.set_param('/gains2', gains.copy(), notify_task=self.notify_task)
param_server.delete_param('/gains2/p', notify_task=self.notify_task)
self.assertEquals([([('deltree2', 'http://deltree2:2')], '/gains2/p/', {}), ], self.last_update)
param_server.delete_param('/gains2/i', notify_task=self.notify_task)
self.assertEquals([([('deltree2', 'http://deltree2:2')], '/gains2/i/', {}), ], self.last_update)
param_server.delete_param('/gains2', notify_task=self.notify_task)
self.assertEquals([([('deltree2', 'http://deltree2:2')], '/gains2/', {}), ], self.last_update)
# delete parent tree
k = '/ns1/ns2/ns3/key'
self.assertEquals({}, param_server.subscribe_param(k, ('del_parent', 'http://del_parent:1')))
param_server.set_param(k, 1, notify_task=self.notify_task)
param_server.delete_param('/ns1/ns2', notify_task=self.notify_task)
self.assertEquals([([('del_parent', 'http://del_parent:1')], '/ns1/ns2/ns3/key/', {}), ], self.last_update)
def test_unsubscribe_param(self):
from rosmaster.registrations import RegistrationManager
from rosmaster.paramserver import ParamDictionary
# setup node and subscriber data
reg_manager = RegistrationManager(ThreadPoolMock())
param_server = ParamDictionary(reg_manager)
# basic test
self.last_update = None
self.assertEquals({}, param_server.subscribe_param('/foo', ('node1', 'http://node1:1')))
param_server.set_param('/foo', 1, notify_task=self.notify_task)
self.assertEquals([([('node1', 'http://node1:1')], '/foo/', 1), ], self.last_update)
# - return value is actually generated by Registrations
code, msg, val = param_server.unsubscribe_param('/foo', ('node1', 'http://node1:1'))
self.assertEquals(1, code)
self.assertEquals(1, val)
self.last_update = None
param_server.set_param('/foo', 2, notify_task=self.notify_task)
self.assertEquals(None, self.last_update)
# - repeat the unsubscribe
code, msg, val = param_server.unsubscribe_param('/foo', ('node1', 'http://node1:1'))
self.assertEquals(1, code)
self.assertEquals(0, val)
self.last_update = None
param_server.set_param('/foo', 2, notify_task=self.notify_task)
self.assertEquals(None, self.last_update)
# verify that stale unsubscribe has no effect on active subscription
self.last_update = None
self.assertEquals({}, param_server.subscribe_param('/bar', ('barnode', 'http://barnode:1')))
param_server.set_param('/bar', 3, notify_task=self.notify_task)
self.assertEquals([([('barnode', 'http://barnode:1')], '/bar/', 3), ], self.last_update)
code, msg, val = param_server.unsubscribe_param('/foo', ('barnode', 'http://notbarnode:1'))
self.assertEquals(1, code)
self.assertEquals(0, val)
param_server.set_param('/bar', 4, notify_task=self.notify_task)
self.assertEquals([([('barnode', 'http://barnode:1')], '/bar/', 4), ], self.last_update)
def _set_param(self, ctx, my_state, test_vals, param_server):
ctx = make_global_ns(ctx)
for type, vals in test_vals:
try:
caller_id = ns_join(ctx, "node")
count = 0
for val in vals:
key = ns_join(caller_id, "%s-%s"%(type,count))
param_server.set_param(key, val)
self.assert_(param_server.has_param(key))
true_key = ns_join(ctx, key)
my_state[true_key] = val
count += 1
except Exception:
assert "getParam failed on type[%s], val[%s]"%(type,val)
#self._check_param_state(my_state)
def _check_param_state(self, param_server, my_state):
for (k, v) in my_state.items():
assert param_server.has_param(k)
#print "verifying parameter %s"%k
try:
v2 = param_server.get_param(k)
except:
raise Exception("Exception raised while calling param_server.get_param(%s): %s"%(k, traceback.format_exc()))
self.assertEquals(v, v2)
param_names = my_state.keys()
ps_param_names = param_server.get_param_names()
assert not set(param_names) ^ set(ps_param_names), "parameter server keys do not match local: %s"%(set(param_names)^set(ps_param_names))
# test_has_param: test has_param API
def test_has_param(self):
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
self.failIf(param_server.has_param('/new_param'))
param_server.set_param('/new_param', 1)
self.assert_(param_server.has_param('/new_param'))
# test with param in sub-namespace
self.failIf(param_server.has_param('/sub/sub2/new_param2'))
# - verify that parameter tree does not exist yet (#587)
for k in ['/sub/sub2/', '/sub/sub2', '/sub/', '/sub']:
self.failIf(param_server.has_param(k))
param_server.set_param('/sub/sub2/new_param2', 1)
self.assert_(param_server.has_param('/sub/sub2/new_param2'))
# - verify that parameter tree now exists (#587)
for k in ['/sub/sub2/', '/sub/sub2', '/sub/', '/sub']:
self.assert_(param_server.has_param(k))
## test ^param naming, i.e. upwards-looking get access
## @param self
def test_search_param(self):
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
caller_id = '/node'
# vals are mostly identical, save some randomness. we want
# identical structure in order to stress lookup rules
val1 = { 'level1_p1': random.randint(0, 10000),
'level1_p2' : { 'level2_p2': random.randint(0, 10000) }}
val2 = { 'level1_p1': random.randint(0, 10000),
'level1_p2' : { 'level2_p2': random.randint(0, 10000) }}
val3 = { 'level1_p1': random.randint(0, 10000),
'level1_p2' : { 'level2_p2': random.randint(0, 10000) }}
val4 = { 'level1_p1': random.randint(0, 10000),
'level1_p2' : { 'level2_p2': random.randint(0, 10000) }}
full_dict = {}
# test invalid input
for k in ['', None, '~param']:
try:
param_server.search_param('/level1/level2', k)
self.fail("param_server search should have failed on [%s]"%k)
except ValueError: pass
for ns in ['', None, 'relative', '~param']:
try:
param_server.search_param(ns, 'param')
self.fail("param_server search should have failed on %s"%k)
except ValueError: pass
# set the val parameter at four levels so we can validate search
# - set val1
self.failIf(param_server.has_param('/level1/param'))
self.failIf(param_server.search_param('/level1/node', 'param'))
param_server.set_param('/level1/param', val1)
# - test param on val1
for ns in ['/level1/node', '/level1/level2/node', '/level1/level2/level3/node']:
self.assertEquals('/level1/param', param_server.search_param(ns, 'param'), "failed with ns[%s]"%ns)
self.assertEquals('/level1/param/', param_server.search_param(ns, 'param/'))
self.assertEquals('/level1/param/level1_p1', param_server.search_param(ns, 'param/level1_p1'))
self.assertEquals('/level1/param/level1_p2/level2_p2', param_server.search_param(ns, 'param/level1_p2/level2_p2'))
self.assertEquals(None, param_server.search_param('/root', 'param'))
self.assertEquals(None, param_server.search_param('/root', 'param/'))
# - set val2
self.failIf(param_server.has_param('/level1/level2/param'))
param_server.set_param('/level1/level2/param', val2)
# - test param on val2
for ns in ['/level1/level2/node', '/level1/level2/level3/node', '/level1/level2/level3/level4/node']:
self.assertEquals('/level1/level2/param', param_server.search_param(ns, 'param'))
self.assertEquals('/level1/level2/param/', param_server.search_param(ns, 'param/'))
self.assertEquals('/level1/param', param_server.search_param('/level1/node', 'param'))
self.assertEquals('/level1/param/', param_server.search_param('/level1/node', 'param/'))
self.assertEquals(None, param_server.search_param('/root', 'param'))
# - set val3
self.failIf(param_server.has_param('/level1/level2/level3/param'))
param_server.set_param('/level1/level2/level3/param', val3)
# - test param on val3
for ns in ['/level1/level2/level3/node', '/level1/level2/level3/level4/node']:
self.assertEquals('/level1/level2/level3/param', param_server.search_param(ns, 'param'))
self.assertEquals('/level1/level2/param', param_server.search_param('/level1/level2/node', 'param'))
self.assertEquals('/level1/param', param_server.search_param('/level1/node', 'param'))
# test subparams before we set val4 on the root
# - test looking for param/sub_param
self.assertEquals(None, param_server.search_param('/root', 'param'))
self.assertEquals(None, param_server.search_param('/root', 'param/level1_p1'))
self.assertEquals(None, param_server.search_param('/not/level1/level2/level3/level4/node', 'param/level1_p1'))
tests = [
('/level1/node', '/level1/param/'),
('/level1/level2/', '/level1/level2/param/'),
('/level1/level2', '/level1/level2/param/'),
('/level1/level2/node', '/level1/level2/param/'),
('/level1/level2/notlevel3', '/level1/level2/param/'),
('/level1/level2/notlevel3/node', '/level1/level2/param/'),
('/level1/level2/level3/level4', '/level1/level2/level3/param/'),
('/level1/level2/level3/level4/', '/level1/level2/level3/param/'),
('/level1/level2/level3/level4/node', '/level1/level2/level3/param/'),
]
for ns, pbase in tests:
self.assertEquals(pbase+'level1_p1',
param_server.search_param(ns, 'param/level1_p1'))
retval = param_server.search_param(ns, 'param/level1_p2/level2_p2')
self.assertEquals(pbase+'level1_p2/level2_p2', retval,
"failed with ns[%s] pbase[%s]: %s"%(ns, pbase, retval))
# - set val4 on the root
self.failIf(param_server.has_param('/param'))
param_server.set_param('/param', val4)
self.assertEquals('/param', param_server.search_param('/root', 'param'))
self.assertEquals('/param', param_server.search_param('/notlevel1/node', 'param'))
self.assertEquals('/level1/param', param_server.search_param('/level1/node', 'param'))
self.assertEquals('/level1/param', param_server.search_param('/level1', 'param'))
self.assertEquals('/level1/param', param_server.search_param('/level1/', 'param'))
# make sure that partial match works
val5 = { 'level1_p1': random.randint(0, 10000),
'level1_p2' : { }}
self.failIf(param_server.has_param('/partial1/param'))
param_server.set_param('/partial1/param', val5)
self.assertEquals('/partial1/param', param_server.search_param('/partial1', 'param'))
self.assertEquals('/partial1/param/level1_p1',
param_server.search_param('/partial1', 'param/level1_p1'))
# - this is the important check, should return key even if it doesn't exist yet based on stem match
self.assertEquals('/partial1/param/non_existent',
param_server.search_param('/partial1', 'param/non_existent'))
self.assertEquals('/partial1/param/level1_p2/non_existent',
param_server.search_param('/partial1', 'param/level1_p2/non_existent'))
# test_get_param: test basic getParam behavior. Value encoding verified separately by testParamValues
def test_get_param(self):
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
val = random.randint(0, 10000)
full_dict = {}
# very similar to has param sequence
self.failIf(param_server.has_param('/new_param'))
self.failIf(param_server.has_param('/new_param/'))
self.assertGetParamFail(param_server, '/new_param')
param_server.set_param('/new_param', val)
full_dict['new_param'] = val
self.assertEquals(val, param_server.get_param('/new_param'))
self.assertEquals(val, param_server.get_param('/new_param/'))
# - test homonym
self.assertEquals(val, param_server.get_param('/new_param//'))
# test full get
self.assertEquals(full_dict, param_server.get_param('/'))
# test with param in sub-namespace
val = random.randint(0, 10000)
self.failIf(param_server.has_param('/sub/sub2/new_param2'))
self.assertGetParamFail(param_server, '/sub/sub2/new_param2')
param_server.set_param('/sub/sub2/new_param2', val)
full_dict['sub'] = {'sub2': { 'new_param2': val }}
self.assertEquals(val, param_server.get_param('/sub/sub2/new_param2'))
# - test homonym
self.assertEquals(val, param_server.get_param('/sub///sub2/new_param2/'))
# test full get
self.assertEquals(full_dict, param_server.get_param('/'))
# test that parameter server namespace-get (#587)
val1 = random.randint(0, 10000)
val2 = random.randint(0, 10000)
val3 = random.randint(0, 10000)
for k in ['/gains/P', '/gains/I', '/gains/D', '/gains']:
self.assertGetParamFail(param_server, k)
self.failIf(param_server.has_param(k))
param_server.set_param('/gains/P', val1)
param_server.set_param('/gains/I', val2)
param_server.set_param('/gains/D', val3)
pid = {'P': val1, 'I': val2, 'D': val3}
full_dict['gains'] = pid
self.assertEquals(pid,
param_server.get_param('/gains'))
self.assertEquals(pid,
param_server.get_param('/gains/'))
self.assertEquals(full_dict,
param_server.get_param('/'))
self.failIf(param_server.has_param('/ns/gains/P'))
self.failIf(param_server.has_param('/ns/gains/I'))
self.failIf(param_server.has_param('/ns/gains/D'))
self.failIf(param_server.has_param('/ns/gains'))
param_server.set_param('/ns/gains/P', val1)
param_server.set_param('/ns/gains/I', val2)
param_server.set_param('/ns/gains/D', val3)
full_dict['ns'] = {'gains': pid}
self.assertEquals(pid,
param_server.get_param('/ns/gains'))
self.assertEquals({'gains': pid},
param_server.get_param('/ns/'))
self.assertEquals({'gains': pid},
param_server.get_param('/ns'))
self.assertEquals(full_dict,
param_server.get_param('/'))
def test_delete_param(self):
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
try:
param_server.delete_param('/fake')
self.fail("delete_param of non-existent should have failed")
except: pass
try:
param_server.delete_param('/')
self.fail("delete_param of root should have failed")
except: pass
param_server.set_param('/foo', 'foo')
param_server.set_param('/bar', 'bar')
self.assert_(param_server.has_param('/foo'))
self.assert_(param_server.has_param('/bar'))
param_server.delete_param('/foo')
self.failIf(param_server.has_param('/foo'))
# - test with trailing slash
param_server.delete_param('/bar/')
self.failIf(param_server.has_param('/bar'))
# test with namespaces
param_server.set_param("/sub/key/x", 1)
param_server.set_param("/sub/key/y", 2)
try:
param_server.delete_param('/sub/key/z')
self.fail("delete_param of non-existent should have failed")
except: pass
try:
param_server.delete_param('/sub/sub2/z')
self.fail("delete_param of non-existent should have failed")
except: pass
self.assert_(param_server.has_param('/sub/key/x'))
self.assert_(param_server.has_param('/sub/key/y'))
self.assert_(param_server.has_param('/sub/key'))
param_server.delete_param('/sub/key')
self.failIf(param_server.has_param('/sub/key'))
self.failIf(param_server.has_param('/sub/key/x'))
self.failIf(param_server.has_param('/sub/key/y'))
# test with namespaces (dictionary vals)
param_server.set_param('/sub2', {'key': { 'x' : 1, 'y' : 2}})
self.assert_(param_server.has_param('/sub2/key/x'))
self.assert_(param_server.has_param('/sub2/key/y'))
self.assert_(param_server.has_param('/sub2/key'))
param_server.delete_param('/sub2/key')
self.failIf(param_server.has_param('/sub2/key'))
self.failIf(param_server.has_param('/sub2/key/x'))
self.failIf(param_server.has_param('/sub2/key/y'))
# test with namespaces: treat value as if its a namespace
# - try to get the dictionary-of-dictionary code to fail
# by descending a value key as if it is a namespace
param_server.set_param('/a', 'b')
self.assert_(param_server.has_param('/a'))
try:
param_server.delete_param('/a/b/c')
self.fail_("should have raised key error")
except: pass
# test_set_param: test basic set_param behavior. Value encoding verified separately by testParamValues
def test_set_param(self):
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
caller_id = '/node'
val = random.randint(0, 10000)
# verify error behavior with root
try:
param_server.set_param('/', 1)
self.fail("ParamDictionary allowed root to be set to non-dictionary")
except: pass
# very similar to has param sequence
self.failIf(param_server.has_param('/new_param'))
param_server.set_param('/new_param', val)
self.assertEquals(val, param_server.get_param('/new_param'))
self.assertEquals(val, param_server.get_param('/new_param/'))
self.assert_(param_server.has_param('/new_param'))
# test with param in sub-namespace
val = random.randint(0, 10000)
self.failIf(param_server.has_param('/sub/sub2/new_param2'))
param_server.set_param('/sub/sub2/new_param2', val)
self.assertEquals(val, param_server.get_param('/sub/sub2/new_param2'))
# test with param type mutation
vals = ['a', {'a': 'b'}, 1, 1., 'foo', {'c': 'd'}, 4, {'a': {'b': 'c'}}, 3]
for v in vals:
param_server.set_param('/multi/multi_param', v)
self.assertEquals(v, param_server.get_param('/multi/multi_param'))
# - set value within subtree that mutates higher level value
param_server.set_param('/multi2/multi_param', 1)
self.assertEquals(1, param_server.get_param('/multi2/multi_param'))
param_server.set_param('/multi2/multi_param/a', 2)
self.assertEquals(2, param_server.get_param('/multi2/multi_param/a'))
self.assertEquals({'a': 2}, param_server.get_param('/multi2/multi_param/'))
param_server.set_param('/multi2/multi_param/a/b', 3)
self.assertEquals(3, param_server.get_param('/multi2/multi_param/a/b'))
self.assertEquals({'b': 3}, param_server.get_param('/multi2/multi_param/a/'))
self.assertEquals({'a': {'b': 3}}, param_server.get_param('/multi2/multi_param/'))
# test that parameter server namespace-set (#587)
self.failIf(param_server.has_param('/gains/P'))
self.failIf(param_server.has_param('/gains/I'))
self.failIf(param_server.has_param('/gains/D'))
self.failIf(param_server.has_param('/gains'))
pid = {'P': random.randint(0, 10000), 'I': random.randint(0, 10000), 'D': random.randint(0, 10000)}
param_server.set_param('/gains', pid)
self.assertEquals(pid, param_server.get_param('/gains'))
self.assertEquals(pid['P'], param_server.get_param('/gains/P'))
self.assertEquals(pid['I'], param_server.get_param('/gains/I'))
self.assertEquals(pid['D'], param_server.get_param('/gains/D'))
subns = {'gains1': pid, 'gains2': pid}
param_server.set_param('/ns', subns)
self.assertEquals(pid['P'], param_server.get_param('/ns/gains1/P'))
self.assertEquals(pid['I'], param_server.get_param('/ns/gains1/I'))
self.assertEquals(pid['D'], param_server.get_param('/ns/gains1/D'))
self.assertEquals(pid, param_server.get_param('/ns/gains1'))
self.assertEquals(pid, param_server.get_param('/ns/gains2'))
self.assertEquals(subns, param_server.get_param('/ns/'))
# test empty dictionary set
param_server.set_param('/ns', {})
# - param should still exist
self.assert_(param_server.has_param('/ns/'))
# - value should remain dictionary
self.assertEquals({}, param_server.get_param('/ns/'))
# - value2 below /ns/ should be erased
self.failIf(param_server.has_param('/ns/gains1'))
self.failIf(param_server.has_param('/ns/gains1/P'))
# verify that root can be set and that it erases all values
param_server.set_param('/', {})
self.failIf(param_server.has_param('/new_param'))
param_server.set_param('/', {'foo': 1, 'bar': 2, 'baz': {'a': 'a'}})
self.assertEquals(1, param_server.get_param('/foo'))
self.assertEquals(1, param_server.get_param('/foo/'))
self.assertEquals(2, param_server.get_param('/bar'))
self.assertEquals(2, param_server.get_param('/bar/'))
self.assertEquals('a', param_server.get_param('/baz/a'))
self.assertEquals('a', param_server.get_param('/baz/a/'))
# test_param_values: test storage of all XML-RPC compatible types"""
def test_param_values(self):
import math
from rosmaster.paramserver import ParamDictionary
param_server = ParamDictionary(None)
test_vals = [
['int', [0, 1024, 2147483647, -2147483647]],
['boolean', [True, False]],
#no longer testing null char
#['string', ['', '\0', 'x', 'hello', ''.join([chr(n) for n in range(0, 255)])]],
['unicode-string', [u'', u'hello', u'Andr\302\202'.encode('utf-8'), u'\377\376A\000n\000d\000r\000\202\000'.encode('utf-16')]],
['string-easy-ascii', [chr(n) for n in range(32, 128)]],
#['string-mean-ascii-low', [chr(n) for n in range(9, 10)]], #separate for easier book-keeping
#['string-mean-ascii-low', [chr(n) for n in range(1, 31)]], #separate for easier book-keeping
#['string-mean-signed', [chr(n) for n in range(129, 256)]],
['string', ['', 'x', 'hello-there', 'new\nline', 'tab\t']],
['double', [0.0, math.pi, -math.pi, 3.4028235e+38, -3.4028235e+38]],
#TODO: microseconds?
['datetime', [datetime.datetime(2005, 12, 6, 12, 13, 14), datetime.datetime(1492, 12, 6, 12, 13, 14)]],
['array', [[], [1, 2, 3], ['a', 'b', 'c'], [0.0, 0.1, 0.2, 2.0, 2.1, -4.0],
[1, 'a', True], [[1, 2, 3], ['a', 'b', 'c'], [1.0, 2.1, 3.2]]]
],
]
print("Putting parameters onto the server")
# put our params into the parameter server
contexts = ['', 'scope1', 'scope/subscope1', 'scope/sub1/sub2']
my_state = {}
failures = []
for ctx in contexts:
self._set_param(ctx, my_state, test_vals, param_server)
self._check_param_state(param_server, my_state)
print("Deleting all of our parameters")
# delete all of our parameters
count = 0
for key in list(my_state.keys()):
count += 1
param_server.delete_param(key)
del my_state[key]
# far too intensive to check every time
if count % 50 == 0:
self._check_param_state(param_server, my_state)
self._check_param_state(param_server, my_state)
def assertGetParamFail(self, param_server, param):
try:
param_server.get_param(param)
self.fail("get_param[%s] did not raise KeyError"%(param))
except KeyError: pass
| bsd-3-clause | -5,362,912,799,459,695,000 | 50.005435 | 144 | 0.588998 | false |
sveetch/boussole | tests/100_compiler/021_source_map.py | 1 | 5369 | # -*- coding: utf-8 -*-
import os
import io
import json
from boussole.conf.model import Settings
def test_source_map_path_001(compiler, temp_builds_dir):
"""
Check about source map path from 'sourceMappingURL' with a simple path
"""
basic_settings = Settings(initial={
"SOURCES_PATH": ".",
"TARGET_PATH": "css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_path_001").strpath
sourcedir = os.path.normpath(
os.path.join(basedir, basic_settings.SOURCES_PATH)
)
targetdir = os.path.normpath(
os.path.join(basedir, basic_settings.TARGET_PATH)
)
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop keys we don't care for this test
del sourcemap["version"]
del sourcemap["mappings"]
del sourcemap["names"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../app.scss"
],
}
def test_source_map_path_002(compiler, temp_builds_dir):
"""
Check about source map path from "sourceMappingURL" with CSS dir below
Sass source dir
"""
basic_settings = Settings(initial={
"SOURCES_PATH": "scss",
"TARGET_PATH": "project/css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_path_002").strpath
sourcedir = os.path.normpath(
os.path.join(basedir, basic_settings.SOURCES_PATH)
)
targetdir = os.path.normpath(
os.path.join(basedir, basic_settings.TARGET_PATH)
)
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop keys we don't care for this test
del sourcemap["version"]
del sourcemap["mappings"]
del sourcemap["names"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../../scss/app.scss"
],
}
def test_source_map_content(compiler, temp_builds_dir):
"""
Check about source map content
"""
basic_settings = Settings(initial={
"SOURCES_PATH": ".",
"TARGET_PATH": "css",
"SOURCE_MAP": True,
"OUTPUT_STYLES": "compact",
})
basedir = temp_builds_dir.join("compiler_source_map_content").strpath
sourcedir = os.path.normpath(os.path.join(basedir, basic_settings.SOURCES_PATH))
targetdir = os.path.normpath(os.path.join(basedir, basic_settings.TARGET_PATH))
os.makedirs(sourcedir)
os.makedirs(targetdir)
src = os.path.join(sourcedir, "app.scss")
dst = os.path.join(targetdir, "app.css")
src_map = os.path.join(targetdir, "app.map")
# Create sample source to compile
with io.open(src, "w", encoding="utf-8") as f:
f.write("""#content{ color:#ff0000; font-weight:bold; }""")
# Compile
success, message = compiler.safe_compile(basic_settings, src, dst)
assert os.path.exists(dst)
assert os.path.exists(src_map)
with io.open(dst, "r", encoding="utf-8") as f:
content = f.read()
with io.open(src_map, "r", encoding="utf-8") as f:
sourcemap = json.load(f)
# Assert compiled file is ok
assert content == (
"""#content { color: #ff0000; font-weight: bold; }\n\n"""
"""/*# sourceMappingURL=app.map */"""
)
# Drop 'version' key since it will cause problem with futur libsass
# versions
del sourcemap["version"]
# Assert source map is ok
assert sourcemap == {
"file": "app.css",
"sources": [
"../app.scss"
],
"mappings": ("AAAA,AAAA,QAAQ,CAAA,EAAE,KAAK,EAAC,OAAO,EAAE,WAAW,EAAC,"
"IAAI,GAAI"),
"names": []
}
| mit | -2,243,959,979,003,704,000 | 26.963542 | 84 | 0.591917 | false |
google-research/ravens | ravens/tasks/align_box_corner.py | 1 | 2653 | # coding=utf-8
# Copyright 2021 The Ravens Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Aligning task."""
import os
import numpy as np
from ravens.tasks.task import Task
from ravens.utils import utils
class AlignBoxCorner(Task):
"""Aligning task."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.max_steps = 3
def reset(self, env):
super().reset(env)
# Generate randomly shaped box.
box_size = self.get_random_size(0.05, 0.15, 0.05, 0.15, 0.01, 0.06)
# Add corner.
dimx = (box_size[0] / 2 - 0.025 + 0.0025, box_size[0] / 2 + 0.0025)
dimy = (box_size[1] / 2 + 0.0025, box_size[1] / 2 - 0.025 + 0.0025)
corner_template = 'corner/corner-template.urdf'
replace = {'DIMX': dimx, 'DIMY': dimy}
corner_urdf = self.fill_template(corner_template, replace)
corner_size = (box_size[0], box_size[1], 0)
corner_pose = self.get_random_pose(env, corner_size)
env.add_object(corner_urdf, corner_pose, 'fixed')
os.remove(corner_urdf)
# Add possible placing poses.
theta = utils.quatXYZW_to_eulerXYZ(corner_pose[1])[2]
fip_rot = utils.eulerXYZ_to_quatXYZW((0, 0, theta + np.pi))
pose1 = (corner_pose[0], fip_rot)
alt_x = (box_size[0] / 2) - (box_size[1] / 2)
alt_y = (box_size[1] / 2) - (box_size[0] / 2)
alt_pos = (alt_x, alt_y, 0)
alt_rot0 = utils.eulerXYZ_to_quatXYZW((0, 0, np.pi / 2))
alt_rot1 = utils.eulerXYZ_to_quatXYZW((0, 0, 3 * np.pi / 2))
pose2 = utils.multiply(corner_pose, (alt_pos, alt_rot0))
pose3 = utils.multiply(corner_pose, (alt_pos, alt_rot1))
# Add box.
box_template = 'box/box-template.urdf'
box_urdf = self.fill_template(box_template, {'DIM': box_size})
box_pose = self.get_random_pose(env, box_size)
box_id = env.add_object(box_urdf, box_pose)
os.remove(box_urdf)
self.color_random_brown(box_id)
# Goal: box is aligned with corner (1 of 4 possible poses).
self.goals.append(([(box_id, (2 * np.pi, None))], np.int32([[1, 1, 1, 1]]),
[corner_pose, pose1, pose2, pose3],
False, True, 'pose', None, 1))
| apache-2.0 | 6,962,212,802,293,448,000 | 35.847222 | 79 | 0.637015 | false |
FedoraScientific/salome-paravis | test/VisuPrs/Plot3D/F1.py | 1 | 1486 | # Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# This case corresponds to: /visu/Plot3D/F1 case
# Create Plot3D for all data of the given MED file
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import CreatePrsForFile, PrsTypeEnum
import pvserver as paravis
# Create presentations
myParavis = paravis.myParavis
# Directory for saving snapshots
picturedir = get_picture_dir("Plot3D/F1")
file = datadir + "ml.med"
print " --------------------------------- "
print "file ", file
print " --------------------------------- "
print "CreatePrsForFile..."
CreatePrsForFile(myParavis, file, [PrsTypeEnum.PLOT3D], picturedir, pictureext)
| lgpl-2.1 | 9,133,120,207,264,019,000 | 37.102564 | 81 | 0.730821 | false |
jeblair/GitPython | git/test/test_docs.py | 1 | 25308 | # -*- coding: utf-8 -*-
# test_git.py
# Copyright (C) 2008, 2009 Michael Trier ([email protected]) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os
from git.test.lib import TestBase
from git.test.lib.helper import with_rw_directory
import os.path as osp
class Tutorials(TestBase):
def tearDown(self):
import gc
gc.collect()
# @skipIf(HIDE_WINDOWS_KNOWN_ERRORS, ## ACTUALLY skipped by `git.submodule.base#L869`.
# "FIXME: helper.wrapper fails with: PermissionError: [WinError 5] Access is denied: "
# "'C:\\Users\\appveyor\\AppData\\Local\\Temp\\1\\test_work_tree_unsupportedryfa60di\\master_repo\\.git\\objects\\pack\\pack-bc9e0787aef9f69e1591ef38ea0a6f566ec66fe3.idx") # noqa E501
@with_rw_directory
def test_init_repo_object(self, rw_dir):
# [1-test_init_repo_object]
from git import Repo
join = osp.join
# rorepo is a Repo instance pointing to the git-python repository.
# For all you know, the first argument to Repo is a path to the repository
# you want to work with
repo = Repo(self.rorepo.working_tree_dir)
assert not repo.bare
# ![1-test_init_repo_object]
# [2-test_init_repo_object]
bare_repo = Repo.init(join(rw_dir, 'bare-repo'), bare=True)
assert bare_repo.bare
# ![2-test_init_repo_object]
# [3-test_init_repo_object]
repo.config_reader() # get a config reader for read-only access
with repo.config_writer(): # get a config writer to change configuration
pass # call release() to be sure changes are written and locks are released
# ![3-test_init_repo_object]
# [4-test_init_repo_object]
assert not bare_repo.is_dirty() # check the dirty state
repo.untracked_files # retrieve a list of untracked files
# ['my_untracked_file']
# ![4-test_init_repo_object]
# [5-test_init_repo_object]
cloned_repo = repo.clone(join(rw_dir, 'to/this/path'))
assert cloned_repo.__class__ is Repo # clone an existing repository
assert Repo.init(join(rw_dir, 'path/for/new/repo')).__class__ is Repo
# ![5-test_init_repo_object]
# [6-test_init_repo_object]
with open(join(rw_dir, 'repo.tar'), 'wb') as fp:
repo.archive(fp)
# ![6-test_init_repo_object]
# repository paths
# [7-test_init_repo_object]
assert osp.isdir(cloned_repo.working_tree_dir) # directory with your work files
assert cloned_repo.git_dir.startswith(cloned_repo.working_tree_dir) # directory containing the git repository
assert bare_repo.working_tree_dir is None # bare repositories have no working tree
# ![7-test_init_repo_object]
# heads, tags and references
# heads are branches in git-speak
# [8-test_init_repo_object]
self.assertEqual(repo.head.ref, repo.heads.master, # head is a sym-ref pointing to master
"It's ok if TC not running from `master`.")
self.assertEqual(repo.tags['0.3.5'], repo.tag('refs/tags/0.3.5')) # you can access tags in various ways too
self.assertEqual(repo.refs.master, repo.heads['master']) # .refs provides all refs, ie heads ...
if 'TRAVIS' not in os.environ:
self.assertEqual(repo.refs['origin/master'], repo.remotes.origin.refs.master) # ... remotes ...
self.assertEqual(repo.refs['0.3.5'], repo.tags['0.3.5']) # ... and tags
# ![8-test_init_repo_object]
# create a new head/branch
# [9-test_init_repo_object]
new_branch = cloned_repo.create_head('feature') # create a new branch ...
assert cloned_repo.active_branch != new_branch # which wasn't checked out yet ...
self.assertEqual(new_branch.commit, cloned_repo.active_branch.commit) # pointing to the checked-out commit
# It's easy to let a branch point to the previous commit, without affecting anything else
# Each reference provides access to the git object it points to, usually commits
assert new_branch.set_commit('HEAD~1').commit == cloned_repo.active_branch.commit.parents[0]
# ![9-test_init_repo_object]
# create a new tag reference
# [10-test_init_repo_object]
past = cloned_repo.create_tag('past', ref=new_branch,
message="This is a tag-object pointing to %s" % new_branch.name)
self.assertEqual(past.commit, new_branch.commit) # the tag points to the specified commit
assert past.tag.message.startswith("This is") # and its object carries the message provided
now = cloned_repo.create_tag('now') # This is a tag-reference. It may not carry meta-data
assert now.tag is None
# ![10-test_init_repo_object]
# Object handling
# [11-test_init_repo_object]
assert now.commit.message != past.commit.message
# You can read objects directly through binary streams, no working tree required
assert (now.commit.tree / 'VERSION').data_stream.read().decode('ascii').startswith('2')
# You can traverse trees as well to handle all contained files of a particular commit
file_count = 0
tree_count = 0
tree = past.commit.tree
for item in tree.traverse():
file_count += item.type == 'blob'
tree_count += item.type == 'tree'
assert file_count and tree_count # we have accumulated all directories and files
self.assertEqual(len(tree.blobs) + len(tree.trees), len(tree)) # a tree is iterable on its children
# ![11-test_init_repo_object]
# remotes allow handling push, pull and fetch operations
# [12-test_init_repo_object]
from git import RemoteProgress
class MyProgressPrinter(RemoteProgress):
def update(self, op_code, cur_count, max_count=None, message=''):
print(op_code, cur_count, max_count, cur_count / (max_count or 100.0), message or "NO MESSAGE")
# end
self.assertEqual(len(cloned_repo.remotes), 1) # we have been cloned, so should be one remote
self.assertEqual(len(bare_repo.remotes), 0) # this one was just initialized
origin = bare_repo.create_remote('origin', url=cloned_repo.working_tree_dir)
assert origin.exists()
for fetch_info in origin.fetch(progress=MyProgressPrinter()):
print("Updated %s to %s" % (fetch_info.ref, fetch_info.commit))
# create a local branch at the latest fetched master. We specify the name statically, but you have all
# information to do it programatically as well.
bare_master = bare_repo.create_head('master', origin.refs.master)
bare_repo.head.set_reference(bare_master)
assert not bare_repo.delete_remote(origin).exists()
# push and pull behave very similarly
# ![12-test_init_repo_object]
# index
# [13-test_init_repo_object]
self.assertEqual(new_branch.checkout(), cloned_repo.active_branch) # checking out branch adjusts the wtree
self.assertEqual(new_branch.commit, past.commit) # Now the past is checked out
new_file_path = osp.join(cloned_repo.working_tree_dir, 'my-new-file')
open(new_file_path, 'wb').close() # create new file in working tree
cloned_repo.index.add([new_file_path]) # add it to the index
# Commit the changes to deviate masters history
cloned_repo.index.commit("Added a new file in the past - for later merege")
# prepare a merge
master = cloned_repo.heads.master # right-hand side is ahead of us, in the future
merge_base = cloned_repo.merge_base(new_branch, master) # allwos for a three-way merge
cloned_repo.index.merge_tree(master, base=merge_base) # write the merge result into index
cloned_repo.index.commit("Merged past and now into future ;)",
parent_commits=(new_branch.commit, master.commit))
# now new_branch is ahead of master, which probably should be checked out and reset softly.
# note that all these operations didn't touch the working tree, as we managed it ourselves.
# This definitely requires you to know what you are doing :) !
assert osp.basename(new_file_path) in new_branch.commit.tree # new file is now in tree
master.commit = new_branch.commit # let master point to most recent commit
cloned_repo.head.reference = master # we adjusted just the reference, not the working tree or index
# ![13-test_init_repo_object]
# submodules
# [14-test_init_repo_object]
# create a new submodule and check it out on the spot, setup to track master branch of `bare_repo`
# As our GitPython repository has submodules already that point to github, make sure we don't
# interact with them
for sm in cloned_repo.submodules:
assert not sm.remove().exists() # after removal, the sm doesn't exist anymore
sm = cloned_repo.create_submodule('mysubrepo', 'path/to/subrepo', url=bare_repo.git_dir, branch='master')
# .gitmodules was written and added to the index, which is now being committed
cloned_repo.index.commit("Added submodule")
assert sm.exists() and sm.module_exists() # this submodule is defintely available
sm.remove(module=True, configuration=False) # remove the working tree
assert sm.exists() and not sm.module_exists() # the submodule itself is still available
# update all submodules, non-recursively to save time, this method is very powerful, go have a look
cloned_repo.submodule_update(recursive=False)
assert sm.module_exists() # The submodules working tree was checked out by update
# ![14-test_init_repo_object]
@with_rw_directory
def test_references_and_objects(self, rw_dir):
# [1-test_references_and_objects]
import git
repo = git.Repo.clone_from(self._small_repo_url(), osp.join(rw_dir, 'repo'), branch='master')
heads = repo.heads
master = heads.master # lists can be accessed by name for convenience
master.commit # the commit pointed to by head called master
master.rename('new_name') # rename heads
master.rename('master')
# ![1-test_references_and_objects]
# [2-test_references_and_objects]
tags = repo.tags
tagref = tags[0]
tagref.tag # tags may have tag objects carrying additional information
tagref.commit # but they always point to commits
repo.delete_tag(tagref) # delete or
repo.create_tag("my_tag") # create tags using the repo for convenience
# ![2-test_references_and_objects]
# [3-test_references_and_objects]
head = repo.head # the head points to the active branch/ref
master = head.reference # retrieve the reference the head points to
master.commit # from here you use it as any other reference
# ![3-test_references_and_objects]
#
# [4-test_references_and_objects]
log = master.log()
log[0] # first (i.e. oldest) reflog entry
log[-1] # last (i.e. most recent) reflog entry
# ![4-test_references_and_objects]
# [5-test_references_and_objects]
new_branch = repo.create_head('new') # create a new one
new_branch.commit = 'HEAD~10' # set branch to another commit without changing index or working trees
repo.delete_head(new_branch) # delete an existing head - only works if it is not checked out
# ![5-test_references_and_objects]
# [6-test_references_and_objects]
new_tag = repo.create_tag('my_new_tag', message='my message')
# You cannot change the commit a tag points to. Tags need to be re-created
self.failUnlessRaises(AttributeError, setattr, new_tag, 'commit', repo.commit('HEAD~1'))
repo.delete_tag(new_tag)
# ![6-test_references_and_objects]
# [7-test_references_and_objects]
new_branch = repo.create_head('another-branch')
repo.head.reference = new_branch
# ![7-test_references_and_objects]
# [8-test_references_and_objects]
hc = repo.head.commit
hct = hc.tree
hc != hct # @NoEffect
hc != repo.tags[0] # @NoEffect
hc == repo.head.reference.commit # @NoEffect
# ![8-test_references_and_objects]
# [9-test_references_and_objects]
self.assertEqual(hct.type, 'tree') # preset string type, being a class attribute
assert hct.size > 0 # size in bytes
assert len(hct.hexsha) == 40
assert len(hct.binsha) == 20
# ![9-test_references_and_objects]
# [10-test_references_and_objects]
self.assertEqual(hct.path, '') # root tree has no path
assert hct.trees[0].path != '' # the first contained item has one though
self.assertEqual(hct.mode, 0o40000) # trees have the mode of a linux directory
self.assertEqual(hct.blobs[0].mode, 0o100644) # blobs have specific mode, comparable to a standard linux fs
# ![10-test_references_and_objects]
# [11-test_references_and_objects]
hct.blobs[0].data_stream.read() # stream object to read data from
hct.blobs[0].stream_data(open(osp.join(rw_dir, 'blob_data'), 'wb')) # write data to given stream
# ![11-test_references_and_objects]
# [12-test_references_and_objects]
repo.commit('master')
repo.commit('v0.8.1')
repo.commit('HEAD~10')
# ![12-test_references_and_objects]
# [13-test_references_and_objects]
fifty_first_commits = list(repo.iter_commits('master', max_count=50))
assert len(fifty_first_commits) == 50
# this will return commits 21-30 from the commit list as traversed backwards master
ten_commits_past_twenty = list(repo.iter_commits('master', max_count=10, skip=20))
assert len(ten_commits_past_twenty) == 10
assert fifty_first_commits[20:30] == ten_commits_past_twenty
# ![13-test_references_and_objects]
# [14-test_references_and_objects]
headcommit = repo.head.commit
assert len(headcommit.hexsha) == 40
assert len(headcommit.parents) > 0
assert headcommit.tree.type == 'tree'
assert len(headcommit.author.name) != 0
assert isinstance(headcommit.authored_date, int)
assert len(headcommit.committer.name) != 0
assert isinstance(headcommit.committed_date, int)
assert headcommit.message != ''
# ![14-test_references_and_objects]
# [15-test_references_and_objects]
import time
time.asctime(time.gmtime(headcommit.committed_date))
time.strftime("%a, %d %b %Y %H:%M", time.gmtime(headcommit.committed_date))
# ![15-test_references_and_objects]
# [16-test_references_and_objects]
assert headcommit.parents[0].parents[0].parents[0] == repo.commit('master^^^')
# ![16-test_references_and_objects]
# [17-test_references_and_objects]
tree = repo.heads.master.commit.tree
assert len(tree.hexsha) == 40
# ![17-test_references_and_objects]
# [18-test_references_and_objects]
assert len(tree.trees) > 0 # trees are subdirectories
assert len(tree.blobs) > 0 # blobs are files
assert len(tree.blobs) + len(tree.trees) == len(tree)
# ![18-test_references_and_objects]
# [19-test_references_and_objects]
self.assertEqual(tree['smmap'], tree / 'smmap') # access by index and by sub-path
for entry in tree: # intuitive iteration of tree members
print(entry)
blob = tree.trees[0].blobs[0] # let's get a blob in a sub-tree
assert blob.name
assert len(blob.path) < len(blob.abspath)
self.assertEqual(tree.trees[0].name + '/' + blob.name, blob.path) # this is how relative blob path generated
self.assertEqual(tree[blob.path], blob) # you can use paths like 'dir/file' in tree
# ![19-test_references_and_objects]
# [20-test_references_and_objects]
assert tree / 'smmap' == tree['smmap']
assert tree / blob.path == tree[blob.path]
# ![20-test_references_and_objects]
# [21-test_references_and_objects]
# This example shows the various types of allowed ref-specs
assert repo.tree() == repo.head.commit.tree
past = repo.commit('HEAD~5')
assert repo.tree(past) == repo.tree(past.hexsha)
self.assertEqual(repo.tree('v0.8.1').type, 'tree') # yes, you can provide any refspec - works everywhere
# ![21-test_references_and_objects]
# [22-test_references_and_objects]
assert len(tree) < len(list(tree.traverse()))
# ![22-test_references_and_objects]
# [23-test_references_and_objects]
index = repo.index
# The index contains all blobs in a flat list
assert len(list(index.iter_blobs())) == len([o for o in repo.head.commit.tree.traverse() if o.type == 'blob'])
# Access blob objects
for (path, stage), entry in index.entries.items(): # @UnusedVariable
pass
new_file_path = osp.join(repo.working_tree_dir, 'new-file-name')
open(new_file_path, 'w').close()
index.add([new_file_path]) # add a new file to the index
index.remove(['LICENSE']) # remove an existing one
assert osp.isfile(osp.join(repo.working_tree_dir, 'LICENSE')) # working tree is untouched
self.assertEqual(index.commit("my commit message").type, 'commit') # commit changed index
repo.active_branch.commit = repo.commit('HEAD~1') # forget last commit
from git import Actor
author = Actor("An author", "[email protected]")
committer = Actor("A committer", "[email protected]")
# commit by commit message and author and committer
index.commit("my commit message", author=author, committer=committer)
# ![23-test_references_and_objects]
# [24-test_references_and_objects]
from git import IndexFile
# loads a tree into a temporary index, which exists just in memory
IndexFile.from_tree(repo, 'HEAD~1')
# merge two trees three-way into memory
merge_index = IndexFile.from_tree(repo, 'HEAD~10', 'HEAD', repo.merge_base('HEAD~10', 'HEAD'))
# and persist it
merge_index.write(osp.join(rw_dir, 'merged_index'))
# ![24-test_references_and_objects]
# [25-test_references_and_objects]
empty_repo = git.Repo.init(osp.join(rw_dir, 'empty'))
origin = empty_repo.create_remote('origin', repo.remotes.origin.url)
assert origin.exists()
assert origin == empty_repo.remotes.origin == empty_repo.remotes['origin']
origin.fetch() # assure we actually have data. fetch() returns useful information
# Setup a local tracking branch of a remote branch
empty_repo.create_head('master', origin.refs.master) # create local branch "master" from remote "master"
empty_repo.heads.master.set_tracking_branch(origin.refs.master) # set local "master" to track remote "master
empty_repo.heads.master.checkout() # checkout local "master" to working tree
# Three above commands in one:
empty_repo.create_head('master', origin.refs.master).set_tracking_branch(origin.refs.master).checkout()
# rename remotes
origin.rename('new_origin')
# push and pull behaves similarly to `git push|pull`
origin.pull()
origin.push()
# assert not empty_repo.delete_remote(origin).exists() # create and delete remotes
# ![25-test_references_and_objects]
# [26-test_references_and_objects]
assert origin.url == repo.remotes.origin.url
with origin.config_writer as cw:
cw.set("pushurl", "other_url")
# Please note that in python 2, writing origin.config_writer.set(...) is totally safe.
# In py3 __del__ calls can be delayed, thus not writing changes in time.
# ![26-test_references_and_objects]
# [27-test_references_and_objects]
hcommit = repo.head.commit
hcommit.diff() # diff tree against index
hcommit.diff('HEAD~1') # diff tree against previous tree
hcommit.diff(None) # diff tree against working tree
index = repo.index
index.diff() # diff index against itself yielding empty diff
index.diff(None) # diff index against working copy
index.diff('HEAD') # diff index against current HEAD tree
# ![27-test_references_and_objects]
# [28-test_references_and_objects]
# Traverse added Diff objects only
for diff_added in hcommit.diff('HEAD~1').iter_change_type('A'):
print(diff_added)
# ![28-test_references_and_objects]
# [29-test_references_and_objects]
# Reset our working tree 10 commits into the past
past_branch = repo.create_head('past_branch', 'HEAD~10')
repo.head.reference = past_branch
assert not repo.head.is_detached
# reset the index and working tree to match the pointed-to commit
repo.head.reset(index=True, working_tree=True)
# To detach your head, you have to point to a commit directly
repo.head.reference = repo.commit('HEAD~5')
assert repo.head.is_detached
# now our head points 15 commits into the past, whereas the working tree
# and index are 10 commits in the past
# ![29-test_references_and_objects]
# [30-test_references_and_objects]
# checkout the branch using git-checkout. It will fail as the working tree appears dirty
self.failUnlessRaises(git.GitCommandError, repo.heads.master.checkout)
repo.heads.past_branch.checkout()
# ![30-test_references_and_objects]
# [31-test_references_and_objects]
git = repo.git
git.checkout('HEAD', b="my_new_branch") # create a new branch
git.branch('another-new-one')
git.branch('-D', 'another-new-one') # pass strings for full control over argument order
git.for_each_ref() # '-' becomes '_' when calling it
# ![31-test_references_and_objects]
repo.git.clear_cache()
def test_submodules(self):
# [1-test_submodules]
repo = self.rorepo
sms = repo.submodules
assert len(sms) == 1
sm = sms[0]
self.assertEqual(sm.name, 'gitdb') # git-python has gitdb as single submodule ...
self.assertEqual(sm.children()[0].name, 'smmap') # ... which has smmap as single submodule
# The module is the repository referenced by the submodule
assert sm.module_exists() # the module is available, which doesn't have to be the case.
assert sm.module().working_tree_dir.endswith('gitdb')
# the submodule's absolute path is the module's path
assert sm.abspath == sm.module().working_tree_dir
self.assertEqual(len(sm.hexsha), 40) # Its sha defines the commit to checkout
assert sm.exists() # yes, this submodule is valid and exists
# read its configuration conveniently
assert sm.config_reader().get_value('path') == sm.path
self.assertEqual(len(sm.children()), 1) # query the submodule hierarchy
# ![1-test_submodules]
@with_rw_directory
def test_add_file_and_commit(self, rw_dir):
import git
repo_dir = osp.join(rw_dir, 'my-new-repo')
file_name = osp.join(repo_dir, 'new-file')
r = git.Repo.init(repo_dir)
# This function just creates an empty file ...
open(file_name, 'wb').close()
r.index.add([file_name])
r.index.commit("initial commit")
# ![test_add_file_and_commit]
| bsd-3-clause | -603,388,717,642,298,800 | 50.230769 | 196 | 0.607397 | false |
karies/root | tutorials/roofit/rf106_plotdecoration.py | 6 | 2263 | ## \file
## \ingroup tutorial_roofit
## \notebook
## Basic functionality: adding boxes with parameters to RooPlots and decorating with arrows, etc...
##
## \macro_code
##
## \author Clemens Lange, Wouter Verkerke (C++ version)
import ROOT
# Set up model
# ---------------------
# Create observables
x = ROOT.RooRealVar("x", "x", -10, 10)
# Create Gaussian
sigma = ROOT.RooRealVar("sigma", "sigma", 1, 0.1, 10)
mean = ROOT.RooRealVar("mean", "mean", -3, -10, 10)
gauss = ROOT.RooGaussian("gauss", "gauss", x, mean, sigma)
# Generate a sample of 1000 events with sigma=3
data = gauss.generate(ROOT.RooArgSet(x), 1000)
# Fit pdf to data
gauss.fitTo(data)
# Plot p.d.f. and data
# -------------------------------------
# Overlay projection of gauss on data
frame = x.frame(ROOT.RooFit.Name("xframe"), ROOT.RooFit.Title(
"RooPlot with decorations"), ROOT.RooFit.Bins(40))
data.plotOn(frame)
gauss.plotOn(frame)
# Add box with pdf parameters
# -----------------------------------------------------
# Left edge of box starts at 55% of Xaxis)
gauss.paramOn(frame, ROOT.RooFit.Layout(0.55))
# Add box with data statistics
# -------------------------------------------------------
# X size of box is from 55% to 99% of Xaxis range, of box is at 80% of
# Yaxis range)
data.statOn(frame, ROOT.RooFit.Layout(0.55, 0.99, 0.8))
# Add text and arrow
# -----------------------------------
# Add text to frame
txt = ROOT.TText(2, 100, "Signal")
txt.SetTextSize(0.04)
txt.SetTextColor(ROOT.kRed)
frame.addObject(txt)
# Add arrow to frame
arrow = ROOT.TArrow(2, 100, -1, 50, 0.01, "|>")
arrow.SetLineColor(ROOT.kRed)
arrow.SetFillColor(ROOT.kRed)
arrow.SetLineWidth(3)
frame.addObject(arrow)
# Persist frame with all decorations in ROOT file
# ---------------------------------------------------------------------------------------------
f = ROOT.TFile("rf106_plotdecoration.root", "RECREATE")
frame.Write()
f.Close()
# To read back and plot frame with all decorations in clean root session do
# root> ROOT.TFile f("rf106_plotdecoration.root")
# root> xframe.Draw()
c = ROOT.TCanvas("rf106_plotdecoration", "rf106_plotdecoration", 600, 600)
ROOT.gPad.SetLeftMargin(0.15)
frame.GetYaxis().SetTitleOffset(1.6)
frame.Draw()
c.SaveAs("rf106_plotdecoration.png")
| lgpl-2.1 | -1,037,350,025,528,606,300 | 26.26506 | 99 | 0.615996 | false |
chenchiyuan/yajiong | applications/posts/management/commands/parse_weixin.py | 1 | 1199 | # -*- coding: utf-8 -*-
# __author__ = chenchiyuan
from __future__ import division, unicode_literals, print_function
from django.core.management import BaseCommand
import requests
from bs4 import BeautifulSoup
from applications.posts.models import Post
import time
headers = {
"referer": "http://weixin.sogou.com/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36",
}
def smart_print(text):
print(text.encode("utf-8"))
class Command(BaseCommand):
def handle(self, *args, **options):
posts = list(Post.objects.all())
for post in posts:
if post.url and not post.content:
try:
self.parse(post)
except Exception, err:
smart_print(err.message)
continue
time.sleep(0.2)
def parse(self, post):
smart_print(post.title)
content = requests.get(post.url, headers=headers).content
soup = BeautifulSoup(content)
page_content_tag = soup.find(id="page-content")
post.content = page_content_tag.extract
post.save() | bsd-3-clause | 7,479,947,682,873,907,000 | 28.268293 | 141 | 0.614679 | false |
metaist/hebphonics | test/test_unicode.py | 1 | 1048 | #!/usr/bin/env python
# coding: utf-8
"""Unicode tests."""
# pkg
from hebphonics import tokens as T
def test_normalize():
"""normalize unicode symbols"""
want = T.LETTER_ALEF + T.POINT_DAGESH_OR_MAPIQ
test = T.normalize(T.LETTER_ALEF_WITH_MAPIQ)
assert test == want
want = T.LETTER_AYIN
test = T.normalize(T.LETTER_ALTERNATIVE_AYIN)
assert test == want
want = T.LETTER_ALEF
test = T.normalize(T.LETTER_WIDE_ALEF)
assert test == want
want = T.PUNCTUATION_NUN_HAFUKHA
test = T.normalize(T.PUNCTUATION_NUN_HAFUKHA)
assert test == want
def test_names():
"""unicode symbol names"""
test = [T.uniname(char, mode="const") for char in u"בְּ/רֵאשִׁית"]
want = [
"LETTER_BET",
"POINT_DAGESH_OR_MAPIQ",
"POINT_SHEVA",
"SOLIDUS",
"LETTER_RESH",
"POINT_TSERE",
"LETTER_ALEF",
"LETTER_SHIN",
"POINT_SHIN_DOT",
"POINT_HIRIQ",
"LETTER_YOD",
"LETTER_TAV",
]
assert test == want
| mit | -3,226,109,340,321,127,400 | 22.044444 | 70 | 0.580521 | false |
Vaidyanath/tempest | tempest/scenario/utils.py | 1 | 5983 | # Copyright 2013 Hewlett-Packard, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import re
import string
import unicodedata
import testscenarios
import testtools
from tempest import clients
from tempest.common import cred_provider
from tempest.common.utils import misc
from tempest import config
from tempest import exceptions
CONF = config.CONF
@misc.singleton
class ImageUtils(object):
default_ssh_user = 'root'
def __init__(self):
# Load configuration items
self.ssh_users = json.loads(CONF.input_scenario.ssh_user_regex)
self.non_ssh_image_pattern = \
CONF.input_scenario.non_ssh_image_regex
# Setup clients
os = clients.Manager()
self.images_client = os.images_client
self.flavors_client = os.flavors_client
def ssh_user(self, image_id):
_image = self.images_client.get_image(image_id)
for regex, user in self.ssh_users:
# First match wins
if re.match(regex, _image['name']) is not None:
return user
else:
return self.default_ssh_user
def _is_sshable_image(self, image):
return not re.search(pattern=self.non_ssh_image_pattern,
string=str(image['name']))
def is_sshable_image(self, image_id):
_image = self.images_client.get_image(image_id)
return self._is_sshable_image(_image)
def _is_flavor_enough(self, flavor, image):
return image['minDisk'] <= flavor['disk']
def is_flavor_enough(self, flavor_id, image_id):
_image = self.images_client.get_image(image_id)
_flavor = self.flavors_client.get_flavor_details(flavor_id)
return self._is_flavor_enough(_flavor, _image)
@misc.singleton
class InputScenarioUtils(object):
"""
Example usage:
import testscenarios
(...)
load_tests = testscenarios.load_tests_apply_scenarios
class TestInputScenario(manager.ScenarioTest):
scenario_utils = utils.InputScenarioUtils()
scenario_flavor = scenario_utils.scenario_flavors
scenario_image = scenario_utils.scenario_images
scenarios = testscenarios.multiply_scenarios(scenario_image,
scenario_flavor)
def test_create_server_metadata(self):
name = rand_name('instance')
self.servers_client.create_server(name=name,
flavor_ref=self.flavor_ref,
image_ref=self.image_ref)
"""
validchars = "-_.{ascii}{digit}".format(ascii=string.ascii_letters,
digit=string.digits)
def __init__(self):
os = clients.Manager(
cred_provider.get_configured_credentials('user', fill_in=False))
self.images_client = os.images_client
self.flavors_client = os.flavors_client
self.image_pattern = CONF.input_scenario.image_regex
self.flavor_pattern = CONF.input_scenario.flavor_regex
def _normalize_name(self, name):
nname = unicodedata.normalize('NFKD', name).encode('ASCII', 'ignore')
nname = ''.join(c for c in nname if c in self.validchars)
return nname
@property
def scenario_images(self):
"""
:return: a scenario with name and uuid of images
"""
if not CONF.service_available.glance:
return []
if not hasattr(self, '_scenario_images'):
try:
images = self.images_client.list_images()
self._scenario_images = [
(self._normalize_name(i['name']), dict(image_ref=i['id']))
for i in images if re.search(self.image_pattern,
str(i['name']))
]
except Exception:
self._scenario_images = []
return self._scenario_images
@property
def scenario_flavors(self):
"""
:return: a scenario with name and uuid of flavors
"""
if not hasattr(self, '_scenario_flavors'):
try:
flavors = self.flavors_client.list_flavors()
self._scenario_flavors = [
(self._normalize_name(f['name']), dict(flavor_ref=f['id']))
for f in flavors if re.search(self.flavor_pattern,
str(f['name']))
]
except Exception:
self._scenario_flavors = []
return self._scenario_flavors
def load_tests_input_scenario_utils(*args):
"""
Wrapper for testscenarios to set the scenarios to avoid running a getattr
on the CONF object at import.
"""
if getattr(args[0], 'suiteClass', None) is not None:
loader, standard_tests, pattern = args
else:
standard_tests, module, loader = args
try:
scenario_utils = InputScenarioUtils()
scenario_flavor = scenario_utils.scenario_flavors
scenario_image = scenario_utils.scenario_images
except exceptions.InvalidConfiguration:
return standard_tests
for test in testtools.iterate_tests(standard_tests):
setattr(test, 'scenarios', testscenarios.multiply_scenarios(
scenario_image,
scenario_flavor))
return testscenarios.load_tests_apply_scenarios(*args)
| apache-2.0 | -6,174,245,329,511,433,000 | 33.988304 | 79 | 0.602206 | false |
laurentb/weboob | modules/ipapi/module.py | 1 | 2091 | # -*- coding: utf-8 -*-
# Copyright(C) 2015 Julien Veyssier
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.geolocip import CapGeolocIp, IpLocation
from weboob.tools.backend import Module
from weboob.browser.browsers import Browser
from weboob.tools.json import json
__all__ = ['IpapiModule']
class IpapiModule(Module, CapGeolocIp):
NAME = 'ipapi'
MAINTAINER = u'Julien Veyssier'
EMAIL = '[email protected]'
VERSION = '2.1'
LICENSE = 'AGPLv3+'
DESCRIPTION = u"IP-API Geolocation API"
BROWSER = Browser
def get_location(self, ipaddr):
res = self.browser.location(u'http://ip-api.com/json/%s' % ipaddr)
jres = json.loads(res.text)
if "status" in jres and jres["status"] == "fail":
raise Exception("IPAPI failure : %s" % jres["message"])
iploc = IpLocation(ipaddr)
iploc.city = u'%s'%jres['city']
iploc.region = u'%s'%jres['regionName']
iploc.zipcode = u'%s'%jres['zip']
iploc.country = u'%s'%jres['country']
if jres['lat'] != '':
iploc.lt = float(jres['lat'])
else:
iploc.lt = 0.0
if jres['lon'] != '':
iploc.lg = float(jres['lon'])
else:
iploc.lg = 0.0
#iploc.host = 'NA'
#iploc.tld = 'NA'
if 'isp' in jres:
iploc.isp = u'%s'%jres['isp']
return iploc
| lgpl-3.0 | 1,998,065,906,981,396,000 | 31.671875 | 77 | 0.636538 | false |
BdEINSALyon/adhesion | adhesion/settings.py | 1 | 3232 | """
Django settings for adhesion project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n2t^s0x1j$*+jjj#7&n#x715n#s(o_ejmo3p&w7hi2s^%(9uiz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'adhesion.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'adhesion.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| gpl-3.0 | 3,136,685,628,160,387,600 | 25.491803 | 91 | 0.690285 | false |
McStasMcXtrace/McCode | tools/Python/mcgui/viewclasses.py | 1 | 42464 | '''
mcgui UI.
'''
import sys
import os
import re
from widgets import *
from PyQt5 import Qsci, QtWidgets
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from mccodelib import mccode_config
from mccodelib.utils import ComponentParser, ComponentParInfo
'''
View class containing windows and dialogs as delegates.
All ui widget updates are handled here.
'''
class McView(object):
def __init__(self):
# create main window
self.mw = McMainWindow()
self.mw.ui.lblInstrument.setText("")
self.ew = McCodeEditorWindow()
# a hack to enable mw to close ew
self.mw.ew = self.ew
def initMainWindowDynamicElements(self, args, callback):
self.mw.initDynamicView(args, callback)
def initCodeEditorComponentMenu(self, args):
self.ew.initComponentMenu(args)
def showMainWindow(self):
self.mw.show()
self.mw.raise_()
def showErrorDialogue(self, title, message):
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Critical)
msg.setWindowTitle(title)
msg.setText(message)
msg.exec_()
def showCodeEditorWindow(self, instr):
self.ew.initCodeEditor(instr)
self.ew.show()
self.ew.raise_()
def closeCodeEditorWindow(self):
return self.ew.close()
''' Update UI data
'''
def updateInstrument(self, labels, instr):
''' labels: <instrument path>, <work dir> '''
self.mw.ui.lblInstrument.setText(labels[0])
if str(labels[0]) == '':
self.__ssd = None
self.ew.initCodeEditor(instr)
def updateStatus(self, text=''):
self.mw.ui.statusbar.showMessage(text)
def updateLog(self, text='', error=False, gui=False):
if error:
self.mw.ui.txtbrwMcgui.setTextColor(QtGui.QColor('red'))
elif gui:
self.mw.ui.txtbrwMcgui.setTextColor(QtGui.QColor('blue'))
else:
self.mw.ui.txtbrwMcgui.setTextColor(QtGui.QColor('green'))
self.mw.ui.txtbrwMcgui.append(text)
def disableRunBtn(self):
self.mw.ui.btnRun.setEnabled(False)
def enableRunBtn(self):
self.mw.ui.btnRun.setEnabled(True)
def updateSimState(self, state=[]):
enableRun = state[0] == 'True'
enablePlot = state[1] == 'True'
enableInterrupt = False;
if len(state)>2:
enableInterrupt = state[2] == 'True'
# clear start simulation dialog
if not enableRun:
self.__ssd = None
# set enabled/disabled states on menus and buttons
ui = self.mw.ui
ui.btnRun.setEnabled(enableRun)
ui.btnEdit.setEnabled(enableRun)
ui.btnPlot.setEnabled(enablePlot)
if enableRun:
ui.lblInstrument.setStyleSheet('color: green')
else:
ui.lblInstrument.setStyleSheet('color: red')
ui.actionClose_Instrument.setEnabled(enableRun)
ui.actionPlot.setEnabled(enablePlot)
ui.actionDisplay.setEnabled(enableRun)
ui.actionDisplay_2d.setEnabled(enableRun)
ui.actionRun_Simulation.setEnabled(enableRun)
ui.actionSave_As.setEnabled(enableRun)
ui.actionOpen_instrument.setEnabled(True)
ui.actionNew_Instrument.setEnabled(True)
ui.menuNew_From_Template.setEnabled(True)
ui.actionEdit_Instrument.setEnabled(enableRun)
ui.actionCompile_Instrument.setEnabled(enableRun)
ui.actionCompile_Instrument_MPI.setEnabled(enableRun)
# set action of run button:
if enableInterrupt:
ui.btnRun.setText('Halt')
ui.btnRun.setToolTip('Interrupt current simulation')
ui.actionRun_Simulation.setEnabled(False)
ui.actionCompile_Instrument.setEnabled(False)
ui.actionCompile_Instrument_MPI.setEnabled(False)
ui.actionClose_Instrument.setEnabled(False)
ui.actionSave_As.setEnabled(False)
ui.actionOpen_instrument.setEnabled(False)
ui.actionNew_Instrument.setEnabled(False)
ui.menuNew_From_Template.setEnabled(False)
else:
ui.btnRun.setText('Run...')
ui.btnRun.setToolTip('')
''' UI actions
'''
def showOpenInstrumentDlg(self, lookDir):
dlg = QtWidgets.QFileDialog()
dlg.setDirectory(lookDir)
dlg.setNameFilters([mccode_config.configuration["MCCODE"]+" instruments (*.instr)", "All files (*)"]);
dlg.selectNameFilter(mccode_config.configuration["MCCODE"]+" instruments (*.instr)")
if dlg.exec_():
return dlg.selectedFiles()[0]
def showOpenPlotDirDlg(self, lookDir):
dlg = QtWidgets.QFileDialog()
dlg.setDirectory(lookDir)
dlg.ShowDirsOnly
return dlg.getExistingDirectory(self.mw,"Open a folder")
def showChangeWorkDirDlg(self, lookDir):
dlg = QtWidgets.QFileDialog()
dlg.setFileMode(QtWidgets.QFileDialog.Directory)
dlg.setDirectory(lookDir)
if dlg.exec_():
return dlg.selectedFiles()[0]
def showStartSimDialog(self, params, comps, mcdisplays, mcplots):
if self.__ssd == None:
self.__ssd = McStartSimDialog()
self.__ssd.createParamsWidgets(params)
self.__ssd.set_components(comps)
self.__ssd.set_mcdisplays(mcdisplays)
self.__ssd.set_mcplots(mcplots)
if self.__ssd.exec_():
return self.__ssd.getValues()
else:
return None, None, None, None, None
def showNewInstrDialog(self, lookdir):
dlg = QtWidgets.QFileDialog()
dlg.setDirectory(lookdir)
dlg.setNameFilter(mccode_config.configuration["MCCODE"]+" instruments (*.instr)");
return dlg.getSaveFileName(parent=None, caption='Create Instrument file...')[0]
def showNewInstrFromTemplateDialog(self, instr):
dlg = QtWidgets.QFileDialog()
return dlg.getSaveFileName(parent=None, caption='Create Instrument file from Template...', directory=instr)[0]
def showSaveAsDialog(self, instr):
dlg = QtWidgets.QFileDialog()
dlg.setFileMode(QtWidgets.QFileDialog.AnyFile)
return dlg.getSaveFileName(parent=None, caption='Save Instrument As...', directory=instr)[0]
def showConfigDialog(self):
dlg = McConfigDialog()
dlg.initConfigData(None)
dlg.exec_()
def showAboutBox(self, text):
if mccode_config.configuration["MCCODE"] == "mcstas":
prefix = "mc"
else:
prefix = "mx"
QtWidgets.QMessageBox.about(self.mw, prefix+'gui: About', text)
''' Main Window widgets wrapper class
Events callbacks are hooked elsewhere.
'''
class McMainWindow(QtWidgets.QMainWindow):
def __init__(self, parent=None):
super(McMainWindow, self).__init__(parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.dynamicMenuClicked = QtCore.pyqtSignal(str)
# set main window title depending on flavour
prefix = 'mx'
if mccode_config.configuration["MCCODE"] == "mcstas":
prefix = 'mc'
self.setWindowTitle(prefix + 'gui-py')
self.ui.actionMcdoc.setText(prefix + "doc Component Reference")
self.ui.actionMcDocCurrent.setText(prefix +"doc current instrument")
mccode = mccode_config.configuration["MCCODE"]
self.ui.actionMcstas_User_Manual.setText(mccode + " User Manual")
self.ui.actionMcstas_Component_Manual.setText(mccode + " Component Manual")
self.ui.actionMcstas_Web_Page.setText(mccode + " Web Page")
self.ui.lblIcon.setPixmap(QtGui.QPixmap(os.path.join(mccode_config.configuration["MCCODE_LIB_DIR"],'tools','Python',prefix + 'gui',mccode + "-py.png")))
def initDynamicView(self, args, callback):
''' - args ([str, [], []]): list of triplets consisting of site name,
[instrument names], [instrument file paths]
- callback (func(str)): function which takes a single string parameter, call with full path
name of selected instrument
'''
class InfoHider:
def __init__(self, itm, cb):
self.itm = itm
self.cb = cb
def handle(self):
self.cb(self.itm)
self.ui.menuNew_From_Template.clear()
for i in range(len(args)):
site = args[i][0]
instrs = args[i][1]
instrs_fulpath = args[i][2]
menu = self.ui.menuNew_From_Template.addMenu(site)
for j in range(len(instrs)):
action = menu.addAction(instrs[j])
h = InfoHider(instrs_fulpath[j], callback)
action.h = h
action.triggered.connect(h.handle)
def add_conf_menu(self,label):
confmenu = QtWidgets.QAction(self)
self.ui.menuFile.addAction(confmenu)
confmenu.setText(QtWidgets.QApplication.translate("MainWindow", label, None))
confmenu.setToolTip(QtWidgets.QApplication.translate("MainWindow", "mccode " + label, None))
return confmenu
def closeEvent(self, event):
''' allow close down only if editor window did not reject '''
if not self.ew.close():
event.ignore()
''' Code editor window widgets wrapper class
'''
class McCodeEditorWindow(QtWidgets.QMainWindow):
volatileDataExists = False
volatileDataTransition = QtCore.pyqtSignal(bool)
saveRequest = QtCore.pyqtSignal(str)
def __init__(self, parent=None):
super(McCodeEditorWindow, self).__init__(parent)
self.ui = Ui_EditorWindow()
self.ui.setupUi(self)
sheight = QtWidgets.QDesktopWidget().availableGeometry().height()
if sheight < 1080:
self.resize(920, sheight)
# dynamically added widgets
self.__scintilla = None
self.__edtSearch = None
self.__initScintilla()
self.__initCallbacks()
self.__initSearchbar()
def __initSearchbar(self):
''' set focus, search action events '''
def __sbEventFilter(subject, object, event):
''' focus event handler '''
edt = QtWidgets.QLineEdit()
edt = subject
# handle focus on
if event.type() == QtCore.QEvent.FocusIn:
if edt.text() == 'search...':
edt.setText('')
font = QtGui.QFont()
font.setItalic(False)
self.__edtSearch.setFont(font)
edt.setStyleSheet("color: black;")
# handle focus off
elif event.type() == QtCore.QEvent.FocusOut:
if edt.text() == '':
font = QtGui.QFont()
font.setItalic(True)
self.__edtSearch.setFont(font)
edt.setStyleSheet("color: grey;")
edt.setText('search...')
# handle enter keypress (search)
elif event.type() == QtCore.QEvent.KeyPress:
# return & enter
if event.key() in [0x01000004, 0x01000005]:
self.__search(subject.text())
# escape
elif event.key() == 0x01000000:
subject.setText('')
self.__scintilla.setFocus()
# tab
#elif event.key() == 0x01000001:
# print "tab"
return False
self.__edtSearch = QtWidgets.QLineEdit()
self.__edtSearch.setObjectName("edtSearch")
font = QtGui.QFont()
font.setItalic(True)
self.__edtSearch.setFont(font)
self.__edtSearch.setText("search...")
# set events
edts = self.__edtSearch
edts.eventFilter = lambda o, e: __sbEventFilter(edts, o, e)
edts.installEventFilter(edts)
self.ui.vlayout.addWidget(self.__edtSearch)
def __search(self, search):
''' implements a search action in scintilla '''
# get cursor position
i, j = self.__scintilla.getCursorPosition()
curs = self.__scintilla.positionFromLineIndex(i, j)
# get match position after cursor
text = self.__scintilla.text()
pos = str(text)[curs:].find(search)
# get match position before cursor
if pos == -1:
pos = str(text).find(search)
else:
pos = pos + curs
if not pos == -1:
self.__setCursorPos(pos + len(search))
self.__selectText(pos, pos + len(search))
def __setCursorPos(self, pos):
k, l = self.__scintilla.lineIndexFromPosition(pos)
self.__scintilla.setCursorPosition(k, l)
def __selectText(self, pos1, pos2):
if not pos1 < pos2:
raise Exception('__selectText: pos2 must be larger than pos1.')
self.__scintilla.selectAll(False)
k1, l1 = self.__scintilla.lineIndexFromPosition(pos1)
k2, l2 = self.__scintilla.lineIndexFromPosition(pos2)
self.__scintilla.setSelection(k1, l1, k2, l2)
def initComponentMenu(self, args):
''' args - [category, comp_names[], comp_parsers[]]
'''
class InfoHider:
def __init__(self, comp_parser, cb):
self.comp_parser = comp_parser
self.cb = cb
def handle(self):
self.cb(self.comp_parser)
all_comp_names = []
for i in range(len(args)):
category = args[i][0]
comp_names = args[i][1]
for name in comp_names:
all_comp_names.append(name)
comp_parsers = args[i][2]
menu = self.ui.menuInsert.addMenu(category)
for j in range(len(comp_names)):
h = InfoHider(comp_parsers[j], self.__handleComponentClicked)
action = menu.addAction(comp_names[j])
action.h = h
action.triggered.connect(h.handle)
self.setLexerComps(self.__scintilla.__myApi, all_comp_names)
def initCodeEditor(self, instr):
if instr != '':
self.__scintilla.setText(open(instr, encoding='utf-8', errors='ignore').read())
else:
self.__scintilla.setText('')
self.setWindowTitle(mccode_config.configuration["MCCODE"] + ": " + instr)
self.assumeDataSaved()
def assumeDataSaved(self):
self.volatileDataTransition.emit(False)
def save(self):
''' external save text hook '''
self.__handleSaveAction()
def closeEvent(self, event):
''' hook to display a "save changes?" dialog if there are unsaved changes
'''
if self.volatileDataExists:
reply = QtWidgets.QMessageBox.question(self,
'The instrument has been modified.',
'Do you want to save changes?',
QtWidgets.QMessageBox.Save | QtWidgets.QMessageBox.Discard | QtWidgets.QMessageBox.Cancel,
QtWidgets.QMessageBox.Cancel)
if reply == QtWidgets.QMessageBox.Save:
self.saveRequest.emit(self.__scintilla.text())
self.assumeDataSaved()
event.accept()
elif reply == QtWidgets.QMessageBox.Discard:
self.assumeDataSaved()
event.accept()
elif reply == QtWidgets.QMessageBox.Cancel:
event.ignore()
else:
event.accept()
def __handleComponentClicked(self, comp_parser):
dlg = McInsertComponentDialog()
dlg.initComponentData(comp_parser)
if dlg.exec_():
comp_type, inst_name, params, atrel = dlg.getValues()
else:
return
text = "COMPONENT " + inst_name + " = " + comp_type + "("
i_max = len(params)-1
for i in range(len(params)):
p = params[i]
text += "\n " + p[0] + "=" + p[1]
if i < i_max:
text += ", "
text += ")"
text += "\nAT (" + atrel[0] + ", " + atrel[1] + ", " + atrel[2] + ") RELATIVE " + atrel[3]
# NOTE: the ROTATED line may be missing
if len(atrel) > 4:
text += "\nROTATED (" + atrel[4] + ", " + atrel[5] + ", " + atrel[6] + ") RELATIVE " + atrel[7]
self.__scintilla.insert(text)
# set cursor position
i, j = self.__scintilla.getCursorPosition()
pos = self.__scintilla.positionFromLineIndex(i, j)
k, l = self.__scintilla.lineIndexFromPosition(pos + len(text))
self.__scintilla.setCursorPosition(k, l)
def __initScintilla(self):
# delete text editor placeholder
scintilla = Qsci.QsciScintilla(self)
########################
# setup scintilla
# set default font
font = QtGui.QFont()
font.setFamily('Deja Vu Sans Mono')
font.setFixedPitch(True)
font.setPointSize(11)
# brace matching
scintilla.setBraceMatching(Qsci.QsciScintilla.SloppyBraceMatch)
# set lexer
lexer = Qsci.QsciLexerCPP()
lexer.setDefaultFont(font)
lexer.setFont(font)
scintilla.setLexer(lexer)
scintilla.setLexer(lexer)
scintilla.__myLexer = lexer # save reference to retain scope
# auto-completion api
scintilla.__myApi = Qsci.QsciAPIs(lexer)
scintilla.setAutoCompletionThreshold(1)
scintilla.setAutoCompletionSource(Qsci.QsciScintilla.AcsAPIs)
# remove horizontal scrollbar
scintilla.SendScintilla(Qsci.QsciScintilla.SCI_SETHSCROLLBAR, 0)
# display default line numbers
fm = QtGui.QFontMetrics(font)
scintilla.setMarginWidth(0, fm.width( "00000" ))
scintilla.setMarginLineNumbers(0, True)
########################
# insert widget into main vlayout
self.ui.vlayout.addWidget(scintilla)
self.__scintilla = scintilla
@staticmethod
def setLexerComps(api, all_comp_names):
api.clear()
# add mcstas meta keywords
api.add("ABSOLUTE")
api.add("AT")
api.add("COMPONENT")
api.add("DECLARE")
api.add("DEFINE")
api.add("DEFINITION")
api.add("END")
api.add("MCDISPLAY")
api.add("FINALLY")
api.add("INITIALIZE")
api.add("INSTRUMENT")
api.add("OUTPUT")
api.add("PARAMETERS")
api.add("RELATIVE")
api.add("ROTATED")
api.add("PREVIOUS")
api.add("SETTING")
api.add("STATE")
api.add("POLARISATION")
api.add("TRACE")
api.add("SHARE")
api.add("EXTEND")
api.add("GROUP")
api.add("SAVE")
api.add("JUMP")
api.add("WHEN")
api.add("NEXT")
api.add("ITERATE")
api.add("MYSELF")
api.add("COPY")
api.add("SPLIT")
api.add("REMOVABLE")
api.add("DEPENDENCY")
# add components
for name in all_comp_names:
api.add(name)
api.prepare()
def __initCallbacks(self):
# connect menu items to corresponding scintilla slots
ui = self.ui
ui.actionUndo.triggered.connect(self.__scintilla.undo)
ui.actionRedo.triggered.connect(self.__scintilla.redo)
ui.actionSelect_All.triggered.connect(lambda: self.__scintilla.selectAll()) # why is l. expr. needed here?
ui.actionCopy.triggered.connect(self.__scintilla.copy)
ui.actionCut.triggered.connect(self.__scintilla.cut)
ui.actionPaste.triggered.connect(self.__scintilla.paste)
ui.actionSave.triggered.connect(self.__handleSaveAction)
ui.actionClose_Instrument_Editor.triggered.connect(self.close)
ui.actionFind.triggered.connect(lambda: self.__edtSearch.setFocus())
ui.actionComponent_Browser.triggered.connect(self.__handleComponentBrowser)
# TODO: create a ctr-a on a menu to __scintilla.selectAll(bool select)
def __keyEventFilterFct(subject, object, event):
if event.type() == QtCore.QEvent.KeyRelease:
# ctrl-q
if event.key() == 81 and int(event.modifiers()) == 67108864:
self.close()
return False
self.eventFilter = lambda o, e: __keyEventFilterFct(self.ui, o, e)
self.installEventFilter(self)
# connect "text changed" signal to our handler to detect unsaved changes
self.__scintilla.textChanged.connect(self.__handleTextChanged)
self.volatileDataTransition.connect(self.__handleVolatileDataPresent)
def __handleComponentBrowser(self):
dlg = QtWidgets.QFileDialog()
dlg.setDirectory(mccode_config.configuration["MCCODE_LIB_DIR"])
dlg.setNameFilter(mccode_config.configuration["MCCODE"]+"component files (*.comp)");
if dlg.exec_():
comp_file = dlg.selectedFiles()[0]
parser = ComponentParser(comp_file)
self.__handleComponentClicked(parser)
def __handleTextChanged(self):
if not self.volatileDataExists:
self.volatileDataTransition.emit(True)
def __handleSaveAction(self):
if self.volatileDataExists:
self.saveRequest.emit(self.__scintilla.text())
def __handleVolatileDataPresent(self, volatileDataExists=False):
if volatileDataExists:
title = self.windowTitle()
self.setWindowTitle('*' + title)
else:
title = str(self.windowTitle())
self.setWindowTitle(title.replace('*', ''))
self.volatileDataExists = volatileDataExists
self.ui.actionSave.setEnabled(volatileDataExists)
''' Start simulation widgets wrapper class
Programatically alters the dialog to match current instrument.
Supports reuse of widgets from sim to sim, to retain input values.
Works as a dialog - call _exec(), probe for return behavior and
state to proceed.
'''
class McStartSimDialog(QtWidgets.QDialog):
def __init__(self, parent=None):
super(McStartSimDialog, self).__init__(parent)
self._last_inspect_compnames = None
self._last_mcdisplays = None
self._last_mcplots = None
self.ui = Ui_dlgStartSim()
self.ui.setupUi(self)
self.ui.btnStart.clicked.connect(self.accept)
self.ui.btnCancel.clicked.connect(self.reject)
self._set_inspect_visible(False)
self.ui.cbxSimTrace.currentIndexChanged.connect(lambda i: self._set_inspect_visible(i))
def set_components(self, compnames):
if compnames == self._last_inspect_compnames:
return
self._last_inspect_compnames = compnames
self.ui.cbxInspect.clear()
self.ui.cbxInspect.addItem("-- None --")
for c in compnames:
self.ui.cbxInspect.addItem(c)
def set_mcdisplays(self, mcdisplays):
if mcdisplays == self._last_mcdisplays:
return
self._last_mcdisplays = mcdisplays
self.ui.cbxMcdisplays.clear()
for m in mcdisplays:
self.ui.cbxMcdisplays.addItem(m)
def set_mcplots(self, mcplots):
if mcplots == self._last_mcplots:
return
self._last_mcplots = mcplots
self.ui.cbxAutoPlotters.clear()
self.ui.cbxAutoPlotters.addItem("-- None --")
for m in mcplots:
self.ui.cbxAutoPlotters.addItem(m)
def _set_inspect_visible(self, sim_run_idx):
visible = False
if sim_run_idx == 1:
visible = True
self.ui.lblInspect.setVisible(visible)
self.ui.cbxInspect.setVisible(visible)
self.ui.lblMcdisplays.setVisible(visible)
self.ui.cbxMcdisplays.setVisible(visible)
self.ui.lblAutoPlot.setVisible(not visible)
self.ui.cbxAutoPlotters.setVisible(not visible)
def getValues(self):
''' Return values:
fixed_params[]:
0 - simulation = 0, trace = 1
1 - neutron count (int)
2 - steps count (int)
3 - gravity (bool)
4 - clustering 0/1/2 (single/MPI/MPIrecompile) (int)
5 - clustering # nodes (int)
6 - random seed (int)
7 - output directory (str)
params[]:
[<par_name>, <value>] pairs
'''
# simulation or trace option
p0 = None
if self.ui.cbxSimTrace.currentIndex() == 0:
p0 = SimTraceEnum.SIM
else:
p0 = SimTraceEnum.TRACE
# neutron count
p1 = self.ui.edtNeutronCnt.text()
# steps
p2 = self.ui.edtSteps.text()
# gravity
p3 = self.ui.cbxGravity.currentIndex() == 1
# clustering option
p4 = None
if self.ui.cbxClustering.currentIndex() == 0:
p4 = ClusteringEnum.SINGLE
if self.ui.cbxClustering.currentIndex() == 1:
p4 = ClusteringEnum.MPI
if self.ui.cbxClustering.currentIndex() == 2:
p4 = ClusteringEnum.MPI_RC
# clustring option
p5 = self.ui.edtNodes.text()
# seed
p6 = self.ui.edtRandomSeed.text()
# output dir
p7 = str(self.ui.edtOutputDir.text())
# autoplot
mcplot = None
idx = self.ui.cbxAutoPlotters.currentIndex()
p8 = idx > 0
if idx > 0:
mcplot = self._last_mcplots[idx-1]
fixed_params =[p0, p1, p2, p3, p4, p5, p6, p7, p8]
# get dynamic params
params = []
for w in self._wParams:
p = []
p.append(str(w[0].text()).rstrip(':'))
p.append(str(w[1].text()))
params.append(p)
inspect = None
idx = self.ui.cbxInspect.currentIndex()
if idx > 0:
inspect = self._last_inspect_compnames[idx]
mcdisplay = None
idx = self.ui.cbxMcdisplays.currentIndex()
if p0 == SimTraceEnum.TRACE:
mcdisplay = self._last_mcdisplays[idx]
return fixed_params, params, inspect, mcdisplay, mcplot
_wParams = []
__oldParams = []
def createParamsWidgets(self, params):
# this logics keeps params values of existing/previous non-dummy widgets, for value reuse
self.__oldParams = []
for w in self._wParams:
old_name = 'no_re_match'
name_match = re.search('(.*):', w[0].text())
if name_match:
old_name = name_match.group(1)
old_value = w[1].text()
self.__oldParams.append([old_name, old_value])
# clear the containing grid
grd = self.ui.gridGrid
for i in reversed(range(grd.count())):
grd.itemAt(i).widget().setParent(None)
# prepare new params widgets
self._wParams = []
# insert custom params widgets
i = -1
x = 0
y = 0
p_index = 0
for p in params:
# get param name, value
name = p[0]
value = p[1]
# reuse old param values, if matching position in grid (p_index) and param name
if len(self.__oldParams) > p_index:
old_name = self.__oldParams[p_index][0]
old_value = self.__oldParams[p_index][1]
if str(old_name) == str(name):
value = old_value
i = i + 1
x = i % (int(mccode_config.configuration["GUICOLS"])*2)
y = i / (int(mccode_config.configuration["GUICOLS"])*2)
lbl = QtWidgets.QLabel(self.ui.gbxGrid)
lbl.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
lbl.setObjectName("lbl" + name)
lbl.setText(name + ':')
self.ui.gridGrid.addWidget(lbl, y, x, 1, 1)
i = i + 1
x = i % (int(mccode_config.configuration["GUICOLS"])*2)
edt = QtWidgets.QLineEdit(self.ui.gbxGrid)
edt.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
edt.setObjectName("edt" + name)
edt.setText(value)
self.ui.gridGrid.addWidget(edt, y, x, 1, 1)
self._wParams.append([lbl, edt])
p_index += 1
self.ui.btnStart.setFocus()
class SimTraceEnum:
SIM = 0
TRACE = 1
class ClusteringEnum:
SINGLE = 0
MPI = 1
MPI_RC = 2
''' Start simulation widgets wrapper class
Programatically alters the dialog to match current instrument.
Supports reuse of widgets from sim to sim, to retain input values.
Works as a dialog - call _exec(), probe for return behavior and
state to proceed.
'''
class McInsertComponentDialog(QtWidgets.QDialog):
__standard_le_style = None
def __init__(self, parent=None):
super(McInsertComponentDialog, self).__init__(parent)
self.ui = Ui_dlgInsertComponent()
self.ui.setupUi(self)
self.ui.btnInsert.clicked.connect(self.accept)
self.ui.btnCancel.clicked.connect(self.reject)
self.__standard_le_style = self.ui.edtInstanceName.styleSheet()
def accept(self):
# detect missing default values
dirty = False
# mark/unmark params dynamic lineedits
first_params_hit = True
for w in self._wParams:
if w[1].text() == '':
w[1].setStyleSheet("border: 3px solid red;")
dirty = True
if first_params_hit:
w[1].setFocus()
first_params_hit = False
else:
w[1].setStyleSheet(self.__standard_le_style)
# mark/unmark instance name lineedit
if self.ui.edtInstanceName.text() == '':
self.ui.edtInstanceName.setStyleSheet("border: 3px solid red;")
if not dirty:
self.ui.edtInstanceName.setFocus()
dirty = True
else:
self.ui.edtInstanceName.setStyleSheet(self.__standard_le_style)
# exit if all lineedit text boxes are filled out
if not dirty:
super(McInsertComponentDialog, self).accept()
_wParams = []
def initComponentData(self, comp_parser):
# parse component info
comp_parser.parse()
# window title
self.setWindowTitle("Component: " + comp_parser.name)
# info & description docstrings - make sure newlines work in case doc includes html
info_description = comp_parser.info + '\n\n' + comp_parser.description
info_description_html = str(info_description).replace('\n', '<br>')
self.ui.lblDescr.setText(info_description_html)
# clear params grd
grd = self.ui.gridParameters
for i in reversed(range(grd.count())):
grd.itemAt(i).widget().setParent(None)
# populate and init params grd
self._wParams = None
self._wParams = []
for i in range(len(comp_parser.pars)):
par = ComponentParInfo(comp_parser.pars[i])
if par.par_name == "string filename":
par.par_name = "filename"
# i'th line/row of the UI
y = i
# parameter name label
x = 0
lbl = QtWidgets.QLabel()
lbl.setObjectName("lbl" + par.par_name)
lbl.setText(par.par_name + ':')
self.ui.gridParameters.addWidget(lbl, y, x, 1, 1)
# parameter value line-edit
x = 1
edt = QtWidgets.QLineEdit()
edt.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
edt.setObjectName("edt" + par.par_name)
edt.defval = par.default_value
self._initTbwFocusEvents(edt)
if par.par_name == "filename":
edt.setText('"' + par.default_value + '"')
else:
edt.setText(par.default_value)
self.ui.gridParameters.addWidget(edt, y, x, 1, 1)
# save widget references for use in self.getValues (also save the par default value)
self._wParams.append([lbl, edt, edt.text()])
# parameter docstring label
x = 2
lbl = QtWidgets.QLabel()
lbl.setWordWrap(True)
lbl.setObjectName("lbl" + par.par_name + "_doc")
lbl.setText(par.doc_and_unit)
self.ui.gridParameters.addWidget(lbl, y, x, 1, 1)
# fix tab-order
q = self.ui.btnInsert
for i in range(len(self._wParams)):
w = self._wParams[i][1]
self.setTabOrder(q, w)
q = w
self.setTabOrder(q, self.ui.edtAtX)
# init instance-name field with an example, mark the text
tbx = self.ui.edtInstanceName
tbx.setText(str.lower(comp_parser.name))
tbx.setFocus()
tbx.selectAll()
def _initTbwFocusEvents(self, w):
''' we assume that w_edt has the member defval, which contains the default value '''
def _wEventFilter(subject, object, event):
''' focus event handler '''
edt = QtWidgets.QLineEdit()
edt = subject
# handle focus on
if event.type() == QtCore.QEvent.FocusIn:
if edt.text() == edt.defval:
edt.setText('')
font = QtGui.QFont()
font.setItalic(False)
edt.setFont(font)
edt.setStyleSheet("color: black;")
edt.setCursorPosition(0)
# handle focus off
elif event.type() == QtCore.QEvent.FocusOut:
if edt.text() == '':
font = QtGui.QFont()
font.setItalic(True)
edt.setFont(font)
edt.setStyleSheet("color: grey;")
edt.setText(edt.defval)
elif edt.text() == edt.defval:
edt.setText(edt.defval)
font = QtGui.QFont()
font.setItalic(True)
edt.setFont(font)
edt.setStyleSheet("color: grey;")
return False
# init
font = QtGui.QFont()
font.setItalic(True)
w.setStyleSheet("color: grey;")
w.setFont(font)
w.setText(w.defval)
# set events
w.eventFilter = lambda o, e: _wEventFilter(w, o, e)
w.installEventFilter(w)
def getValues(self):
'''
inst_name : contents of instance name field
params : list of [name, value] pairs matching component parameters
'''
if not self.ui.cbxVerbose.isChecked():
return self.__getValuesReduced()
# instance name
inst_name = self.ui.edtInstanceName.text()
m = re.match("Component: (.*)", self.windowTitle())
comp_type = m.group(1)
# get dynamic params
params = []
for w in self._wParams:
p = []
p.append(str(w[0].text()).rstrip(':'))
p.append(str(w[1].text()))
params.append(p)
# get values for AT(x,y,z), RELATIVE <posrel>, ROTATED(x,y,z), RELATIVE <rotrel>
atrel = []
atrel.append(self.ui.edtAtX.text())
atrel.append(self.ui.edtAtY.text())
atrel.append(self.ui.edtAtZ.text())
atrel.append(self.ui.edtAtRel.text())
atrel.append(self.ui.edtRotX.text())
atrel.append(self.ui.edtRotY.text())
atrel.append(self.ui.edtRotZ.text())
atrel.append(self.ui.edtRotRel.text())
return comp_type, inst_name, params, atrel
def __getValuesReduced(self):
'''
inst_name : contents of instance name field
params : list of [name, value] pairs matching component parameters
'''
# instance name
inst_name = self.ui.edtInstanceName.text()
m = re.match("Component: (.*)", self.windowTitle())
comp_type = m.group(1)
# get dynamic params
params = []
for w in self._wParams:
# proceed if typed value differs from the default value (also counting empty default values)
if w[1].text() != w[2]:
p = []
p.append(str(w[0].text()).rstrip(':'))
p.append(str(w[1].text()))
params.append(p)
# get values for AT(x,y,z), RELATIVE <posrel>, ROTATED(x,y,z), RELATIVE <rotrel>
atrel = []
atrel.append(self.ui.edtAtX.text())
atrel.append(self.ui.edtAtY.text())
atrel.append(self.ui.edtAtZ.text())
atrel.append(self.ui.edtAtRel.text())
if self.ui.edtRotX.text() != '0' or self.ui.edtRotY.text() != '0' or self.ui.edtRotZ.text() != '0':
atrel.append(self.ui.edtRotX.text())
atrel.append(self.ui.edtRotY.text())
atrel.append(self.ui.edtRotZ.text())
atrel.append(self.ui.edtRotRel.text())
return comp_type, inst_name, params, atrel
''' mcgui config widgets wrapper class
'''
class McConfigDialog(QtWidgets.QDialog):
__standard_le_style = None
def __init__(self, parent=None):
super(McConfigDialog, self).__init__(parent)
self.ui = Ui_dlgConfig()
self.ui.setupUi(self)
self.ui.btnOk.clicked.connect(self.accept)
self.ui.btnSave.clicked.connect(self.save)
self.ui.btnCancel.clicked.connect(self.reject)
# set labels mccode-prefix
prefix = mccode_config.get_mccode_prefix()
self.ui.lblMcrun.setText("%srun" % prefix)
self.ui.lblMcplot.setText("%splot" % prefix)
self.ui.lblMcdisplay.setText("%sdisplay" % prefix)
def initConfigData(self, args):
# comboboxes
mcrun_lst, mcplot_lst, mcdisplay_lst = mccode_config.get_options()
# mcrun combobox
selected_val = mccode_config.configuration["MCRUN"]
i = 0
for val in mcrun_lst:
self.ui.cbxMcrun.addItem(val)
if val == selected_val:
self.ui.cbxMcrun.setCurrentIndex(i)
i += 1
self.ui.cbxMcrun.conf_var = "MCRUN"
self.ui.cbxMcrun.conf_org_value = mccode_config.configuration["MCRUN"]
self.ui.cbxMcrun.conf_options_lst = mcrun_lst
# mcplot combobox
selected_val = mccode_config.configuration["MCPLOT"]
i = 0
for val in mcplot_lst:
self.ui.cbxMcPlot.addItem(val)
if val == selected_val:
self.ui.cbxMcPlot.setCurrentIndex(i)
i += 1
self.ui.cbxMcPlot.conf_var = "MCPLOT"
self.ui.cbxMcPlot.conf_org_value = mccode_config.configuration["MCPLOT"]
self.ui.cbxMcPlot.conf_options_lst = mcplot_lst
# mcdisplay combobox
selected_val = mccode_config.configuration["MCDISPLAY"]
i = 0
for val in mcdisplay_lst:
self.ui.cbxMcdisplay.addItem(val)
if val == selected_val:
self.ui.cbxMcdisplay.setCurrentIndex(i)
i += 1
self.ui.cbxMcdisplay.conf_var = "MCDISPLAY"
self.ui.cbxMcdisplay.conf_org_value = mccode_config.configuration["MCDISPLAY"]
self.ui.cbxMcdisplay.conf_options_lst = mcdisplay_lst
# line edits
self.ui.edtCC.setText(mccode_config.compilation["CC"])
self.ui.edtCC.conf_var = "CC"
self.ui.edtCflags.setText(mccode_config.compilation["CFLAGS"])
self.ui.edtCflags.conf_var = "CFLAGS"
self.ui.edtMpicc.setText(mccode_config.compilation["MPICC"])
self.ui.edtMpicc.conf_var = "MPICC"
self.ui.edtMPIrun.setText(mccode_config.compilation["MPIRUN"])
self.ui.edtMPIrun.conf_var = "MPIRUN"
self.ui.edtNumNodes.setText(mccode_config.compilation["MPINODES"])
self.ui.edtNumNodes.conf_var = "MPINODES"
self.ui.edtNumCols.setText(mccode_config.configuration["GUICOLS"])
self.ui.edtNumCols.conf_var = "GUICOLS"
def __pullValuesTo_mccode_config(self):
# mcrun combobox
i = self.ui.cbxMcrun.currentIndex()
mccode_config.configuration["MCRUN"] = self.ui.cbxMcrun.conf_options_lst[i]
# mcrun combobox
i = self.ui.cbxMcPlot.currentIndex()
mccode_config.configuration["MCPLOT"] = self.ui.cbxMcPlot.conf_options_lst[i]
# mcrun combobox
i = self.ui.cbxMcdisplay.currentIndex()
mccode_config.configuration["MCDISPLAY"] = self.ui.cbxMcdisplay.conf_options_lst[i]
# line edits
mccode_config.compilation[str(self.ui.edtCC.conf_var)] = str(self.ui.edtCC.text())
mccode_config.compilation[str(self.ui.edtCflags.conf_var)] = str(self.ui.edtCflags.text())
mccode_config.compilation[str(self.ui.edtMpicc.conf_var)] = str(self.ui.edtMpicc.text())
mccode_config.compilation[str(self.ui.edtMPIrun.conf_var)] = str(self.ui.edtMPIrun.text())
mccode_config.compilation[str(self.ui.edtNumNodes.conf_var)] = str(self.ui.edtNumNodes.text())
mccode_config.configuration[str(self.ui.edtNumCols.conf_var)] = str(self.ui.edtNumCols.text())
# Export selected variables to the system / mcrun
target_mccode=mccode_config.configuration["MCCODE"].upper()
# CFLAGS and CC:
os.environ[target_mccode + '_CFLAGS_OVERRIDE']=mccode_config.compilation[str(self.ui.edtCflags.conf_var)]
os.environ[target_mccode + '_CC_OVERRIDE']=mccode_config.compilation[str(self.ui.edtCC.conf_var)]
# MPIRUN and MPICC:
os.environ[target_mccode + '_MPICC_OVERRIDE']=mccode_config.compilation[str(self.ui.edtMpicc.conf_var)]
os.environ[target_mccode + '_MPIRUN_OVERRIDE']=mccode_config.compilation[str(self.ui.edtMPIrun.conf_var)]
def accept(self):
self.__pullValuesTo_mccode_config()
# finally
super(McConfigDialog, self).accept()
def save(self):
self.__pullValuesTo_mccode_config()
mccode_config.save_user_config()
# finally
super(McConfigDialog, self).accept()
| gpl-2.0 | -2,626,172,253,893,456,000 | 35.294017 | 160 | 0.57922 | false |
marmyshev/transitions | openlp/plugins/bibles/lib/biblestab.py | 1 | 26262 | # -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2013 Raoul Snyman #
# Portions copyright (c) 2008-2013 Tim Bentley, Gerald Britton, Jonathan #
# Corwin, Samuel Findlay, Michael Gorven, Scott Guerrieri, Matthias Hub, #
# Meinert Jordan, Armin Köhler, Erik Lundin, Edwin Lunando, Brian T. Meyer. #
# Joshua Miller, Stevan Pettit, Andreas Preikschat, Mattias Põldaru, #
# Christian Richter, Philip Ridout, Simon Scudder, Jeffrey Smith, #
# Maikel Stuivenberg, Martin Thompson, Jon Tibble, Dave Warnock, #
# Frode Woldsund, Martin Zibricky, Patrick Zimmermann #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
import logging
from PyQt4 import QtCore, QtGui
from openlp.core.lib import Receiver, SettingsTab, Settings, UiStrings, translate
from openlp.core.lib.ui import find_and_set_in_combo_box
from openlp.plugins.bibles.lib import LayoutStyle, DisplayStyle, update_reference_separators, \
get_reference_separator, LanguageSelection
log = logging.getLogger(__name__)
class BiblesTab(SettingsTab):
"""
BiblesTab is the Bibles settings tab in the settings dialog.
"""
log.info(u'Bible Tab loaded')
def __init__(self, parent, title, visible_title, icon_path):
self.paragraph_style = True
self.show_new_chapters = False
self.display_style = 0
SettingsTab.__init__(self, parent, title, visible_title, icon_path)
def setupUi(self):
self.setObjectName(u'BiblesTab')
SettingsTab.setupUi(self)
self.verseDisplayGroupBox = QtGui.QGroupBox(self.leftColumn)
self.verseDisplayGroupBox.setObjectName(u'verseDisplayGroupBox')
self.verseDisplayLayout = QtGui.QFormLayout(self.verseDisplayGroupBox)
self.verseDisplayLayout.setObjectName(u'verseDisplayLayout')
self.newChaptersCheckBox = QtGui.QCheckBox(self.verseDisplayGroupBox)
self.newChaptersCheckBox.setObjectName(u'newChaptersCheckBox')
self.verseDisplayLayout.addRow(self.newChaptersCheckBox)
self.displayStyleLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.displayStyleLabel.setObjectName(u'displayStyleLabel')
self.displayStyleComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.displayStyleComboBox.addItems([u'', u'', u'', u''])
self.displayStyleComboBox.setObjectName(u'displayStyleComboBox')
self.verseDisplayLayout.addRow(self.displayStyleLabel, self.displayStyleComboBox)
self.layoutStyleLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.layoutStyleLabel.setObjectName(u'layoutStyleLabel')
self.layoutStyleComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.layoutStyleComboBox.setObjectName(u'layoutStyleComboBox')
self.layoutStyleComboBox.addItems([u'', u'', u''])
self.verseDisplayLayout.addRow(self.layoutStyleLabel, self.layoutStyleComboBox)
self.bibleSecondCheckBox = QtGui.QCheckBox(self.verseDisplayGroupBox)
self.bibleSecondCheckBox.setObjectName(u'bibleSecondCheckBox')
self.verseDisplayLayout.addRow(self.bibleSecondCheckBox)
self.bibleThemeLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.bibleThemeLabel.setObjectName(u'BibleThemeLabel')
self.bibleThemeComboBox = QtGui.QComboBox(self.verseDisplayGroupBox)
self.bibleThemeComboBox.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToMinimumContentsLength)
self.bibleThemeComboBox.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
self.bibleThemeComboBox.addItem(u'')
self.bibleThemeComboBox.setObjectName(u'BibleThemeComboBox')
self.verseDisplayLayout.addRow(self.bibleThemeLabel, self.bibleThemeComboBox)
self.changeNoteLabel = QtGui.QLabel(self.verseDisplayGroupBox)
self.changeNoteLabel.setWordWrap(True)
self.changeNoteLabel.setObjectName(u'changeNoteLabel')
self.verseDisplayLayout.addRow(self.changeNoteLabel)
self.leftLayout.addWidget(self.verseDisplayGroupBox)
self.scriptureReferenceGroupBox = QtGui.QGroupBox(self.leftColumn)
self.scriptureReferenceGroupBox.setObjectName(u'scriptureReferenceGroupBox')
self.scriptureReferenceLayout = QtGui.QGridLayout(self.scriptureReferenceGroupBox)
self.verseSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.verseSeparatorCheckBox.setObjectName(u'verseSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.verseSeparatorCheckBox, 0, 0)
self.verseSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
# self.verseSeparatorLineEdit.setPalette
self.verseSeparatorLineEdit.setObjectName(u'verseSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.verseSeparatorLineEdit, 0, 1)
self.rangeSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.rangeSeparatorCheckBox.setObjectName(u'rangeSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.rangeSeparatorCheckBox, 1, 0)
self.rangeSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.rangeSeparatorLineEdit.setObjectName(u'rangeSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.rangeSeparatorLineEdit, 1, 1)
self.listSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.listSeparatorCheckBox.setObjectName(u'listSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.listSeparatorCheckBox, 2, 0)
self.listSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.listSeparatorLineEdit.setObjectName(u'listSeparatorLineEdit')
self.scriptureReferenceLayout.addWidget(self.listSeparatorLineEdit, 2, 1)
self.endSeparatorCheckBox = QtGui.QCheckBox(self.scriptureReferenceGroupBox)
self.endSeparatorCheckBox.setObjectName(u'endSeparatorCheckBox')
self.scriptureReferenceLayout.addWidget(self.endSeparatorCheckBox, 3, 0)
self.endSeparatorLineEdit = QtGui.QLineEdit(self.scriptureReferenceGroupBox)
self.endSeparatorLineEdit.setObjectName(u'endSeparatorLineEdit')
self.endSeparatorLineEdit.setValidator(QtGui.QRegExpValidator(QtCore.QRegExp(r'[^0-9]*'),
self.endSeparatorLineEdit))
self.scriptureReferenceLayout.addWidget(self.endSeparatorLineEdit, 3, 1)
self.leftLayout.addWidget(self.scriptureReferenceGroupBox)
self.rightColumn.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
self.languageSelectionGroupBox = QtGui.QGroupBox(self.rightColumn)
self.languageSelectionGroupBox.setObjectName(u'languageSelectionGroupBox')
self.languageSelectionLayout = QtGui.QVBoxLayout(self.languageSelectionGroupBox)
self.languageSelectionLabel = QtGui.QLabel(self.languageSelectionGroupBox)
self.languageSelectionLabel.setObjectName(u'languageSelectionLabel')
self.languageSelectionComboBox = QtGui.QComboBox(self.languageSelectionGroupBox)
self.languageSelectionComboBox.setObjectName(u'languageSelectionComboBox')
self.languageSelectionComboBox.addItems([u'', u'', u''])
self.languageSelectionLayout.addWidget(self.languageSelectionLabel)
self.languageSelectionLayout.addWidget(self.languageSelectionComboBox)
self.rightLayout.addWidget(self.languageSelectionGroupBox)
self.leftLayout.addStretch()
self.rightLayout.addStretch()
# Signals and slots
QtCore.QObject.connect(self.newChaptersCheckBox, QtCore.SIGNAL(u'stateChanged(int)'),
self.onNewChaptersCheckBoxChanged)
QtCore.QObject.connect(self.displayStyleComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onDisplayStyleComboBoxChanged)
QtCore.QObject.connect(self.bibleThemeComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onBibleThemeComboBoxChanged)
QtCore.QObject.connect(self.layoutStyleComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onLayoutStyleComboBoxChanged)
QtCore.QObject.connect(self.bibleSecondCheckBox, QtCore.SIGNAL(u'stateChanged(int)'),
self.onBibleSecondCheckBox)
QtCore.QObject.connect(self.verseSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onVerseSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.verseSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onVerseSeparatorLineEditEdited)
QtCore.QObject.connect(self.verseSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onVerseSeparatorLineEditFinished)
QtCore.QObject.connect(self.rangeSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onRangeSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.rangeSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onRangeSeparatorLineEditEdited)
QtCore.QObject.connect(self.rangeSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onRangeSeparatorLineEditFinished)
QtCore.QObject.connect(self.listSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onListSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.listSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onListSeparatorLineEditEdited)
QtCore.QObject.connect(self.listSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onListSeparatorLineEditFinished)
QtCore.QObject.connect(self.endSeparatorCheckBox, QtCore.SIGNAL(u'clicked(bool)'),
self.onEndSeparatorCheckBoxClicked)
QtCore.QObject.connect(self.endSeparatorLineEdit, QtCore.SIGNAL(u'textEdited(QString)'),
self.onEndSeparatorLineEditEdited)
QtCore.QObject.connect(self.endSeparatorLineEdit, QtCore.SIGNAL(u'editingFinished()'),
self.onEndSeparatorLineEditFinished)
QtCore.QObject.connect(Receiver.get_receiver(), QtCore.SIGNAL(u'theme_update_list'), self.updateThemeList)
QtCore.QObject.connect(self.languageSelectionComboBox, QtCore.SIGNAL(u'activated(int)'),
self.onLanguageSelectionComboBoxChanged)
def retranslateUi(self):
self.verseDisplayGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Verse Display'))
self.newChaptersCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Only show new chapter numbers'))
self.layoutStyleLabel.setText(UiStrings().LayoutStyle)
self.displayStyleLabel.setText(UiStrings().DisplayStyle)
self.bibleThemeLabel.setText(translate('BiblesPlugin.BiblesTab', 'Bible theme:'))
self.layoutStyleComboBox.setItemText(LayoutStyle.VersePerSlide, UiStrings().VersePerSlide)
self.layoutStyleComboBox.setItemText(LayoutStyle.VersePerLine, UiStrings().VersePerLine)
self.layoutStyleComboBox.setItemText(LayoutStyle.Continuous, UiStrings().Continuous)
self.displayStyleComboBox.setItemText(DisplayStyle.NoBrackets,
translate('BiblesPlugin.BiblesTab', 'No Brackets'))
self.displayStyleComboBox.setItemText(DisplayStyle.Round,
translate('BiblesPlugin.BiblesTab', '( And )'))
self.displayStyleComboBox.setItemText(DisplayStyle.Curly,
translate('BiblesPlugin.BiblesTab', '{ And }'))
self.displayStyleComboBox.setItemText(DisplayStyle.Square,
translate('BiblesPlugin.BiblesTab', '[ And ]'))
self.changeNoteLabel.setText(translate('BiblesPlugin.BiblesTab',
'Note:\nChanges do not affect verses already in the service.'))
self.bibleSecondCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Display second Bible verses'))
self.scriptureReferenceGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Custom Scripture References'))
self.verseSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Verse Separator:'))
self.rangeSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'Range Separator:'))
self.listSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'List Separator:'))
self.endSeparatorCheckBox.setText(translate('BiblesPlugin.BiblesTab', 'End Mark:'))
#@todo these are common so move to StringsUI and reuse.
self.verseSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'verse separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.rangeSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'range separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.listSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'list separators may be defined.\nThey have to be separated '
'by a vertical bar "|".\nPlease clear this edit line to use '
'the default value.'))
self.endSeparatorLineEdit.setToolTip(
translate('BiblesPlugin.BiblesTab', 'Multiple alternative '
'end marks may be defined.\nThey have to be separated by a '
'vertical bar "|".\nPlease clear this edit line to use the '
'default value.'))
self.languageSelectionGroupBox.setTitle(translate('BiblesPlugin.BiblesTab', 'Default Bible Language'))
self.languageSelectionLabel.setText(translate('BiblesPlugin.BiblesTab',
'Book name language in search field,\nsearch results and on display:'))
self.languageSelectionComboBox.setItemText(LanguageSelection.Bible,
translate('BiblesPlugin.BiblesTab', 'Bible Language'))
self.languageSelectionComboBox.setItemText(LanguageSelection.Application,
translate('BiblesPlugin.BiblesTab', 'Application Language'))
self.languageSelectionComboBox.setItemText(LanguageSelection.English,
translate('BiblesPlugin.BiblesTab', 'English'))
def onBibleThemeComboBoxChanged(self):
self.bible_theme = self.bibleThemeComboBox.currentText()
def onDisplayStyleComboBoxChanged(self):
self.display_style = self.displayStyleComboBox.currentIndex()
def onLayoutStyleComboBoxChanged(self):
self.layout_style = self.layoutStyleComboBox.currentIndex()
def onLanguageSelectionComboBoxChanged(self):
self.language_selection = self.languageSelectionComboBox.currentIndex()
def onNewChaptersCheckBoxChanged(self, check_state):
self.show_new_chapters = False
# We have a set value convert to True/False.
if check_state == QtCore.Qt.Checked:
self.show_new_chapters = True
def onBibleSecondCheckBox(self, check_state):
self.second_bibles = False
# We have a set value convert to True/False.
if check_state == QtCore.Qt.Checked:
self.second_bibles = True
def onVerseSeparatorCheckBoxClicked(self, checked):
if checked:
self.verseSeparatorLineEdit.setFocus()
else:
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onVerseSeparatorLineEditEdited(self, text):
self.verseSeparatorCheckBox.setChecked(True)
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onVerseSeparatorLineEditFinished(self):
if self.verseSeparatorLineEdit.isModified():
text = self.verseSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_v_default') or not text.replace(u'|', u''):
self.verseSeparatorCheckBox.setChecked(False)
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onRangeSeparatorCheckBoxClicked(self, checked):
if checked:
self.rangeSeparatorLineEdit.setFocus()
else:
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onRangeSeparatorLineEditEdited(self, text):
self.rangeSeparatorCheckBox.setChecked(True)
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onRangeSeparatorLineEditFinished(self):
if self.rangeSeparatorLineEdit.isModified():
text = self.rangeSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_r_default') or not text.replace(u'|', u''):
self.rangeSeparatorCheckBox.setChecked(False)
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onListSeparatorCheckBoxClicked(self, checked):
if checked:
self.listSeparatorLineEdit.setFocus()
else:
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onListSeparatorLineEditEdited(self, text):
self.listSeparatorCheckBox.setChecked(True)
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onListSeparatorLineEditFinished(self):
if self.listSeparatorLineEdit.isModified():
text = self.listSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_l_default') or not text.replace(u'|', u''):
self.listSeparatorCheckBox.setChecked(False)
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def onEndSeparatorCheckBoxClicked(self, checked):
if checked:
self.endSeparatorLineEdit.setFocus()
else:
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(not checked))
def onEndSeparatorLineEditEdited(self, text):
self.endSeparatorCheckBox.setChecked(True)
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
def onEndSeparatorLineEditFinished(self):
if self.endSeparatorLineEdit.isModified():
text = self.endSeparatorLineEdit.text()
if text == get_reference_separator(u'sep_e_default') or not text.replace(u'|', u''):
self.endSeparatorCheckBox.setChecked(False)
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
def load(self):
settings = Settings()
settings.beginGroup(self.settingsSection)
self.show_new_chapters = settings.value(u'display new chapter')
self.display_style = settings.value(u'display brackets')
self.layout_style = settings.value(u'verse layout style')
self.bible_theme = settings.value(u'bible theme')
self.second_bibles = settings.value(u'second bibles')
self.newChaptersCheckBox.setChecked(self.show_new_chapters)
self.displayStyleComboBox.setCurrentIndex(self.display_style)
self.layoutStyleComboBox.setCurrentIndex(self.layout_style)
self.bibleSecondCheckBox.setChecked(self.second_bibles)
verse_separator = settings.value(u'verse separator')
if (verse_separator.strip(u'|') == u'') or (verse_separator == get_reference_separator(u'sep_v_default')):
self.verseSeparatorLineEdit.setText(get_reference_separator(u'sep_v_default'))
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.verseSeparatorCheckBox.setChecked(False)
else:
self.verseSeparatorLineEdit.setText(verse_separator)
self.verseSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.verseSeparatorCheckBox.setChecked(True)
range_separator = settings.value(u'range separator')
if (range_separator.strip(u'|') == u'') or (range_separator == get_reference_separator(u'sep_r_default')):
self.rangeSeparatorLineEdit.setText(get_reference_separator(u'sep_r_default'))
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.rangeSeparatorCheckBox.setChecked(False)
else:
self.rangeSeparatorLineEdit.setText(range_separator)
self.rangeSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.rangeSeparatorCheckBox.setChecked(True)
list_separator = settings.value(u'list separator')
if (list_separator.strip(u'|') == u'') or (list_separator == get_reference_separator(u'sep_l_default')):
self.listSeparatorLineEdit.setText(get_reference_separator(u'sep_l_default'))
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.listSeparatorCheckBox.setChecked(False)
else:
self.listSeparatorLineEdit.setText(list_separator)
self.listSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.listSeparatorCheckBox.setChecked(True)
end_separator = settings.value(u'end separator')
if (end_separator.strip(u'|') == u'') or (end_separator == get_reference_separator(u'sep_e_default')):
self.endSeparatorLineEdit.setText(get_reference_separator(u'sep_e_default'))
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(True))
self.endSeparatorCheckBox.setChecked(False)
else:
self.endSeparatorLineEdit.setText(end_separator)
self.endSeparatorLineEdit.setPalette(self.getGreyTextPalette(False))
self.endSeparatorCheckBox.setChecked(True)
self.language_selection = settings.value(u'book name language')
self.languageSelectionComboBox.setCurrentIndex(self.language_selection)
settings.endGroup()
def save(self):
settings = Settings()
settings.beginGroup(self.settingsSection)
settings.setValue(u'display new chapter', self.show_new_chapters)
settings.setValue(u'display brackets', self.display_style)
settings.setValue(u'verse layout style', self.layout_style)
settings.setValue(u'book name language', self.language_selection)
settings.setValue(u'second bibles', self.second_bibles)
settings.setValue(u'bible theme', self.bible_theme)
if self.verseSeparatorCheckBox.isChecked():
settings.setValue(u'verse separator', self.verseSeparatorLineEdit.text())
else:
settings.remove(u'verse separator')
if self.rangeSeparatorCheckBox.isChecked():
settings.setValue(u'range separator', self.rangeSeparatorLineEdit.text())
else:
settings.remove(u'range separator')
if self.listSeparatorCheckBox.isChecked():
settings.setValue(u'list separator', self.listSeparatorLineEdit.text())
else:
settings.remove(u'list separator')
if self.endSeparatorCheckBox.isChecked():
settings.setValue(u'end separator', self.endSeparatorLineEdit.text())
else:
settings.remove(u'end separator')
update_reference_separators()
Receiver.send_message(u'bibles_load_list')
settings.endGroup()
def updateThemeList(self, theme_list):
"""
Called from ThemeManager when the Themes have changed.
``theme_list``
The list of available themes::
[u'Bible Theme', u'Song Theme']
"""
self.bibleThemeComboBox.clear()
self.bibleThemeComboBox.addItem(u'')
self.bibleThemeComboBox.addItems(theme_list)
find_and_set_in_combo_box(self.bibleThemeComboBox, self.bible_theme)
def getGreyTextPalette(self, greyed):
"""
Returns a QPalette with greyed out text as used for placeholderText.
"""
palette = QtGui.QPalette()
color = self.palette().color(QtGui.QPalette.Active, QtGui.QPalette.Text)
if greyed:
color.setAlpha(128)
palette.setColor(QtGui.QPalette.Active, QtGui.QPalette.Text, color)
return palette
| gpl-2.0 | -5,386,206,835,964,329,000 | 59.229358 | 116 | 0.700419 | false |
jerome-nexedi/dream | dream/simulation/CoreObject.py | 1 | 47086 | # ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DREAM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DREAM. If not, see <http://www.gnu.org/licenses/>.
# ===========================================================================
'''
Created on 12 Jul 2012
@author: George
'''
'''
Class that acts as an abstract. It should have no instances. All the core-objects should inherit from it
'''
# from SimPy.Simulation import Process, Resource, now, SimEvent, waitevent
import simpy
from ManPyObject import ManPyObject
# ===========================================================================
# the core object
# ===========================================================================
class CoreObject(ManPyObject):
class_name = 'Dream.CoreObject'
def __init__(self, id, name, **kw):
ManPyObject.__init__(self,id,name)
self.objName = name
# lists that hold the previous and next objects in the flow
self.next=[] #list with the next objects in the flow
self.previous=[] #list with the previous objects in the flow
self.nextIds=[] #list with the ids of the next objects in the flow
self.previousIds=[] #list with the ids of the previous objects in the flow
#lists to hold statistics of multiple runs
self.Failure=[]
self.Working=[]
self.Blockage=[]
self.Waiting=[]
self.OffShift=[]
self.WaitingForOperator=[]
self.WaitingForLoadOperator=[]
self.Loading = []
self.SettingUp =[]
# list that holds the objectInterruptions that have this element as victim
self.objectInterruptions=[]
#default attributes set so that the CoreObject has them
self.isPreemptive=False
self.resetOnPreemption=False
self.interruptCause=None
self.gatherWipStat=False
# flag used to signal that the station waits for removeEntity event
self.waitEntityRemoval=False
# attributes/indices used for printing the route, hold the cols corresponding to the object (entities route and operators route)
self.station_col_inds=[]
self.op_col_indx=None
# if there is input in a dictionary parse from it
from Globals import G
G.ObjList.append(self) # add object to ObjList
# list of expected signals of a station (values can be used as flags to inform on which signals is the station currently yielding)
self.expectedSignals={
"isRequested":0,
"canDispose":0,
"interruptionStart":0,
"interruptionEnd":0,
"loadOperatorAvailable":0,
"initialWIP":0,
"brokerIsSet":0,
"preemptQueue":0,
"entityRemoved":0,
"entityCreated":0,
"moveEnd":0,
"processOperatorUnavailable":0
}
# flag notifying the the station can deliver entities that ended their processing while interrupted
self.canDeliverOnInterruption=False
# keep wip stats for every replication
self.WipStat=[]
def initialize(self):
from Globals import G
self.env=G.env
self.Up=True #Boolean that shows if the object is in failure ("Down") or not ("up")
self.onShift=True
self.currentEntity=None
# ============================== total times ===============================================
self.totalOperationTime=0 #dummy variable to hold totalWorkin/SetupTime during an interruption (yield ...(self.operation('setup'))
self.totalBlockageTime=0 #holds the total blockage time
self.totalFailureTime=0 #holds the total failure time
self.totalWaitingTime=0 #holds the total waiting time
self.totalWorkingTime=0 #holds the total working time
self.totalOffShiftTime=0 #holds the total off-shift time
self.completedJobs=0 #holds the number of completed jobs
# ============================== Entity related attributes =================================
self.timeLastEntityEnded=0 #holds the last time that an entity ended processing in the object
self.nameLastEntityEnded="" #holds the name of the last entity that ended processing in the object
self.timeLastEntityEntered=0 #holds the last time that an entity entered in the object
self.nameLastEntityEntered="" #holds the name of the last entity that entered in the object
# ============================== shift related times =====================================
self.timeLastShiftStarted=0 #holds the time that the last shift of the object started
self.timeLastShiftEnded=0 #holds the time that the last shift of the object ended
self.offShiftTimeTryingToReleaseCurrentEntity=0 #holds the time that the object was off-shift while trying
#to release the current entity
# ============================== failure related times =====================================
self.timeLastFailure=0 #holds the time that the last failure of the object started
self.timeLastFailureEnded=0 #holds the time that the last failure of the object ended
#processing the current entity
self.downTimeInTryingToReleaseCurrentEntity=0 #holds the time that the object was down while trying
#to release the current entity . This might be due to failure, off-shift, etc
self.timeLastEntityLeft=0 #holds the last time that an entity left the object
self.processingTimeOfCurrentEntity=0 #holds the total processing time that the current entity required
# ============================== waiting flag ==============================================
self.waitToDispose=False #shows if the object waits to dispose an entity
self.isWorkingOnTheLast=False #shows if the object is performing the last processing before scheduled interruption
# ============================== the below are currently used in Jobshop =======================
self.giver=None #the CoreObject that the activeObject will take an Entity from
if len(self.previous)>0:
self.giver=self.previous[0]
self.receiver=None #the CoreObject that the activeObject will give an Entity to
if len(self.next)>0:
self.receiver=self.next[0]
# ============================== variable that is used for the loading of objects =============
self.exitAssignedToReceiver = None # by default the objects are not blocked
# when the entities have to be loaded to operated objects
# then the giverObjects have to be blocked for the time
# that the object is being loaded
# ============================== variable that is used signalling of objects ==================
self.entryAssignedToGiver = None # by default the objects are not blocked
# when the entities have to be received by objects
# then the objects have to be blocked after the first signal they receive
# in order to avoid signalling the same object
# while it has not received the entity it has been originally signalled for
# ============================== lists to hold statistics of multiple runs =====================
self.totalTimeWaitingForOperator=0
self.operatorWaitTimeCurrentEntity=0
self.totalTimeInCurrentEntity=0
self.operatorWaitTimeCurrentEntity=0
self.totalProcessingTimeInCurrentEntity=0
# self.failureTimeInCurrentEntity=0
self.setupTimeCurrentEntity=0
# the time that the object started/ended its wait for the operator
self.timeWaitForOperatorStarted=0
self.timeWaitForOperatorEnded=0
# the time that the object started/ended its wait for the operator
self.timeWaitForLoadOperatorStarted=0
self.timeWaitForLoadOperatorEnded=0
self.totalTimeWaitingForLoadOperator=0
# the time that the operator started/ended loading the object
self.timeLoadStarted=0
self.timeLoadEnded=0
self.totalLoadTime=0
# the time that the operator started/ended setting-up the object
self.timeSetupStarted=0
self.timeSetupEnded=0
self.totalSetupTime=0
# Current entity load/setup/loadOperatorwait/operatorWait related times
self.operatorWaitTimeCurrentEntity=0 # holds the time that the object was waiting for the operator
self.loadOperatorWaitTimeCurrentEntity = 0 # holds the time that the object waits for operator to load the it
self.loadTimeCurrentEntity = 0 # holds the time to load the current entity
self.setupTimeCurrentEntity = 0 # holds the time to setup the object before processing the current entity
self.shouldPreempt=False #flag that shows that the object should preempt or not
self.isProcessingInitialWIP=False #flag that is used only when a object has initial wip
self.lastGiver=None # variable that holds the last giver of the object, used by object in case of preemption
# initialize the wipStatList -
# TODO, think what to do in multiple runs
# TODO, this should be also updated in Globals.setWIP (in case we have initial wip)
import numpy as np
self.wipStatList=np.array([[0,0]])
self.isRequested=self.env.event()
self.canDispose=self.env.event()
self.interruptionEnd=self.env.event()
self.interruptionStart=self.env.event()
self.interruptedBy=None
self.entityRemoved=self.env.event()
self.initialWIP=self.env.event()
# flag used to signal that the station waits for removeEntity event
self.waitEntityRemoval=False
# attributes/indices used for printing the route, hold the cols corresponding to the object (entities route and operators route)
self.station_col_inds=[]
self.op_col_indx=None
# flag that locks the entry of an object so that it cannot receive entities
self.isLocked=False
# flag that shows if the object is processing state at any given time
self.isProcessing=False
# variable that shows what kind of operation is the station performing at the moment
'''
it can be Processing or Setup
XXX: others not yet implemented
'''
self.currentlyPerforming=None
# flag that shows if the object is blocked state at any given time
self.isBlocked=False
self.timeLastBlockageStarted=None
# list of expected signals of a station (values can be used as flags to inform on which signals is the station currently yielding)
self.expectedSignals={
"isRequested":0,
"canDispose":0,
"interruptionStart":0,
"interruptionEnd":0,
"loadOperatorAvailable":0,
"initialWIP":0,
"brokerIsSet":0,
"preemptQueue":0,
"entityRemoved":0,
"entityCreated":0,
"moveEnd":0
}
# lists that keep the start/endShiftTimes of the victim
self.endShiftTimes=[]
self.startShiftTimes=[]
# =======================================================================
# the main process of the core object
# this is dummy, every object must have its own implementation
# =======================================================================
def run(self):
raise NotImplementedError("Subclass must define 'run' method")
# =======================================================================
# sets the routing in and out elements for the Object
# =======================================================================
def defineRouting(self, predecessorList=[], successorList=[]):
self.next=successorList
self.previous=predecessorList
# =======================================================================
# checks if there is anything set as WIP at the begging of the simulation
# and sends an event to initialize the simulation
# =======================================================================
def initialSignalReceiver(self):
if self.haveToDispose():
self.signalReceiver()
def initialAllocationRequest(self):
# TODO if the station is operated, and the operators have skills defined then the SkilledOperatorRouter should be signalled
# XXX: there may be a case where one object is not assigned an operator, in that case we do not want to invoke the allocation routine
if self.checkForDedicatedOperators():
allocationNeeded=False
from Globals import G
for obj in G.MachineList:
if obj.operatorPool!='None':
if obj.operatorPool.operators:
allocationNeeded=False
break
else:
allocationNeeded=True
if allocationNeeded:
self.requestAllocation()
# =======================================================================
# removes an Entity from the Object the Entity to be removed is passed
# as argument by getEntity of the receiver
# =======================================================================
def removeEntity(self, entity=None, resetFlags=True, addBlockage=True):
if addBlockage and self.isBlocked:
# add the blocking time
self.addBlockage()
# reset flags
if resetFlags:
self.isBlocked=False
self.isProcessing=False
activeObjectQueue=self.Res.users
activeObjectQueue.remove(entity) #remove the Entity from the queue
if self.receiver:
self.receiver.appendEntity(entity)
self.downTimeInTryingToReleaseCurrentEntity=0
self.offShiftTimeTryingToReleaseCurrentEntity=0
self.timeLastEntityLeft=self.env.now
self.outputTrace(entity.name, "released "+self.objName)
#append the time to schedule so that it can be read in the result
#remember that every entity has it's schedule which is supposed to be updated every time
# he entity enters a new object
if entity.schedule:
entity.schedule[-1]["exitTime"] = self.env.now
# update wipStatList
if self.gatherWipStat:
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
if self.expectedSignals['entityRemoved']:
self.printTrace(self.id, signal='(removedEntity)')
self.sendSignal(receiver=self, signal=self.entityRemoved)
return entity
#===========================================================================
# appends entity to the receiver object. to be called by the removeEntity of the giver
# this method is created to be overridden by the Assembly class in its getEntity where Frames are loaded
#===========================================================================
def appendEntity(self,entity=None):
activeObjectQueue=self.Res.users
activeObjectQueue.append(entity)
# =======================================================================
# called be getEntity it identifies the Entity
# to be obtained so that
# getEntity gives it to removeEntity as argument
# =======================================================================
def identifyEntityToGet(self):
giverObjectQueue=self.getGiverObjectQueue()
return giverObjectQueue[0]
# =======================================================================
# adds the blockage time to totalBlockageTime
# each time an Entity is removed
# =======================================================================
def addBlockage(self):
if self.timeLastBlockageStarted:
self.totalBlockageTime+=self.env.now-self.timeLastBlockageStarted
# =======================================================================
# gets an entity from the giver
# =======================================================================
def getEntity(self):
# get active object and its queue, as well as the active (to be) entity
#(after the sorting of the entities in the queue of the giver object)
# activeObject=self.getActiveObject()
activeObjectQueue=self.Res.users
# get giver object, its queue, and sort the entities according to this object priorities
giverObject=self.giver
giverObject.sortEntities() #sort the Entities of the giver
#according to the scheduling rule if applied
giverObject.sortEntitiesForReceiver(self)
giverObjectQueue=giverObject.Res.users
# if the giverObject is blocked then unBlock it
if giverObject.exitIsAssignedTo():
giverObject.unAssignExit()
# if the activeObject entry is blocked then unBlock it
if self.entryIsAssignedTo():
self.unAssignEntry()
activeEntity=self.identifyEntityToGet()
activeEntity.currentStation=self
# update the receiver of the giverObject
giverObject.receiver=self
# remove entity from the giver
activeEntity = giverObject.removeEntity(entity=self.identifyEntityToGet())
# variable that holds the last giver; used in case of preemption
self.lastGiver=self.giver
# #get the entity from the previous object and put it in front of the activeQ
# activeObjectQueue.append(activeEntity)
#append the time to schedule so that it can be read in the result
#remember that every entity has it's schedule which is supposed to be updated every time
# the entity enters a new object
activeEntity.schedule.append({"station": self,
"entranceTime": self.env.now})
#update variables
activeEntity.currentStation=self
self.timeLastEntityEntered=self.env.now
self.nameLastEntityEntered=activeEntity.name # this holds the name of the last entity that got into object
# update the next list of the object
self.updateNext(activeEntity)
self.outputTrace(activeEntity.name, "got into "+self.objName)
self.printTrace(activeEntity.name, enter=self.id)
# # if there are entities with requiredParts then check whether the requirements are fulfilled for them to proceed
# # ass soon as a "buffer" receives an entity it controls if the entity is requested elsewhere,
# # then it checks if there other requested entities by the same requesting entity.
# # Finally, it is controlled whether all the requested parts have concluded
# # their sequences for the requesting entity
# from Globals import G
# # for all the entities in the EntityList
# for entity in G.EntityList:
# requiredParts=entity.getRequiredParts()
# if requiredParts:
# # if the activeEntity is in the requierdParts of the entity
# if activeEntity in requiredParts:
# # if the entity that requires the activeEntity can proceed then signal the currentStation of the entity
# if entity.checkIfRequiredPartsReady() and entity.currentStation.expectedSignals['canDispose']:
# entity.mayProceed=True
# self.sendSignal(receiver=entity.currentStation, signal=entity.currentStation.canDispose)
# if the object (eg Queue) canAccept then signal the Giver
if self.canAccept():
self.signalGiver()
return activeEntity
#===========================================================================
# updates the next list of the object
#===========================================================================
def updateNext(self, entity=None):
pass
#===========================================================================
# check whether there is a critical entity to be disposed
# and if preemption is required
#===========================================================================
def preemptReceiver(self):
activeObjectQueue=self.Res.users
# find a critical order if any
critical=False
for entity in activeObjectQueue:
if entity.isCritical:
activeEntity=entity
critical=True
break
if critical:
# pick a receiver
receiver=None
if any(object for object in self.next if object.isPreemptive and object.checkIfActive()):
receiver=next(object for object in self.next if object.isPreemptive and object.checkIfActive())
# if there is any receiver that can be preempted check if it is operated
if receiver:
receiverOperated=False # local variable to inform if the receiver is operated for Loading
try:
from MachineJobShop import MachineJobShop
from MachineManagedJob import MachineManagedJob
# TODO: implement preemption for simple machines
if receiver.operatorPool\
and isinstance(receiver, MachineJobShop) or\
isinstance(receiver, MachineManagedJob):
# and the operationType list contains Load, the receiver is operated
if (receiver.operatorPool!="None")\
and any(type=="Load" for type in receiver.multOperationTypeList):
receiverOperated=True
except:
pass
# if the obtained Entity is critical and the receiver is preemptive and not operated
# in the case that the receiver is operated the preemption is performed by the operators
# if the receiver is not Up then no preemption will be performed
if not receiverOperated and len(receiver.Res.users)>0:
#if the receiver does not hold an Entity that is also critical
if not receiver.Res.users[0].isCritical:
receiver.shouldPreempt=True
self.printTrace(self.id, preempt=receiver.id)
receiver.preempt()
receiver.timeLastEntityEnded=self.env.now #required to count blockage correctly in the preemptied station
# sort so that the critical entity is placed in front
activeObjectQueue.sort(key=lambda x: x==activeEntity, reverse=True)
# if there is a critical entity and the possible receivers are operated then signal the Router
elif receiverOperated:
self.signalRouter(receiver)
activeObjectQueue.sort(key=lambda x: x==activeEntity, reverse=True)
# update wipStatList
if self.gatherWipStat:
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
#===========================================================================
# find possible receivers
#===========================================================================
@staticmethod
def findReceiversFor(activeObject):
receivers=[]
for object in [x for x in activeObject.next if x.canAccept(activeObject) and not x.isRequested.triggered and x.expectedSignals['isRequested']]:
receivers.append(object)
return receivers
# =======================================================================
# signal the successor that the object can dispose an entity
# =======================================================================
def signalReceiver(self):
possibleReceivers=self.findReceiversFor(self)
if possibleReceivers:
receiver=self.selectReceiver(possibleReceivers)
receiversGiver=self
# perform the checks that canAcceptAndIsRequested used to perform and update activeCallersList or assignExit and operatorPool
while not receiver.canAcceptAndIsRequested(receiversGiver):
possibleReceivers.remove(receiver)
if not possibleReceivers:
receiversGiver=None
receiver=None
# if no receiver can accept then try to preempt a receive if the stations holds a critical order
self.preemptReceiver()
return False
receiver=self.selectReceiver(possibleReceivers)
receiversGiver=self
# sorting the entities of the object for the receiver
self.sortEntitiesForReceiver(receiver)
# signalling the Router if the receiver is operated and not assigned an operator
if self.signalRouter(receiver):
return False
self.receiver=receiver
self.receiver.giver=self
self.printTrace(self.id, signalReceiver=self.receiver.id)
# assign the entry of the receiver
self.receiver.assignEntryTo()
# assign the exit of the current object to the receiver
self.assignExitTo(self.receiver)
if self.receiver.expectedSignals['isRequested']:
self.sendSignal(receiver=self.receiver, signal=self.receiver.isRequested)
return True
# if no receiver can accept then try to preempt a receive if the stations holds a critical order
self.preemptReceiver()
return False
# =======================================================================
# select a receiver Object
# =======================================================================
@staticmethod
def selectReceiver(possibleReceivers=[]):
candidates=possibleReceivers
# dummy variables that help prioritize the objects requesting to give objects to the object (activeObject)
maxTimeWaiting=0 # dummy variable counting the time a successor is waiting
receiver=None
from Globals import G
for object in candidates:
timeWaiting=G.env.now-object.timeLastEntityLeft # the time it has been waiting is updated and stored in dummy variable timeWaiting
if(timeWaiting>maxTimeWaiting or maxTimeWaiting==0):# if the timeWaiting is the maximum among the ones of the successors
maxTimeWaiting=timeWaiting
receiver=object # set the receiver as the longest waiting possible receiver
return receiver
#===========================================================================
# sort the entities of the queue for the receiver
#===========================================================================
def sortEntitiesForReceiver(self, receiver=None):
pass
#===========================================================================
# find possible givers
#===========================================================================
@staticmethod
def findGiversFor(activeObject):
givers=[]
for object in [x for x in activeObject.previous if(not x is activeObject) and not x.canDispose.triggered and
(x.expectedSignals['canDispose'] or
(x.canDeliverOnInterruption and x.timeLastShiftEnded==x.env.now))]: # extra check.If shift ended right now and the object
# can unload we relax the canDispose flag
if object.haveToDispose(activeObject):
givers.append(object)
return givers
# =======================================================================
# signal the giver that the entity is removed from its internalQueue
# =======================================================================
def signalGiver(self):
possibleGivers=self.findGiversFor(self)
if possibleGivers:
giver=self.selectGiver(possibleGivers)
giversReceiver=self
# perform the checks that canAcceptAndIsRequested used to perform and update activeCallersList or assignExit and operatorPool
while not self.canAcceptAndIsRequested(giver):
possibleGivers.remove(giver)
if not possibleGivers:
return False
giver=self.selectGiver(possibleGivers)
giversReceiver=self
self.giver=giver
self.giver.receiver=self
if self.giver.expectedSignals['canDispose'] or (self.giver.canDeliverOnInterruption
and self.giver.timeLastShiftEnded==self.env.now): # extra check.If shift ended right now and the object
# can unload we relax the canDispose flag
self.sendSignal(receiver=self.giver, signal=self.giver.canDispose)
self.printTrace(self.id, signalGiver=self.giver.id)
return True
return False
# =======================================================================
# select a giver Object
# =======================================================================
@staticmethod
def selectGiver(possibleGivers=[]):
candidates=possibleGivers
# dummy variables that help prioritize the objects requesting to give objects to the object (activeObject)
maxTimeWaiting=0 # dummy variable counting the time a predecessor is blocked
giver=None
from Globals import G
# loop through the possible givers to see which have to dispose and which is the one blocked for longer
for object in candidates:
# calculate how much the giver is waiting
timeWaiting=G.env.now-object.timeLastEntityEnded
if(timeWaiting>=maxTimeWaiting):
giver=object # the object to deliver the Entity to the activeObject is set to the ith member of the previous list
maxTimeWaiting=timeWaiting
return giver
# =======================================================================
# actions to be taken after the simulation ends
# =======================================================================
def postProcessing(self, MaxSimtime=None):
if MaxSimtime==None:
from Globals import G
MaxSimtime=G.maxSimTime
activeObject=self.getActiveObject()
activeObjectQueue=self.getActiveObjectQueue()
import numpy
self.wipStatList=numpy.concatenate((self.wipStatList,[[self.env.now, len(activeObjectQueue)]]))
#calculate the offShift time for current entity
offShiftTimeInCurrentEntity=0
if self.interruptedBy:
if self.onShift==False: # and self.interruptedBy=='ShiftScheduler':
offShiftTimeInCurrentEntity=self.env.now-activeObject.timeLastShiftEnded
if self.isBlocked:
self.addBlockage()
#if object is currently processing an entity we should count this working time
if self.isProcessing:
'''XXX currentlyPerforming can be Setup or Processing '''
if self.currentlyPerforming:
if self.currentlyPerforming=='Setup':
activeObject.totalSetupTime+=self.env.now-self.timeLastOperationStarted
else:
activeObject.totalWorkingTime+=self.env.now-self.timeLastOperationStarted
else:
activeObject.totalWorkingTime+=self.env.now-self.timeLastProcessingStarted
# activeObject.totalTimeWaitingForOperator+=activeObject.operatorWaitTimeCurrentEntity
# if object is down we have to add this failure time to its total failure time
if self.Up==False:
if self.onShift:
activeObject.totalFailureTime+=self.env.now-activeObject.timeLastFailure
# if object is off shift add only the fail time before the shift ended
if not self.onShift and self.timeLastFailure < self.timeLastShiftEnded:
self.totalFailureTime+=self.timeLastShiftEnded-self.timeLastFailure
#if the object is off shift,add this to the off-shift time
if activeObject.onShift==False:
# if we ran the simulation for infinite time we have to identify the last event
now=self.env.now
if now==float('inf'):
now=0
lastExits=[]
for object in G.ExitList:
lastExits.append(object.timeLastEntityEntered)
if lastExits:
now=max(lastExits)
self.totalOffShiftTime+=now-self.timeLastShiftEnded
#object was idle when it was not in any other state
activeObject.totalWaitingTime=MaxSimtime-activeObject.totalWorkingTime-activeObject.totalBlockageTime-activeObject.totalFailureTime-activeObject.totalLoadTime-activeObject.totalSetupTime-self.totalOffShiftTime
if activeObject.totalBlockageTime<0 and activeObject.totalBlockageTime>-0.00001: #to avoid some effects of getting negative cause of rounding precision
self.totalBlockageTime=0
if activeObject.totalWaitingTime<0 and activeObject.totalWaitingTime>-0.00001: #to avoid some effects of getting negative cause of rounding precision
self.totalWaitingTime=0
activeObject.Failure.append(100*self.totalFailureTime/MaxSimtime)
activeObject.Blockage.append(100*self.totalBlockageTime/MaxSimtime)
activeObject.Waiting.append(100*self.totalWaitingTime/MaxSimtime)
activeObject.Working.append(100*self.totalWorkingTime/MaxSimtime)
activeObject.WaitingForOperator.append(100*self.totalTimeWaitingForOperator/MaxSimtime)
activeObject.WaitingForLoadOperator.append(100*self.totalTimeWaitingForLoadOperator/MaxSimtime)
activeObject.Loading.append(100*self.totalLoadTime/MaxSimtime)
activeObject.SettingUp.append(100*self.totalSetupTime/MaxSimtime)
activeObject.OffShift.append(100*self.totalOffShiftTime/MaxSimtime)
activeObject.WipStat.append(self.wipStatList.tolist())
# =======================================================================
# outputs results to JSON File
# =======================================================================
def outputResultsJSON(self):
pass
# =======================================================================
# checks if the Object can dispose an entity to the following object
# =======================================================================
def haveToDispose(self, callerObject=None):
activeObjectQueue=self.Res.users
return len(activeObjectQueue)>0
# =======================================================================
# checks if the Object can accept an entity and there is an entity
# in some possible giver waiting for it
# =======================================================================
def canAcceptAndIsRequested(self,callerObject=None):
pass
# =======================================================================
# checks if the Object can accept an entity
# =======================================================================
def canAccept(self, callerObject=None):
pass
#===========================================================================
# method used to check whether the station is a successor of the caller
#===========================================================================
def isInRouteOf(self, callerObject=None):
thecaller=callerObject
# if the caller is not defined then return True. We are only interested in checking whether
# the station can accept whatever entity from whichever giver
if not thecaller:
return True
#check it the caller object is predecessor to the activeObject
if thecaller in self.previous:
return True
return False
# =======================================================================
# sorts the Entities in the activeQ of the objects
# =======================================================================
def sortEntities(self):
pass
# =======================================================================
# get the active object. This always returns self
# =======================================================================
def getActiveObject(self):
return self
# =======================================================================
# get the activeQ of the active object.
# =======================================================================
def getActiveObjectQueue(self):
return self.Res.users
# =======================================================================
# get the giver object in a getEntity transaction.
# =======================================================================
def getGiverObject(self):
return self.giver
# =======================================================================
# get the giver object queue in a getEntity transaction.
# =======================================================================
def getGiverObjectQueue(self):
return self.giver.Res.users
# =======================================================================
# get the receiver object in a removeEntity transaction.
# =======================================================================
def getReceiverObject(self):
return self.receiver
# =======================================================================
# get the receiver object queue in a removeEntity transaction.
# =======================================================================
def getReceiverObjectQueue(self):
return self.receiver.Res.users
# =======================================================================
# calculates the processing time
# =======================================================================
def calculateProcessingTime(self):
# this is only for processing of the initial wip
if self.isProcessingInitialWIP:
activeEntity=self.getActiveObjectQueue()[0]
if activeEntity.remainingProcessingTime:
remainingProcessingTime=activeEntity.remainingProcessingTime
from RandomNumberGenerator import RandomNumberGenerator
initialWIPrng=RandomNumberGenerator(self, remainingProcessingTime)
return initialWIPrng.generateNumber()
return self.rng.generateNumber() # this is if we have a default processing time for all the entities
#===========================================================================
# calculates time (running through a dictionary) according to the type of processing given as argument
#===========================================================================
def calculateTime(self,type='Processing'):
return {
'Load': self.loadRng.generateNumber,
'Setup': self.stpRng.generateNumber,
'Processing': self.calculateProcessingTime
}[type]()
# =======================================================================
# checks if the object is blocked
# =======================================================================
def exitIsAssignedTo(self):
return self.exitAssignedToReceiver
# =======================================================================
# assign Exit of the object
# =======================================================================
def assignExitTo(self, callerObject=None):
self.exitAssignedToReceiver=callerObject
# =======================================================================
# unblock the object
# =======================================================================
def unAssignExit(self):
self.exitAssignedToReceiver = None
# =======================================================================
# checks if the object is blocked
# =======================================================================
def entryIsAssignedTo(self):
return self.entryAssignedToGiver
# =======================================================================
# assign Exit of the object
# =======================================================================
def assignEntryTo(self):
self.entryAssignedToGiver = self.giver
# =======================================================================
# unblock the object
# =======================================================================
def unAssignEntry(self):
self.entryAssignedToGiver = None
# =======================================================================
# actions to be carried whenever the object is interrupted
# (failure, break, preemption, etc)
# =======================================================================
def interruptionActions(self):
pass
# =======================================================================
# actions to be carried whenever the object recovers
# control after an interruption (failure, break, preemption, etc)
# =======================================================================
def postInterruptionActions(self):
pass
# =======================================================================
# method to execute preemption
# =======================================================================
def preempt(self):
#ToDO make a generic method
pass
# =======================================================================
# checks if the object is in an active position
# =======================================================================
def checkIfActive(self):
return self.Up and self.onShift
#===========================================================================
# filter that returns True if the activeObject Queue is empty and
# false if object holds entities in its queue
#===========================================================================
def activeQueueIsEmpty(self):
return len(self.Res.users)==0
# =======================================================================
# actions to be carried out when the processing of an Entity ends
# =======================================================================
def endOperationActions(self):
pass
#===========================================================================
# check if an entity is in the internal Queue of the object
#===========================================================================
def isInActiveQueue(self, entity=None):
activeObjectQueue = self.Res.users
return any(x==entity for x in activeObjectQueue)
| gpl-3.0 | -88,395,979,137,123,230 | 53.687573 | 217 | 0.514102 | false |
winhamwr/neckbeard | neckbeard/cloud_resource.py | 1 | 19469 | import logging
import time
import boto.exception
import dateutil.parser
import requests
from boto.ec2 import elb
from requests.exceptions import (
ConnectionError,
Timeout,
RequestException,
)
from simpledb import models
from neckbeard.output import fab_out_opts
NODE_AWS_TYPES = ['ec2', 'rds', 'elb']
EC2_RETIRED_STATES = ['shutting-down', 'terminated']
RDS_RETIRED_STATES = ['deleted']
logger = logging.getLogger('cloud_resource')
fab_output_hides = fab_out_opts[logger.getEffectiveLevel()]
fab_quiet = fab_output_hides + ['stderr']
# This is just a non-functional place to track configuration options to provide
# a starting point once we add actual validation
REQUIRED_CONFIGURATION = {
'ec2': [
'aws.keypair',
],
}
OPTIONAL_CONFIGURATION = {
'ec2': [
'aws.elastic_ip',
],
}
class InfrastructureNode(models.Model):
nodename = models.ItemName()
generation_id = models.NumberField(required=True)
# The environment name. Eg. test, beta, staging, live
deployment_name = models.Field(required=True)
# Type of node. Eg. ec2, rds, elb
aws_type = models.Field(required=True)
# Unique AWS id. Eg. `i-xxxxxx`
aws_id = models.Field(required=True)
# Unique ID within this generation of a deployment
# This determine which configuration is pulled
name = models.Field(required=True)
creation_date = models.DateTimeField(required=True)
is_running = models.NumberField(default=1, required=True)
# Is this generation the currently-active generation
is_active_generation = models.NumberField(default=0, required=True)
# Whether or not we've completed the first deploy on this node
# Used to allow the first deploy to differ from subsequent deploys
# for one-time operations per node. Idempotency is preferred, but this is a
# shortcut towards some speed improvements. We only need to do EBS volume
# mounting on the first run, for example.
initial_deploy_complete = models.NumberField(default=0, required=True)
def __init__(self, *args, **kwargs):
self.ec2conn = None
self.rdsconn = None
self.elbconn = None
self._boto_instance = None
self._deployment_info = None
super(InfrastructureNode, self).__init__(*args, **kwargs)
def __str__(self):
if self.aws_type in NODE_AWS_TYPES:
output_str = '%s:%s[%s]<%s>' % (
self.aws_type,
self.name,
self.aws_id,
self.creation_date,
)
return output_str
return super(InfrastructureNode, self).__str__()
def save(self):
# Until this is well-tested, I don't want anyone running this code and
# actually writing to a SimpleDB Domain. This is a "permanent mock"
# until we think this functionality is safe/stable
logger.critical("Called save on %s", self)
return
def get_status_output(self):
"""
Provide a detailed string representation of the instance with its
current operational/health status.
"""
if self.aws_type in NODE_AWS_TYPES:
status_str = ''
if not self.is_running:
status_str += 'RETIRED-'
else:
if self.is_operational:
status_str += 'UP-'
else:
status_str += 'INACTIVE-'
if not self.is_healthy:
status_str += 'UNHEALTHY-'
return "%s-%s" % (status_str, self)
return "UNKNOWN-%s" % self
def set_aws_conns(self, ec2conn, rdsconn):
self.ec2conn = ec2conn
self.rdsconn = rdsconn
def set_deployment_info(self, deployment_info):
self._deployment_info = deployment_info
def is_actually_running(self):
"""
Checks AWS to ensure this node hasn't been terminated.
"""
if self.aws_type == 'ec2':
if self.boto_instance:
if self.boto_instance.state not in EC2_RETIRED_STATES:
return True
elif self.aws_type == 'rds':
if self.boto_instance:
if self.boto_instance.status not in RDS_RETIRED_STATES:
return True
return False
def terminate(self):
if (self.is_active_generation and self.is_operational):
raise Exception("Can't hard-terminate an active, operational node")
if self.aws_type == 'ec2':
if self.is_actually_running():
self.boto_instance.terminate()
elif self.aws_type == 'rds':
if self.is_actually_running():
final_snapshot = self._deployment_info.get(
'final_snapshot',
None,
)
if final_snapshot:
self.boto_instance.stop(
skip_final_snapshot=False,
final_snapshot_id=final_snapshot,
)
else:
self.boto_instance.stop(
skip_final_snapshot=True, final_snapshot_id=None)
self.is_running = 0
self.save()
def retire(self):
"""
Mark this node as retired and no longer used. Useful for hung nodes.
"""
if (self.is_active_generation and self.is_operational):
raise Exception("Can't retire an active, operational node")
self.is_running = 0
self.save()
def make_temporarily_inoperative(self):
"""
Make the given node temporarily inoperative in preperation for putting
it back in to operation shortly after.
This is the call to use for things like rotating in and out of the
loadbalancer. ``make_fully_inoperative`` should be used for planned
long-term inoperability.
"""
if self.aws_type == 'ec2':
self._remove_from_loadbalancer()
elif self.aws_type == 'rds':
pass
def _remove_from_loadbalancer(self):
"""
If this node is in a loadbalancer, remove it from that loadbalancer.
"""
if self.aws_type != 'ec2':
return
loadbalancer = self.get_loadbalancer()
if not loadbalancer:
return
# Check if this instance is even in the load balancer
if not self._instance_in_load_balancer():
logger.debug(
"_remove_from_loadbalancer: Instance %s not in loadbalancer",
self.boto_instance,
)
return
logger.info(
"Removing node from loadbalancer: %s",
loadbalancer,
)
loadbalancer.deregister_instances([self.aws_id])
def make_fully_inoperative(self):
"""
Make the given node fully inoperative. This is the call to use for
planned long-term inoperability. ``make_temporarily_inoperative``
is more useful for temporary inoperability (such as rotating in
and out of the loadbalancer).
"""
if self.aws_type == 'ec2':
elastic_ip = self.get_elastic_ip()
if elastic_ip and elastic_ip.instance_id:
if elastic_ip.instance_id == self.boto_instance.id:
logger.info(
"Dissociating elastic IP %s from instance %s",
elastic_ip,
elastic_ip.instance_id,
)
self.ec2conn.disassociate_address(elastic_ip.public_ip)
self._remove_from_loadbalancer()
elif self.aws_type == 'rds':
pass
def refresh_boto_instance(self):
self._boto_instance = None
@property
def boto_instance(self):
if not self._boto_instance:
if self.aws_type == 'ec2':
reservations = self.ec2conn.get_all_instances(
instance_ids=[self.aws_id])
if len(reservations) == 1:
self._boto_instance = reservations[0].instances[0]
elif self.aws_type == 'rds':
try:
db_instances = self.rdsconn.get_all_dbinstances(
instance_id=self.aws_id)
except boto.exception.BotoServerError:
return self._boto_instance
if len(db_instances) == 1:
self._boto_instance = db_instances[0]
return self._boto_instance
@property
def launch_time(self):
if not self.boto_instance:
return None
if self.aws_type == 'ec2':
return dateutil.parser.parse(self.boto_instance.launch_time)
elif self.aws_type == 'rds':
return dateutil.parser.parse(self.boto_instance.create_time)
def _instance_in_load_balancer(self):
"""
Determine if this instance is in its current loadbalancer.
"""
loadbalancer = self.get_loadbalancer()
if self.boto_instance is None:
return False
if loadbalancer is None:
return False
# The comparator between instances do not necessarily work, compare by
# id instead.
ids_in_lb = [i.id for i in loadbalancer.instances]
return self.boto_instance.id in ids_in_lb
@property
def is_operational(self):
"""
Is this instance fully operational as defined by the deployment info.
ie. is it in the loadbalancer with the correct ip or is it active with
no pending rds config values
"""
if not self.boto_instance:
return False
if not self._deployment_info:
logger.critical(
"No deployment configuration found for node: %s",
self,
)
logger.critical(
"Unable to determine operational status. "
"Assuming NOT operational."
)
return False
if self.aws_type == 'ec2':
key_name = self._deployment_info['aws']['keypair']
elastic_ip = self.get_elastic_ip()
loadbalancer = self.get_loadbalancer()
if self.boto_instance.state != 'running':
logger.debug(
"is_operational: Instance %s not running",
self.boto_instance,
)
return False
if self.boto_instance.key_name != key_name:
logger.debug(
"is_operational: Instance %s has wrong key",
self.boto_instance,
)
return False
if elastic_ip:
if self.boto_instance.id != elastic_ip.instance_id:
logger.debug(
"is_operational: Instance %s has wrong elastic ip",
self.boto_instance,
)
return False
if loadbalancer:
if not self._instance_in_load_balancer():
logger.debug(
"is_operational: Instance %s not in loadbalancer",
self.boto_instance,
)
logger.debug(
'Instances in loadbalancer: %s',
loadbalancer.instances,
)
return False
health_list = loadbalancer.get_instance_health(
instances=[self.aws_id])
assert len(health_list) == 1
if health_list[0].state != 'InService':
logger.debug(
"is_operational: Node %s not healthy in loadbalancer.",
self.boto_instance,
)
logger.debug("LB health state: %s", health_list[0].state)
return False
return True
elif self.aws_type == 'rds':
if self.boto_instance.status != 'available':
logger.debug(
"is_operational: Instance %s not available",
self.boto_instance,
)
return False
# TODO: add checks for pending values and matching params
return True
return False
def get_health_check_url(self):
if 'health_check' not in self._deployment_info:
return None
if not self.boto_instance.public_dns_name:
logger.debug(
"No health check url due to no public dns name",
)
return None
health_check = self._deployment_info['health_check']
status_url = health_check['status_url']
status_url = 'http://%s%s' % (
self.boto_instance.public_dns_name,
status_url,
)
return status_url
def passes_health_check(self):
"""
Does this node currently pass the `health_check` as defined in its
configuration.
If no `health_check` is defined, returns True.
"""
status_url = self.get_health_check_url()
if not status_url:
logger.info("No health check defined. Assuming healthy.")
return True
health_check = self._deployment_info['health_check']
status_success_string = health_check['status_contains']
timeout = health_check['status_check_timeout']
try:
site_status = requests.get(status_url, timeout=timeout)
except ConnectionError:
logger.info("health_check unavailable for %s", self)
logger.debug("status url: %s", status_url)
return False
except Timeout:
logger.info("health_check timed out for %s", self)
logger.debug("status url: %s", status_url)
return False
except RequestException, e:
logger.info("health_check raised exception for %s", self)
logger.debug("status url: %s", status_url)
logger.debug("Exception: %s", e)
return False
if status_success_string not in site_status.text:
logger.debug(
"Required string not present in health_check for %s",
self,
)
logger.debug("status url: %s", status_url)
logger.debug("Required string: %s", status_success_string)
return False
return True
@property
def is_healthy(self):
"""
Is this instance healthy according to its status checks. Healthy nodes
are ready to perform their function, regardles of whether or not
they're currently in operation (in the Loadbalancer, with the proper
IP, etc).
"""
if not self.boto_instance:
return False
if not self._deployment_info:
logger.critical(
"No deployment configuration found for node: %s",
self,
)
logger.critical(
"Unable to determine health status. "
"Assuming NOT healthy."
)
return False
if self.aws_type == 'ec2':
key_name = self._deployment_info['aws']['keypair']
if self.boto_instance.state != 'running':
logger.debug(
"is_healthy: Instance %s not running",
self.boto_instance,
)
return False
elif self.boto_instance.key_name != key_name:
logger.debug(
"is_healthy: Instance %s has wrong key",
self.boto_instance,
)
return False
return self.passes_health_check()
elif self.aws_type == 'rds':
if self.boto_instance.status != 'available':
logger.debug("Instance %s not available" % self.boto_instance)
return False
# TODO: Check to ensure no pending values and that params match
return True
return False
def make_operational(self, force_operational=False):
if not force_operational:
if not self.is_healthy or not self.is_active_generation:
raise Exception(
"Only health nodes in the active generation "
"can be made operational"
)
if self.aws_type == 'ec2':
elastic_ip = self.get_elastic_ip()
loadbalancer = self.get_loadbalancer()
if elastic_ip and elastic_ip.instance_id:
if elastic_ip.instance_id != self.boto_instance.id:
logger.info(
"Dissociating elastic IP %s from instance %s",
elastic_ip,
elastic_ip.instance_id,
)
self.ec2conn.disassociate_address(elastic_ip.public_ip)
# Switch the elastic IP
if elastic_ip and elastic_ip.instance_id != self.boto_instance.id:
logger.info(
"Pointing IP %s to %s",
elastic_ip.public_ip,
self.boto_instance,
)
while elastic_ip.instance_id != self.boto_instance.id:
self.boto_instance.use_ip(elastic_ip)
elastic_ip = self.get_elastic_ip()
logger.info(
"Waiting 5s for ip %s to associated to %s",
elastic_ip,
self.boto_instance,
)
time.sleep(5)
logger.info(
"IP %s succesfully associated to %s",
elastic_ip,
self.boto_instance,
)
# Stick the instance in the loadbalancer
if loadbalancer:
logger.info(
"Placing node <%s> in to loadbalancer <%s>",
self,
loadbalancer,
)
loadbalancer.register_instances([self.boto_instance.id])
elif self.aws_type == 'rds':
pass
def get_loadbalancer(self):
if not self.elbconn:
self.elbconn = elb.ELBConnection(
self.ec2conn.aws_access_key_id,
self.ec2conn.aws_secret_access_key)
if not self._deployment_info.get('loadbalancer', None):
return None
elb_list = self.elbconn.get_all_load_balancers(
load_balancer_names=[self._deployment_info['loadbalancer']])
assert len(elb_list) == 1
return elb_list[0]
def get_elastic_ip(self):
configured_ip = self._deployment_info['aws'].get('elastic_ip')
if not configured_ip:
return None
ips = self.ec2conn.get_all_addresses(
[configured_ip],
)
assert len(ips) == 1
return ips[0]
def set_initial_deploy_complete(self):
"""
Record that the initial deployment operation has completed
succesfully.
"""
self.initial_deploy_complete = 1
self.save()
def verify_running_state(self):
if self.is_running == 1 and not self.is_actually_running():
self.is_running = 0
self.save()
| bsd-3-clause | 5,361,584,043,886,388,000 | 33.519504 | 79 | 0.538805 | false |
tjcsl/cslbot | cslbot/helpers/handler.py | 1 | 28231 | # -*- coding: utf-8 -*-
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import base64
import collections
import configparser
import copy
import logging
import random
import re
import threading
import time
from datetime import datetime, timedelta
from typing import Callable, Dict, List
import irc
from . import (acl, arguments, control, identity, misc, orm, registry, sql,
textutils, workers)
logger = logging.getLogger(__name__)
class BotHandler(object):
def __init__(self, config: configparser.ConfigParser, connection: irc.client.ServerConnection, channels: List[str], confdir: str, idx: int):
"""Set everything up.
| kick_enabled controls whether the bot will kick people or not.
| abuselist is a dict keeping track of how many times nicks have used
| rate-limited commands.
| modules is a dict containing the commands the bot supports.
| confdir is the path to the directory where the bot's config is stored.
| db - Is a db wrapper for data storage.
"""
self.connection = connection
self.channels = channels
self.config = config
self.idx = idx
self.db = sql.Sql(config, confdir)
# FIXME: don't pass in self
self.workers = workers.Workers(self)
self.guarded: List[str] = []
self.voiced: Dict[str, Dict[str, bool]] = collections.defaultdict(dict)
self.opers: Dict[str, Dict[str, bool]] = collections.defaultdict(dict)
self.features = {'account-notify': False, 'extended-join': False, 'whox': False}
start = datetime.now()
self.uptime = {'start': start, 'reloaded': start}
self.abuselist: Dict[str, Dict[str, datetime]] = {}
self.ping_map: Dict[str, str] = {}
self.outputfilter: Dict[str, List[Callable[[str], str]]] = collections.defaultdict(list)
self.kick_enabled = True
self.who_map: Dict[int, str] = {}
self.flood_lock = threading.Lock()
self.data_lock = threading.RLock()
self.last_msg_time = datetime.now()
self.confdir = confdir
self.log_to_ctrlchan = False
def get_data(self):
"""Saves the handler's data for :func:`.reloader.do_reload`"""
data = {}
data['guarded'] = self.guarded[:]
data['voiced'] = copy.deepcopy(self.voiced)
data['opers'] = copy.deepcopy(self.opers)
data['features'] = self.features.copy()
data['uptime'] = self.uptime.copy()
data['abuselist'] = self.abuselist.copy()
data['who_map'] = self.who_map.copy()
return data
def set_data(self, data):
"""Called from :func:`.reloader.do_reload` to restore the handler's data."""
for key, val in data.items():
setattr(self, key, val)
self.uptime['reloaded'] = datetime.now()
def update_authstatus(self, nick):
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = nick
self.send_who(nick, tag)
elif self.config['feature']['servicestype'] == "ircservices":
self.rate_limited_send('privmsg', 'NickServ', 'STATUS %s' % nick)
elif self.config['feature']['servicestype'] == "atheme":
self.rate_limited_send('privmsg', 'NickServ', 'ACC %s' % nick)
def send_who(self, target, tag):
# http://faerion.sourceforge.net/doc/irc/whox.var
# n(show nicknames), a(show nickserv status), f(show channel status/modes), t(show tag)
self.rate_limited_send('who', '{} %naft,{}'.format(target, tag))
def is_admin(self, send, nick, required_role='admin'):
"""Checks if a nick is a admin.
If NickServ hasn't responded yet, then the admin is unverified,
so assume they aren't a admin.
"""
# If the required role is None, bypass checks.
if not required_role:
return True
# Current roles are admin and owner, which is a superset of admin.
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == nick).first()
if admin is None:
return False
# owner implies admin, but not the other way around.
if required_role == "owner" and admin.role != "owner":
return False
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return True
if not admin.registered:
self.update_authstatus(nick)
# We don't necessarily want to complain in all cases.
if send is not None:
send("Unverified admin: %s" % nick, target=self.config['core']['channel'])
return False
else:
if not self.features['account-notify']:
# reverify every 5min if we don't have the notification feature.
if datetime.now() - admin.time > timedelta(minutes=5):
self.update_authstatus(nick)
return True
def get_admins(self):
"""Check verification for all admins."""
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return
with self.db.session_scope() as session:
for a in session.query(orm.Permissions).all():
if not a.registered:
self.update_authstatus(a.nick)
def abusecheck(self, send, nick, target, limit, cmd):
""" Rate-limits commands.
| If a nick uses commands with the limit attr set, record the time
| at which they were used.
| If the command is used more than `limit` times in a
| minute, ignore the nick.
"""
if nick not in self.abuselist:
self.abuselist[nick] = {}
if cmd not in self.abuselist[nick]:
self.abuselist[nick][cmd] = [datetime.now()]
else:
self.abuselist[nick][cmd].append(datetime.now())
count = 0
for x in self.abuselist[nick][cmd]:
# 60 seconds - arbitrary cuttoff
if datetime.now() - x < timedelta(seconds=60):
count = count + 1
if count > limit:
msg = "%s: don't abuse scores!" if cmd == 'scores' else "%s: stop abusing the bot!"
send(msg % nick, target=target)
with self.db.session_scope() as session:
send(misc.ignore(session, nick))
return True
@staticmethod
def build_split_msg(msg, max_len):
msgs = []
msg_enc = [x.encode() for x in msg]
while sum(map(len, msg_enc)) > max_len:
split, msg_enc = misc.split_msg(msg_enc, max_len)
msgs.append(split)
msgs.append(''.join([x.decode() for x in msg_enc]).strip())
return msgs
def send(self, target, nick, msg, msgtype, ignore_length=False, filters=None):
"""Send a message.
Records the message in the log.
"""
if not isinstance(msg, str):
raise Exception("Trying to send a %s to irc, only strings allowed." % type(msg).__name__)
if filters is None:
filters = self.outputfilter[target]
for i in filters:
if target != self.config['core']['ctrlchan']:
msg = i(msg)
# Avoid spam from commands that produce excessive output.
if not ignore_length:
# Ignore everything after the first 800 chars.
msg = misc.truncate_msg(msg, 800)
# We can't send messages > 512 bytes to irc.
max_len = misc.get_max_length(target, msgtype)
msgs = self.build_split_msg(msg, max_len)
for i in msgs:
self.do_log(target, nick, i, msgtype)
if msgtype == 'action':
self.rate_limited_send('action', target, i)
else:
self.rate_limited_send('privmsg', target, i)
def rate_limited_send(self, mtype, target, msg=None):
with self.flood_lock:
elapsed = datetime.now() - self.last_msg_time
# Don't send messages more then once every 0.5 sec.
time.sleep(max(0, 0.5 - elapsed.total_seconds()))
if msg is None:
getattr(self.connection, mtype)(target)
else:
getattr(self.connection, mtype)(target, msg)
self.last_msg_time = datetime.now()
def do_log(self, target, nick, msg, msgtype):
"""Handles logging.
| Logs to a sql db.
"""
if not isinstance(msg, str):
raise Exception("IRC doesn't like it when you send it a %s" % type(msg).__name__)
target = target.lower()
flags = 0
# Properly handle /msg +#channel
if target.startswith(('+', '@')):
target = target[1:]
with self.data_lock:
if target in self.channels:
if self.opers[target].get(nick, False):
flags |= 1
if self.voiced[target].get(nick, False):
flags |= 2
else:
target = 'private'
# FIXME: should we special-case this?
# strip ctrl chars from !creffett
msg = msg.replace('\x02\x038,4', '<rage>')
self.db.log(nick, target, flags, msg, msgtype, self.connection.server)
if self.log_to_ctrlchan:
ctrlchan = self.config['core']['ctrlchan']
if target != ctrlchan:
ctrlmsg = "%s:%s:%s:%s" % (target, msgtype, nick, msg)
# If we call self.send, we'll get a infinite loop.
self.connection.privmsg(ctrlchan, ctrlmsg.strip())
def do_part(self, cmdargs, nick, target, msgtype, send, c):
"""Leaves a channel.
Prevent user from leaving the primary channel.
"""
channel = self.config['core']['channel']
botnick = self.config['core']['nick']
if not cmdargs:
# don't leave the primary channel
if target == channel:
send("%s must have a home." % botnick)
return
else:
cmdargs = target
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
# don't leave the primary channel
if cmdargs == channel:
send("%s must have a home." % botnick)
return
# don't leave the control channel
if cmdargs == self.config['core']['ctrlchan']:
send("%s must remain under control, or bad things will happen." % botnick)
return
self.send(cmdargs, nick, "Leaving at the request of %s" % nick, msgtype)
c.part(cmdargs)
def do_join(self, cmdargs, nick, msgtype, send, c):
"""Join a channel.
| Checks if bot is already joined to channel.
"""
if not cmdargs:
send("Join what?")
return
if cmdargs == '0':
send("I'm sorry, Dave. I'm afraid I can't do that.")
return
if not cmdargs.startswith(('#', '+', '@')):
cmdargs = '#' + cmdargs
cmd = cmdargs.split()
# FIXME: use argparse
if cmd[0] in self.channels and not (len(cmd) > 1 and cmd[1] == "force"):
send("%s is already a member of %s" % (self.config['core']['nick'], cmd[0]))
return
c.join(cmd[0])
self.send(cmd[0], nick, "Joined at the request of " + nick, msgtype)
def check_mode(self, mode):
if mode[2] != self.connection.real_nickname:
return False
if (mode[0], mode[1]) == ('-', 'o'):
return True
elif (mode[0], mode[1]) == ('+', 'b'):
return True
return False
def do_mode(self, target, msg, nick, send):
"""reop and handle guard violations."""
mode_changes = irc.modes.parse_channel_modes(msg)
with self.data_lock:
for change in mode_changes:
if change[1] == 'v':
self.voiced[target][change[2]] = True if change[0] == '+' else False
if change[1] == 'o':
self.opers[target][change[2]] = True if change[0] == '+' else False
# reop
# FIXME: handle -o+o msbobBot msbobBot
if [x for x in mode_changes if self.check_mode(x)]:
send("%s: :(" % nick, target=target)
# Assume bot admins know what they're doing.
if not self.is_admin(None, nick):
send("OP %s" % target, target='ChanServ')
send("UNBAN %s" % target, target='ChanServ')
if len(self.guarded) > 0:
# if user is guarded and quieted, devoiced, or deopped, fix that
regex = r"(.*(-v|-o|\+q|\+b)[^ ]*) (%s)" % "|".join(self.guarded)
match = re.search(regex, msg)
if match and nick not in [match.group(3), self.connection.real_nickname]:
modestring = "+voe-qb %s" % (" ".join([match.group(3)] * 5))
self.connection.mode(target, modestring)
send('Mode %s on %s by the guard system' % (modestring, target), target=self.config['core']['ctrlchan'])
def do_kick(self, send, target, nick, msg, slogan=True):
"""Kick users.
- If kick is disabled, don't do anything.
- If the bot is not a op, rage at a op.
- Kick the user.
"""
if not self.kick_enabled:
return
if target not in self.channels:
send("%s: you're lucky, private message kicking hasn't been implemented yet." % nick)
return
with self.data_lock:
ops = [k for k, v in self.opers[target].items() if v]
botnick = self.config['core']['nick']
if botnick not in ops:
ops = ['someone'] if not ops else ops
send(textutils.gen_creffett("%s: /op the bot" % random.choice(ops)), target=target)
elif random.random() < 0.01 and msg == "shutting caps lock off":
if nick in ops:
send("%s: HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U" % nick, target=target)
else:
self.connection.kick(target, nick, "HUEHUEHUE GIBE CAPSLOCK PLS I REPORT U")
else:
msg = textutils.gen_slogan(msg).upper() if slogan else msg
if nick in ops:
send("%s: %s" % (nick, msg), target=target)
else:
self.connection.kick(target, nick, msg)
def do_args(self, modargs, send, nick, target, source, name, msgtype):
"""Handle the various args that modules need."""
realargs = {}
args = {
'nick': nick,
'handler': self,
'db': None,
'config': self.config,
'source': source,
'name': name,
'type': msgtype,
'botnick': self.connection.real_nickname,
'target': target if target[0] == "#" else "private",
'do_kick': lambda target, nick, msg: self.do_kick(send, target, nick, msg),
'is_admin': lambda nick: self.is_admin(send, nick),
'abuse': lambda nick, limit, cmd: self.abusecheck(send, nick, target, limit, cmd)
}
for arg in modargs:
if arg in args:
realargs[arg] = args[arg]
else:
raise Exception("Invalid Argument: %s" % arg)
return realargs
def do_welcome(self):
"""Do setup when connected to server.
- Join the primary channel.
- Join the control channel.
"""
self.rate_limited_send('join', self.config['core']['channel'])
self.rate_limited_send('join', self.config['core']['ctrlchan'], self.config['auth']['ctrlkey'])
# We use this to pick up info on admins who aren't currently in a channel.
self.workers.defer(5, False, self.get_admins)
extrachans = self.config['core']['extrachans']
if extrachans:
for chan in [x.strip() for x in extrachans.split(',')]:
self.rate_limited_send('join', chan)
def is_ignored(self, nick):
with self.db.session_scope() as session:
return session.query(orm.Ignore).filter(orm.Ignore.nick == nick).count()
def get_filtered_send(self, cmdargs, send, target):
"""Parse out any filters."""
parser = arguments.ArgParser(self.config)
parser.add_argument('--filter')
try:
filterargs, remainder = parser.parse_known_args(cmdargs)
except arguments.ArgumentException as ex:
return str(ex), None
cmdargs = ' '.join(remainder)
if filterargs.filter is None:
return cmdargs, send
filter_list, output = textutils.append_filters(filterargs.filter)
if filter_list is None:
return output, None
# define a new send to handle filter chaining
def filtersend(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length, filters=filter_list)
return cmdargs, filtersend
def do_rejoin(self, c, e):
# If we're still banned, this will trigger a bannedfromchan event so we'll try again.
if e.arguments[0] not in self.channels:
c.join(e.arguments[0])
def handle_event(self, msg, send, c, e):
if e.type == 'whospcrpl':
self.handle_who(e)
elif e.type == 'account':
self.handle_account(e)
elif e.type == 'authenticate':
self.handle_authenticate(e)
elif e.type == 'bannedfromchan':
self.workers.defer(5, False, self.do_rejoin, c, e)
elif e.type == 'cap':
self.handle_cap(e)
elif e.type in ['ctcpreply', 'nosuchnick']:
misc.ping(self.ping_map, c, e, datetime.now())
elif e.type == 'error':
logger.error(e.target)
elif e.type == 'featurelist':
if 'WHOX' in e.arguments:
self.features['whox'] = True
elif e.type == 'nick':
self.handle_nick(send, e)
elif e.type == 'nicknameinuse':
self.connection.nick('Guest%d' % random.getrandbits(20))
elif e.type == 'privnotice':
if e.source.nick == 'NickServ':
# FIXME: don't pass self
acl.set_admin(msg, self)
elif e.type == 'welcome':
self.handle_welcome()
@property
def serverpass(self):
return self.config['auth']['serverpass'].split(',')[self.idx].strip()
def handle_authenticate(self, e):
user = self.config['core']['nick']
if e.target == '+':
token = base64.b64encode('\0'.join([user, user, self.serverpass]).encode())
self.connection.send_raw('AUTHENTICATE %s' % token.decode())
self.connection.cap('END')
def handle_account(self, e):
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.target == '*':
admin.registered = False
else:
admin.registered = True
admin.time = datetime.now()
def handle_welcome(self):
user = self.config['core']['nick']
logger.info("Connected to server %s", self.connection.server)
if self.config.getboolean('feature', 'nickserv') and self.connection.real_nickname != self.config['core']['nick']:
self.connection.privmsg('NickServ', 'REGAIN %s %s' % (user, self.serverpass))
self.do_welcome()
def handle_who(self, e):
# arguments: tag,nick,modes,account
# modes = H(here) or G(away), +(voice), @(oper)
# account is the nicksev account if authed, else 0
# properly track voiced status.
location = self.who_map[int(e.arguments[0])]
# FIXME: devoice if G in modes
self.voiced[location][e.arguments[1]] = '+' in e.arguments[2]
self.opers[location][e.arguments[1]] = '@' in e.arguments[2]
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.arguments[1]).first()
if admin is not None:
if e.arguments[1] == e.arguments[3]:
admin.registered = True
admin.time = datetime.now()
def handle_cap(self, e):
if e.arguments[0] == 'ACK':
if e.arguments[1].strip() == 'sasl':
self.connection.send_raw('AUTHENTICATE PLAIN')
elif e.arguments[1].strip() == 'account-notify':
self.features['account-notify'] = True
elif e.arguments[1].strip() == 'extended-join':
self.features['extended-join'] = True
def handle_nick(self, send, e):
with self.data_lock:
for channel in misc.get_channels(self.channels, e.target):
self.do_log(channel, e.source.nick, e.target, 'nick')
# Move the voice+op status to the new nick
if e.source.nick in self.voiced[channel].keys(): # In case we somehow didn't set the voice state on the old nick
self.voiced[channel][e.target] = self.voiced[channel].pop(e.source.nick)
if e.source.nick in self.opers[channel].keys(): # As above, for ops
self.opers[channel][e.target] = self.opers[channel].pop(e.source.nick)
if identity.handle_nick(self, e):
for x in misc.get_channels(self.channels, e.target):
self.do_kick(send, x, e.target, "identity crisis")
def handle_join(self, c, e, target, send):
# Get status for all nicks in-channel when we join, or the new nick when somebody else joins.
if self.features['whox']:
tag = random.randint(0, 999)
self.who_map[tag] = target
if e.source.nick == c.real_nickname:
self.send_who(target, tag)
else:
self.send_who(e.source.nick, tag)
if e.source.nick == c.real_nickname:
send("Joined channel %s" % target, target=self.config['core']['ctrlchan'])
elif self.features['extended-join']:
with self.db.session_scope() as session:
admin = session.query(orm.Permissions).filter(orm.Permissions.nick == e.source.nick).first()
if admin is not None:
if e.arguments[0] == e.source.nick:
admin.registered = True
admin.time = datetime.now()
else:
admin.registered = False
def get_cmd(self, msg):
cmd = msg.split()[0]
cmdchar = self.config['core']['cmdchar']
cmdlen = len(cmd) + 1
# FIXME: figure out a better way to handle !s
if cmd.startswith('%ss' % cmdchar):
# escape special regex chars
raw_cmdchar = '\\' + cmdchar if re.match(r'[\[\].^$*+?]', cmdchar) else cmdchar
match = re.match(r'%ss(\W)' % raw_cmdchar, cmd)
if match:
cmd = cmd.split(match.group(1))[0]
cmdlen = len(cmd)
cmdargs = msg[cmdlen:]
cmd_name = cmd[len(cmdchar):].lower() if cmd.startswith(cmdchar) else None
return cmd_name, cmdargs
def run_cmd(self, send, nick, target, cmd_name, cmdargs, e):
cmdargs, filtersend = self.get_filtered_send(cmdargs, send, target)
if filtersend is None:
send(cmdargs)
return
cmd_obj = registry.command_registry.get_command(cmd_name)
if cmd_obj.is_limited() and self.abusecheck(send, nick, target, cmd_obj.limit, cmd_name):
return
if not self.is_admin(send, nick, cmd_obj.required_role):
send("Insufficent privileges for command.")
return
args = self.do_args(cmd_obj.args, send, nick, target, e.source, cmd_name, e.type)
cmd_obj.run(filtersend, cmdargs, args, cmd_name, nick, target, self)
def handle_kick(self, c, e, target, send):
if e.arguments[0] == c.real_nickname:
send("Kicked from channel %s" % target, target=self.config['core']['ctrlchan'])
# Auto-rejoin after 5 seconds.
self.workers.defer(5, False, self.connection.join, target)
def handle_hooks(self, send, nick, target, e, msg):
if self.config['feature'].getboolean('hooks'):
for h in registry.hook_registry.get_hook_objects():
realargs = self.do_args(h.args, send, nick, target, e.source, h, e.type)
h.run(send, msg, e.type, self, target, realargs)
def handle_msg(self, c, e):
"""The Heart and Soul of IrcBot."""
if e.type not in ['authenticate', 'error', 'join', 'part', 'quit']:
nick = e.source.nick
else:
nick = e.source
if e.arguments is None:
msg = ""
else:
msg = " ".join(e.arguments).strip()
# Send the response to private messages to the sending nick.
target = nick if e.type == 'privmsg' else e.target
def send(msg, mtype='privmsg', target=target, ignore_length=False):
self.send(target, self.connection.real_nickname, msg, mtype, ignore_length)
if e.type in [
'account', 'authenticate', 'bannedfromchan', 'cap', 'ctcpreply', 'error', 'featurelist', 'nosuchnick', 'nick', 'nicknameinuse',
'privnotice', 'welcome', 'whospcrpl'
]:
self.handle_event(msg, send, c, e)
return
# ignore empty messages
if not msg and e.type != 'join':
return
self.do_log(target, nick, msg, e.type)
if e.type == 'mode':
self.do_mode(target, msg, nick, send)
return
if e.type == 'join':
self.handle_join(c, e, target, send)
return
if e.type == 'part':
if nick == c.real_nickname:
send("Parted channel %s" % target, target=self.config['core']['ctrlchan'])
return
if e.type == 'kick':
self.handle_kick(c, e, target, send)
return
if e.target == self.config['core']['ctrlchan'] and self.is_admin(None, nick):
control.handle_ctrlchan(self, msg, nick, send)
if self.is_ignored(nick) and not self.is_admin(None, nick):
return
self.handle_hooks(send, nick, target, e, msg)
# We only process hooks for notices, not commands.
if e.type == 'pubnotice':
return
msg = misc.get_cmdchar(self.config, c, msg, e.type)
cmd_name, cmdargs = self.get_cmd(msg)
if registry.command_registry.is_registered(cmd_name):
self.run_cmd(send, nick, target, cmd_name, cmdargs, e)
# special commands
elif cmd_name == 'reload':
with self.db.session_scope() as session:
if session.query(orm.Permissions).filter(orm.Permissions.nick == nick).count():
send("Aye Aye Capt'n")
| gpl-2.0 | 8,572,865,770,078,463,000 | 40.516176 | 144 | 0.562715 | false |
frank-und-freunde/Lunchez | functions.py | 1 | 1594 | import math
from datetime import datetime
def weekDay(year, month, day):
offset = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
week = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
afterFeb = 1
if month > 2:
afterFeb = 0
aux = year - 1700 - afterFeb
dayOfWeek = 5
dayOfWeek += (aux + afterFeb) * 365
dayOfWeek += aux / 4 - aux / 100 + (aux + 100) / 400
dayOfWeek += offset[month - 1] + (day - 1)
dayOfWeek %= 7
return week[math.floor(dayOfWeek)]
Today = weekDay(int(str(datetime.now())[:4]), int(str(datetime.now())[5:7].lstrip('0')), int(str(datetime.now())[8:10]))
def restaurants(spots):
destination = ''
for x in range(0, len(spots)):
entry = ''
if 'dayoff' in spots[x] and spots[x]['dayoff'] == Today:
entry = ''
elif 'vacationFrom' in spots[x] and spots[x]['vacationFrom'] < str(datetime.now()) < spots[x]['vacationTo']:
entry = ''
else:
if 'menu' in spots[x] and 'credit' in spots[x]: # if lunchspot has payment option other than cash display card emoji
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> <" + spots[x]['menu'] + "|" + spots[x]['restaurant'] + "> :credit_card:\n"
elif 'menu' in spots[x]:
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> <" + spots[x]['menu'] + "|" + spots[x]['restaurant'] + ">\n"
else:
entry = "<" + spots[x]['location'] + "|:" + spots[x]['number'] + ":> " + spots[x]['restaurant'] + "\n"
destination += entry
return destination
| mit | 5,577,577,409,626,868,000 | 40.947368 | 151 | 0.557089 | false |
webrecorder/warcio | test/test_limitreader.py | 1 | 1457 | from warcio.limitreader import LimitReader
from contextlib import closing
from io import BytesIO
class TestLimitReader(object):
def test_limit_reader_1(self):
assert b'abcdefghji' == LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 10).read(26)
def test_limit_reader_2(self):
assert b'abcdefgh' == LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8).readline(26)
def test_limit_reader_3(self):
reader = LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8)
new_reader = LimitReader.wrap_stream(reader, 4)
assert reader == new_reader
assert b'abcd' == new_reader.readline(26)
#assert b'abcd' == LimitReader.wrap_stream(LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 8), 4).readline(26)
def test_limit_reader_multiple_read(self):
reader = LimitReader(BytesIO(b'abcdefghjiklmnopqrstuvwxyz'), 10)
string = None
for x in [2, 2, 20]:
string = reader.read(x)
assert b'efghji' == string
def test_limit_reader_zero(self):
assert b'' == LimitReader(BytesIO(b'a'), 0).readline(0)
def test_limit_reader_invalid_wrap(self):
b = BytesIO(b'some data')
assert LimitReader.wrap_stream(b, 'abc') == b
def test_limit_reader_close(self):
reader = LimitReader(BytesIO(b'abcdefg'), 3)
with closing(reader):
assert b'abc' == reader.read(10)
assert reader.tell() == 3
| apache-2.0 | 2,581,903,342,736,774,000 | 36.358974 | 122 | 0.654084 | false |
saintdragon2/python-3-lecture-2015 | homework_checker/civil_hw_personal_list/civil_hw_list.py | 1 | 2177 | from __future__ import print_function
import glob
import hw_sungyong_list
import os
# os.chdir("homework_01")
list_a = [10, 30, 40, -20, 15]
list_b = [-90, 20, 50, 2, 4]
list_c = ['hello', 34, 0, 12]
num_p = 3
num_q = 7
sy_a = hw_sungyong_list.square_of_list(list_a, num_p)
sy_b = hw_sungyong_list.square_of_list(list_b, num_q)
sy_x = hw_sungyong_list.gap(list_c)
print(sy_a)
print(sy_b)
print(sy_x)
f = open('result.txt', 'w')
for file in glob.glob("hw_civil_list_*.py"):
point = 0
name = file.replace('.py', '')
print(name)
mode = __import__(name)
a = mode.square_of_list(list_a, num_p)
b = mode.square_of_list(list_b, num_p)
c = mode.gap(list_c)
message = ''
point = 0
if a == sy_a:
point += 3
if b == '문자열이 있습니다':
point += 3
if c == sy_x:
point += 3
print(point)
f.write(name + '\t' + str( point ) + '\n')
f.close()
'''
dan_num = 4
five_num = 35
three_num = 369
fifteen_num = 15 * 7
sungyong_dan = sungyong_dan_gg.dan(dan_num)
five = sungyong_dan_gg.baesoo(five_num)
three = sungyong_dan_gg.baesoo(three_num)
fifteen = sungyong_dan_gg.baesoo(fifteen_num)
f = open('result.txt', 'w')
for file in glob.glob("hw_zest*.py"):
point = 0
name = file.replace('.py', '')
print(name)
mode = __import__(name)
a = mode.dan(dan_num)
message = ''
if type(a) is str and sungyong_dan in a:
point += 5
else:
message += 'dan failed!\t'
five_result = mode.baesoo(five_num)
if type(five_result) is str and '5의 배수입니다' in five_result:
point += 2
else:
message += '5 failed!\t'
three_result = mode.baesoo(three_num)
if type(three_result) is str and '3의 배수입니다' in three_result:
point += 2
else:
message += '3 failed!\t'
fifteen_result = mode.baesoo(fifteen_num)
if type(fifteen_result) is str and '3과 5의 공배수입니다' in mode.baesoo(fifteen_num):
point += 2
else:
message += '3 & 5 failed!\t'
f.write(name +'\t'+ str(point) + '\t' + message + '\n')
# from homework_01 import eval(name)
f.close()
''' | mit | 4,742,109,069,305,678,000 | 19.403846 | 82 | 0.573314 | false |
hschilling/CADRE-1 | src/CADRE/test/test_assembly.py | 1 | 9717 | """ Run the CADRE model and make sure the value compare to the saved pickle."""
from __future__ import print_function
import os
import pickle
import unittest
import warnings
import numpy as np
from openmdao.core.problem import Problem
from CADRE.CADRE_group import CADRE
# Ignore the numerical warnings from performing the rel error calc.
warnings.simplefilter("ignore")
idx = '0'
setd = {}
fpath = os.path.dirname(os.path.realpath(__file__))
data = pickle.load(open(fpath + "/data1346.pkl", 'rb'))
for key in data.keys():
if key[0] == idx or not key[0].isdigit():
if not key[0].isdigit():
shortkey = key
else:
shortkey = key[2:]
# set floats correctly
if data[key].shape == (1,) and shortkey != "iSOC":
setd[shortkey] = data[key][0]
else:
setd[shortkey] = data[key]
n = setd['P_comm'].size
m = setd['CP_P_comm'].size
assembly = Problem(root=CADRE(n, m))
#assembly.setup()
assembly.setup(check=False)
setd['r_e2b_I0'] = np.zeros(6)
setd['r_e2b_I0'][:3] = data[idx + ":r_e2b_I0"]
setd['r_e2b_I0'][3:] = data[idx + ":v_e2b_I0"]
setd['Gamma'] = data[idx + ":gamma"]
assembly['CP_P_comm'] = setd['CP_P_comm']
assembly['LD'] = setd['LD']
assembly['cellInstd'] = setd['cellInstd']
assembly['CP_gamma'] = setd['CP_gamma']
assembly['finAngle'] = setd['finAngle']
assembly['lon'] = setd['lon']
assembly['CP_Isetpt'] = setd['CP_Isetpt']
assembly['antAngle'] = setd['antAngle']
assembly['t'] = setd['t']
assembly['r_e2b_I0'] = setd['r_e2b_I0']
assembly['lat'] = setd['lat']
assembly['alt'] = setd['alt']
assembly['iSOC'] = setd['iSOC']
assembly.run()
class Testcase_CADRE_assembly(unittest.TestCase):
""" Tests the CADRE assembly. """
def compare(self, compname, inputs, outputs):
for var in inputs + outputs:
computed = assembly[var]
actual = setd[var]
if isinstance(computed, np.ndarray):
rel = np.linalg.norm(
actual - computed) / np.linalg.norm(actual)
else:
rel = np.abs(actual - computed) / np.abs(actual)
if np.mean(actual) > 1e-3 or np.mean(computed) > 1e-3:
#print(var)
#print(computed)
#print(actual)
assert rel <= 1e-3
def test_Comm_DataDownloaded(self):
compname = 'Comm_DataDownloaded'
inputs = ['Dr']
outputs = ['Data']
self.compare(compname, inputs, outputs)
def test_Comm_AntRotation(self):
compname = 'Comm_AntRotation'
inputs = ['antAngle']
outputs = ['q_A']
self.compare(compname, inputs, outputs)
def test_Comm_BitRate(self):
compname = 'Comm_BitRate'
inputs = ['P_comm', 'gain', 'GSdist', 'CommLOS']
outputs = ['Dr']
self.compare(compname, inputs, outputs)
def test_Comm_Distance(self):
compname = 'Comm_Distance'
inputs = ['r_b2g_A']
outputs = ['GSdist']
self.compare(compname, inputs, outputs)
def test_Comm_EarthsSpin(self):
compname = 'Comm_EarthsSpin'
inputs = ['t']
outputs = ['q_E']
self.compare(compname, inputs, outputs)
def test_Comm_EarthsSpinMtx(self):
compname = 'Comm_EarthsSpinMtx'
inputs = ['q_E']
outputs = ['O_IE']
self.compare(compname, inputs, outputs)
def test_Comm_GainPattern(self):
compname = 'Comm_GainPattern'
inputs = ['azimuthGS', 'elevationGS']
outputs = ['gain']
self.compare(compname, inputs, outputs)
def test_Comm_GSposEarth(self):
compname = 'Comm_GSposEarth'
inputs = ['lon', 'lat', 'alt']
outputs = ['r_e2g_E']
self.compare(compname, inputs, outputs)
def test_Comm_GSposECI(self):
compname = 'Comm_GSposECI'
inputs = ['O_IE', 'r_e2g_E']
outputs = ['r_e2g_I']
self.compare(compname, inputs, outputs)
def test_Comm_LOS(self):
compname = 'Comm_LOS'
inputs = ['r_b2g_I', 'r_e2g_I']
outputs = ['CommLOS']
self.compare(compname, inputs, outputs)
def test_Comm_VectorAnt(self):
compname = 'Comm_VectorAnt'
inputs = ['r_b2g_B', 'O_AB']
outputs = ['r_b2g_A']
self.compare(compname, inputs, outputs)
def test_Comm_VectorBody(self):
compname = 'Comm_VectorBody'
inputs = ['r_b2g_I', 'O_BI']
outputs = ['r_b2g_B']
self.compare(compname, inputs, outputs)
def test_Comm_VectorECI(self):
compname = 'Comm_VectorECI'
inputs = ['r_e2g_I', 'r_e2b_I']
outputs = ['r_b2g_I']
self.compare(compname, inputs, outputs)
def test_Comm_VectorSpherical(self):
compname = 'Comm_VectorSpherical'
inputs = ['r_b2g_A']
outputs = ['azimuthGS', 'elevationGS']
self.compare(compname, inputs, outputs)
def test_ThermalTemperature(self):
compname = 'ThermalTemperature'
inputs = ['exposedArea', 'cellInstd', 'LOS', 'P_comm']
outputs = ['temperature']
self.compare(compname, inputs, outputs)
def test_Attitude_Angular(self):
compname = 'Attitude_Angular'
inputs = ['O_BI', 'Odot_BI']
outputs = ['w_B']
self.compare(compname, inputs, outputs)
def test_Attitude_AngularRates(self):
compname = 'Attitude_AngularRates'
inputs = ['w_B']
outputs = ['wdot_B']
self.compare(compname, inputs, outputs)
def test_Attitude_Attitude(self):
compname = 'Attitude_Attitude'
inputs = ['r_e2b_I']
outputs = ['O_RI']
self.compare(compname, inputs, outputs)
def test_Attitude_Roll(self):
compname = 'Attitude_Roll'
inputs = ['Gamma']
outputs = ['O_BR']
self.compare(compname, inputs, outputs)
def test_Attitude_RotationMtx(self):
compname = 'Attitude_RotationMtx'
inputs = ['O_BR', 'O_RI']
outputs = ['O_BI']
self.compare(compname, inputs, outputs)
def test_Attitude_RotationMtxRates(self):
compname = 'Attitude_RotationMtxRates'
inputs = ['O_BI']
outputs = ['Odot_BI']
self.compare(compname, inputs, outputs)
# def test_Attitude_Sideslip(self):
# compname = 'Attitude_Sideslip'
# inputs = ['r_e2b_I', 'O_BI']
# outputs = ['v_e2b_B']
# self.compare(compname, inputs, outputs)
def test_Attitude_Torque(self):
compname = 'Attitude_Torque'
inputs = ['w_B', 'wdot_B']
outputs = ['T_tot']
self.compare(compname, inputs, outputs)
def test_Sun_LOS(self):
compname = 'Sun_LOS'
inputs = ['r_e2b_I', 'r_e2s_I']
outputs = ['LOS']
self.compare(compname, inputs, outputs)
def test_Sun_PositionBody(self):
compname = 'Sun_PositionBody'
inputs = ['O_BI', 'r_e2s_I']
outputs = ['r_e2s_B']
self.compare(compname, inputs, outputs)
def test_Sun_PositionECI(self):
compname = 'Sun_PositionECI'
inputs = ['t', 'LD']
outputs = ['r_e2s_I']
self.compare(compname, inputs, outputs)
def test_Sun_PositionSpherical(self):
compname = 'Sun_PositionSpherical'
inputs = ['r_e2s_B']
outputs = ['azimuth', 'elevation']
self.compare(compname, inputs, outputs)
def test_Solar_ExposedArea(self):
compname = 'Solar_ExposedArea'
inputs = ['finAngle', 'azimuth', 'elevation']
outputs = ['exposedArea']
self.compare(compname, inputs, outputs)
def test_Power_CellVoltage(self):
compname = 'Power_CellVoltage'
inputs = ['LOS', 'temperature', 'exposedArea', 'Isetpt']
outputs = ['V_sol']
self.compare(compname, inputs, outputs)
def test_Power_SolarPower(self):
compname = 'Power_SolarPower'
inputs = ['V_sol', 'Isetpt']
outputs = ['P_sol']
self.compare(compname, inputs, outputs)
def test_Power_Total(self):
compname = 'Power_Total'
inputs = ['P_sol', 'P_comm', 'P_RW']
outputs = ['P_bat']
self.compare(compname, inputs, outputs)
# def test_ReactionWheel_Motor(self):
# compname = 'ReactionWheel_Motor'
# inputs = ['T_RW', 'w_B', 'w_RW']
# outputs = ['T_m']
# self.compare(compname, inputs, outputs)
def test_ReactionWheel_Dynamics(self):
compname = 'ReactionWheel_Dynamics'
inputs = ['w_B', 'T_RW']
outputs = ['w_RW']
self.compare(compname, inputs, outputs)
def test_ReactionWheel_Power(self):
compname = 'ReactionWheel_Power'
inputs = ['w_RW', 'T_RW']
outputs = ['P_RW']
self.compare(compname, inputs, outputs)
def test_ReactionWheel_Torque(self):
compname = 'ReactionWheel_Torque'
inputs = ['T_tot']
outputs = ['T_RW']
self.compare(compname, inputs, outputs)
def test_BatterySOC(self):
compname = 'BatterySOC'
inputs = ['P_bat', 'temperature']
outputs = ['SOC']
self.compare(compname, inputs, outputs)
def test_BatteryPower(self):
compname = 'BatteryPower'
inputs = ['SOC', 'temperature', 'P_bat']
outputs = ['I_bat']
self.compare(compname, inputs, outputs)
def test_BatteryConstraints(self):
compname = 'BatteryConstraints'
inputs = ['I_bat', 'SOC']
outputs = ['ConCh', 'ConDs', 'ConS0', 'ConS1']
self.compare(compname, inputs, outputs)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -7,118,911,251,664,067,000 | 23.979434 | 79 | 0.571164 | false |
yostashiro/awo-custom | sale_line_quant_extended/wizard/stock_return_picking.py | 1 | 1645 | # -*- coding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2016 Rooms For (Hong Kong) Limited T/A OSCG
# <https://www.odoo-asia.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp import models, api
class StockReturnPicking(models.TransientModel):
_inherit = "stock.return.picking"
@api.model
def default_get(self, fields):
return_pick = super(StockReturnPicking, self).default_get(fields)
if 'product_return_moves' in return_pick:
return_moves = return_pick['product_return_moves']
for move in return_moves:
if self.env['product.product'].browse(move['product_id']).\
product_tmpl_id.categ_id.enforce_qty_1:
quant = self.env['stock.quant'].search(
[('history_ids', 'in', move['move_id'])])
if quant and quant.lot_id:
move['lot_id'] = quant.lot_id.id
return return_pick
| lgpl-3.0 | -1,805,168,410,514,851,300 | 43.459459 | 77 | 0.642553 | false |
steelsoul/hide_to_tray | interface5.py | 1 | 5039 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'interface.ui'
#
# Created by: PyQt5 UI code generator 5.14.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setWindowModality(QtCore.Qt.NonModal)
MainWindow.setEnabled(True)
MainWindow.resize(401, 251)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(401, 251))
MainWindow.setMaximumSize(QtCore.QSize(401, 251))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../../../.designer/backup/bomb.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
icon.addPixmap(QtGui.QPixmap("bomb.png"), QtGui.QIcon.Normal, QtGui.QIcon.On)
MainWindow.setWindowIcon(icon)
MainWindow.setWindowOpacity(1.0)
self.centralwidget = QtWidgets.QWidget(MainWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName("centralwidget")
self.layoutWidget = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 381, 191))
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.lineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.lineEdit.setInputMask("")
self.lineEdit.setText("")
self.lineEdit.setMaxLength(3)
self.lineEdit.setCursorPosition(0)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
self.label_2.setObjectName("label_2")
self.horizontalLayout.addWidget(self.label_2)
self.verticalLayout.addLayout(self.horizontalLayout)
self.lcdNumber = QtWidgets.QLCDNumber(self.layoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lcdNumber.sizePolicy().hasHeightForWidth())
self.lcdNumber.setSizePolicy(sizePolicy)
self.lcdNumber.setProperty("value", 0.0)
self.lcdNumber.setObjectName("lcdNumber")
self.verticalLayout.addWidget(self.lcdNumber)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.pushButton = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout_2.addWidget(self.pushButton)
self.pushButton_2 = QtWidgets.QPushButton(self.layoutWidget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_2.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout_2)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 401, 25))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setEnabled(False)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MiniTimer"))
self.label.setText(_translate("MainWindow", "Enter timeout:"))
self.label_2.setText(_translate("MainWindow", "minutes"))
self.pushButton.setText(_translate("MainWindow", "Start"))
self.pushButton_2.setText(_translate("MainWindow", "Reset"))
| gpl-3.0 | 8,543,905,054,595,443,000 | 50.418367 | 112 | 0.71919 | false |
alex3287/PyCharmProjects | project/parser_1.py | 1 | 2603 | # Парсер для сбора внешних ссылок с сайта
from urllib.request import urlopen
from urllib.error import HTTPError
from bs4 import BeautifulSoup
url = 'http://gymn11.ru'
def get_html(url):
'''Считывает страницу в html'''
try:
html = urlopen(url)
except:
print('нет сайта такого')
return None
else:
return html.read()
def all_links(html):
"""Находит все ссылки на страницы
и помещает их в список"""
suop = BeautifulSoup(html, "html.parser")
links = suop.body.find_all("a")
mas = []
for link in links:
if 'href' in link.attrs:
mas.append(link.attrs['href'])
return mas
def print_l(links):
'''вывод каждой ссылки в отдельную строчку'''
k=0
for i in links:
k+=1
print(k, i)
def type_links(mas):
global url
"""Делит список ссылок на 2 категории:
1. внешнии
2. внутренние"""
input2 = []
output2 = []
for i in mas:
if ('http:' in i) or ('https:' in i):
if url not in i:
output2.append(i)
elif (len(i)>2) and ('java' not in i):
input2.append(i)
return output2, input2
def sort2(mas):
"""Отбрасывает одинаковые ссылки"""
b=[]
for i in mas:
if i[-1]!='/':
k=i+'/'
b.append(k)
else: b.append(i)
links1 = set(b)
links=list(links1)
return links
def out_link(links):
"""Создает настаящую ссылку из внутренней ссылки"""
global url
out_li = []
for i in links:
link = url+i
out_li.append(link)
return out_li
def seach_links(links):
links_list = []
n = 0
for i in links:
htm = get_html(i)
if htm:
n += 1
print('сделано', n)
links5 = all_links(htm)
links6 = type_links(links5)
links7 = sort2(links6[0])
for k in links7:
# print(k)
links_list.append(k)
return sort2(links_list)
if __name__ == "__main__":
# url = input("Введите сайт для парсенга \n>>>")
html = get_html(url)
links = all_links(html)
links2 = type_links(links)
links3 = out_link(sort2(links2[1]))
print_l(links3)
print('*'*150)
print_l(seach_links(links3))
print('used') | gpl-3.0 | 583,739,816,110,222,500 | 22.2 | 55 | 0.539025 | false |
cobbler/cobbler | tests/template_api_test.py | 1 | 2372 | import pytest
from cobbler.template_api import CobblerTemplate
class TestCobblerTemplate:
def test_compile(self):
# Arrange
# Act
compiled_template = CobblerTemplate(searchList=[{"autoinstall_snippets_dir": "/var/lib/cobbler/snippets"}]) \
.compile(source="$test")
result = str(compiled_template(namespaces={"test": 5}))
# Assert
assert result == "5"
def test_no_snippets_dir(self):
# Arrange
test_template = CobblerTemplate()
# Act & Assert
with pytest.raises(AttributeError):
test_template.read_snippet("nonexistingsnippet")
def test_read_snippet_none(self):
# Arrange
test_template = CobblerTemplate(searchList=[{"autoinstall_snippets_dir": "/var/lib/cobbler/snippets"}])
# Act
result = test_template.read_snippet("nonexistingsnippet")
# Assert
assert result is None
def test_read_snippet(self):
# Arrange
test_template = CobblerTemplate(searchList=[{"autoinstall_snippets_dir": "/var/lib/cobbler/snippets"}])
expected = "#errorCatcher ListErrors\n" + "set -x -v\n" + "exec 1>/root/ks-post.log 2>&1\n"
# Act
result = test_template.read_snippet("log_ks_post")
# Assert
assert result == expected
def test_nonexisting_snippet(self):
# Arrange
test_template = CobblerTemplate(searchList=[{"autoinstall_snippets_dir": "/var/lib/cobbler/snippets"}])
# Act
result = test_template.SNIPPET("preseed_early_default")
# Assert
assert result == "# Error: no snippet data for preseed_early_default\n"
def test_snippet(self):
# Arrange
test_template = CobblerTemplate(searchList=[{"autoinstall_snippets_dir": "/var/lib/cobbler/snippets"}])
# Act
result = test_template.SNIPPET("post_run_deb")
# Assert
assert result == "# A general purpose snippet to add late-command actions for preseeds\n"
def test_sedesc(self):
# Arrange
test_input = "This () needs [] to ^ be * escaped {}."
expected = "This \\(\\) needs \\[\\] to \\^ be \\* escaped \\{\\}\\."
test_template = CobblerTemplate()
# Act
result = test_template.sedesc(test_input)
# Assert
assert result == expected
| gpl-2.0 | 8,403,558,537,794,590,000 | 29.805195 | 117 | 0.605396 | false |
TheLazyHase/dragon_dice_simulator | business/dice/face/save_with_special/counter.py | 1 | 1773 | # -*- coding: utf-8 *-*
# Copyright (c) 2013 Tisserant Pierre
#
# This file is part of Dragon dice simulator.
#
# Dragon dice simulator is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Dragon dice simulator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Dragon dice simulator. If not, see <http://www.gnu.org/licenses/>.
from business.dice.face import Face, SAI, Melee, Save
from business.effect import UnsaveableDamageEffect
class Counter(SAI, Melee, Save):
@property
def name(self):
return '%s Counter' % self.amount
def icon_by_type(self, icon_type):
value = 0
if (icon_type == Face.ICON_MELEE):
if (self.type_roll.is_melee):
value = self.amount
elif (icon_type == Face.ICON_SAVE):
if (self.type_roll.is_save):
value = self.amount
return value
@property
def special_effect(self):
value = None
#@TODO : restrict back damage to missile saving throw
if (self.type_roll.is_melee_save):
value = UnsaveableDamageEffect(self.amount)
return value
icon = {
Face.ICON_MELEE: 1,
Face.ICON_MISSILE: 0,
Face.ICON_MANEUVER: 0,
Face.ICON_MAGIC: 0,
Face.ICON_SAVE: 1,
}
| gpl-3.0 | -6,605,072,641,197,626,000 | 33.764706 | 83 | 0.646926 | false |
jimstorch/tokp | tokp_lib/system_rules.py | 1 | 1980 | #------------------------------------------------------------------------------
# File: system_rules.py
# Purpose:
# Author: James Mynderse
# Revised:
# License: GPLv3 see LICENSE.TXT
#------------------------------------------------------------------------------
import datetime
# defined by loot system rules:
SystemStartDate = datetime.datetime(2008,11,13,6,0)
RaidWeekStart = 2
PartFactor = {0.5:0.00, 1:0.10, 2:0.25, 3:0.50, 4:0.75}
PointsPerDay = {0.5:0.00, 1:0.82, 2:1.29, 3:1.68, 4:2.00}
PointDecay = {0:0.0, 1:0.0, 2:2.0, 3:4.0, 4:8.0, 5:10.0}
ValueLabels = {"epic":1, "rare":2, "uncommon":3, "zg":4, "special":5}
RevValueLabels = {1:"epic", 2:"rare", 3:"uncommon", 4:"zg", 5:"special"}
ValueCosts = {1:20 , 2:6, 3:3, 4:1, 5:0}
MinCost = 20
MaxCost = 50
ResetPercent = 0.75
MinPoints = -50
MaxPoints = 150
SkipRepeatParticipation = 1
def subtract_loot(OldScores, LootValueIndex):
# reset equal and less valuable scores
# subtract from more valuable scores
NewScores = {}
#print OldScores
#print LootValueIndex
for index in OldScores.keys():
#print index
if index >= LootValueIndex:
NewScores[index] = reset_score(OldScores[index])
else:
NewScores[index] = OldScores[index] - ValueCosts[LootValueIndex]
if NewScores[index] < MinPoints:
NewScores[index] = MinPoints
#print OldScores, LootValueIndex, NewScores
return NewScores
def reset_score(OldScore):
if 1:
# this is the old system, here for posterity
# reset cost
ResetCost = ResetPercent * OldScore
# chose which cost to use
if ResetCost < MinCost:
NewScore = OldScore - MinCost
elif ResetCost > MaxCost:
NewScore = OldScore - MaxCost
else:
NewScore = OldScore - ResetCost
else:
NewScore = OldScore
return NewScore | gpl-3.0 | 7,329,093,138,148,140,000 | 32.172414 | 79 | 0.554545 | false |
WaveBlocks/WaveBlocksND | WaveBlocksND/IOM_plugin_lincombwp.py | 1 | 13903 | """The WaveBlocks Project
IOM plugin providing functions for handling
linear combinations of general wavepackets.
@author: R. Bourquin
@copyright: Copyright (C) 2013, 2016 R. Bourquin
@license: Modified BSD License
"""
import numpy as np
def add_lincombwp(self, parameters, timeslots=None, lincombsize=None, blockid=0):
r"""Add storage for the linear combination of general wavepackets.
:param parameters: An :py:class:`ParameterProvider` instance with at
least the key ``ncomponents``.
:param timeslots: The number of time slots we need. Can be set to ``None``
to get automatically growing datasets.
:param lincombsize: The (maximal) size ``J`` of the linear combination of wavepackets. If specified
this remains fixed for all timeslots. Can be set to ``None`` (default)
to get automatically growing datasets.
:param blockid: The ID of the data block to operate on.
"""
N = parameters["ncomponents"]
# TODO: Handle multi-component packets
assert N == 1
if timeslots is None:
T = 0
Ts = None
else:
T = timeslots
Ts = timeslots
if lincombsize is None:
J = 0
Js = None
csJs = 32
else:
J = lincombsize
Js = lincombsize
csJs = min(32, Js)
# The overall group containing all lincombwp data
grp_lc = self._srf[self._prefixb + str(blockid)].require_group("lincombwp")
# Create the dataset with appropriate parameters
daset_tg_c = grp_lc.create_dataset("timegrid_coefficients", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,), fillvalue=-1)
daset_tg_p = grp_lc.create_dataset("timegrid_packets", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,), fillvalue=-1)
grp_lc.create_dataset("lincomb_size", (T,), dtype=np.integer, chunks=True, maxshape=(Ts,))
# Coefficients
grp_lc.create_dataset("coefficients", (T, J), dtype=np.complexfloating, chunks=(1, csJs), maxshape=(Ts, Js))
# Packet IDs (32 characters is the length of a 'md5' digest in hex representation)
daset_refs = grp_lc.create_dataset("packet_refs", (T, J), dtype=np.dtype((str, 32)), chunks=(1, csJs), maxshape=(Ts, Js))
gid = self.create_group(groupid="wavepacketsLCblock" + str(blockid))
daset_refs.attrs["packet_gid"] = gid
# Attach pointer to timegrid
daset_tg_c.attrs["pointer"] = 0
daset_tg_p.attrs["pointer"] = 0
def delete_lincombwp(self, blockid=0):
r"""Remove the stored linear combination.
:param blockid: The ID of the data block to operate on.
"""
try:
del self._srf[self._prefixb + str(blockid) + "/lincombwp"]
except KeyError:
pass
def has_lincombwp(self, blockid=0):
r"""Ask if the specified data block has the desired data tensor.
:param blockid: The ID of the data block to operate on.
"""
return "lincombwp" in self._srf[self._prefixb + str(blockid)].keys()
def save_lincombwp_description(self, descr, blockid=0):
r"""Save the description of this linear combination.
:param descr: The description.
:param blockid: The ID of the data block to operate on.
"""
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp"
# Save the description
for key, value in descr.items():
self._srf[pathd].attrs[key] = self._save_attr_value(value)
def save_lincombwp_coefficients(self, coefficients, timestep=None, blockid=0):
r"""Save the coefficients of the linear combination to a file.
:param coefficients: The coefficients of the linear combination of wavepackets.
:type coefficients: A single, suitable :py:class:`ndarray`.
:param timestep: The timestep at which we save the data.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/coefficients"
timeslot = self._srf[pathtg].attrs["pointer"]
# Write the data
self.must_resize(pathlcs, timeslot)
J = np.size(coefficients)
self._srf[pathlcs][timeslot] = J
self.must_resize(pathd, timeslot)
if not J == 0:
self.must_resize(pathd, J - 1, axis=1)
self._srf[pathd][timeslot, :J] = np.squeeze(coefficients)
# Write the timestep to which the stored values belong into the timegrid
self.must_resize(pathtg, timeslot)
self._srf[pathtg][timeslot] = timestep
# Update the pointer
self._srf[pathtg].attrs["pointer"] += 1
def save_lincombwp_wavepackets(self, packetlist, timestep=None, blockid=0):
r"""Save the wavepackets being part of this linear combination.
.. warning:: This is quite an expensive operation.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
gid = self._srf[pathd].attrs["packet_gid"]
timeslot = self._srf[pathtg].attrs["pointer"]
# Book keeping
self.must_resize(pathd, timeslot)
K = len(packetlist)
if not K == 0:
self.must_resize(pathd, K - 1, axis=1)
# Save the packets
known_packets = self.get_block_ids(groupid=gid)
for k, packet in enumerate(packetlist):
bid = "LC" + str(blockid) + "WP" + str(packet.get_id())
if bid not in known_packets:
bid = self.create_block(blockid=bid, groupid=gid)
descr = packet.get_description()
self.add_genericwp(descr, blockid=bid)
self.save_genericwp(packet, timestep=timestep, blockid=bid)
# Book keeping
self._srf[pathd][timeslot, k] = packet.get_id()
# Write the timestep to which the stored packets belong into the timegrid
self.must_resize(pathtg, timeslot)
self._srf[pathtg][timeslot] = timestep
# Update the pointer
self._srf[pathtg].attrs["pointer"] += 1
def load_lincombwp_description(self, blockid=0):
r"""Load the description of this linear combination.
:param blockid: The ID of the data block to operate on.
"""
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp"
# Load and return all descriptions available
descr = {}
for key, value in self._srf[pathd].attrs.items():
descr[key] = self._load_attr_value(value)
return descr
def load_lincombwp_timegrid(self, blockid=0, key=("coeffs", "packets")):
r"""Load the timegrid of this linear combination.
:param blockid: The ID of the data block to operate on.
:param key: Specify which linear combination timegrids to load. All are independent.
:type key: Tuple of valid identifier strings that are ``coeffs`` and ``packets``.
Default is ``("coeffs", "packets")``.
"""
tg = []
for item in key:
if item == "coeffs":
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
tg.append(self._srf[pathtg][:])
elif item == "packets":
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
tg.append(self._srf[pathtg][:])
if len(tg) == 1:
return tg[0]
else:
return tuple(tg)
def load_lincombwp_size(self, timestep=None, blockid=0):
r"""Load the size (number of packets) of this linear combination.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
return self._srf[pathlcs][index]
else:
index = slice(None)
return self._srf[pathlcs][index]
def load_lincombwp_coefficients(self, timestep=None, blockid=0):
r"""Load the coefficients of this linear combination.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_coefficients"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/coefficients"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
J = self._srf[pathlcs][index]
return self._srf[pathd][index, :J]
else:
index = slice(None)
return self._srf[pathd][index, :]
def load_lincombwp_wavepackets(self, timestep, packetindex=None, blockid=0):
r"""Load the wavepackets being part of this linear combination.
Note that this is quite an expensive operation.
:param timestep: Load only the data of this timestep.
:param packetindex: Load only the packet with this index. If ``None``
then load all packets for the given timestep.
:param blockid: The ID of the data block to operate on.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathlcs = "/" + self._prefixb + str(blockid) + "/lincombwp/lincomb_size"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
index = self.find_timestep_index(pathtg, timestep)
J = self._srf[pathlcs][index]
refs = self._srf[pathd][index, :J]
if packetindex is None:
packets = []
for ref in refs:
bid = "LC" + str(blockid) + "WP" + str(ref)
packets.append(self.load_genericwp(timestep=timestep, blockid=bid))
return tuple(packets)
else:
if packetindex >= J:
raise ValueError("Packet index is invalid.")
bid = "LC" + str(blockid) + "WP" + str(refs[packetindex])
return self.load_genericwp(timestep=timestep, blockid=bid)
def load_lincombwp_wavepacket_refs(self, timestep=None, blockid=0):
r"""Load the references of the wavepackets being part of
this linear combination. References can be used as ``blockid``
for loading selected wavepackets manually. If for example a
``ref`` obtained through this method is:
>>> refs = anIom.load_lincombwp_wavepacket_refs(timestep=4)
>>> refs
array(['673290fd36a0fa80f28973ae31f10378',
'075dc9d7d2c558c97608e2fe08a7d53d',
'0aed8bf3e21b5894bf89ef894d3f7d0c'],
dtype='|S32')
>>> ref = refs[0]
'673290fd36a0fa80f28973ae31f10378'
the the corresponding block ID is:
>>> bid = "LC" + str(blockid) + "WP" + ref
'LC0WP673290fd36a0fa80f28973ae31f10378'
with ``blockid`` the block ID where the linear combination
was stored. With that ``bid`` we can now for example load
data of a selected wavepacket:
>>> Pi = anIom.load_wavepacket_parameters(timestep=4, blockid=bid)
in case of a Hagedorn wavepacket.
:param timestep: Load only the data of this timestep.
:param blockid: The ID of the data block to operate on.
:return: A :py:class:`ndarray` of strings.
"""
pathtg = "/" + self._prefixb + str(blockid) + "/lincombwp/timegrid_packets"
pathd = "/" + self._prefixb + str(blockid) + "/lincombwp/packet_refs"
if timestep is not None:
index = self.find_timestep_index(pathtg, timestep)
else:
index = slice(None)
return self._srf[pathd][index, :]
#
# The following two methods are only for convenience and are NOT particularly efficient.
#
def load_lincombwp(self, timestep, blockid=0):
r"""Load a linear combination at a given timestep and return a fully configured
:py:class:`LinearCombinationOfWPs` instance. This method just calls some other
:py:class:`IOManager` methods in the correct order. It is included only for
convenience and is not particularly efficient.
:param timestep: The timestep :math:`n` we load the wavepacket.
:param blockid: The ID of the data block to operate on.
:return: A :py:class:`LinearCombinationOfWPs` instance.
"""
from WaveBlocksND.LinearCombinationOfWPs import LinearCombinationOfWPs
descr = self.load_lincombwp_description(blockid=blockid)
J = self.load_lincombwp_size(timestep=timestep, blockid=blockid)
if J == 0:
return None
# Load the data
c = self.load_lincombwp_coefficients(timestep=timestep, blockid=blockid)
psi = self.load_lincombwp_wavepackets(timestep=timestep, blockid=blockid)
# Assemble the linear combination
LC = LinearCombinationOfWPs(descr["dimension"], descr["ncomponents"])
LC.add_wavepackets(psi, c)
return LC
def save_lincombwp(self, lincomb, timestep, blockid=0):
r"""Save a linear combination of general wavepackets at a given timestep and read
all data to save from the :py:class:`LinearCombinationOfWPs` instance provided. This
method just calls some other :py:class:`IOManager` methods in the correct order.
It is included only for convenience and is not particularly efficient. We assume
the linear combination is already set up with the correct :py:meth:`add_lincombwp`
method call.
:param lincomb: The :py:class:`LinearCombinationOfWPs` instance we want to save.
:param timestep: The timestep :math:`n` at which we save the linear combination.
:param blockid: The ID of the data block to operate on.
"""
# Description
self.save_lincombwp_description(lincomb.get_description(), blockid=blockid)
# Wavepackets
self.save_lincombwp_wavepackets(lincomb.get_wavepackets(), timestep=timestep, blockid=blockid)
# Coefficients
self.save_lincombwp_coefficients(lincomb.get_coefficients(), timestep=timestep, blockid=blockid)
| bsd-3-clause | -8,754,087,416,531,690,000 | 36.474394 | 130 | 0.662519 | false |
Chaffleson/blupy | blupy/settings.py | 1 | 4657 | """
Django settings for blupy project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from confresolver import *
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = DJANGO_SECRET
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost', '127.0.0.1', DEV_SITE_URL]
# Celery Config
# http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
# Broker URL for CloudAMQP integration
BROKER_URL = BOUND_SERVICES['CloudAMQP']['credentials']['uri']
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_HIJACK_ROOT_LOGGER = False
CELERY_TIMEZONE = 'Europe/London'
CELERY_DISABLE_RATE_LIMITS = True
# CloudAMQP recommended settings
BROKER_POOL_LIMIT = 1 # Will decrease connection usage
BROKER_CONNECTION_TIMEOUT = 30 # May require a long timeout due to Linux DNS timeouts etc
BROKER_HEARTBEAT = 30 # Will detect stale connections faster
CELERY_SEND_EVENTS = False # Will not create celeryev.* queues
CELERY_EVENT_QUEUE_EXPIRES = 60 # Will delete all celeryev. queues without consumers after 1 minute.
# Using Finalware to auto create the super user for convenience
# http://stackoverflow.com/a/11210730/4717963
SITE_SUPERUSER_USERNAME = 'admin'
SITE_SUPERUSER_EMAIL = '[email protected]'
SITE_SUPERUSER_PASSWORD = SUPER_USER_PASSWORD # this is set in settings_local
SITE_SUPERUSER_ID = '48'
SITE_OBJECTS_INFO_DICT = {
'1': {
'name': 'development',
'domain': DEV_SITE_URL,
}
}
SITE_ID = 1
# Application definition
INSTALLED_APPS = (
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djcelery',
'example',
'finalware',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'blupy.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'finalware.context_processors.contextify',
],
},
},
]
WSGI_APPLICATION = 'blupy.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'example01',
'USER': BOUND_SERVICES['PostgreSQL']['credentials']['username'],
'PASSWORD': BOUND_SERVICES['PostgreSQL']['credentials']['password'],
'HOST': BOUND_SERVICES['PostgreSQL']['credentials']['public_hostname'].split(':')[0],
'PORT': BOUND_SERVICES['PostgreSQL']['credentials']['public_hostname'].split(':')[1]
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
| gpl-2.0 | -8,794,081,472,193,870,000 | 29.84106 | 101 | 0.704316 | false |
sburnett/seattle | production_nat_new/test/ut_newnatdeployed_legacywithnormalclient.py | 1 | 1401 | """
One server connects to a deployed forwarder
One legacy client connects to the server
One new client connects to the server
A few messages are exchanged
"""
#pragma repy restrictions.normal
include NatForwardingShim.repy
include NATLayer_rpc.repy
def response(remote_ip,remote_port,sock,th,listenhandle):
try:
while True:
msg = sock.recv(1024)
sock.send('got'+msg)
except:
sock.close()
if callfunc == 'initialize':
serverkey = 'NAT$BLAHBLAHBLAH'
ip = '127.0.0.1'
port = 12345
# use the nat shim
server_shim = ShimStack('(NatForwardingShim)(NullShim)')
handle = server_shim.waitforconn(serverkey,12347,response)
sleep(10) # need to sleep while the value is advertised
# CLIENT LOGIC
# open connection using the legacy client
# manually enter the forwarder info
legacy_sock = nat_openconn(serverkey, 12347)
#client_shim = ShimStack('(NatForwardingShim)(NullShim)')
#sock = client_shim.openconn(serverkey,12347)
sock = nat_openconn(serverkey, 12347)
for i in range(10):
legacy_sock.send(str(i))
sock.send(str(i))
legacy_msg = legacy_sock.recv(10)
msg1 = sock.recv(10)
if msg1 != 'got'+str(i):
print 'GOT WRONG MSG FROM SHIM SOCK'
elif legacy_msg != 'got'+str(i):
print 'GOT WRONG MSG FROM LEGACY SOCK'
legacy_sock.close()
sock.close()
exitall()
| mit | 6,065,202,797,805,868,000 | 18.191781 | 60 | 0.670236 | false |
ikumen/project-euler | solutions/013.py | 1 | 1047 | #!/usr/bin/env python
'''
013.py: https://projecteuler.net/problem=13
Large Sum
Work out the first ten digits of the sum of the following one-hundred 50-digit numbers.
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
...
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690
'''
import os
import pytest
import time
def first_ten_digits_of_sum(n, numbers):
'''Finds the first n digits of the sum of numbers list.'''
return (str(sum(numbers)))[:10]
def test_first_ten_digits_of_sum():
'''Test'''
assert '5537376230' == first_ten_digits_of_sum(10, load_numbers())
def load_numbers():
with open(os.path.join(os.path.dirname(__file__), 'data/013.txt')) as input:
return list(map(int, input.readlines()))
def main():
'''Main runner, delegates to solution.'''
print(first_ten_digits_of_sum(10, load_numbers()))
if __name__ == '__main__':
start_time = time.time()
main()
print("--- %s seconds ---" % (time.time() - start_time))
| mit | -689,761,490,315,390,700 | 22.795455 | 87 | 0.723018 | false |
cokelaer/colormap | src/colormap/colors.py | 1 | 32584 | # -*- python -*-
# -*- coding: utf-8 -*-
#
# This file is part of the colormap software
#
# Copyright (c) 2011-20134
#
# File author(s): Thomas Cokelaer <[email protected]>
#
# Distributed under the GPLv3 License.
# See accompanying file LICENSE.txt or copy at
# http://www.gnu.org/licenses/gpl-3.0.html
#
# Website: https://github.com/cokelaer/colormap
# Documentation: http://packages.python.org/colormap
#
##############################################################################
"""Utilities provided in this module can be found either in the
standard Python module called :mod:`colorsys` or in matplotlib.colors
(e.g rgb2hex) or are original to this module (e.g., rgb2huv)
"""
# matplotlib dependence is only inside Colormap class
import colorsys
from easydev.tools import check_param_in_list, swapdict, check_range
from colormap.xfree86 import XFree86_colors
__all__ = ["HEX", "Color", "hex2web", "web2hex", "hex2rgb", "hex2dec",
"rgb2hex", "rgb2hsv", "hsv2rgb", "rgb2hls", "hls2rgb","yuv2rgb", "rgb2yuv",
"to_intensity", "yuv2rgb_int", "rgb2yuv_int", "Colormap"
]
def hex2web(hexa):
"""Convert hexadecimal string (6 digits) into *web* version (3 digits)
.. doctest::
>>> from colormap.colors import hex2web
>>> hex2web("#FFAA11")
'#FA1'
.. seealso:: :func:`web2hex`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`rgb2hsv`, :func:`hsv2rgb`, :func:`rgb2hls`,
:func:`hls2rgb`
"""
hexa = HEX().get_standard_hex_color(hexa)
return "#" + hexa[1::2]
def web2hex(web):
"""Convert *web* hexadecimal string (3 digits) into 6 digits version
.. doctest::
>>> from colormap.colors import web2hex
>>> web2hex("#FA1")
'#FFAA11'
.. seealso:: :func:`hex2web`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`rgb2hsv`, :func:`hsv2rgb`, :func:`rgb2hls`,
:func:`hls2rgb`
"""
return HEX().get_standard_hex_color(web)
def hex2rgb(hexcolor, normalise=False):
"""This function converts a hex color triplet into RGB
Valid hex code are:
* #FFF
* #0000FF
* 0x0000FF
* 0xFA1
.. doctest::
>>> from colormap.colors import hex2rgb
>>> hex2rgb("#FFF", normalise=False)
(255, 255, 255)
>>> hex2rgb("#FFFFFF", normalise=True)
(1.0, 1.0, 1.0)
.. seealso:: :func:`hex2web`, :func:`web2hex`,
:func:`rgb2hex`, :func:`rgb2hsv`, :func:`hsv2rgb`, :func:`rgb2hls`,
:func:`hls2rgb`
"""
hexcolor = HEX().get_standard_hex_color(hexcolor)[1:]
r, g, b = int(hexcolor[0:2], 16), int(hexcolor[2:4], 16), int(hexcolor[4:6], 16)
if normalise:
r, g, b = _normalise(r, g, b)
return r, g, b
def rgb2hex(r, g, b, normalised=False):
"""Convert RGB to hexadecimal color
:param: can be a tuple/list/set of 3 values (R,G,B)
:return: a hex vesion ofthe RGB 3-tuple
.. doctest::
>>> from colormap.colors import rgb2hex
>>> rgb2hex(0,0,255, normalised=False)
'#0000FF'
>>> rgb2hex(0,0,1, normalised=True)
'#0000FF'
.. seealso:: :func:`hex2web`, :func:`web2hex`, :func:`hex2rgb`
, :func:`rgb2hsv`, :func:`hsv2rgb`, :func:`rgb2hls`,
:func:`hls2rgb`
"""
if normalised:
r, g, b = _denormalise(r, g, b, mode="rgb")
r = int(r)
g = int(g)
b = int(b)
check_range(r, 0, 255)
check_range(g, 0, 255)
check_range(b, 0, 255)
return '#%02X%02X%02X' % (r, g, b)
def rgb2hls(r, g, b, normalised=True):
"""Convert an RGB value to an HLS value.
:param bool normalised: if *normalised* is True, the input RGB triplet
should be in the range 0-1 (0-255 otherwise)
:return: the HLS triplet. If *normalised* parameter is True, the output
triplet is in the range 0-1; otherwise, H in the range 0-360 and LS
in the range 0-100.
.. doctest::
>>> from colormap.colors import rgb2hls
>>> rgb2hls(255,255,255, normalised=False)
(0.0, 1.0, 0.0)
.. seealso:: :func:`hex2web`, :func:`web2hex`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`hsv2rgb`,
:func:`hls2rgb`
"""
# rgb_to_hsv expects normalised values !
if normalised:
upper = 1
else:
upper = 255
check_range(r, 0, upper)
check_range(g, 0, upper)
check_range(b, 0, upper)
if normalised==False:
r, g, b = _normalise(r, g, b)
h, l, s = colorsys.rgb_to_hls(r, g, b)
return h, l, s
def rgb2hsv(r, g, b, normalised=True):
"""Convert an RGB value to an HSV value.
:param bool normalised: if *normalised* is True, the input RGB triplet
should be in the range 0-1 (0-255 otherwise)
:return: the HSV triplet. If *normalised* parameter is True, the output
triplet is in the range 0-1; otherwise, H in the range 0-360 and LS
in the range 0-100.
.. doctest::
>>> from colormap.colors import rgb2hsv
>>> rgb2hsv(0.5,0,1)
(0.75, 1, 1)
.. seealso:: :func:`hex2web`, :func:`web2hex`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`hsv2rgb`, :func:`rgb2hls`,
:func:`hls2rgb`
"""
# rgb_to_hsv expects normalised values !
if normalised:
upper = 1
else:
upper = 255
check_range(r, 0, upper)
check_range(g, 0, upper)
check_range(b, 0, upper)
if normalised==False:
r, g, b = _normalise(r, g, b)
h, s, v = colorsys.rgb_to_hsv(r, g, b)
return h,s,v
def hsv2rgb(h, s, v, normalised=True):
"""Convert a hue-saturation-value (HSV) value to a red-green-blue (RGB).
:param bool normalised: If *normalised* is True, the input HSV triplet
should be in the range 0-1; otherwise, H in the range 0-360 and LS
in the range 0-100.
:return: the RGB triplet. The output
triplet is in the range 0-1 whether the input is normalised or not.
.. doctest::
>>> from colormap.colors import hsv2rgb
>>> hsv2rgb(0.5,1,1, normalised=True) # doctest: +SKIP
(0, 1, 1)
.. seealso:: :func:`hex2web`, :func:`web2hex`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`rgb2hsv`, :func:`rgb2hls`,
:func:`hls2rgb`
.. seealso:: :func:`rgb2hex`
"""
if normalised:
upper = 1
else:
upper = 100
if normalised:
uppera = 1
else:
uppera = 360
check_range(h, 0, uppera)
check_range(s, 0, upper)
check_range(v, 0, upper)
if normalised == False:
h, s, v = _normalise(h, s, v, mode="hsv")
return colorsys.hsv_to_rgb(h, s, v)
def hls2rgb(h, l, s, normalised=True):
"""Convert an HLS value to a RGB value.
:param bool normalised: If *normalised* is True, the input HLS triplet
should be in the range 0-1; otherwise, H in the range 0-360 and LS
in the range 0-100.
:return: the RGB triplet. The output
triplet is in the range 0-1 whether the input is normalised or not.
.. doctest::
>>> from colormap.colors import hls2rgb
>>> hls2rgb(360, 50, 60, normalised=False) # doctest: +SKIP
(0.8, 0.2, 0.2)
.. seealso:: :func:`hex2web`, :func:`web2hex`, :func:`hex2rgb`
:func:`rgb2hex`, :func:`rgb2hsv`, :func:`hsv2rgb`, :func:`rgb2hls`,
"""
if normalised:
upper = 1
else:
upper = 100
if normalised:
uppera = 1
else:
uppera = 360
check_range(h, 0, uppera)
check_range(s, 0, upper)
check_range(l, 0, upper)
if normalised == False:
h, l, s = _normalise(h, l, s, mode="hls")
return colorsys.hls_to_rgb(h, l, s)
def hex2dec(data):
"""convert hexadecimal string (data) into a float in the [0-65536] inclusive range"""
if data[0] == '#':
data.replace('#', '')
return int(data, 16)/255.
def rgb2yuv(r, g, b):
"""Convert RGB triplet into YUV
:return: YUV triplet with values between 0 and 1
`YUV wikipedia <http://en.wikipedia.org/wiki/YUV>`_
.. warning:: expected input must be between 0 and 1
.. note:: the constants referenc used is Rec. 601
"""
check_range(r, 0, 1)
check_range(g, 0, 1)
check_range(b, 0, 1)
#y = int(0.299 * r + 0.587 * g + 0.114 * b)
#u = int(-0.14713 * r + -0.28886 * g + 0.436 * b)
#v = int(0.615 * r + -0.51499 * g + -0.10001 * b)
y = 0.299 * r + 0.587 * g + 0.114 * b
u = -32591.0/221500.0 * r + -63983.0/221500.0 * g + 0.436 * b
v = 0.615 * r + -72201./140200 * g + -7011/70100. * b
return (y, u, v)
def yuv2rgb(y, u, v):
"""Convert YUV triplet into RGB
`YUV <http://en.wikipedia.org/wiki/YUV>`_
.. warning:: expected input must be between 0 and 255 (not normalised)
"""
check_range(y, 0,1)
check_range(u, 0, 1)
check_range(v, 0, 1)
A, B, C, D = 701.0/615.0, 25251.0/63983.0, 209599.0/361005.0, 443.0/218.0
r = y + A * v
g = y - B * u - C * v
b = y + D * u
return (r, g, b)
def rgb2yuv_int(r, g, b):
"""Convert RGB triplet into YUV
`YUV wikipedia <http://en.wikipedia.org/wiki/YUV>`_
.. warning:: expected input must be between 0 and 255 (not normalised)
"""
check_range(r, 0, 255)
check_range(g, 0, 255)
check_range(b, 0, 255)
y = int(0.299 * r + 0.587 * g + 0.114 * b)
u = int(-32591.0/221500.0 * r + -63983.0/221500.0 * g + 0.436 * b)
v = int(0.615 * r + -72201./140200 * g + -7011/70100. * b)
return (y, u, v)
def yuv2rgb_int(y, u, v):
"""Convert YUV triplet into RGB
`YUV <http://en.wikipedia.org/wiki/YUV>`_
.. warning:: expected input must be between 0 and 255 (not normalised)
"""
check_range(y, 0, 255)
check_range(u, 0, 255)
check_range(v, 0, 255)
r = int(y + 1.13983 * v)
g = int(y - 0.39465 * u - 0.58060 * v)
b = int(y + 2.03211 * u)
return (r, g, b)
def _denormalise(r, g, b, mode="rgb"):
check_param_in_list(mode, ["rgb", "hls", "hsv"])
if mode == "rgb":
return r*255., g*255., b*255.
elif mode in ["hls", "hsv"]:
return r*360., g*100., b*100.
def _normalise(r, g, b, mode="rgb"):
check_param_in_list(mode, ["rgb", "hls", "hsv"])
if mode == "rgb":
return r/255., g/255., b/255.
elif mode in ["hls", "hsv"]:
return r/360., g/100., b/100.
def to_intensity(n):
"""Return intensity
:param n: value between 0 and 1
:return: value between 0 and 255; round(n*127.5+127.5)
"""
check_range(n, 0, 1)
return int(round(n * 127.5 + 127.5))
class HEX(object):
"""Class to check the validity of an hexadecimal string and get standard string
By standard, we mean #FFFFFF (6 digits)
::
>>> h = HEX()
>>> h.is_valid_hex_color("#FFFF00")
True
"""
def __init__(self):
pass
def is_valid_hex_color(self, value, verbose=True):
"""Return True is the string can be interpreted as hexadecimal color
Valid formats are
* #FFF
* #0000FF
* 0x0000FF
* 0xFA1
"""
try:
self.get_standard_hex_color(value)
return True
except Exception as err:
if verbose:
print(err)
return False
def get_standard_hex_color(self, value):
"""Return standard hexadecimal color
By standard, we mean a string that starts with # sign followed by 6
character, e.g. #AABBFF
"""
if isinstance(value, str)==False:
raise TypeError("value must be a string")
if len(value) <= 3:
raise ValueError("input string must be of type 0xFFF, 0xFFFFFF or #FFF or #FFFFFF")
if value.startswith("0x") or value.startswith("0X"):
value = value[2:]
elif value.startswith("#"):
value = value[1:]
else:
raise ValueError("hexa string must start with a '#' sign or '0x' string")
value = value.upper()
# Now, we have either FFFFFF or FFF
# now check the length
for x in value:
if x not in "0123456789ABCDEF":
raise ValueError("Found invalid hexa character {0}".format(x))
if len(value) == 6 or len(value) == 8:
value = "#" + value[0:6]
elif len(value) == 3:
value = "#" + value[0]*2 + value[1]*2 + value[2]*2
else:
raise ValueError("hexa string should be 3, 6 or 8 digits. if 8 digits, last 2 are ignored")
return value
class Color(HEX):
"""Class to ease manipulation and conversion between color codes
You can create an instance in many differen ways. You can either use a
human-readable name as long as it is part of the
`XFree86 list <http://en.wikipedia.org/wiki/X11_color_names>`_
You can also provide a hexadecimal string (either 3 or 6 digits). You can
use triplets of values corresponding to the RGB, HSV or HLS conventions.
Here are some examples:
.. doctest::
from colormap import Color
Color("red") # human XFree86 compatible representation
Color("#f00") # standard 3 hex digits
Color("#ff0000") # standard 6 hex digits
Color(hsv=(0,1,0.5))
Color(hls=(0, 1, 0.5)) # HLS triplet
Color(rgb=(1, 0, 0)) # RGB triplet
Color(Color("red")) # using an instance of :class:`Color`
Note that the RGB, HLS and HSV triplets use normalised values. If you need
to normalise the triplet, you can use :mod:`colormap.colors._normalise` that
provides a function to normalise RGB, HLS and HSV triplets::
colors._normalise(*(255, 255, 0), mode="rgb")
colors._normalise(*(360, 50, 100), mode="hls")
If you provide a string, it has to be a valid string from XFree86.
In addition to the official names, the lower case names are valid. Besides,
there are names with spaces. The equivalent names without space are also
valid. Therefore the name "Spring Green", which is an official name can be
provided as "Spring Green", "spring green", "springgreen" or "SpringGreen".
"""
# Get official color names
colors = XFree86_colors.copy()
# add color names without spaces
aliases = dict([(x.replace(" ", ""),x) for x in colors.keys() if " " in x])
# add color names without spaces in lower cases
aliases.update([(x.replace(" ", "").lower(),x) for x in colors.keys() if " " in x])
# add color names in lower case
aliases.update(dict([(x.lower(),x) for x in colors.keys()]))
aliases.update(dict([(x,x) for x in colors.keys()]))
# keep track of all possible names
color_names = sorted(list(set(list(colors.keys()) +list( aliases.keys()))))
def __init__(self, name=None, rgb=None, hls=None, hsv=None):
super(Color, self).__init__()
self._name = None
self._mode = None
self._rgb = None
# Does the user provided the name argument (first one) as a string ?
if isinstance(name, str):
# if so, it can be a valid human name (e.g., red) or an hex
# assuming that valid hexadecimal starts with # or 0x,
# if we can interpret the string as an hexadecimal, we are done
if self.is_valid_hex_color(name, verbose=False):
self.hex = name
else:
# if not, then, the user probably provided a valid color name
# the property will check the validity.
self.name = name[:]
#all other input parameters are ignored
elif name == None:
if rgb:
self.rgb = rgb
elif hls:
self.hls = hls
elif hsv:
self.hsv = hsv
else:
raise ValueError("You must set one of the parameter")
elif isinstance(name, Color):
self.rgb = name.rgb
else:
raise ValueError("name parameter must be a string")
def _get_name(self):
return self._name
def _set_name(self, name):
check_param_in_list(name, self.color_names)
name = self.aliases[name]
self._name = name
# set hex and rgb at the same time based on the name
self.hex = self.colors[name]
name = property(_get_name, _set_name)
color = property(_get_name, _set_name)
def _get_hex(self):
return self._hex
def _set_hex(self, value):
# hex is an approximation made of 255 bits so do not define rgb here
if self.is_valid_hex_color(value):
value = self.get_standard_hex_color(value)
self._hex = value
if self._hex in self.colors.values():
self._name = swapdict(self.colors, check_ambiguity=False)[self._hex]
else:
self._name = "undefined"
self._rgb = hex2rgb(self._hex, normalise=True)
else:
# just to warn the user
self.get_standard_hex_color(value)
hex = property(_get_hex, _set_hex,
doc="getter/setter the hexadecimal value.")
def _get_rgb(self):
return self._rgb
def _set_rgb(self, value):
# set name, hex and rgb
self.hex = rgb2hex(*value , normalised=True)
# must reset rgb with its real value (set_hex may round the rgb)
# in _set_hex
self._rgb = value
rgb = property(_get_rgb, _set_rgb,
doc="getter/setter the RGB values (3-length tuple)")
def _get_hsv(self):
hsv = rgb2hsv(*self.rgb)
return hsv
def _set_hsv(self, value):
# TODO: value must be normalised
self.rgb = hsv2rgb(*value)
hsv = property(_get_hsv, _set_hsv,
doc="getter/setter the HSV values (3-length tuple)")
def _get_hls(self):
hls = rgb2hls(*self.rgb)
return hls
def _set_hls(self, value):
#hls = _normalise(*value, mode="hls")
#else:
hls = value
self.rgb = hls2rgb(*hls)
hls = property(_get_hls, _set_hls,
doc="getter/setter the HLS values (3-length tuple)")
def _get_lightness(self):
return self.hls[1]
def _set_lightness(self, lightness):
h, l, s = self.hls
self.hls = (h, lightness, s)
lightness = property(_get_lightness, _set_lightness,
doc="getter/setter the lightness in the HLS triplet")
def _get_saturation_hls(self):
return self.hls[2]
def _set_saturation_hls(self, saturation):
h, l, s = self.hls
self.hls = (h, l, saturation)
saturation_hls = property(_get_saturation_hls, _set_saturation_hls,
doc="getter/setter the saturation in the HLS triplet")
def _get_hue(self):
return self.hls[0]
def _set_hue(self, hue):
h, l, s = self.hls
self.hls = (hue, l, s)
hue = property(_get_hue, _set_hue,
doc="getter/setter the saturation in the HLS triplet")
def _get_red(self):
return self.rgb[0]
def _set_red(self, red):
r, g, b = self.rgb
self.rgb = (red,g,b)
red = property(_get_red, _set_red,
doc="getter/setter for the red color in RGB triplet")
def _get_green(self):
return self.rgb[1]
def _set_green(self, green):
r, g, b = self.rgb
self.rgb = (r, green, b)
green = property(_get_green, _set_green,
doc="getter/setter for the green color in RGB triplet")
def _get_blue(self):
return self.rgb[2]
def _set_blue(self, blue):
r, g, b = self.rgb
self.rgb = (r, g, blue)
blue = property(_get_blue, _set_blue,
doc="getter/setter for the blue color in RGB triplet")
def _get_value(self):
return self.hls[0]
def _set_value(self, value):
h, s, v = self.hsv
self.hsv = (h, s, value)
value = property(_get_value, _set_value,
doc="getter/setter the value in the HSV triplet")
def _get_yiq(self):
return colorsys.rgb_to_yiq(*self.rgb)
yiq = property(_get_yiq, doc="Getter for the YIQ triplet")
def __str__(self):
txt = 'Color {0}\n'.format(self.name)
txt+= ' hexa code: {0}\n'.format(self.hex)
txt+= ' RGB code: {0}\n'.format(self.rgb)
txt+= ' RGB code (un-normalised): {0}\n\n'.format([x*255 for x in self.rgb])
txt+= ' HSV code: {0}\n'.format(self.hsv)
txt+= ' HSV code: (un-normalised) {0} {1} {2}\n\n'.format(self.hsv[0]*360, self.hsv[1]*100, self.hsv[2]*100)
txt+= ' HLS code: {0}\n'.format(self.hls)
txt+= ' HLS code: (un-normalised) {0} {1} {2}\n\n'.format(self.hls[0]*360, self.hls[1]*100, self.hls[2]*100)
return txt
class Colormap(object):
"""Class to create matplotlib colormap
This example show how to get the pre-defined colormap called *heat*
.. plot::
:include-source:
from pylab import *
from colormap.colors import Colormap
c = Colormap()
cmap = c.get_cmap_heat()
c.test_colormap(cmap)
You may be more interested in building your own colormap::
# design your own colormap
d = {'blue': [0,0,0,1,1,1,0],
'green':[0,1,1,1,0,0,0],
'red': [1,1,0,0,0,1,1]}
cmap = c.cmap(d, reverse=False)
# see the results
c.test_colormap(cmap)
If you want a simple linear colormap, you can use the example above,
or use the :meth:`cmap_linear`. For instance for a diverging colormap
from red to green (with with color in between)::
cmap = c.cmap_linear("red", "white", "green")
c.test_colormap(cmap)
Even simpler, you can use a bicolor colormap :meth:`cmap_bicolor`. For instance
for a red to green colormap::
cmap = c.cmap_bicolor("red", "green")
c.test_colormap(cmap)
From matplotlib documentation, colormaps falls into 4 categories:
#. Sequential schemes for unipolar data that progresses from low to high
#. Diverging schemes for bipolar data that emphasizes positive or
negative deviations from acentral value
#. Cyclic schemes meant for plotting values that wrap around at the
endpoints, such as phase angle, wind direction, or time of day
#. Qualitative schemes for nominal data that has no inherent ordering,
where color is used only to distinguish categories
:references: matplotlib documentation and examples
http://matplotlib.org/examples/color/colormaps_reference.html
"""
def _get_colormap_mpl(self):
try:
from matplotlib.pyplot import colormaps as _cmaps
return _cmaps()
except:
return []
colormaps = property(_get_colormap_mpl)
def _get_sequentials(self):
return ['Blues', 'BuGn', 'BuPu', 'GnBu', 'Greens', 'Greys', 'OrRd',
'Oranges', 'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',
'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']
sequentials = property(_get_sequentials)
def _get_sequentials2(self):
return ['afmhot', 'autumn', 'bone', 'cool', 'copper',
'gist_heat', 'gray', 'hot', 'pink',
'spring', 'summer', 'winter']
sequentials2 = property(_get_sequentials2)
def _get_diverging(self):
return ['BrBG', 'PRGn', 'PiYG', 'PuOr', 'RdBu', 'RdGy', 'RdYlBu',
'RdYlGn', 'Spectral', 'bwr', 'coolwarm', 'seismic']
diverging = property(_get_diverging)
def _get_diverging_black(self):
return ['red_black_sky', 'red_black_blue', 'red_black_green', 'yellow_black_blue',
'yellow_black_sky', 'red_black_orange', 'pink_black_green(w3c)'
]
diverging_black = property(_get_diverging_black)
def _get_qualitative(self):
return ['Accent', 'Dark2', 'Paired', 'Pastel1', 'Pastel2',
'Set1', 'Set2', 'Set3']
qualitative = property(_get_qualitative)
def _get_misc(self):
return ['gist_earth', 'terrain', 'ocean', 'gist_stern',
'brg', 'CMRmap', 'cubehelix', 'gnuplot', 'gnuplot2', 'gist_ncar',
'nipy_spectral', 'jet', 'rainbow', 'gist_rainbow', 'hsv', 'flag', 'prism']
misc = property(_get_misc)
def plot_rgb_from_hex_list(self, cols):
"""This functions takes a list of hexadecimal values and plots
the RGB curves. This can be handy to figure out the RGB functions
to be used in the :meth:`get_cmap`.
.. plot::
:include-source:
:width: 60%
from colormap.colors import Colormap
c = Colormap()
t = ['#FF0000FF', '#FF4D00FF', '#FF9900FF', '#FFE500FF',
'#CCFF00FF', '#80FF00FF', '#33FF00FF', '#00FF19FF',
'#00FF66FF', '#00FFB2FF', '#00FFFFFF', '#00B3FFFF',
'#0066FFFF', '#001AFFFF', '#3300FFFF', '#7F00FFFF',
'#CC00FFFF','#FF00E6FF','#FF0099FF', '#FF004DFF']
c.plot_rgb_from_hex_list(t)
"""
import pylab
red = [hex2rgb(x)[0]/255. for x in cols]
blue = [hex2rgb(x)[2]/255. for x in cols]
green = [hex2rgb(x)[1]/255. for x in cols]
x = pylab.linspace(0, 1, len(cols))
pylab.clf()
pylab.plot(x, red, 'ro-', alpha=0.5)
pylab.plot(x, green, 'gs-', alpha=0.5, markersize=15)
pylab.plot(x, blue, 'bx-', alpha=0.5, markersize=15)
pylab.ylim([-0.1, 1.1])
def cmap_bicolor(self, color1, color2, reverse=False, N=256):
"""Provide 3 colors in format accepted by :class:`Color`
::
>>> red = Color('red')
>>> white = Color('white')
>>> cmap = cmap_bicolor(red, white)
"""
c1 = Color(color1)
c2 = Color(color2)
dico = {'red': [c1.red, c2.red],
'green':[c1.green, c2.green],
'blue':[c1.blue, c2.blue]}
return self.cmap(dico, reverse=reverse, N=N)
def cmap_linear(self, color1, color2, color3, reverse=False, N=256):
"""Provide 3 colors in format accepted by :class:`Color`
::
red = Color('red')
cmap = cmap_linear(red, 'white', '#0000FF')
"""
c1 = Color(color1)
c2 = Color(color2)
c3 = Color(color3)
dico = {'red': [c1.red, c2.red, c3.red],
'green':[c1.green, c2.green, c3.green],
'blue':[c1.blue, c2.blue, c3.blue]}
return self.cmap(dico, reverse=reverse, N=N)
def cmap(self, colors=None, reverse=False, N=256):
"""Return a colormap object to be used within matplotlib
:param dict colors: a dictionary that defines the RGB colors to be
used in the colormap. See :meth:`get_cmap_heat` for an example.
:param bool reverse: reverse the colormap is set to True (defaults to False)
:param int N: Defaults to 50
"""
# matplotlib colormaps
if colors in self.colormaps:
if reverse and colors.endswith("_r") is False:
colors += "_r"
from matplotlib.cm import get_cmap
return get_cmap(colors)
# custom ones
elif colors in self.diverging_black:
c1, c2, c3 = colors.split("_")
# special case of sky, which does not exists
c3 = c3.replace("sky", "deep sky blue")
return self.cmap_linear(c1, c2, c3)
elif colors == 'heat':
return self.get_cmap_heat()
elif colors == 'heat_r':
return self.get_cmap_heat_r()
# Keep these dependencies inside the function to allow
# installation of colormap without those dependencies
# FIXME remove numpy dependencies
import numpy as np
# extracted from R, heat.colors(20)
if reverse:
for k in colors.keys():
colors[k].reverse()
# If index not given, RGB colors are evenly-spaced in colormap.
index = np.linspace(0, 1, len(colors['red']))
# Adapt color_data to the form expected by LinearSegmentedColormap.
color_data = dict((key, [(x, y, y) for x, y in zip(index, value)])
for key, value in list(colors.items()))
import matplotlib
f = matplotlib.colors.LinearSegmentedColormap
m = f('my_color_map', color_data, N)
return m
def get_cmap_heat(self):
"""Return a heat colormap matplotlib-compatible colormap
This heat colormap should be equivalent to heat.colors() in R.
::
>>> from colormap.colors import Colormap
>>> cmap = Colormap.get_cmap_heat()
You can generate the colormap based solely on this information for the RGB
functions along::
d= { 'blue':[0,0,0,0,1],
'green':[0,.35,.7,1,1],
'red':[1,1,1,1,1]}
cmap = Colormap.get_cmap(d)
"""
return self.cmap(
{ 'blue':[0, 0, 0, 0, 1],
'green':[0, .35, .7, 1, 1],
'red':[1, 1, 1, 1, 1]}, reverse=False)
def get_cmap_heat_r(self):
"""Return a heat colormap matplotlib-compatible colormap
Same as :meth:`get_cmap_heat` but reversed
"""
return self.cmap(
{ 'blue':[0, 0, 0, 0, 1],
'green':[0, .35, .7, 1, 1],
'red':[1, 1, 1, 1, 1]}, reverse=True)
def get_cmap_rainbow(self):
"""colormap similar to rainbow colormap from R
.. note:: The red is actually appearing on both sides... Yet
this looks like what is coded in R 3.0.1
"""
return self.cmap(
{ 'blue': [0, 0, 0, 1, 1, 1, 0],
'green':[0, 1, 1, 1, 0, 0, 0],
'red': [1, 1, 0, 0, 0, 1, 1]}, reverse=False)
def get_cmap_red_green(self):
return self.cmap(
{ 'green': [0, 0.4, 0.6, .75, .8, .9, 1, .9, .8, .6],
'blue' : [0, .4, .6, .75, .8, .7, .6, .35, .17, .1],
'red': [1, 1, 1, 1, 1, .9, .8, .6, .3, .1]}, reverse=True)
def test_colormap(self, cmap=None):
"""plot one colormap for testing
By default, test the :meth:`get_cmap_heat`
"""
if cmap is None:
cmap = self.get_cmap_heat()
import numpy as np
from pylab import clf, pcolor, colorbar, show, linspace, axis
A, B = np.meshgrid(linspace(0, 10, 100), linspace(0, 10, 100))
clf()
pcolor((A-5)**2+(B-5)**2, cmap=cmap)
colorbar()
show()
axis('off')
def plot_colormap(self, cmap_list=None):
"""cmap_list list of valid cmap or name of a set (sequential,
diverging,)
if none, plot all known colors
.. .. plot::
.. :width:80%
.. :include-source:
.. from colormap import Colormap
.. c = Colormap()
.. c.plot_colormap('sequential')
"""
from pylab import subplots
if isinstance(cmap_list, str):
if cmap_list in ['sequentials','sequentials2','qualitative',
'misc','diverging', 'diverging_black']:
cmap_list = getattr(self, cmap_list)
else:
cmap_list = [cmap_list]
if isinstance(cmap_list, list) is not True:
raise TypeError("""input must be a list of srtings or a single string. Each string should be found. For a user-defined cmap, use test_colormap""")
for this in cmap_list:
if this not in self.colormaps and this not in self.diverging_black:
raise ValueError("unknown colormap name. Please check valid names in colormaps attribute")
nrows = len(cmap_list)
gradient = [x/255. for x in range(0,256)]
gradient = [gradient, gradient]
#np.vstack((gradient, gradient))
fig, axes = subplots(nrows=nrows)
fig.subplots_adjust(top=0.95, bottom=0.05, left=0.05, right=0.8)
for ax, name in zip(axes, cmap_list):
ax.imshow(gradient, aspect='auto', cmap=self.cmap(name))
pos = list(ax.get_position().bounds)
x_text = pos[2] + 0.08
y_text = pos[1] + pos[3]/2.
fig.text(x_text, y_text, name, va='center', ha='left', fontsize=10)
# Turn off *all* ticks & spines, not just the ones with colormaps.
for ax in axes:
ax.set_axis_off()
| bsd-3-clause | -6,709,208,497,947,145,000 | 31.879919 | 158 | 0.56049 | false |
UltronAI/Deep-Learning | Pattern-Recognition/hw2-Feature-Selection/skfeature/example/test_chi_square.py | 1 | 1627 | import scipy.io
from sklearn.metrics import accuracy_score
from sklearn import cross_validation
from sklearn import svm
from skfeature.function.statistical_based import chi_square
def main():
# load data
mat = scipy.io.loadmat('../data/BASEHOCK.mat')
X = mat['X'] # data
X = X.astype(float)
y = mat['Y'] # label
y = y[:, 0]
n_samples, n_features = X.shape # number of samples and number of features
# split data into 10 folds
ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)
# perform evaluation on classification task
num_fea = 100 # number of selected features
clf = svm.LinearSVC() # linear SVM
correct = 0
for train, test in ss:
# obtain the chi-square score of each feature
score = chi_square.chi_square(X, y)
# rank features in descending order according to score
idx = chi_square.feature_ranking(score)
# obtain the dataset on the selected features
selected_features = X[:, idx[0:num_fea]]
# train a classification model with the selected features on the training dataset
clf.fit(selected_features[train], y[train])
# predict the class labels of test data
y_predict = clf.predict(selected_features[test])
# obtain the classification accuracy on the test data
acc = accuracy_score(y[test], y_predict)
correct = correct + acc
# output the average classification accuracy over all 10 folds
print 'Accuracy:', float(correct)/10
if __name__ == '__main__':
main() | mit | 7,051,745,803,490,119,000 | 31.244898 | 89 | 0.635526 | false |
ekarlso/partizan | tests/functional/v1/test_category.py | 1 | 2709 | # -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from oslo_serialization import jsonutils
from tests.functional import base
from partizan.db import models
class CategoryTest(base.BaseTest):
def test_create(self):
data = {
"name": "IC"
}
response = self.simulate_post('/categories', body=data)
body = jsonutils.loads(response[0])
self.assertEqual(self.srmock.status, falcon.HTTP_201)
self.assertEqual(data["name"], body["name"])
self.assertIn("id", body)
self.assertIn("updated_at", body)
self.assertIn("created_at", body)
self.assertIn("created_at", body)
def test_get(self):
data = {
"name": "IC"
}
obj = models.Category.create(data)
response = self.simulate_get('/categories/%s' % obj.id)
body = jsonutils.loads(response[0])
self.assertEqual(self.srmock.status , falcon.HTTP_200)
self.assertEqual(data["name"], body["name"])
self.assertIn("id", body)
self.assertIn("updated_at", body)
self.assertIn("created_at", body)
self.assertIn("created_at", body)
def test_list(self):
items = [
{
"name": "Resistors"
},
{
"name": "IC"
}
]
for i in items:
obj = models.Category.create(i)
response = self.simulate_get('/categories')
body = jsonutils.loads(response[0])
self.assertEqual(self.srmock.status , falcon.HTTP_200)
for i in xrange(0, len(items)):
item = body["categories"][i]
self.assertEqual(items[i]["name"], item["name"])
self.assertIn("id", item)
self.assertIn("updated_at", item)
self.assertIn("created_at", item)
self.assertIn("created_at", item)
def test_delete(self):
data = {
"name": "IC"
}
obj = models.Category.create(data)
response = self.simulate_delete('/categories/%s' % obj.id)
self.assertEqual(self.srmock.status, falcon.HTTP_204)
| apache-2.0 | -6,247,913,441,934,495,000 | 29.438202 | 78 | 0.58804 | false |
ngsxfem/ngsxfem | demos/fictdom.py | 1 | 4669 | """
In this example we solve a scalar *unfitted* PDE problem. As a
discretisation method we use a level set based geometry description and
a Cut (or Fictitious) Finite element method with a Nitsche formulation
to impose boundary conditions. For stability we add a ghost penalty
stabilization.
Domain:
-------
The domain is [-1,1]^2 while the interface is a ringe described by a
level set function. In the discretisation the level set function is
approximated with a piecewise linear interpolation. This approximate
geometry is then mapped by applying a mesh deformation resulting in
a higher order geometry approximation.
PDE problem:
------------
- (u_xx + u_yy) = f in Omega (where levelset is negative)
u = 0 on dOmega (where levelset is zero)
The r.h.s. term f is chosen according to a manufactured solution.
Discretisation:
---------------
* Background finite element space restricted to active domain (CutFEM)
* Nitsche formulation to impose boundary conditions, see. e.g. [1]
* Ghost penalty stabilization to deal with bad cuts (version as in [2])
Implementational aspects:
-------------------------
* Geometry approximation using isoparametric unfitted FEM
* A (sparse) direct solver is applied to solve the arising linear systems.
References:
-----------
All concepts that are used here are explained in the jupyter tutorials
`basics.ipynb`, `intlset.ipynb` and `cutfem.ipynb`.
Literature:
-----------
[1] E. Burman, P. Hansbo, Fictitious domain finite element methods using
cut elements: II. A stabilized Nitsche method, Appl. Num. Math.
62(4):328-341, 2012.
[2] J. Preuß, Higher order unfitted isoparametric space-time FEM on
moving domains. Master's thesis, NAM, University of Göttingen, 2018.
"""
# ------------------------------ LOAD LIBRARIES -------------------------------
from netgen.geom2d import SplineGeometry
from ngsolve import *
from ngsolve.internal import *
from xfem import *
from xfem.lsetcurv import *
ngsglobals.msg_level = 2
# -------------------------------- PARAMETERS ---------------------------------
# Quadrilateral (or simplicial mesh)
quad_mesh = False
# Mesh diameter
maxh = 0.1
# Finite element space order
order = 3
# Stabilization parameter for ghost-penalty
gamma_stab = 0.1
# Stabilization parameter for Nitsche
lambda_nitsche = 10 * order * order
# ----------------------------------- MAIN ------------------------------------
# Geometry and Mesh
square = SplineGeometry()
square.AddRectangle((-1, -1), (1, 1), bc=1)
ngmesh = square.GenerateMesh(maxh=maxh, quad_dominated=quad_mesh)
mesh = Mesh(ngmesh)
# Manufactured exact solution for monitoring the error
r2 = 3 / 4 # outer radius
r1 = 1 / 4 # inner radius
rc = (r1 + r2) / 2.0
rr = (r2 - r1) / 2.0
r = sqrt(x**2 + y**2)
levelset = IfPos(r - rc, r - rc - rr, rc - r - rr)
exact = (20 * (r2 - sqrt(x**2 + y**2)) * (sqrt(x**2 + y**2) - r1)).Compile()
coeff_f = - (exact.Diff(x).Diff(x) + exact.Diff(y).Diff(y)).Compile()
# Higher order level set approximation
lsetmeshadap = LevelSetMeshAdaptation(mesh, order=order, threshold=0.1,
discontinuous_qn=True)
deformation = lsetmeshadap.CalcDeformation(levelset)
lsetp1 = lsetmeshadap.lset_p1
# Element, facet and dof marking w.r.t. boundary approximation with lsetp1:
ci = CutInfo(mesh, lsetp1)
hasneg = ci.GetElementsOfType(HASNEG)
hasif = ci.GetElementsOfType(IF)
# facets used for stabilization:
ba_facets = GetFacetsWithNeighborTypes(mesh, a=hasneg, b=hasif)
Vhbase = H1(mesh, order=order, dirichlet=[], dgjumps=True)
Vh = Restrict(Vhbase, hasneg)
gfu = GridFunction(Vh)
u, v = Vh.TrialFunction(), Vh.TestFunction()
h = specialcf.mesh_size
n = Normalize(grad(lsetp1))
# integration domains:
dx = dCut(lsetp1, NEG, definedonelements=hasneg, deformation=deformation)
ds = dCut(lsetp1, IF, definedonelements=hasif, deformation=deformation)
dw = dFacetPatch(definedonelements=ba_facets, deformation=deformation)
a = BilinearForm(Vh, symmetric=False)
# Diffusion term
a += grad(u) * grad(v) * dx
# Nitsche term
a += -grad(u) * n * v * ds
a += -grad(v) * n * u * ds
a += (lambda_nitsche / h) * u * v * ds
# Ghost penalty stabilization (near the boundary)
a += gamma_stab / h**2 * (u - u.Other()) * (v - v.Other()) * dw
# R.h.s. term:
f = LinearForm(Vh)
f += coeff_f * v * dx
# Assemble system
a.Assemble()
f.Assemble()
# Solve linear system
gfu.vec.data = a.mat.Inverse(Vh.FreeDofs()) * f.vec
# Measure the error
l2error = sqrt(Integrate((gfu - exact)**2*dx, mesh))
print("L2 Error: {0}".format(l2error))
# visualization:
Draw(deformation, mesh, "deformation")
DrawDC(lsetp1, gfu, 0, mesh, "uh", deformation=deformation)
| lgpl-3.0 | 5,536,156,285,712,531,000 | 30.533784 | 79 | 0.674095 | false |
Esri/raster-functions | functions/PercentAboveThreshold.py | 1 | 5988 | import numpy as np
from datetime import timedelta
import datetime
#import sys
#import os
#import pickle
#debug_logs_directory =
class PercentAboveThreshold():
def __init__(self):
self.name = 'Percent Above or Below Threshold'
self.description = 'Calculates the percentage of pixels that are above or below' \
'a threshold value. The threshold value is set in the raster function.' \
'The raster function can be applied to a time-enabled stack of rasters in ' \
'a mosaic dataset.'
self.times = []
self.start_year = None
self.end_year = None
self.threshold = 50
def getParameterInfo(self):
return [
{
'name': 'rasters',
'dataType': 'rasters',
'value': None,
'required': True,
'displayName': 'Rasters',
'description': 'The collection of rasters to analyze.',
},
{
'name': 'start_date',
'dataType': 'string',
'value': '1/1/2019 12:30:00',
'required': True,
'displayName': 'Start Date',
'description': 'The beginning date of analysis (inclusive of entire year).',
},
{
'name': 'end_date',
'dataType': 'string',
'value': '12/31/2019 23:30:00',
'required': True,
'displayName': 'End Date',
'description': 'The final date of analysis (inclusive of entire year).',
},
{
'name': 'threshold',
'dataType': 'numeric',
'value': 45,
'required': True,
'displayName': 'Value Threshold',
'description': 'Value Threshold.',
}
]
def getConfiguration(self, **scalars):
return {
'inheritProperties': 4 | 8, # inherit everything but the pixel type (1) and NoData (2)
'invalidateProperties': 2 | 4, # invalidate histogram and statistics because we are modifying pixel values
'inputMask': True, # need raster mask of all input rasters in .updatePixels().
'resampling': False, # process at native resolution
'keyMetadata': ['AcquisitionDate']
}
def updateRasterInfo(self, **kwargs):
# outStats = {'minimum': -1, 'maximum': 1}
# outStatsTuple = tuple(outStats for i in range(outBandCount))
kwargs['output_info']['pixelType'] = 'f4' # output pixels are floating-point values
kwargs['output_info']['histogram'] = () # no statistics/histogram for output raster specified
kwargs['output_info']['statistics'] = () # outStatsTuple
#kwargs['output_info'][
# 'bandCount'] = outBandCount # number of output bands. 7 time bands, 3 TC bands, creates 21 bands
self.times = kwargs['rasters_keyMetadata']
self.start_date = kwargs['start_date']
self.end_date = kwargs['end_date']
self.threshold = int(kwargs['threshold'])
return kwargs
def updateKeyMetadata(self, names, bandIndex, **keyMetadata):
return keyMetadata
def updatePixels(self, tlc, shape, props, **pixelBlocks):
#fname = '{:%Y_%b_%d_%H_%M_%S}_t.txt'.format(datetime.datetime.now())
#filename = os.path.join(debug_logs_directory, fname)
#file = open(filename,"w")
#file.write("File Open.\n")
pix_time = [j['acquisitiondate'] for j in self.times]
#pickle_filename = os.path.join(debug_logs_directory, fname)
#pickle.dump(pix_time, open(pickle_filename[:-4]+'pix_time.p',"wb"))
#file.write(str(len(pix_time))+ "\n")
pix_blocks = pixelBlocks['rasters_pixels']
pix_array = np.asarray(pix_blocks)
#pickle_filename = os.path.join(debug_logs_directory, fname)
#pickle.dump(pix_array, open(pickle_filename[:-4]+'pix_blocks.p',"wb"))
pix_array_dim = pix_array.shape
num_squares_x = pix_array_dim[2]
num_squares_y = pix_array_dim[3]
#file.write("Filtering Based on Time\n")
# This worked before I added time Filtering:
#pix_as_array = np.reshape(pix_array, -1)
#total_count = np.size(pix_as_array)
#vals_above_thresh_count = np.size(np.where(pix_as_array <= self.threshold))
#outBlock = np.ones((num_squares_x, num_squares_y)) * (vals_above_thresh_count / total_count) * 100
t_array = []
ind_array = []
start_date = self.start_date #"1/1/2019 12:30:00"
end_date = self.end_date #"7/7/2019 12:30:00"
start_datetime = datetime.datetime.strptime(start_date, '%m/%d/%Y %H:%M:%S') # %p')
end_datetime = datetime.datetime.strptime(end_date, '%m/%d/%Y %H:%M:%S') # %p')
for ind, time in enumerate(pix_time):
temp_t = datetime.datetime(1900, 1, 1) + timedelta(time - 2)
if temp_t >= start_datetime and temp_t <= end_datetime:
t_array.append(temp_t)
ind_array.append(ind)
#time_within = [pix_time[x] for x in ind_array]
pix_array_within = pix_array[ind_array, :, :, :]
#threshold = 50
pix_as_array = np.reshape(pix_array_within, -1)
total_count = np.size(pix_as_array)
vals_above_thresh_count = np.size(np.where(pix_as_array <= self.threshold)) #< below, > above
outBlock = np.ones((num_squares_x, num_squares_y)) * (vals_above_thresh_count / total_count) * 100
#file.write("DONE\n")
#file.close()
pixelBlocks['output_pixels'] = outBlock.astype(props['pixelType'], copy=False)
#masks = np.array(pixelBlocks['rasters_mask'], copy=False)
#pixelBlocks['output_mask'] = np.all(masks, axis=0).astype('u1', copy=False)
return pixelBlocks
| apache-2.0 | 2,665,880,069,612,288,500 | 38.92 | 119 | 0.559619 | false |
jarvisqi/nlp_learn | gensim/text.py | 1 | 2054 | import jieba
import pandas as pd
from gensim import corpora, models, similarities
# 训练样本
raw_documents = [
'0无偿居间介绍买卖毒品的行为应如何定性',
'1吸毒男动态持有大量毒品的行为该如何认定',
'2如何区分是非法种植毒品原植物罪还是非法制造毒品罪',
'3为毒贩贩卖毒品提供帮助构成贩卖毒品罪',
'4将自己吸食的毒品原价转让给朋友吸食的行为该如何认定',
'5为获报酬帮人购买毒品的行为该如何认定',
'6毒贩出狱后再次够买毒品途中被抓的行为认定',
'7虚夸毒品功效劝人吸食毒品的行为该如何认定',
'8妻子下落不明丈夫又与他人登记结婚是否为无效婚姻',
'9一方未签字办理的结婚登记是否有效',
'10夫妻双方1990年按农村习俗举办婚礼没有结婚证 一方可否起诉离婚',
'11结婚前对方父母出资购买的住房写我们二人的名字有效吗',
'12身份证被别人冒用无法登记结婚怎么办?',
'13同居后又与他人登记结婚是否构成重婚罪',
'14未办登记只举办结婚仪式可起诉离婚吗',
'15同居多年未办理结婚登记,是否可以向法院起诉要求离婚'
]
def main():
corpora_documents = []
for item_text in raw_documents:
item_str = list(jieba.cut(item_text))
corpora_documents.append(item_str)
dictionary = corpora.Dictionary(corpora_documents)
corpus = [dictionary.doc2bow(text) for text in corpora_documents]
similarity =similarities.Similarity('-Similarity-index', corpus, num_features=400)
test_data_1 = '你好,我想问一下我想离婚他不想离,孩子他说不要,是六个月就自动生效离婚'
test_cut_raw_1 = jieba.cut(test_data_1)
test_corpus_1 = dictionary.doc2bow(test_cut_raw_1)
similarity.num_best = 5
# 返回最相似的样本材料,(index_of_document, similarity) tuples
print(similarity[test_corpus_1])
if __name__ == '__main__':
main()
| mit | -1,880,925,002,496,043,800 | 27.755556 | 86 | 0.704791 | false |
FlemingResearch/Indra | pkg/eventsProcessor.py | 1 | 4484 | #!/usr/bin/env python
#
# developed by Sergey Markelov (2013)
#
import random
import subprocess
from config import Config, BingRewardsReportItem
class EventsProcessor:
"""
Processes events declared in config.xml
"""
OK = 0
RETRY = 1
def __init__(self, config, reportItem):
if config is None: raise ValueError("config is None")
if not isinstance(config, Config): raise TypeError("config is not of Config type")
if reportItem is None: raise ValueError("reportItem is None")
if not isinstance(reportItem, BingRewardsReportItem): raise TypeError("reportItem is not of BingRewardsReportItem type")
self.config = config
self.reportItem = reportItem
def __processRetry(self, retry):
"""
Returns number of seconds to sleep before the next retry,
or negative value, indicating no retry should be done
"""
if self.reportItem.retries >= retry.count:
return -1
if retry.ifStatement and not retry.ifStatement.evaluate(self.reportItem):
return -1
t = retry.interval + random.uniform(0, retry.salt)
return t
def __processNotify(self, notify):
if notify.ifStatement and not notify.ifStatement.evaluate(self.reportItem):
return
self.reportItem.notify = notify
self.__processCmd(notify.cmd)
def __processCmd(self, cmd):
command = cmd
for specifier in Config.Event.Specifier.Dictionary.keys():
val = Config.Event.Specifier.evaluate(specifier, self.reportItem)
command = command.replace(specifier, '"' + str(val) + '"')
# TODO: check return code from subprocess.call() ?
subprocess.call(command, shell = True)
def __processEventOnReportItem(self, eventType):
event = self.config.getEvent(eventType, self.reportItem)
if event:
if event.retry:
result = self.__processRetry(event.retry)
if result >= 0:
return ( EventsProcessor.RETRY, result )
if event.notifies:
for notify in event.notifies:
self.__processNotify(notify)
return (EventsProcessor.OK, 0)
def processReportItem(self):
"""
Processes events from self.config based on the result in self.reportItem
returns a tuple of (resultCode, extra):
(OK, 0) - nothing needs to be done, _extra_ can be ignored
(RETRY, sec) - an account should be retried in _sec_ seconds
"""
if not self.config.events:
return (EventsProcessor.OK, 0)
if self.reportItem.error:
return self.__processEventOnReportItem(Config.Event.onError)
return self.__processEventOnReportItem(Config.Event.onComplete)
@staticmethod
def onScriptComplete(config):
"""
Processes Config.Event.onScriptComplete
_config_ an instance of Config
returns nothing
"""
if config is None: raise ValueError("config is None")
if not isinstance(config, Config): raise TypeError("config is not of Config type")
event = config.getEvent(Config.Event.onScriptComplete)
if event is None:
return
for notify in event.notifies:
# TODO: check return code from subprocess.call() ?
subprocess.call(notify.cmd, shell = True)
@staticmethod
def onScriptFailure(config, exception):
"""
Processes Config.Event.onScriptComplete
_config_ an instance of Config
_exception_ is an exception derived from BaseException which caused the script to fail
By the nature of this function, it won't fail if _exception_ is None or
is not of the class BaseException, but it's better to supply one
This function won't fail if _config_ is not supplied. In that case it will simply
reraise the exception
returns nothing
"""
if config is None: raise
if not isinstance(config, Config): raise
event = config.getEvent(Config.Event.onScriptFailure)
if event is None:
raise
description = str(exception) if exception else "No exception was supplied"
description = "\"" + description.replace("\"", "") + "\""
for notify in event.notifies:
cmd = notify.cmd + " " + description
subprocess.call(cmd, shell = True)
| mit | -6,217,829,494,796,182,000 | 32.969697 | 128 | 0.627342 | false |
frmdstryr/enamlx | enamlx/qt/qt_tree_view.py | 1 | 8321 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2015, Jairus Martin.
Distributed under the terms of the MIT License.
The full license is in the file COPYING.txt, distributed with this software.
Created on Aug 28, 2015
"""
from atom.api import (
Typed, Instance, Property, Int
)
from enamlx.qt.qt_abstract_item_view import (
QtAbstractItemView, QAbstractAtomItemModel, IS_QT4
)
from enamlx.widgets.tree_view import (
ProxyTreeViewItem, ProxyTreeView, ProxyTreeViewColumn, AbstractWidgetItem
)
from enamlx.qt.qt_abstract_item import AbstractQtWidgetItem, RESIZE_MODES
from qtpy.QtWidgets import QTreeView
from qtpy.QtCore import QAbstractItemModel, QModelIndex
from enaml.core.pattern import Pattern
from enaml.qt.qt_widget import QtWidget
from enaml.application import timed_call
class QAtomTreeModel(QAbstractAtomItemModel, QAbstractItemModel):
def rowCount(self, parent):
d = self.declaration
if d.vertical_headers:
return len(d.vertical_headers)
elif parent.isValid():
item = parent.internalPointer()
d = item.declaration
return len(d.items) if d and not d.is_destroyed else 0
def columnCount(self, parent):
d = self.declaration
if d.horizontal_headers:
return len(d.horizontal_headers)
elif parent.isValid():
item = parent.internalPointer()
d = item.declaration
return len(d._columns) if d and not d.is_destroyed else 0
def index(self, row, column, parent):
""" The index should point to the corresponding QtControl in the
enaml object hierarchy.
"""
item = parent.internalPointer()
#: If the parent is None
d = self.declaration if item is None else item.declaration
if row < len(d._items):
proxy = d._items[row].proxy
assert isinstance(proxy, QtTreeViewItem), \
"Invalid item {}".format(proxy)
else:
proxy = d.proxy
return self.createIndex(row, column, proxy)
def parent(self, index):
if not index.isValid():
return QModelIndex()
item = index.internalPointer()
if not isinstance(item, QtTreeViewItem) or item.is_destroyed:
return QModelIndex()
parent = item.parent()
if not isinstance(parent, QtTreeViewItem) or parent.is_destroyed:
return QModelIndex()
d = parent.declaration
return self.createIndex(d.row, 0, parent)
def itemAt(self, index=None):
if not index or not index.isValid():
return
item = index.internalPointer()
assert isinstance(item, QtTreeViewItem), \
"Invalid index: {} at ({},{}) {}".format(
index, index.row(), index.column(), item)
d = item.declaration
try:
c = index.column() # - d.visible_column
return d._columns[c].proxy
except IndexError:
return
class QtTreeView(QtAbstractItemView, ProxyTreeView):
#: Tree widget
widget = Typed(QTreeView)
#: Root index
index = Instance(QModelIndex, ())
def create_widget(self):
self.widget = QTreeView(self.parent_widget())
def init_widget(self):
super(QtTreeView, self).init_widget()
d = self.declaration
self.set_show_root(d.show_root)
def init_model(self):
self.set_model(QAtomTreeModel(parent=self.widget))
# -------------------------------------------------------------------------
# Widget Setters
# -------------------------------------------------------------------------
def set_show_root(self, show):
self.widget.setRootIsDecorated(show)
def set_cell_padding(self, padding):
self.widget.setStyleSheet(
"QTreeView::item { padding: %ipx }" % padding)
def set_horizontal_minimum_section_size(self, size):
self.widget.header().setMinimumSectionSize(size)
def set_horizontal_stretch(self, stretch):
self.widget.header().setStretchLastSection(stretch)
def set_horizontal_headers(self, headers):
self.widget.header().model().layoutChanged.emit()
def set_resize_mode(self, mode):
if IS_QT4:
self.widget.header().setResizeMode(RESIZE_MODES[mode])
else:
self.widget.header().setSectionResizeMode(RESIZE_MODES[mode])
def set_show_horizontal_header(self, show):
header = self.widget.header()
header.show() if show else header.hide()
# -------------------------------------------------------------------------
# View refresh handlers
# -------------------------------------------------------------------------
def _refresh_visible_column(self, value):
self._pending_column_refreshes -= 1
if self._pending_column_refreshes == 0:
d = self.declaration
# TODO: What about parents???
try:
cols = self.model.columnCount(self.index)-d.visible_columns
d.visible_column = max(0, min(value, cols))
except RuntimeError:
#: Since refreshing is deferred several ms later
pass
def _refresh_visible_row(self, value):
self._pending_row_refreshes -= 1
if self._pending_row_refreshes == 0:
d = self.declaration
try:
rows = self.model.rowCount(self.index)-d.visible_rows
d.visible_row = max(0, min(value, rows))
except RuntimeError:
pass
class AbstractQtTreeViewItem(AbstractQtWidgetItem):
""" Base TreeViewItem class """
#: Pending refreshes when loading widgets
_refresh_count = Int(0)
#: Time to wait before loading widget
_loading_interval = Int(100)
def create_widget(self):
if self.declaration:
for child in self.children():
if isinstance(child, (Pattern, QtWidget)):
self.delegate = child
def set_row(self, row):
self._update_index()
def set_column(self, column):
self._update_index()
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row, d.column, self.parent().index)
def _update_index(self):
self.index = self._default_index()
if self.delegate:
self._refresh_count +=1
timed_call(self._loading_interval, self._update_delegate)
def _update_delegate(self):
""" Update the delegate cell widget. This is deferred so it
does not get called until the user is done scrolling.
"""
self._refresh_count -= 1
if self._refresh_count != 0:
return
try:
delegate = self.delegate
if not self._is_visible():
return
# The table destroys when it goes out of view
# so we always have to make a new one
delegate.create_widget()
delegate.init_widget()
# Set the index widget
self.view.widget.setIndexWidget(self.index, delegate.widget)
except RuntimeError:
# Since this is deferred, the table could be deleted already
# and a RuntimeError is possible
pass
def _is_visible(self):
return self.index.isValid()
def data_changed(self, change):
""" Notify the model that data has changed in this cell! """
self.view.model.dataChanged.emit(self.index, self.index)
class QtTreeViewItem(AbstractQtTreeViewItem, ProxyTreeViewItem):
def _default_view(self):
""" If this is the root item, return the parent
which must be a TreeView, otherwise return the
parent Item's view.
"""
parent = self.parent()
if isinstance(parent, QtTreeView):
return parent
return parent.view
class QtTreeViewColumn(AbstractQtTreeViewItem, ProxyTreeViewColumn):
def _default_view(self):
""" Since the TreeViewColumn must be a child of a TreeViewItem,
simply return the Item's view.
"""
return self.parent().view
def _default_index(self):
d = self.declaration
return self.view.model.index(d.row, d.column, self.parent().index)
| mit | -3,384,336,813,083,965,000 | 32.552419 | 79 | 0.592717 | false |
soarlab/FPTaylor | benchmarks/toplas/print_results.py | 1 | 4016 | #!/usr/bin/env python
import sys
import os
import glob
import decimal
import argparse
parser = argparse.ArgumentParser(
description="Prints out results of FPTaylor experiments (from the log directory)")
parser.add_argument('--prec', type=int, default=2,
help="precision of printed results")
parser.add_argument('--alt-order', action='store_true',
help="alternative order of printed results")
parser.add_argument('--no-name', action='store_true',
help="do not print names of benchmarks")
parser.add_argument('--no-time', action='store_true',
help="do not print times of benchmarks")
parser.add_argument('--log-dir', default="log",
help="the log directory")
args = parser.parse_args()
if args.alt_order:
benchmark_list = [
"carbon_gas",
"doppler1",
"doppler2",
"doppler3",
"jet",
"predatorPrey",
"rigidBody1",
"rigidBody2",
"sine",
"sineOrder3",
"sqroot",
"t_div_t1",
"turbine1",
"turbine2",
"turbine3",
"verhulst",
"azimuth",
"logexp",
"sphere",
"kepler0",
"kepler1",
"kepler2",
"himmilbeau",
"hartman3",
"hartman6",
"floudas1",
"floudas2",
"floudas3"
]
else:
benchmark_list = [
"t_div_t1",
"sine",
"sqroot",
"sineOrder3",
"carbon_gas",
"verhulst",
"predatorPrey",
"rigidBody1",
"rigidBody2",
"doppler1",
"doppler2",
"doppler3",
"turbine1",
"turbine2",
"turbine3",
"jet",
"logexp",
"sphere",
"azimuth",
"kepler0",
"kepler1",
"kepler2",
"himmilbeau",
"hartman3",
"hartman6",
"floudas1",
"floudas2",
"floudas3"
]
class Problem:
def __init__(self, name, error, time):
self.name = name
self.error_str = "{0:.{prec}e}".format(
decimal.Context(prec=args.prec + 1, rounding=decimal.ROUND_UP).create_decimal(error),
prec=args.prec)
self.time_str = "{0:.1f}".format(time)
def __str__(self):
out = ""
if not args.no_name:
out += self.name + ", "
out += self.error_str
if not args.no_time:
out += ", " + self.time_str
return out
def problem_from_file(fname):
name = None
err_abs = None
err_rel = None
time = None
with open(fname, 'r') as f:
for line in f:
if line.startswith("Problem: "):
name = line[len("Problem: "):].strip()
elif line.startswith("Absolute error (exact): "):
err_abs = line[len("Absolute error (exact): "):].strip()
elif line.startswith("Absolute error (approximate): "):
err_abs = line[len("Absolute error (approximate): "):].strip()
elif line.startswith("Relative error (exact): "):
err_rel = line[len("Relative error (exact): "):].strip()
elif line.startswith("Relative error (approximate): "):
err_rel = line[len("Relative error (approximate): "):].strip()
elif line.startswith("Elapsed time: "):
time = float(line[len("Elapsed time: "):].strip())
if name and (err_abs or err_rel) and time:
return Problem(name, err_abs if err_abs else err_rel, time)
else:
return None
base_dir = args.log_dir
results = {}
for fname in glob.glob(os.path.join(base_dir, "*.log")):
result = problem_from_file(fname)
if result:
results[result.name] = result
for name in benchmark_list:
if name in results:
print(results[name])
del results[name]
if len(results) > 0:
print("\nUnsorted results:")
for _, result in results.iteritems():
print(result)
| mit | -7,073,710,044,295,486,000 | 25.596026 | 97 | 0.525149 | false |
scrapinghub/dateparser | dateparser/data/date_translation_data/sg.py | 1 | 2622 | info = {
"name": "sg",
"date_order": "DMY",
"january": [
"nye",
"nyenye"
],
"february": [
"ful",
"fulundïgi"
],
"march": [
"mbä",
"mbängü"
],
"april": [
"ngu",
"ngubùe"
],
"may": [
"bêl",
"bêläwü"
],
"june": [
"fön",
"föndo"
],
"july": [
"len",
"lengua"
],
"august": [
"kük",
"kükürü"
],
"september": [
"mvu",
"mvuka"
],
"october": [
"ngb",
"ngberere"
],
"november": [
"nab",
"nabändüru"
],
"december": [
"kak",
"kakauka"
],
"monday": [
"bk2",
"bïkua-ûse"
],
"tuesday": [
"bk3",
"bïkua-ptâ"
],
"wednesday": [
"bk4",
"bïkua-usïö"
],
"thursday": [
"bk5",
"bïkua-okü"
],
"friday": [
"lâp",
"lâpôsö"
],
"saturday": [
"lây",
"lâyenga"
],
"sunday": [
"bikua-ôko",
"bk1"
],
"am": [
"nd"
],
"pm": [
"lk"
],
"year": [
"ngû"
],
"month": [
"nze"
],
"week": [
"dimâsi"
],
"day": [
"lâ"
],
"hour": [
"ngbonga"
],
"minute": [
"ndurü ngbonga"
],
"second": [
"nzîna ngbonga"
],
"relative-type": {
"0 day ago": [
"lâsô"
],
"0 hour ago": [
"this hour"
],
"0 minute ago": [
"this minute"
],
"0 month ago": [
"this month"
],
"0 second ago": [
"now"
],
"0 week ago": [
"this week"
],
"0 year ago": [
"this year"
],
"1 day ago": [
"bîrï"
],
"1 month ago": [
"last month"
],
"1 week ago": [
"last week"
],
"1 year ago": [
"last year"
],
"in 1 day": [
"kêkerêke"
],
"in 1 month": [
"next month"
],
"in 1 week": [
"next week"
],
"in 1 year": [
"next year"
]
},
"locale_specific": {},
"skip": [
" ",
"'",
",",
"-",
".",
"/",
";",
"@",
"[",
"]",
"|",
","
]
}
| bsd-3-clause | 7,045,914,109,901,481,000 | 14.242604 | 26 | 0.26514 | false |
dcramer/taskmaster | src/taskmaster/progressbar.py | 1 | 1033 | """
taskmaster.progressbar
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from __future__ import absolute_import
from progressbar import ProgressBar, UnknownLength, Counter, Timer
from progressbar.widgets import Widget
class Speed(Widget):
'Widget for showing the rate.'
format = 'Rate: %6.2f/s'
def __init__(self):
self.startval = 0
def update(self, pbar):
'Updates the widget with the current SI prefixed speed.'
if self.startval == 0:
self.startval = pbar.currval
return 'Rate: --/s'
speed = (pbar.currval - self.startval) / pbar.seconds_elapsed
return self.format % speed
class Value(Widget):
def __init__(self, label=None, callback=None):
assert not (label and callback)
self.label = label
self.callback = callback
def update(self, pbar):
if self.callback:
return self.callback(pbar)
return self.label
| apache-2.0 | 8,523,526,974,073,646,000 | 21.955556 | 69 | 0.617619 | false |
Pyomo/PyomoGallery | test_notebooks.py | 1 | 4863 | #
# Jupyter notebook testing logic adapted from
# https://gist.github.com/lheagy/f216db7220713329eb3fc1c2cd3c7826
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Lindsey Heagy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Raw
import unittest
import sys
import os
import subprocess
try:
import jupyter
jupyter_available = True
except:
jupyter_available = False
try:
import pandas
pandas_available = True
except:
pandas_available = False
try:
import networkx
networkx_available = True
except:
networkx_available = False
timeout=120
requires_pandas = set(['max_flow_interdict', 'min_cost_flow_interdict', 'multi_commodity_flow_interdict', 'sp_interdict', 'min_cost_flow', 'mst'])
requires_networkx = set(['mst'])
# Testing for the notebooks - use nbconvert to execute all cells of the
# notebook
# For testing on TravisCI, be sure to include a requirements.txt that
# includes jupyter so that you run on the most up-to-date version.
# Where are the notebooks?
TESTDIR = os.path.dirname(os.path.abspath(__file__))
#NBDIR = os.path.sep.join(TESTDIR.split(os.path.sep)[:-2] + ['notebooks/']) # where are the notebooks?
def setUp():
nbpaths = [] # list of notebooks, with file paths
nbnames = [] # list of notebook names (for making the tests)
print(TESTDIR)
# walk the test directory and find all notebooks
for dirname, dirnames, filenames in os.walk(TESTDIR):
for filename in filenames:
if filename.endswith('.ipynb') and not filename.endswith('-checkpoint.ipynb'):
nbpaths.append(os.path.abspath(dirname) + os.path.sep + filename) # get abspath of notebook
nbnames.append(''.join(filename[:-6])) # strip off the file extension
return nbpaths, nbnames
def get(nbname, nbpath):
# use nbconvert to execute the notebook
def test_func(self):
print('\n--------------- Testing {0} ---------------'.format(nbname))
print(' {0}'.format(nbpath))
if not jupyter_available:
self.skipTest("Jupyter unavailable")
if nbname in requires_pandas and not pandas_available:
self.skipTest("Pandas unavailable")
if nbname in requires_networkx and not networkx_available:
self.skipTest("Networkx unavailable")
# execute the notebook using nbconvert to generate html
dir_=os.path.dirname(nbpath)
os.chdir(dir_)
nbexe = subprocess.Popen(
[ 'jupyter', 'nbconvert', '{0}'.format(nbpath),
'--execute',
'--inplace',
'--ExecutePreprocessor.kernel_name=python%s' % (
{2:"",3:"3"}[sys.version_info[0]], ),
'--ExecutePreprocessor.timeout='+str(timeout)],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = nbexe.communicate()
check = nbexe.returncode
if check == 0:
print('\n ..... {0} Passed ..... \n'.format(nbname))
# if passed remove the generated html file
#subprocess.call(['rm', '{0}.html'.format( os.path.sep.join(os.getcwd().split(os.path.sep) + [nbpath.split(os.path.sep)[-1][:-6]]))])
else:
print('\n <<<<< {0} FAILED >>>>> \n'.format(nbname))
print('Captured Output: \n {0}'.format(err))
self.assertEqual(check, 0)
return test_func
class TestNotebooks(unittest.TestCase):
pass
nbpaths, nbnames = setUp()
# Check for duplicates
tmp = set()
for name in nbnames:
if name in tmp:
raise IOError("ERROR: duplicate test name %s" % name)
tmp.add(name)
# build test for each notebook
for i, nb in enumerate(nbnames):
#print((i,nb,nbpaths[i]))
setattr(TestNotebooks, 'test_'+nb, get(nb, nbpaths[i]))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | 7,526,504,506,299,258,000 | 33.985612 | 146 | 0.660703 | false |
max291/RLScore | rlscore/measure/auc_measure.py | 1 | 2659 | import operator
import numpy as np
from rlscore.measure.measure_utilities import UndefinedPerformance
from measure_utilities import multitask
from rlscore.utilities import array_tools
def auc_singletask(Y, P):
#the implementation has n(log(n)) time complexity
#P: predicted labels
#Y: true labels, y_i \in {-1,1} for each y_i \in Y
#
Y = np.array(Y).T[0]
P = np.array(P).T[0]
size = len(P)
#form a list of prediction-label pairs
I = np.argsort(P)
Y = Y[I]
P = P[I]
poscount = 0.
#The number of positive labels that have the same prediction
#as the current P[i] value
posties = 0.
#Number of pairwise mistakes this far
errors = 0.
j = 0
for i in range(size):
#j points always to the next entry in P for which
#P[j] > P[i]. In the end j will point outside of P
if j == i:
poscount += posties
posties = 0.
while j< size and P[i]==P[j]:
if Y[j]==1:
posties += 1
j+=1
if Y[i] == -1:
#every pairwise inversion of positive-negative pair
#incurs one error, except for ties where it incurs 0.5
#errors
errors += poscount+0.5*posties
poscount += posties
#the number of positive-negative pairs
paircount = poscount*(size-poscount)
#AUC is 1 - number of pairwise errors
if paircount == 0:
raise UndefinedPerformance("AUC undefined if both classes not present")
AUC = 1. - errors/paircount
return AUC
def auc_multitask(Y, P):
return multitask(Y, P, auc_singletask)
def auc(Y, P):
"""Area under the ROC curve (AUC).
A performance measure for binary classification problems.
Can be interpreted as an estimate of the probability, that
the classifier is able to discriminate between a randomly
drawn positive and negative training examples. An O(n*log(n))
time implementation, with correction for tied predictions.
If 2-dimensional arrays are supplied as arguments, then AUC
is separately computed for each column, after which the AUCs
are averaged.
Parameters
----------
Y: {array-like}, shape = [n_samples] or [n_samples, n_labels]
Correct labels, must belong to set {-1,1}
P: {array-like}, shape = [n_samples] or [n_samples, n_labels]
Predicted labels, can be any real numbers.
Returns
-------
auc: float
number between 0 and 1
"""
Y = array_tools.as_labelmatrix(Y)
P = array_tools.as_labelmatrix(P)
return np.mean(auc_multitask(Y,P))
auc.iserror = False
| mit | -3,690,062,520,798,114,300 | 31.426829 | 79 | 0.618654 | false |
fahadkaleem/DataStructures | LinkedList/Problems/nthNodeFromEndOfLinkedList/Solution2.py | 1 | 2551 | """
Author: Mohammed Fahad Kaleem
Problem: Find the nth node from the end of Linked List
Method:
Maintain two pointers
1. Reference pointer and main pointer.
2. Initialize both reference and main pointers to head.
3. First move reference pointer to n nodes from head.
4. Move both pointers one by one until reference pointer reaches end.
5. Main pointer will point to nth node from the end. Return main pointer.
"""
class Node:
def __init__(self, data, next_node=None):
self.data = data
self.next_node = next_node
def get_data(self):
return self.data
def set_data(self, data):
self.data = data
def get_next_node(self):
return self.next_node
def set_next_node(self, next_node):
self.next_node = next_node
class LinkedList(object):
def __init__(self):
self.head = None
self.length = 0
def insert(self, data):
new_node = Node(data)
if self.length == 0:
self.head = new_node
self.length += 1
else:
current_node = self.head
while current_node.get_next_node() is not None:
current_node = current_node.get_next_node()
current_node.set_next_node(new_node)
self.length += 1
def print_linked_list(self):
if self.length == 0:
print("Linked List is empty")
else:
current_node = self.head
while current_node:
print("[%s]" % current_node.get_data(), end=" ==> ")
current_node = current_node.get_next_node()
print()
def nth_node(self,n):
if self.length == 0:
print("Linked List is empty")
return False
reference_pointer = self.head
main_pointer = self.head
nth_node = self.length - n +1
if nth_node > self.length:
print("Value of n is greater than length of the Linked List")
return False
for i in range(nth_node + 1):
reference_pointer = reference_pointer.get_next_node()
while reference_pointer:
reference_pointer = reference_pointer.get_next_node()
main_pointer = main_pointer.get_next_node()
print(main_pointer.get_data())
return main_pointer.get_data()
if __name__ == "__main__":
linked_list = LinkedList()
linked_list.insert(12)
linked_list.insert(16)
linked_list.insert(3)
linked_list.insert(15)
linked_list.print_linked_list()
linked_list.nth_node(0)
| mit | 3,686,608,701,181,479,400 | 28.321839 | 73 | 0.588397 | false |
pashango2/sphinx-explorer | sphinx_explorer/python_venv/tasks.py | 1 | 3544 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import, unicode_literals
from qtpy.QtCore import *
# from qtpy.QtWidgets import *
import re
import logging
from sphinx_explorer.util.commander import Commander
logger = logging.getLogger(__name__)
class PipInstallTask(QObject):
finished = Signal(bool, str, str)
def __init__(self, packages, update_flag=False, commander=Commander(), callback=None, parent=None):
super(PipInstallTask, self).__init__(parent)
self.packages = packages or []
self.callback = callback
self.commander = commander
self.update_flag = update_flag
def run(self):
for package in self.packages:
update_flag = "-U" if self.update_flag else ""
result = self.commander.call("pip install -q {} {}".format(update_flag, package), shell=True)
if not result:
logger.warning("pip failed.")
package_info = self.commander.check_output("pip show {}".format(package), shell=True)
version = self.get_version(package_info)
self.finished.emit(True, package, version)
@staticmethod
def get_version(msg):
if not msg:
return None
for line in msg.splitlines():
if line.startswith("Version: "):
version = line[len("Version: "):].strip()
break
else:
version = None
return version
class PipListTask(QObject):
finished = Signal(list)
PARSE_RE = re.compile(r"([^\s]+)\s+\(([^\s]+)\)")
def __init__(self, commander=Commander(), callback=None, parent=None):
super(PipListTask, self).__init__(parent)
self.packages = []
self.callback = callback
self.commander = commander
@staticmethod
def filter(output):
for line in output.splitlines():
g = PipListTask.PARSE_RE.match(line)
if g:
package, version = g.groups()
yield package, version, None
def run(self):
self._run()
self.finished.emit(self.packages)
def _run(self):
output = self.commander.check_output("pip list --format=legacy", shell=True)
if not output:
logger.warning("pip failed.")
else:
for package, version, latest in PipListTask.filter(output):
self.packages.append((package, version, latest))
class PipListOutDateTask(PipListTask):
OUTDATE_PARSE_RE = re.compile(r"([^\s]+)\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)")
@staticmethod
def outdate_filter(output):
if not output:
return
for line in output.splitlines():
g = PipListOutDateTask.OUTDATE_PARSE_RE.match(line)
if g:
package, version, latest, pack_type = g.groups()
if not package or package[0] == "-" or package == "Package":
continue
yield package, version, latest, pack_type
def run(self):
# noinspection PyBroadException
try:
output = self.commander.check_output("pip list -o --format=columns", shell=True)
if not output:
logger.warning("pip failed.")
except:
self.finished.emit(self.packages)
return
for package, version, latest, _ in self.outdate_filter(output):
self.packages.append((package, version, latest))
self.finished.emit(self.packages)
| mit | 8,971,301,953,340,769,000 | 30.642857 | 105 | 0.582111 | false |
gratipay/gratipay.com | gratipay/elsewhere/__init__.py | 1 | 12580 | """This subpackage contains functionality for working with accounts elsewhere.
"""
from __future__ import division, print_function, unicode_literals
from collections import OrderedDict
from datetime import datetime
import hashlib
import json
import logging
from urllib import quote
from urlparse import urlparse, urlunparse
import xml.etree.ElementTree as ET
from aspen import log, Response
from aspen.utils import to_age, utc
from oauthlib.oauth2 import TokenExpiredError
from requests_oauthlib import OAuth1Session, OAuth2Session
from gratipay.elsewhere._extractors import not_available
from gratipay.utils.i18n import LocalizedErrorResponse
ACTIONS = {'opt-in', 'connect'}
PLATFORMS = 'facebook google bitbucket bountysource github openstreetmap twitter venmo'.split()
class UnknownAccountElsewhere(Exception): pass
class PlatformRegistry(object):
"""Registry of platforms we support connecting to Gratipay accounts.
"""
def __init__(self, platforms):
self.__dict__ = OrderedDict((p.name, p) for p in platforms)
def __contains__(self, platform):
return platform.name in self.__dict__
def __iter__(self):
return iter(self.__dict__.values())
class UserInfo(object):
"""A simple container for a user's info.
Accessing a non-existing attribute returns `None`.
"""
def __init__(self, **kw):
self.__dict__.update(kw)
def __getattr__(self, key):
return self.__dict__.get(key, None)
def __setattr__(self, key, value):
if value is None:
self.__dict__.pop(key, None)
else:
self.__dict__[key] = value
class Platform(object):
allows_team_connect = False
# "x" stands for "extract"
x_user_info = not_available
x_user_id = not_available
x_user_name = not_available
x_display_name = not_available
x_email = not_available
x_gravatar_id = not_available
x_avatar_url = not_available
x_is_team = not_available
required_attrs = ( 'account_url'
, 'display_name'
, 'name'
)
def __init__(self, api_key, api_secret, callback_url, api_url=None, auth_url=None):
self.api_key = api_key
self.api_secret = api_secret
self.callback_url = callback_url
if api_url:
self.api_url = api_url
if auth_url:
self.auth_url = auth_url
elif not getattr(self, 'auth_url', None):
self.auth_url = self.api_url
# Determine the appropriate response parser using `self.api_format`
api_format = getattr(self, 'api_format', None)
if api_format == 'json':
self.api_parser = lambda r: r.json()
elif api_format == 'xml':
self.api_parser = lambda r: ET.fromstring(r.content)
elif api_format:
raise ValueError('unknown API format: '+str(api_format))
# Make sure the subclass was implemented properly.
missing_attrs = [a for a in self.required_attrs if not hasattr(self, a)]
if missing_attrs:
msg = "The class %s is missing these required attributes: %s"
msg %= self.__class__.__name__, ', '.join(missing_attrs)
raise AttributeError(msg)
def api_get(self, path, sess=None, **kw):
"""
Given a `path` (e.g. /users/foo), this function sends a GET request to
the platform's API (e.g. https://api.github.com/users/foo).
The response is returned, after checking its status code and ratelimit
headers.
"""
is_user_session = bool(sess)
if not sess:
sess = self.get_auth_session()
response = sess.get(self.api_url+path, **kw)
limit, remaining, reset = self.get_ratelimit_headers(response)
if not is_user_session:
self.log_ratelimit_headers(limit, remaining, reset)
# Check response status
status = response.status_code
if status == 401 and isinstance(self, PlatformOAuth1):
# https://tools.ietf.org/html/rfc5849#section-3.2
if is_user_session:
raise TokenExpiredError
raise Response(500)
if status == 404:
raise Response(404, response.text)
if status == 429 and is_user_session:
def msg(_, to_age):
if remaining == 0 and reset:
return _("You've consumed your quota of requests, you can try again in {0}.", to_age(reset))
else:
return _("You're making requests too fast, please try again later.")
raise LocalizedErrorResponse(status, msg)
if status != 200:
log('{} api responded with {}:\n{}'.format(self.name, status, response.text)
, level=logging.ERROR)
msg = lambda _: _("{0} returned an error, please try again later.",
self.display_name)
raise LocalizedErrorResponse(502, msg)
return response
def get_ratelimit_headers(self, response):
limit, remaining, reset = None, None, None
prefix = getattr(self, 'ratelimit_headers_prefix', None)
if prefix:
limit = response.headers.get(prefix+'limit')
remaining = response.headers.get(prefix+'remaining')
reset = response.headers.get(prefix+'reset')
try:
limit, remaining, reset = int(limit), int(remaining), int(reset)
reset = datetime.fromtimestamp(reset, tz=utc)
except (TypeError, ValueError):
d = dict(limit=limit, remaining=remaining, reset=reset)
log('Got weird rate headers from %s: %s' % (self.name, d))
limit, remaining, reset = None, None, None
return limit, remaining, reset
def log_ratelimit_headers(self, limit, remaining, reset):
"""Emit log messages if we're running out of ratelimit.
"""
if None in (limit, remaining, reset):
return
percent_remaining = remaining/limit
if percent_remaining < 0.5:
log_msg = (
'{0} API: {1:.1%} of ratelimit has been consumed, '
'{2} requests remaining, resets {3}.'
).format(self.name, 1 - percent_remaining, remaining, to_age(reset))
log_lvl = logging.WARNING
if percent_remaining < 0.2:
log_lvl = logging.ERROR
elif percent_remaining < 0.05:
log_lvl = logging.CRITICAL
log(log_msg, log_lvl)
def extract_user_info(self, info):
"""
Given a user_info object of variable type (depending on the platform),
extract the relevant information by calling the platform's extractors
(`x_user_name`, `x_user_id`, etc).
Returns a `UserInfo`. The `user_id` attribute is guaranteed to have a
unique non-empty value.
"""
r = UserInfo(platform=self.name)
info = self.x_user_info(r, info, info)
r.user_name = self.x_user_name(r, info, None)
if self.x_user_id.__func__ is not_available:
r.user_id = r.user_name
else:
r.user_id = self.x_user_id(r, info)
assert r.user_id is not None
r.user_id = unicode(r.user_id)
assert len(r.user_id) > 0
r.display_name = self.x_display_name(r, info, None)
r.email = self.x_email(r, info, None)
r.avatar_url = self.x_avatar_url(r, info, None)
if not r.avatar_url:
gravatar_id = self.x_gravatar_id(r, info, None)
if r.email and not gravatar_id:
gravatar_id = hashlib.md5(r.email.strip().lower()).hexdigest()
if gravatar_id:
r.avatar_url = 'https://secure.gravatar.com/avatar/'+gravatar_id
r.is_team = self.x_is_team(r, info, False)
r.extra_info = info
return r
def get_team_members(self, account, page_url=None):
"""Given an AccountElsewhere, return its membership list from the API.
"""
if not page_url:
page_url = self.api_team_members_path.format(
user_id=quote(account.user_id),
user_name=quote(account.user_name or ''),
)
r = self.api_get(page_url)
members, count, pages_urls = self.api_paginator(r, self.api_parser(r))
members = [self.extract_user_info(m) for m in members]
return members, count, pages_urls
def get_user_info(self, key, value, sess=None):
"""Given a user_name or user_id, get the user's info from the API.
"""
if key == 'user_id':
path = 'api_user_info_path'
else:
assert key == 'user_name'
path = 'api_user_name_info_path'
path = getattr(self, path, None)
if not path:
raise Response(400)
path = self._format_path(path, {key: value})
info = self.api_parser(self.api_get(path, sess=sess))
return self.extract_user_info(info)
def _format_path(self, path, values):
parsed = urlparse(path)
quoted_values = {k: quote(v) for k, v in values.items()}
parsed = parsed._replace(path=parsed.path.format(**values))
parsed = parsed._replace(query=parsed.query.format(**quoted_values))
return urlunparse(parsed)
def get_user_self_info(self, sess):
"""Get the authenticated user's info from the API.
"""
r = self.api_get(self.api_user_self_info_path, sess=sess)
info = self.extract_user_info(self.api_parser(r))
token = getattr(sess, 'token', None)
if token:
info.token = json.dumps(token)
return info
def get_friends_for(self, account, page_url=None, sess=None):
if not page_url:
page_url = self.api_friends_path.format(
user_id=quote(account.user_id),
user_name=quote(account.user_name or ''),
)
r = self.api_get(page_url, sess=sess)
friends, count, pages_urls = self.api_paginator(r, self.api_parser(r))
friends = [self.extract_user_info(f) for f in friends]
if count == -1 and hasattr(self, 'x_friends_count'):
count = self.x_friends_count(None, account.extra_info, -1)
return friends, count, pages_urls
class PlatformOAuth1(Platform):
request_token_path = '/oauth/request_token'
authorize_path = '/oauth/authorize'
access_token_path = '/oauth/access_token'
def get_auth_session(self, token=None):
args = ()
if token:
args = (token['token'], token['token_secret'])
return OAuth1Session(self.api_key, self.api_secret, *args,
callback_uri=self.callback_url)
def get_auth_url(self, **kw):
sess = self.get_auth_session()
r = sess.fetch_request_token(self.auth_url+self.request_token_path)
url = sess.authorization_url(self.auth_url+self.authorize_path)
return url, r['oauth_token'], r['oauth_token_secret']
def get_query_id(self, querystring):
return querystring['oauth_token']
def handle_auth_callback(self, url, token, token_secret):
sess = self.get_auth_session(dict(token=token, token_secret=token_secret))
sess.parse_authorization_response(url)
r = sess.fetch_access_token(self.auth_url+self.access_token_path)
sess.token = dict(token=r['oauth_token'],
token_secret=r['oauth_token_secret'])
return sess
class PlatformOAuth2(Platform):
oauth_default_scope = None
oauth_email_scope = None
oauth_payment_scope = None
def get_auth_session(self, state=None, token=None, token_updater=None):
return OAuth2Session(self.api_key, state=state, token=token,
token_updater=token_updater,
redirect_uri=self.callback_url,
scope=self.oauth_default_scope)
def get_auth_url(self, **kw):
sess = self.get_auth_session()
url, state = sess.authorization_url(self.auth_url)
return url, state, ''
def get_query_id(self, querystring):
return querystring['state']
def handle_auth_callback(self, url, state, unused_arg):
sess = self.get_auth_session(state=state)
sess.fetch_token(self.access_token_url,
client_secret=self.api_secret,
authorization_response=url)
return sess
| mit | 8,522,709,110,932,166,000 | 36.440476 | 112 | 0.592289 | false |
jamesmcm/cryptopals | basic.py | 1 | 9677 | from math import sqrt
from numpy import mean
from Crypto.Cipher import AES
import operator
hexd={"0":0,"1":1,"2":2,"3":3,"4":4,"5":5,"6":6,"7":7,"8":8,"9":9,"a":10,"b":11,"c":12,"d":13,"e":14,"f":15}
b64d={0:"A",16:"Q",32:"g",48:"w",1:"B",17:"R",33:"h",49:"x",2:"C",18:"S",34:"i",50:"y",3:"D",19:"T",35:"j",51:"z",4:"E",20:"U",36:"k",52:"0",5:"F",21:"V",37:"l",53:"1",6:"G",22:"W",38:"m",54:"2",7:"H",23:"X",39:"n",55:"3",8:"I",24:"Y",40:"o",56:"4",9:"J",25:"Z",41:"p",57:"5",10:"K",26:"a",42:"q",58:"6",11:"L",27:"b",43:"r",59:"7",12:"M",28:"c",44:"s",60:"8",13:"N",29:"d",45:"t",61:"9",14:"O",30:"e",46:"u",62:"+",15:"P",31:"f",47:"v",63:"/"}
nhexd = dict (zip(hexd.values(),hexd.keys()))
nb64d= dict (zip(b64d.values(),b64d.keys()))
lf={"a":0.08167,"b":0.01492,"c":0.02782,"d":0.04253,"e":0.12702,"f":0.02228,"g":0.02015,"h":0.06094,"i":0.06966,"j":0.00153,"k":0.00772,"l":0.04025,"m":0.02406,"n":0.06749,"o":0.07507,"p":0.01929,"q":0.00095,"r":0.05987,"s":0.06327,"t":0.09056,"u":0.02758,"v":0.00978,"w":0.02360,"x":0.00150,"y":0.01974,"z":0.00074}
def encode(js):
z = (js[0] << 8) | js[1]
z = (z<<8) | js[2]
js=[]
oc1=16515072&z
oc1=oc1>>18
oc2=258048&z
oc2=oc2>>12
oc3=4032&z
oc3=oc3>>6
oc4=63&z
return [oc1,oc2,oc3,oc4]
def decodehex(s):
out=[]
for i in xrange(len(s)/2):
c=s[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
out.append(j)
return out
def hex2b64(s):
out=""
tc=0
js=[]
for i in xrange(len(inputs)/2):
c=inputs[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
js.append(j)
tc+=1
if tc==3:
ocs=encode(js)
js=[]
tc=0
#print ocs
for oc in ocs:
out=out+str(b64d[oc])
if tc!=0:
for v in range(3-tc):
js.append(0)
ocs = encode(js)
for oc in ocs:
out=out+str(b64d[oc])
pass
mys=""
for i in range(3-tc):
mys=mys+"="
out=out[:-(3-tc)]+mys
return out
def encodehex(n):
out=""
trigger=False
for i in range(64):
if n/(16**(63-i))>=1 or trigger==True:
trigger=True
#print i, n
if i!=63:
out+=str(nhexd[n/(16**(63-i))])
else:
out+=str(nhexd[n])
n=n-((n/(16**(63-i)))*(16**(63-i)))
if n<0:
n=0
#print out
return out
def createbinary(sl):
out=0
for i in range(len(sl)):
out=out<<8 | sl[i]
return out
def hexstring2ascii(s):
out=""
for i in xrange(len(s)/2):
c=s[2*i:(2*i)+2]
j=16*hexd[c[0]]+hexd[c[1]]
out+=str(chr(j))
return out
def ascii2hex(c):
o=encodehex(c)
if len(o)==1:
o="0"+o
return o
def repeatkeyxor(key,s, tohex=True):
sl=list(s)
out=[]
for i in xrange(len(sl)):
out.append(ord(sl[i])^ord(key[i%len(key)]))
if tohex==True:
return "".join(map(ascii2hex,out))
else:
return "".join(map(chr,out))
def xorstrings(s1,s2):
out=[]
for i in xrange(len(s1)):
out.append(chr(ord(s1[i])^ord(s2[i])))
return "".join(out)
def b642ascii(s):
out=[]
for i in xrange(len(s)/4):
c=s[4*i:(4*i)+4]
#print c
n=0
nulls=0
for z in c:
if z!="=":
n=n<<6 | nb64d[z]
else:
nulls+=1
n=n<<6 | 0
c1=(n&16711680)>>16
c2=(n&65280)>>8
c3=n&255
cs=[c1,c2,c3]
for i in range(3-nulls):
out.append(chr(cs[i]))
return "".join(out)
def hamming(s1,s2):
b1=str2bin(s1)
b2=str2bin(s2)
b=b1^b2
return ones(b)
def computehistogram(block):
myhist={}
chars=0
for k in lf:
myhist[k]=0
for c in block:
c=c.lower()
if c in myhist:
chars+=1
myhist[c]+=1
for k in myhist:
myhist[k]=myhist[k]/float(chars)
return(myhist)
def ascii2hexstring(msg):
return ''.join(x.encode('hex') for x in msg)
def comparehist(hist):
rmse=0
for k in hist:
rmse+=(lf[k]-hist[k])**2
return rmse
def str2bin(s):
o=0
for c in s:
o=o << 8 | ord(c)
return o
def ones(n):
w = 0
while (n):
w += 1
n &= n - 1
return w
def decryptxor(k,s):
return repeatkeyxor(k,s,tohex=False)
def decryptECBAES(k,s):
cipher = AES.new(k, AES.MODE_ECB, "ignoreIV")
msg = cipher.decrypt(s)
return msg
def encryptECBAES(k,s):
cipher = AES.new(k, AES.MODE_ECB, "ignoreIV")
msg = cipher.encrypt(s)
return msg
def splitblocks(s,keysize):
blocks=[]
for i in xrange((len(s)/keysize)+1):
if i!=len(s)/keysize:
blocks.append(s[i*keysize:(i+1)*keysize])
else:
if len(s[i*keysize:])>0:
blocks.append(s[i*keysize:])
return blocks
if __name__=="__main__":
#Q1
print "Q1"
inputs="49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d"
print hex2b64(inputs)
#Q2
print "Q2"
s1=decodehex("1c0111001f010100061a024b53535009181c")
s2=decodehex("686974207468652062756c6c277320657965")
print encodehex(createbinary(s1)^createbinary(s2))
#Q3
print "Q3"
s=decodehex("1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736")
for i in range(20,120):
cur=map(chr,map(lambda x: x^i, s))
if all(map(lambda x: x>=32 and x<=126, map(ord, cur))):
if cur.count("a")/float(len(cur))>0.03 and cur.count("e")/float(len(cur))>0.01 and cur.count(" ")/float(len(cur))>0.01:
print "".join(cur)
print "Key: " + chr(i)
#Q4
print "Q4"
f=open("4.txt","r")
for line in f:
s=decodehex(line)
for i in range(20,120):
cur=map(chr,map(lambda x: x^i, s))
if sum(map(lambda x: x>=32 and x<=126, map(ord, cur)))/float(len(cur))>0.96:
if cur.count("t")+cur.count("T")>cur.count("p")+cur.count("P") and cur.count("e")+cur.count("E")>cur.count("z")+cur.count("Z") and cur.count("e")+cur.count("E")>cur.count("L")+cur.count("l"):
if cur.count("a")/float(len(cur))>0.03 and cur.count("e")/float(len(cur))>0.01 and cur.count(" ")/float(len(cur))>0.01:
print "".join(cur)
print "Key: " + str(chr(i)) + ", Line: " + line
#Q5
print "Q5"
s="Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"
k="ICE"
out=repeatkeyxor(k,s)
print repeatkeyxor(k,s)
print hexstring2ascii(repeatkeyxor(k,hexstring2ascii(out)))
#Q6
out=""
f=open("6.txt","r")
for line in f:
out+=line.strip()
s=b642ascii(out)
ksd={}
for keysize in xrange(1,40):
numbytes=8*keysize
numchars=(1+(keysize/4))*4
c1=s[:keysize]
c2=s[keysize:2*keysize]
c3=s[2*keysize:3*keysize]
c4=s[3*keysize:4*keysize]
c5=s[4*keysize:5*keysize]
diff=mean([hamming(c1,c2)/float(keysize),hamming(c1,c3)/float(keysize),hamming(c2,c3)/float(keysize),hamming(c4,c5)/float(keysize),hamming(c2,c4)/float(keysize),hamming(c1,c5)/float(keysize)])
ksd[keysize]=diff
#From ksd we see keysize is 19 (not 5 or 2!)
s=b642ascii(out)
keysize=29
#split string to blocks
blocks=[]
for i in xrange((len(s)/keysize)+1):
if i!=len(s)/keysize:
blocks.append(s[i*keysize:(i+1)*keysize])
else:
if len(s[i*keysize:])>0:
blocks.append(s[i*keysize:])
#transpose blocks
newblocks=[]
for i in xrange(keysize):
newblocks.append([])
for block in blocks:
for j in xrange(len(block)):
newblocks[j].append(block[j])
key=[]
keyds=[]
for block in newblocks:
minscore=float("infinity")
bestc=None
keyd={}
for keyc in range(32,123):
decrypt=map(lambda x: chr(ord(x)^keyc),block)
score=comparehist(computehistogram(decrypt))
keyd[chr(keyc)]=score
#print score
if score<minscore:
minscore=score
bestc=chr(keyc)
key.append(bestc)
keyds.append(keyd)
print "Key: " + "".join(key)
#After fixing case:
key="Terminator X: Bring the noise"
#can we fix this automatically?
print decryptxor("".join(key),s)
#Q7
#OpenSSL example
#echo -n "0123456789abcdef0123456789abcdef" | openssl aes-128-ecb -nosalt -nopad -K "59454c4c4f57205355424d4152494e45" | xxd
key = b'YELLOW SUBMARINE'
cipher = AES.new(key, AES.MODE_ECB, "")
f=open("7.txt","r")
s=b""
for line in f:
s+=line.strip()
s=b642ascii(s)
f.close()
key = b'YELLOW SUBMARINE'
cipher = AES.new(key, AES.MODE_ECB, "ignoreIV")
msg = cipher.decrypt(s)
#print msg
#Q8
f=open("8.txt","r")
cps=[]
for line in f:
cps.append(line.strip())
f.close()
lenblock=32
simd={}
for z in xrange(len(cps)):
c=cps[z]
count=0
for i in xrange(len(c)/lenblock):
for j in xrange(i+1,(len(c)/lenblock)):
if c[i*lenblock:(i+1)*lenblock] == c[j*lenblock:(j+1)*lenblock]:
count+=1
simd[z]=count
sorted_x = sorted(simd.items(), key=operator.itemgetter(1), reverse=True) #here we see 132 has the most repeats (entirely repeats)
#print cps[132]
| gpl-2.0 | -7,439,320,829,853,359,000 | 27.461765 | 444 | 0.521959 | false |
gprakhar/scripts-biopython | hmmer-pipeline-DPM_Dicty.py | 1 | 1173 | #Script to rum Hmmer on Dicty Protiens
#Run it keeping in mind that HMM(s) from a particular DPM human holmolog protien are all checked against every Protien from Dicty
#Author : prakhar gaur
#date : Wed 16 July IST 2015
import os
import argparse
import csv
parser = argparse.ArgumentParser()
parser.add_argument('-C', '--numberofcores', help='Number of cores to run the blast on', type=int)
parser.add_argument('hmmfile', metavar='F', help='csv file with hmm entries, with first entry in each row as Uniprot id')
args = parser.parse_args()
cores = args.numberofcores
inputfileName = str(args.hmmfile)
hmmerExe = r'/home/interns/CHG_Nunjundiah-Project/local-bin/hmmer/hmmer-3.1b2-linux-intel-x86_64/binaries/'
pfamid = list()
with open(inputfileName) as inputfileHandle:
keywordString = csv.reader(inputfileHandle)
for row in keywordString:
pfamid.append(row)
for idlist in pfamid:
for item in idlist[1:]:
hmmsearch_cmd = '%shmmsearch --cpu %d /home/interns/CHG_Nunjundiah-Project/raw-data/DPM-prot-HMM/%s.hmm dicty_primary_protein.fa >> %s.out' % (hmmerExe, cores, item, idlist[0])
print hmmsearch_cmd
os.system(hmmsearch_cmd)
| gpl-3.0 | -1,187,990,765,733,363,000 | 38.1 | 178 | 0.736573 | false |
kindkaktus/CcPy | ccpy/ccpystate.py | 1 | 4899 | #
# Andrei Korostelev <andrei at korostelev dot net>
#
# Before using this product in any way please read the license agreement.
# If you do not agree to the terms in this agreement you are not allowed
# to use this product or parts of it. You can read this license in the
# file named LICENSE.
#
"""
Ccpy state
"""
import os
import logging
import sys
import xml.dom.minidom
from .enum import Enum
from .common import LoggerName
DefCcPyStateConfigFileName = '/etc/ccpy.state'
PrjStates = Enum('OK', 'FAILED', 'UNKNOWN')
Logger = logging.getLogger(LoggerName)
def str2PrjState(anStr):
for s in PrjStates:
if str(s) == anStr:
return s
raise Exception("State '%s' does not exist in enum" % anStr)
class CcPyState:
_rootElem = 'ccpystate'
_prjElem = 'project'
_prjNameAttrName = 'name'
_prjStateAttrName = 'state'
def __init__(self, aFileName=DefCcPyStateConfigFileName):
self._fileName = aFileName
def getPrjState(self, aName):
if not os.path.exists(self._fileName):
Logger.debug(
"File '%s' does not exist, defaulting project state to %s" %
(self._fileName, PrjStates.UNKNOWN))
return PrjStates.UNKNOWN
if not os.path.isfile(self._fileName):
raise IOError("'%s' exists but is not a regular file" % self._fileName)
myDom = xml.dom.minidom.parse(self._fileName)
if myDom.documentElement.tagName != CcPyState._rootElem:
raise RuntimeError(
"'%s' is ill-formed ccpystate config (incorrect root element)" %
self._fileName)
myProjects = myDom.getElementsByTagName(CcPyState._prjElem)
myRequestedProjects = [
prj for prj in myProjects if prj.getAttribute(
CcPyState._prjNameAttrName) == aName]
if (len(myRequestedProjects) > 1):
raise RuntimeError(
"'%s' is ill-formed ccpystate config (more than one '%s' projects found)" %
aName)
if (len(myRequestedProjects) == 0):
Logger.debug(
"'%s' does not contain project '%s', defaulting project state to %s" %
(self._fileName, aName, PrjStates.UNKNOWN))
return PrjStates.UNKNOWN
myStateStr = myRequestedProjects[0].getAttribute(CcPyState._prjStateAttrName)
myState = str2PrjState(myStateStr)
return myState
def setPrjState(self, aName, aVal):
if aVal not in PrjStates:
raise TypeError("'%s' in not valid project state" % aVal)
if os.path.exists(self._fileName):
if not os.path.isfile(self._fileName):
raise IOError("'%s' exists but is not a regular file" % self._fileName)
myDom = xml.dom.minidom.parse(self._fileName)
if myDom.documentElement.tagName != CcPyState._rootElem:
raise RuntimeError(
"'%s' is ill-formed ccpystate config (incorrect root element)" %
self._fileName)
myProjects = myDom.getElementsByTagName(CcPyState._prjElem)
myProjects2Change = [
prj for prj in myProjects if prj.getAttribute(
CcPyState._prjNameAttrName) == aName]
if (len(myProjects2Change) > 1):
raise RuntimeError(
"'%s' is ill-formed ccpystate config (more than one '%s' projects found)" %
aName)
if (len(myProjects2Change) == 0):
Logger.debug(
"'%s' does not contain project '%s', adding project with state %s" %
(self._fileName, aName, aVal))
myElem = myDom.createElement(CcPyState._prjElem)
myElem.setAttribute(CcPyState._prjNameAttrName, aName)
myElem.setAttribute(CcPyState._prjStateAttrName, str(aVal))
myDom.documentElement.appendChild(myElem)
else:
Logger.debug(
"'%s' contains project '%s', setting project state to %s" %
(self._fileName, aName, aVal))
myElem = myProjects2Change[0]
myElem.setAttribute(CcPyState._prjStateAttrName, str(aVal))
else:
myDom = xml.dom.minidom.parseString('<%s>\n<%s %s="%s" %s="%s"/>\n</%s>\n' %
(CcPyState._rootElem,
CcPyState._prjElem,
CcPyState._prjNameAttrName, aName,
CcPyState._prjStateAttrName, aVal,
CcPyState._rootElem))
myFp = open(self._fileName, 'w+')
myDom.writexml(myFp)
myFp.close()
prjState = property(getPrjState, setPrjState)
| bsd-3-clause | -6,498,245,015,245,778,000 | 40.168067 | 95 | 0.569708 | false |
ocelot-collab/ocelot | unit_tests/ebeam_test/dba_tracking/dba_tracking_conf.py | 1 | 1026 | """Test parameters description"""
import pytest
from ocelot import *
"""lattice elements descripteion"""
Q1 = Quadrupole(l=0.4, k1=-1.3, eid="Q1")
Q2 = Quadrupole(l=0.8, k1=1.4, eid="Q2")
Q3 = Quadrupole(l=0.4, k1=-1.7, eid="Q3")
Q4 = Quadrupole(l=0.5, k1=1.19250444829, eid="Q4")
B = Bend(l=2.7, k1=-.06, angle=2*pi/16., e1=pi/16., e2=pi/16., eid= "B")
SF = Sextupole(l=0.01, k2=150.0, eid="SF") #random value
SD = Sextupole(l=0.01, k2=-150.0, eid="SD") #random value
D1 = Drift(l=2., eid= "D1")
D2 = Drift(l=0.6, eid= "D2")
D3 = Drift(l=0.3, eid= "D3")
D4 = Drift(l=0.7, eid= "D4")
D5 = Drift(l=0.9, eid= "D5")
D6 = Drift(l=0.2, eid= "D6")
"""pytest fixtures description"""
@pytest.fixture(scope='module')
def cell():
return (D1, Q1, D2, Q2, D3, Q3, D4, B, D5, SD, D5, SF, D6, Q4, D6, SF, D5, SD,D5, B, D4, Q3, D3, Q2, D2, Q1, D1)
@pytest.fixture(scope='module')
def method():
return MethodTM()
@pytest.fixture(scope='module')
def lattice(cell, method):
return MagneticLattice(cell, method=method)
| gpl-3.0 | 2,305,508,052,010,516,700 | 24.02439 | 116 | 0.61306 | false |
kepstin/picard | picard/util/tags.py | 1 | 3767 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2007 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
TAG_NAMES = {
'album': N_('Album'),
'artist': N_('Artist'),
'title': N_('Title'),
'date': N_('Date'),
'originaldate': N_('Original Release Date'),
'albumartist': N_('Album Artist'),
'tracknumber': N_('Track Number'),
'totaltracks': N_('Total Tracks'),
'discnumber': N_('Disc Number'),
'totaldiscs': N_('Total Discs'),
'albumartistsort': N_('Album Artist Sort Order'),
'artistsort': N_('Artist Sort Order'),
'titlesort': N_('Title Sort Order'),
'albumsort': N_('Album Sort Order'),
'asin': N_('ASIN'),
'grouping': N_('Grouping'),
'version': N_('Version'),
'isrc': N_('ISRC'),
'mood': N_('Mood'),
'bpm': N_('BPM'),
'copyright': N_('Copyright'),
'license': N_('License'),
'composer': N_('Composer'),
'writer': N_('Writer'),
'conductor': N_('Conductor'),
'lyricist': N_('Lyricist'),
'arranger': N_('Arranger'),
'producer': N_('Producer'),
'engineer': N_('Engineer'),
'subtitle': N_('Subtitle'),
'discsubtitle': N_('Disc Subtitle'),
'remixer': N_('Remixer'),
'musicbrainz_trackid': N_('MusicBrainz Recording Id'),
'musicbrainz_albumid': N_('MusicBrainz Release Id'),
'musicbrainz_artistid': N_('MusicBrainz Artist Id'),
'musicbrainz_albumartistid': N_('MusicBrainz Release Artist Id'),
'musicbrainz_workid': N_('MusicBrainz Work Id'),
'musicbrainz_releasegroupid': N_('MusicBrainz Release Group Id'),
'musicbrainz_discid': N_('MusicBrainz Disc Id'),
'musicbrainz_sortname': N_('MusicBrainz Sort Name'),
'musicip_puid': N_('MusicIP PUID'),
'musicip_fingerprint': N_('MusicIP Fingerprint'),
'acoustid_id': N_('AcoustID'),
'acoustid_fingerprint': N_('AcoustID Fingerprint'),
'discid': N_('Disc Id'),
'website': N_('Website'),
'compilation': N_('Compilation'),
'comment:': N_('Comment'),
'genre': N_('Genre'),
'encodedby': N_('Encoded By'),
'performer:': N_('Performer'),
'releasetype': N_('Release Type'),
'releasestatus': N_('Release Status'),
'releasecountry': N_('Release Country'),
'label': N_('Record Label'),
'barcode': N_('Barcode'),
'catalognumber': N_('Catalog Number'),
'format': N_('Format'),
'djmixer': N_('DJ-Mixer'),
'media': N_('Media'),
'lyrics:': N_('Lyrics'),
'mixer': N_('Mixer'),
'language': N_('Language'),
'script': N_('Script'),
'~length': N_('Length'),
'~rating': N_('Rating'),
}
def display_tag_name(name):
if ':' in name:
name, desc = name.split(':', 1)
name = _(TAG_NAMES.get(name + ':', name))
return '%s [%s]' % (_(name), desc)
else:
new_name = TAG_NAMES.get(name)
if new_name is None:
new_name = TAG_NAMES.get(name + ':')
if new_name is None:
return _(name)
else:
return '%s []' % (_(new_name),)
else:
return _(new_name)
| gpl-2.0 | 5,036,923,362,161,006,000 | 35.192308 | 80 | 0.591923 | false |
mjsauvinen/P4UL | pyLib/netcdfTools.py | 1 | 11278 | #!/usr/bin/env python3
import netCDF4 as nc
import sys
import argparse
import numpy as np
from utilities import partialMatchFromList
debug = True
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def asciiEncode(uList, uStr):
n = len(uList)
if(n > 0):
uList = list(uList) # This might be a tuple coming in
for i in range(len(uList)):
if isinstance(uList[i], bytes): uList[i] = uList[i].decode()
else:
print(' Dictionary {} has zero length. Exiting ...'.format(uStr))
sys.exit(1)
return uList
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfDataset(filename, verbose=True):
# Create Dataset
ds = nc.Dataset(filename)
# Generate a list of variables and independent variables contained in the file.
varList = asciiEncode(ds.variables.keys(), 'Variables')
dimList = asciiEncode(ds.dimensions.keys(), 'Dimensions')
if(verbose):
print(' Variable List : {} '.format(varList))
print(' Dimension List : {} '.format(dimList))
return ds, varList, dimList
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfOutputDataset(filename, mode='w'):
if( isinstance( filename, bytes ) ):
filename = filename.decode()
dso = nc.Dataset(filename, mode, format='NETCDF4')
return dso
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def netcdfWriteAndClose(dso, verbose=True):
if(verbose):
print('Writing of output data .... ')
dso.close()
if(verbose):
print(' ... done. File closed.')
dso = None
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read1DVariableFromDataset( dimStr, varStr, ds, iLOff=0, iROff=0, cl=1):
# iLOff: left offset
# iROff: right offset
# cl : coarsening level
if(varStr in ds.variables.keys()):
vs = ds.variables[varStr]
dimList = vs.dimensions # return a list of variable dimensions ('time', 'x', 'y', etc.)
print(' dimList = {} '.format( dimList ))
vdim = partialMatchFromList( dimStr, dimList )
try:
print(' Reading variable {} ... '.format(vdim))
if(iROff == 0 or (iROff is None) ):
var = ds.variables[vdim][(0 + iLOff):]
else:
var = ds.variables[vdim][(0 + iLOff):-abs(iROff)]
print(' ... done.')
except:
print(' Cannot read the array of variable: {}.'.format(varStr))
sys.exit(1)
else:
print(' Variable {} not in list {}.'.format(varStr, ds.variables.keys()))
sys.exit(1)
return var[::cl], np.shape(var[::cl])
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def readVariableFromDataset(varStr, ds, cl=1 ):
if( varStr in ds.variables.keys() ):
vdims = asciiEncode(ds.variables[varStr].dimensions, ' Variable dimensions ')
if( len(vdims) == 4 ):
var = ds.variables[varStr][:,::cl,::cl,::cl]
elif( len(vdims) == 3 and 'time' not in vdims ):
var = ds.variables[varStr][::cl,::cl,::cl]
elif( len(vdims) == 3 and 'time' in vdims ):
var = ds.variables[varStr][:,::cl,::cl]
elif( len(vdims) == 2 and 'time' not in vdims ):
var = ds.variables[varStr][::cl,::cl]
elif( len(vdims) == 2 and 'time' in vdims ):
print(' {} {} '.format(varStr, ds.variables[varStr][:].shape ))
var = ds.variables[varStr][:,::cl]
elif( len(vdims) == 1 and 'time' in vdims ):
var = ds.variables[varStr]
else:
var = ds.variables[varStr][::cl]
# Load the independent variables and wrap them into a dict
dDict = dict()
for dname in vdims:
dData = ds.variables[dname][:]
if( 'time' in dname ): dDict[dname] = dData
else: dDict[dname] = dData[::cl]
dData = None
else:
sys.exit(' Variable {} not in list {}.'.format(varStr, ds.variables.keys()))
return var, dDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3DVariableFromDataset(varStr, ds, iTOff=0, iLOff=0, iROff=0, cl=1, meanOn=False):
# iLOff: left offset
# iROff: right offset
# cl : coarsening level
varStr = partialMatchFromList( varStr , ds.variables.keys() )
print(' Reading variable {} ... '.format(varStr))
var, dDict = readVariableFromDataset(varStr, ds, cl=1 )
print(' ... done.')
iL = 0 + int(iLOff/cl)
iR = int(abs(iROff/cl))
iT = 0 + int(iTOff)
if(iR == 0):
# Param list (time, z, y, x )
if(meanOn):
vo = var[iL:, iL:, iL:]
else:
vo = var[iT:, iL:, iL:, iL:]
else:
if(meanOn):
vo = var[iL:-iR, iL:-iR, iL:-iR]
else:
vo = var[iT:, iL:-iR, iL:-iR, iL:-iR]
var = None
return vo, np.array(vo.shape)
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3dDataFromNetCDF( fname, varStr, cl=1, zeroNans=True ):
'''
Establish two boolean variables which indicate whether the created variable is an
independent or dependent variable in function createNetcdfVariable().
'''
parameter = True; variable = False
'''
Create a NETCDF input dataset (ds), and its associated lists of dependent (varList)
and independent (dimList) variables.
'''
ds, varList, paramList = netcdfDataset(fname)
varStr = partialMatchFromList( varStr , varList )
print(' Extracting {} from dataset in {} ... '.format( varStr, fname ))
var, dDict = readVariableFromDataset(varStr, ds, cl )
print(' {}_dims = {}\n Done!'.format(varStr, var.shape ))
# Rename the keys in dDict to simplify the future postprocessing
for dn in dDict.keys():
if( zeroNans ):
idNan = np.isnan(dDict[dn]); dDict[dn][idNan] = 0.
if( 'time' in dn and 'time' != dn ):
dDict['time'] = dDict.pop( dn )
elif( 'x' == dn[0] and 'x' != dn ):
dDict['x'] = dDict.pop( dn )
elif( 'y' == dn[0] and 'y' != dn ):
dDict['y'] = dDict.pop( dn )
elif( 'z' == dn[0] and 'z' != dn ):
dDict['z'] = dDict.pop( dn )
else: pass
# Append the variable into the dict.
dDict['v'] = var
return dDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def interpolatePalmVectors(v0, vc_dims, cmpStr, meanOn=False):
icmp = int()
iOn = False
jOn = False
kOn = False
kCopy = False
if(cmpStr == 'i'):
icmp = 3
iOn = True
elif(cmpStr == 'j'):
icmp = 2
jOn = True
elif(cmpStr == 'k'):
icmp = 1
kOn = True
elif(cmpStr == 'kc'):
icmp = 1
kCopy = True
else:
print('Invalid component string: {}. Exiting ...'.format(cmpStr))
sys.exit(1)
vc = np.zeros(vc_dims)
if(meanOn):
vm = np.zeros(vc_dims[1:])
else:
vm = np.array([]) # Empty array.
# Create index arrays for interpolation.
jl = np.arange(0, vc_dims[icmp]); jr = jl + 1 # x,y,z: left < right
nTo, nzo, nyo, nxo = np.shape(v0)
nTimes, nz, ny, nx = vc_dims
if( nz == nzo ): k1 = 0
else: k1 = 1
for i in range(nTimes):
tmp0 = v0[i, :, :, :].copy()
if(iOn):
tmp1 = (tmp0[:, :, jl] + tmp0[:, :, jr]) * 0.5; tmp0 = None
tmp2 = tmp1[k1:, 0:-1, :]
if(jOn):
tmp1 = (tmp0[:, jl, :] + tmp0[:, jr, :]) * 0.5; tmp0 = None
tmp2 = tmp1[k1:, :, 0:-1]
if(kOn):
tmp1 = (tmp0[jl, :, :] + tmp0[jr, :, :]) * 0.5; tmp0 = None
tmp2 = tmp1[:, 0:-1, 0:-1]
if( kCopy ):
tmp1 = tmp0[jl, :, :]; tmp0 = None
tmp2 = tmp1[:, 0:-1, 0:-1]
tmp1 = None
vc[i, :, :, :] = tmp2
if(meanOn):
vm += tmp2.copy()
# Clear memory.
tmp0 = None
tmp1 = None
tmp2 = None
if(meanOn):
vm /= float(nTimes)
print(' Interpolation along the {}^th direction completed.'.format(cmpStr))
return vc, vm # vm is empty if meanOn=False.
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def vectorPrimeComponent(vc, vm):
vc_dims = np.shape(vc)
vp = np.zeros(np.shape(vc))
nTimes = vc_dims[0]
print(' Computing primes for {} times ... '.format(nTimes))
for i in range(nTimes):
vp[i, :, :, :] = vc[i, :, :, :] - vm[:, :, :]
print(' ... done.')
return vp
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def createNetcdfVariable(dso, v, vName, vLen, vUnits, vType, vTuple, parameter, zlib=False, fill_value=None,verbose=True):
if(parameter):
dso.createDimension(vName, vLen)
var = dso.createVariable(vName, vType, vTuple, zlib=zlib, fill_value=fill_value)
var.units = vUnits
var[:] = v
v = None
if(parameter):
pStr = 'parameter'
else:
pStr = 'variable'
if(verbose):
print(' NetCDF {} {} successfully created. '.format(pStr, vName))
return var
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def createCoordinateAxis(dso, Rdims, Rdpx, axis, varname, formatstr, unit, parameter, zlib=False, verbose=True, offset=0.0):
arr = np.empty(Rdims[axis])
for i in range(Rdims[axis]):
# dpx is in [N,E], see getGeoTransform() in gdalTools.py
arr[i] = np.maximum(0.0, i + offset) * Rdpx[axis]
axvar = createNetcdfVariable( \
dso, arr, varname, len(arr), unit, formatstr, (varname,), parameter, zlib, verbose=verbose )
arr = None
return axvar
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def fillTopographyArray(Rtopo, Rdims, Rdpx, datatype):
topodims = np.array([Rdims[2], Rdims[0], Rdims[1]])
topo = np.zeros(topodims, dtype=datatype)
print(' \n Filling 3D array from topography data...')
print(' Dimensions [z,y,x]: [{}, {}, {}]'.format(*topodims))
print(' Total number of data points: {}'.format(np.prod(topodims)))
for x in range(Rdims[1]):
for y in range(Rdims[0]):
# Reverse the y-axis because of the top-left origo in raster
maxind = int(round(Rtopo[-y - 1][x] / Rdpx[2]))+1
if(maxind>1):
topo[0:maxind, y, x] = 1
print(' ...done. \n')
return topo
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
def read3dDictVarFromNetCDF( fname, nameDict, cl=1 ):
'''
Establish two boolean variables which indicate whether the created variable is an
independent or dependent variable in function createNetcdfVariable().
'''
parameter = True; variable = False
'''
Create a NETCDF input dataset (ds), and its associated lists of dependent (varList)
and independent (dimList) variables.
'''
ds, varList, paramList = netcdfDataset(fname)
'''
Read cell center coordinates and time.
Create the output independent variables right away and empty memory.
'''
time, time_dims = read1DVariableFromDataset('time', nameDict['varname'], ds, 0, 0, 1 ) # All values.
x, x_dims = read1DVariableFromDataset(nameDict['xname'], nameDict['varname'], ds, 0, 0, cl )
y, y_dims = read1DVariableFromDataset(nameDict['yname'], nameDict['varname'], ds, 0, 0, cl )
z, z_dims = read1DVariableFromDataset(nameDict['zname'], nameDict['varname'], ds, 0, 0, cl )
x[np.isnan(x)] = 0. # Clear away NaNs
y[np.isnan(y)] = 0. #
z[np.isnan(z)] = 0. #
'''
Read in the velocity components.
PALM netCDF4:
u(time, zu_3d, y, xu)
v(time, zu_3d, yv, x)
w(time, zw_3d, y, x)
'''
print(' Extracting {} from dataset ... '.format( nameDict['varname'] ))
v, v_dims = read3DVariableFromDataset(nameDict['varname'], ds, 0, 0, 0, cl) # All values.
print(' {}_dims = {}\n Done!'.format(nameDict['varname'], v_dims ))
dataDict = dict()
dataDict['v'] = v
dataDict['x'] = x
dataDict['y'] = y
dataDict['z'] = z
dataDict['time'] = time
return dataDict
# =*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
| mit | -815,513,492,057,849,600 | 27.917949 | 124 | 0.561092 | false |
clemsos/mitras | tests/test_clustering.py | 1 | 1207 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import numpy as np
from test_helpers import TestHelpers
helpers=TestHelpers()
helpers.add_relative_path()
from lib.api import Similarity_API
from lib.clusters import get_linkage_matrix
from time import time
# from scipy.cluster.hierarchy import linkage, dendrogram, leaves_list
import fastcluster
t0=time()
path="/home/clemsos/Dev/mitras/data/tmp"
chunk_size=2500 # cut the whole dataset into chunks so it can be processed
protomemes_count= 43959#db["hashtags"].count()
api=Similarity_API(path,protomemes_count,chunk_size)
print
sims=api.get_similarity_matrix()
print sims.shape
similarity_treshold = 0.7 # minimum value of similarity between protomemes
similar_protomemes_treshold=20
print 'getting rows with %d protomemes that are at least %.3fx similar'%(similar_protomemes_treshold,similarity_treshold)
# get row numbers
remarquable_rows=np.where((sims > similarity_treshold).sum(axis=1) >= similar_protomemes_treshold)[0]
# print type(remarquable_rows)
print "%d memes found"%len(remarquable_rows)
print remarquable_rows
# get memes data
print " done in %.3fs"%(time()-t0)
print
print
print " done in %.3fs"%(time()-t0) | mit | -5,519,701,892,024,718,000 | 23.16 | 121 | 0.763877 | false |
tecnologiaenegocios/tn.plonehtmlimagecache | src/tn/plonehtmlimagecache/tests/test_behaviors.py | 1 | 5640 | from Products.CMFDefault.Document import Document
from plone.behavior.interfaces import IBehavior
from plone.behavior.interfaces import IBehaviorAssignable
from plone.directives.form import IFormFieldProvider
from tn.plonehtmlimagecache import behaviors
from tn.plonehtmlimagecache import interfaces
from tn.plonehtmlimagecache.tests import base
from zope.annotation.interfaces import IAttributeAnnotatable
from zope.app.testing import placelesssetup
import stubydoo
import unittest
import zope.annotation
import zope.component
import zope.interface
def set_default_attr(obj, attr, default=None):
if not hasattr(obj, attr):
setattr(obj, attr, default)
return getattr(obj, attr)
@zope.component.adapter(None)
@zope.interface.implementer(zope.annotation.interfaces.IAnnotations)
def annotations(context):
return set_default_attr(context, '_annotations', dict())
class TestCase(unittest.TestCase):
def setUp(self):
placelesssetup.setUp(self)
zope.component.provideAdapter(annotations)
self.context = stubydoo.double()
self.adapted = behaviors.HTMLImageCacheableFromContent(self.context)
def tearDown(self):
placelesssetup.tearDown()
class TestHTMLImageCacheableFromContentBehavior(TestCase):
def test_behavior_provides_form_fields(self):
self.assertTrue(IFormFieldProvider.providedBy(
behaviors.IHTMLImageCacheableFromContent
))
class TestHTMLImageCacheableFromContentSaveImagesLocallyFlag(TestCase):
def test_save_images_locally_defaults_to_false(self):
self.assertFalse(self.adapted.save_images_locally)
def test_save_images_locally_can_be_set_to_true(self):
self.adapted.save_images_locally = True
self.assertTrue(self.adapted.save_images_locally)
def test_save_images_locally_is_persisted(self):
self.adapted.save_images_locally = False
self.adapted.save_images_locally = True
new_adapted = behaviors.HTMLImageCacheableFromContent(self.context)
self.assertTrue(new_adapted.save_images_locally)
class TestHTMLImageCacheableFromContentHTML(TestCase):
def setUp(self):
super(TestHTMLImageCacheableFromContentHTML, self).setUp()
class HTMLAttribute(object):
zope.component.adapts(None)
zope.interface.implements(interfaces.IHTMLAttribute)
def __init__(self, context): self.context = context
def _set_html(self, value): self.context._html = value
def _get_html(self):
return set_default_attr(self.context, '_html', 'no-value')
html = property(_get_html, _set_html)
zope.component.provideAdapter(HTMLAttribute)
def test_gets_html_from_adapter(self):
self.assertEquals(self.adapted.html, 'no-value')
def test_sets_html_to_adapter(self):
self.adapted.html = 'other-value'
self.assertEquals(self.adapted.html, 'other-value')
class TestHTMLImageCacheableFromContentBehaviorRegistration(base.TestCase):
def afterSetUp(self):
super(TestHTMLImageCacheableFromContentBehaviorRegistration, self).\
afterSetUp()
self.context = Document('document')
zope.interface.alsoProvides(self.context, IAttributeAnnotatable)
self.behavior_assignable_factory = None
# This will enable the behavior for our document.
class BehaviorAssignable(object):
zope.component.adapts(Document)
zope.interface.implements(IBehaviorAssignable)
def __init__(self, context):
self.context = context
def supports(self, behavior_interface):
return behavior_interface is \
behaviors.IHTMLImageCacheableFromContent
def enumerate_behaviors(self):
i = behaviors.IHTMLImageCacheableFromContent
yield zope.component.queryUtility(IBehavior,
name=i.__identifier__)
zope.component.provideAdapter(BehaviorAssignable)
self.behavior_assignable_factory = BehaviorAssignable
def beforeTearDown(self):
zope.component.getGlobalSiteManager().\
unregisterAdapter(self.behavior_assignable_factory)
def test_behavior_is_registered(self):
self.assertTrue(zope.component.queryUtility(
IBehavior,
name=behaviors.IHTMLImageCacheableFromContent.__identifier__
) is not None)
def test_behavior_has_correct_marker(self):
behavior = zope.component.queryUtility(
IBehavior,
name=behaviors.IHTMLImageCacheableFromContent.__identifier__
)
if behavior is None:
self.fail('behavior not registered')
else:
self.assertTrue(behavior.marker is
interfaces.IPossibleHTMLImageCacheable)
def test_behavior_is_usable(self):
adapted = behaviors.IHTMLImageCacheableFromContent(self.context, None)
self.assertTrue(adapted is not None)
def test_adaptation_to_html_image_cacheable_uses_behavior(self):
adapted = interfaces.IHTMLImageCacheable(self.context, None)
self.assertTrue(interfaces.IHTMLImageCacheable.providedBy(adapted))
def test_adaptation_to_html_image_cacheable_fails_if_cant_adapt_behavior(self):
context = stubydoo.double()
zope.interface.alsoProvides(context,
interfaces.IPossibleHTMLImageCacheable)
adapted = interfaces.IHTMLImageCacheable(context, None)
self.assertTrue(adapted is None)
| bsd-3-clause | -311,616,254,257,620,030 | 36.350993 | 83 | 0.694326 | false |
nafitzgerald/allennlp | allennlp/modules/alternating_highway_lstm.py | 1 | 13051 | from typing import Tuple
from overrides import overrides
import torch
from torch.autograd import Function, Variable
from torch.nn import Parameter
from torch.nn.utils.rnn import PackedSequence, pad_packed_sequence, pack_padded_sequence
from allennlp.nn.initializers import block_orthogonal
from allennlp.custom_extensions._ext import highway_lstm_layer
class _AlternatingHighwayLSTMFunction(Function):
def __init__(self, input_size: int, hidden_size: int, num_layers: int, train: bool) -> None:
super(_AlternatingHighwayLSTMFunction, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.train = train
@overrides
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor,
weight: torch.Tensor,
bias: torch.Tensor,
state_accumulator: torch.Tensor,
memory_accumulator: torch.Tensor,
dropout_mask: torch.Tensor,
lengths: torch.Tensor,
gates: torch.Tensor) -> Tuple[torch.Tensor, None]:
sequence_length, batch_size, input_size = inputs.size()
tmp_i = inputs.new(batch_size, 6 * self.hidden_size)
tmp_h = inputs.new(batch_size, 5 * self.hidden_size)
is_training = 1 if self.train else 0
highway_lstm_layer.highway_lstm_forward_cuda(input_size, # type: ignore # pylint: disable=no-member
self.hidden_size,
batch_size,
self.num_layers,
sequence_length,
inputs,
lengths,
state_accumulator,
memory_accumulator,
tmp_i,
tmp_h,
weight,
bias,
dropout_mask,
gates,
is_training)
self.save_for_backward(inputs, lengths, weight, bias, state_accumulator,
memory_accumulator, dropout_mask, gates)
# The state_accumulator has shape: (num_layers, sequence_length + 1, batch_size, hidden_size)
# so for the output, we want the last layer and all but the first timestep, which was the
# initial state.
output = state_accumulator[-1, 1:, :, :]
return output, state_accumulator[:, 1:, :, :]
@overrides
def backward(self, grad_output, grad_hy): # pylint: disable=arguments-differ
(inputs, lengths, weight, bias, state_accumulator, # pylint: disable=unpacking-non-sequence
memory_accumulator, dropout_mask, gates) = self.saved_tensors
inputs = inputs.contiguous()
sequence_length, batch_size, input_size = inputs.size()
parameters_need_grad = 1 if self.needs_input_grad[1] else 0 # pylint: disable=unsubscriptable-object
grad_input = inputs.new().resize_as_(inputs).zero_()
grad_state_accumulator = inputs.new().resize_as_(state_accumulator).zero_()
grad_memory_accumulator = inputs.new().resize_as_(memory_accumulator).zero_()
grad_weight = inputs.new()
grad_bias = inputs.new()
grad_dropout = None
grad_lengths = None
grad_gates = None
if parameters_need_grad:
grad_weight.resize_as_(weight).zero_()
grad_bias.resize_as_(bias).zero_()
tmp_i_gates_grad = inputs.new().resize_(batch_size, 6 * self.hidden_size).zero_()
tmp_h_gates_grad = inputs.new().resize_(batch_size, 5 * self.hidden_size).zero_()
is_training = 1 if self.train else 0
highway_lstm_layer.highway_lstm_backward_cuda(input_size, # pylint: disable=no-member
self.hidden_size,
batch_size,
self.num_layers,
sequence_length,
grad_output,
lengths,
grad_state_accumulator,
grad_memory_accumulator,
inputs,
state_accumulator,
memory_accumulator,
weight,
gates,
dropout_mask,
tmp_h_gates_grad,
tmp_i_gates_grad,
grad_hy,
grad_input,
grad_weight,
grad_bias,
is_training,
parameters_need_grad)
return (grad_input, grad_weight, grad_bias, grad_state_accumulator,
grad_memory_accumulator, grad_dropout, grad_lengths, grad_gates)
class AlternatingHighwayLSTM(torch.nn.Module):
"""
A stacked LSTM with LSTM layers which alternate between going forwards over
the sequence and going backwards, with highway connections between each of
the alternating layers. This implementation is based on the description in
`Deep Semantic Role Labelling - What works and what's next
<https://homes.cs.washington.edu/~luheng/files/acl2017_hllz.pdf>`_ .
Parameters
----------
input_size : int, required
The dimension of the inputs to the LSTM.
hidden_size : int, required
The dimension of the outputs of the LSTM.
num_layers : int, required
The number of stacked LSTMs to use.
recurrent_dropout_probability: float, optional (default = 0.0)
The dropout probability to be used in a dropout scheme as stated in
`A Theoretically Grounded Application of Dropout in Recurrent Neural Networks
<https://arxiv.org/abs/1512.05287>`_ .
Returns
-------
output : PackedSequence
The outputs of the interleaved LSTMs per timestep. A tensor of shape
(batch_size, max_timesteps, hidden_size) where for a given batch
element, all outputs past the sequence length for that batch are
zero tensors.
"""
def __init__(self,
input_size: int,
hidden_size: int,
num_layers: int = 1,
recurrent_dropout_probability: float = 0) -> None:
super(AlternatingHighwayLSTM, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.num_layers = num_layers
self.recurrent_dropout_probability = recurrent_dropout_probability
self.training = True
# Input dimensions consider the fact that we do
# all of the LSTM projections (and highway parts)
# in a single matrix multiplication.
input_projection_size = 6 * hidden_size
state_projection_size = 5 * hidden_size
bias_size = 5 * hidden_size
# Here we are creating a single weight and bias with the
# parameters for all layers unfolded into it. This is necessary
# because unpacking and re-packing the weights inside the
# kernel would be slow, as it would happen every time it is called.
total_weight_size = 0
total_bias_size = 0
for layer in range(num_layers):
layer_input_size = input_size if layer == 0 else hidden_size
input_weights = input_projection_size * layer_input_size
state_weights = state_projection_size * hidden_size
total_weight_size += input_weights + state_weights
total_bias_size += bias_size
self.weight = Parameter(torch.FloatTensor(total_weight_size))
self.bias = Parameter(torch.FloatTensor(total_bias_size))
self.reset_parameters()
def reset_parameters(self) -> None:
self.bias.data.zero_()
weight_index = 0
bias_index = 0
for i in range(self.num_layers):
input_size = self.input_size if i == 0 else self.hidden_size
# Create a tensor of the right size and initialize it.
init_tensor = self.weight.data.new(input_size, self.hidden_size * 6).zero_()
block_orthogonal(init_tensor, [input_size, self.hidden_size])
# Copy it into the flat weight.
self.weight.data[weight_index: weight_index + init_tensor.nelement()]\
.view_as(init_tensor).copy_(init_tensor)
weight_index += init_tensor.nelement()
# Same for the recurrent connection weight.
init_tensor = self.weight.data.new(self.hidden_size, self.hidden_size * 5).zero_()
block_orthogonal(init_tensor, [self.hidden_size, self.hidden_size])
self.weight.data[weight_index: weight_index + init_tensor.nelement()]\
.view_as(init_tensor).copy_(init_tensor)
weight_index += init_tensor.nelement()
# Set the forget bias to 1.
self.bias.data[bias_index + self.hidden_size:bias_index + 2 * self.hidden_size].fill_(1)
bias_index += 5 * self.hidden_size
def forward(self, inputs: PackedSequence, # pylint: disable=arguments-differ
# pylint: disable=unused-argument
initial_state: torch.Tensor = None)-> Tuple[PackedSequence, torch.Tensor]:
"""
Parameters
----------
inputs : ``PackedSequence``, required.
A batch first ``PackedSequence`` to run the stacked LSTM over.
initial_state : Tuple[torch.Tensor, torch.Tensor], optional, (default = None)
Currently, this is ignored.
Returns
-------
output_sequence : ``PackedSequence``
The encoded sequence of shape (batch_size, sequence_length, hidden_size)
final_states: ``torch.Tensor``
The per-layer final (state, memory) states of the LSTM, each with shape
(num_layers, batch_size, hidden_size).
"""
inputs, lengths = pad_packed_sequence(inputs, batch_first=True)
# Kernel takes sequence length first tensors.
inputs = inputs.transpose(0, 1)
sequence_length, batch_size, _ = inputs.size()
accumulator_shape = [self.num_layers, sequence_length + 1, batch_size, self.hidden_size]
state_accumulator = Variable(inputs.data.new(*accumulator_shape).zero_(), requires_grad=False)
memory_accumulator = Variable(inputs.data.new(*accumulator_shape).zero_(), requires_grad=False)
dropout_weights = inputs.data.new().resize_(self.num_layers, batch_size, self.hidden_size).fill_(1.0)
if self.training:
# Normalize by 1 - dropout_prob to preserve the output statistics of the layer.
dropout_weights.bernoulli_(1 - self.recurrent_dropout_probability)\
.div_((1 - self.recurrent_dropout_probability))
dropout_weights = Variable(dropout_weights, requires_grad=False)
gates = Variable(inputs.data.new().resize_(self.num_layers,
sequence_length,
batch_size, 6 * self.hidden_size))
lengths_variable = Variable(torch.IntTensor(lengths))
implementation = _AlternatingHighwayLSTMFunction(self.input_size,
self.hidden_size,
num_layers=self.num_layers,
train=self.training)
output, _ = implementation(inputs, self.weight, self.bias, state_accumulator,
memory_accumulator, dropout_weights, lengths_variable, gates)
# TODO(Mark): Also return the state here by using index_select with the lengths so we can use
# it as a Seq2VecEncoder.
output = output.transpose(0, 1)
output = pack_padded_sequence(output, lengths, batch_first=True)
return output, None
| apache-2.0 | -5,288,858,517,103,349,000 | 49.003831 | 109 | 0.534365 | false |
felgari/k2 | report.py | 1 | 10700 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Felipe Gallego. All rights reserved.
#
# This is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Script to report results.
"""
import sys
import os
import csv
import numpy as np
from ctes import *
from avpos import AvPos
from resdifpos import ResDiffPos
from aptrend import ApTrend
from kfiles import read_input_file, read_res_file
from utils import get_matchings
def report_file_name(index):
return REP_OUT_FILE_PREFIX + index + REP_OUT_FILE_EXT
def do_report(index, k_data, cl, b1_res, a2_res, b1_per, a2_per, extd,
pre_rf = None, sco_rf = None, pre_df = None, sco_df = None):
print("Generating report ...")
rep_ap = []
trend_1 = []
trend_2 = []
the_trend = []
out_file_name = os.path.join(DATA_PATH, report_file_name(index))
avp = AvPos()
avp.calculate()
rdp = ResDiffPos(cl)
rdp.calculate()
aptr = ApTrend()
print("Saving to file: %s" % out_file_name)
try:
with open(out_file_name, 'w') as f:
idx = 0
for k_elt in k_data:
k_name_1 = k_elt[K_NAME_1_COL]
k_name_2 = k_elt[K_NAME_2_COL]
if k_name_1 != K_UNKNOWN_NAME and k_name_2 != K_UNKNOWN_NAME:
data = b1_res
elt_type = TYPE_1_COL
cl_1 = cl.b1_data(k_name_1)
cl_2 = cl.b1_data(k_name_2)
per = b1_per
if not len(cl_1):
data = a2_res
elt_type = TYPE_2_COL
cl_1 = cl.a2_data(k_name_1)
cl_2 = cl.a2_data(k_name_2)
per = a2_per
mat1, val_trend1 = get_matchings(k_name_1, data, True)
mat2, val_trend2 = get_matchings(k_name_2, data, False)
trend_1.append(val_trend1)
trend_2.append(val_trend2)
f.write("%s\n" % GEN_SEP)
f.write("-> %s (%s) - %s (%s)\n" % \
(k_name_1, cl_1[CL_POS_COL], k_name_2, cl_2[CL_POS_COL]))
f.write("Ext %s\n" % extd.mean[idx])
dif_pos = cl_1[CL_POS_COL] - cl_2[CL_POS_COL]
f.write("Dif: %d\n" % (dif_pos))
sum_ran = None
for i in range(dif_pos - DIF_RANGE, dif_pos + DIF_RANGE + 1):
try:
if sum_ran:
sum_ran = [ sum_ran[j] + per[i][j] for j in range(len(sum_ran))]
else:
sum_ran = per[i]
f.write("%d %s\n" % (i, per[i]))
except KeyError:
f.write("%d No disp\n" % i)
dif = cl_1[CL_POS_COL] - cl_2[CL_POS_COL]
trend = rdp.trend(cl_1[CL_POS_COL], cl_2[CL_POS_COL], elt_type)
the_trend.append(trend)
f.write("Sm %s -> %s \n" % (sum_ran, trend))
name_1_trend = avp.trend(k_name_1)
name_2_trend = avp.trend(k_name_2)
avg_1 = np.mean(avp.avpos(k_name_1)[-LAST_POS:])
if avg_1 > avp.avpos(k_name_1)[-1]:
name_1_curr = AVPOS_TREND_DOWN
else:
name_1_curr = AVPOS_TREND_UP
avg_2 = np.mean(avp.avpos(k_name_2)[-LAST_POS:])
if avg_2 > avp.avpos(k_name_2)[-1]:
name_2_curr = AVPOS_TREND_DOWN
else:
name_2_curr = AVPOS_TREND_UP
f.write("Pos. %s: %s\n(AVG: %d) - Current %s - Trend %s\n" % \
(k_name_1, avp.avpos(k_name_1),
avg_1, name_1_curr, name_1_trend))
f.write("Pos. %s: %s\n(AVG: %d) - Current %s - Trend %s\n" % \
(k_name_2, avp.avpos(k_name_2),
avg_2, name_2_curr, name_2_trend))
if len(trend) > 0:
ap_t = aptr.calculate_ap(trend, name_1_trend,
name_2_trend, int(cl_1[CL_POS_COL]),
int(cl_2[CL_POS_COL]))
rep_ap.append(ap_t)
f.write("Ap trend: %s -> %s %s\n" % \
(ap_t, val_trend1, val_trend2))
else:
rep_ap.append(TREND_IG)
if pre_rf and sco_rf:
f.write("Pre RF (%.1f): %s\n" % (sco_rf[idx], pre_rf[idx]))
if pre_df and sco_df:
f.write("Pre DF (%.1f): %s\n" % (sco_df[idx], pre_df[idx]))
f.write("%s\n" % FIRST_SEP)
the_sco = []
for m in mat1:
if elt_type == TYPE_1_COL:
mat_cl = cl.b1_data(NAMES_CONVERT[m[MAT_NAME_2_COL]])
else:
mat_cl = cl.a2_data(NAMES_CONVERT[m[MAT_NAME_2_COL]])
m[MAT_RES_COL] = CHR_TO_RES[m[MAT_RES_COL]]
the_sco.append(int(m[-1][:m[-1].find('-')]))
the_mark = ''
if m[2] == MAX_IS_FIRST:
if mat_cl[CL_POS_COL] + DIFF_POS_THIRD < cl_1[CL_POS_COL]:
the_mark = THE_MARK
elif m[2] == MAX_IS_SECOND:
if cl_1[CL_POS_COL] + DIFF_POS_SECOND < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
elif cl_1[CL_POS_COL] + DIFF_POS_THIRD < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
if mat_cl[CL_POS_COL] < cl_2[CL_POS_COL] + REF_LEVEL:
the_ref = "%s [%s] " % (THE_REF, m[2])
else:
the_ref = ''
f.write("%s (%s) %s %s\n" % (m, mat_cl[CL_POS_COL],
the_ref ,the_mark))
if not len(the_sco):
print(m[MAT_NAME_2_COL])
print(elt_type)
the_sco.remove(max(the_sco))
the_sco.remove(min(the_sco))
f.write("%s\n" % SECOND_SEP)
the_sco2 = []
for m in mat2:
if elt_type == TYPE_1_COL:
mat_cl = cl.b1_data(NAMES_CONVERT[m[MAT_NAME_1_COL]])
else:
mat_cl = cl.a2_data(NAMES_CONVERT[m[MAT_NAME_1_COL]])
m[MAT_RES_COL] = CHR_TO_RES[m[MAT_RES_COL]]
the_sco2.append(int(m[-1][m[-1].find('-')+1:]))
the_mark = ''
if m[2] == MAX_IS_FIRST:
if cl_2[CL_POS_COL] + DIFF_POS_THIRD < mat_cl[CL_POS_COL]:
the_mark = THE_MARK
elif m[2] == MAX_IS_SECOND:
if mat_cl[CL_POS_COL] + DIFF_POS_SECOND < cl_2[CL_POS_COL]:
the_mark = THE_MARK
elif mat_cl[CL_POS_COL] + DIFF_POS_THIRD < cl_2[CL_POS_COL]:
the_mark = THE_MARK
if mat_cl[CL_POS_COL] < cl_1[CL_POS_COL] + REF_LEVEL:
the_ref = "%s [%s] " % (THE_REF, m[2])
else:
the_ref = ''
f.write("%s (%s) %s %s\n" % (m, mat_cl[CL_POS_COL],
the_ref ,the_mark))
f.write("%s\n" % SECOND_SEP)
the_sco2.remove(max(the_sco2))
the_sco2.remove(min(the_sco2))
f.write("%0.1f - %0.1f\n" % (np.mean(the_sco), np.mean(the_sco2)))
else:
trend_1.append(TREND_IG)
trend_2.append(TREND_IG)
rep_ap.append(TREND_IG)
idx += 1
aptr.write_data(index)
except IOError as ioe:
print("IOError saving file: '%s'" % out_file_name)
except KeyError as ke:
print("KeyError saving file: '%s'" % out_file_name)
except IndexError as ie:
print("IndexError saving file: '%s'" % out_file_name)
return rep_ap, trend_1, trend_2, the_trend
def report_generated(index):
return os.path.exists(report_file_name(index))
if __name__ == "__main__":
if len(sys.argv) == NUM_ARGS:
sys.exit(do_report(sys.argv[1]))
else:
print("The index is needed as argument.") | gpl-3.0 | -4,561,189,203,595,888,600 | 38.487085 | 96 | 0.38028 | false |
nvbn/coviolations_web | projects/forms.py | 1 | 1067 | from pymongo import DESCENDING
from django import forms
from tasks.models import Tasks
from tasks.exceptions import TaskDoesNotExists
from .models import Project
class FindTaskForBadgeForm(forms.Form):
"""Find task for badge form"""
project = forms.ModelChoiceField(
Project.objects.all(), required=True, to_field_name='name',
)
commit = forms.CharField(required=False)
branch = forms.CharField(required=False)
def get_task(self):
"""Get task"""
filter_spec = {
'project': self.cleaned_data['project'].name,
}
if self.cleaned_data.get('commit'):
filter_spec['commit.hash'] = self.cleaned_data['commit']
if self.cleaned_data.get('branch'):
filter_spec['commit.branch'] = self.cleaned_data['branch']
task = Tasks.find_one(
filter_spec, sort=[('created', DESCENDING)], fields={
'status': True,
},
)
if task:
return task
else:
raise TaskDoesNotExists(filter_spec)
| mit | -7,728,021,554,287,230,000 | 31.333333 | 70 | 0.605436 | false |
aliceinwire/virt-manager | virtManager/inspection.py | 1 | 9120 | #
# Copyright (C) 2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
from Queue import Queue, Empty
from threading import Thread
import logging
import os
from guestfs import GuestFS # pylint: disable=F0401
from virtManager.baseclass import vmmGObject
from virtManager.domain import vmmInspectionData
class vmmInspection(vmmGObject):
# Can't find a way to make Thread release our reference
_leak_check = False
def __init__(self):
vmmGObject.__init__(self)
self._thread = Thread(name="inspection thread", target=self._run)
self._thread.daemon = True
self._wait = 5 * 1000 # 5 seconds
self._q = Queue()
self._conns = {}
self._vmseen = {}
self._cached_data = {}
def _cleanup(self):
self._thread = None
self._q = Queue()
self._conns = {}
self._vmseen = {}
self._cached_data = {}
# Called by the main thread whenever a connection is added or
# removed. We tell the inspection thread, so it can track
# connections.
def conn_added(self, engine_ignore, conn):
obj = ("conn_added", conn)
self._q.put(obj)
def conn_removed(self, engine_ignore, uri):
obj = ("conn_removed", uri)
self._q.put(obj)
# Called by the main thread whenever a VM is added to vmlist.
def vm_added(self, conn, uuid):
ignore = conn
ignore = uuid
obj = ("vm_added")
self._q.put(obj)
def start(self):
# Wait a few seconds before we do anything. This prevents
# inspection from being a burden for initial virt-manager
# interactivity (although it shouldn't affect interactivity at
# all).
def cb():
self._thread.start()
return 0
logging.debug("waiting")
self.timeout_add(self._wait, cb)
def _run(self):
while True:
self._process_queue()
self._process_vms()
# Process everything on the queue. If the queue is empty when
# called, block.
def _process_queue(self):
first_obj = self._q.get()
self._process_queue_item(first_obj)
self._q.task_done()
try:
while True:
obj = self._q.get(False)
self._process_queue_item(obj)
self._q.task_done()
except Empty:
pass
def _process_queue_item(self, obj):
if obj[0] == "conn_added":
conn = obj[1]
if conn and not (conn.is_remote()):
uri = conn.get_uri()
self._conns[uri] = conn
conn.connect("vm-added", self.vm_added)
elif obj[0] == "conn_removed":
uri = obj[1]
del self._conns[uri]
elif obj[0] == "vm_added":
# Nothing - just a signal for the inspection thread to wake up.
pass
# Any VMs we've not seen yet? If so, process them.
def _process_vms(self):
for conn in self._conns.itervalues():
for vmuuid in conn.list_vm_uuids():
if not conn.is_active():
break
prettyvm = vmuuid
try:
vm = conn.get_vm(vmuuid)
prettyvm = conn.get_uri() + ":" + vm.get_name()
if vmuuid in self._vmseen:
data = self._cached_data.get(vmuuid)
if not data:
continue
if vm.inspection != data:
logging.debug("Found cached data for %s", prettyvm)
self._set_vm_inspection_data(vm, data)
continue
# Whether success or failure, we've "seen" this VM now.
self._vmseen[vmuuid] = True
self._process(conn, vm, vmuuid)
except:
logging.exception("%s: exception while processing",
prettyvm)
def _process(self, conn, vm, vmuuid):
g = GuestFS()
prettyvm = conn.get_uri() + ":" + vm.get_name()
ignore = vmuuid
disks = []
for disk in vm.get_disk_devices():
if (disk.path and
(disk.type == "block" or disk.type == "file") and
not disk.device == "cdrom"):
disks.append(disk)
if not disks:
logging.debug("%s: nothing to inspect", prettyvm)
return
# Add the disks. Note they *must* be added with readonly flag set.
for disk in disks:
path = disk.path
driver_type = disk.driver_type
if not (os.path.exists(path) and os.access(path, os.R_OK)):
logging.debug("%s: cannot access '%s', skipping inspection",
prettyvm, path)
return
g.add_drive_opts(path, readonly=1, format=driver_type)
g.launch()
# Inspect the operating system.
roots = g.inspect_os()
if len(roots) == 0:
logging.debug("%s: no operating systems found", prettyvm)
return
# Arbitrarily pick the first root device.
root = roots[0]
# Inspection results.
typ = g.inspect_get_type(root) # eg. "linux"
distro = g.inspect_get_distro(root) # eg. "fedora"
major_version = g.inspect_get_major_version(root) # eg. 14
minor_version = g.inspect_get_minor_version(root) # eg. 0
hostname = g.inspect_get_hostname(root) # string
product_name = g.inspect_get_product_name(root) # string
product_variant = g.inspect_get_product_variant(root) # string
# For inspect_list_applications and inspect_get_icon we
# require that the guest filesystems are mounted. However
# don't fail if this is not possible (I'm looking at you,
# FreeBSD).
filesystems_mounted = False
try:
# Mount up the disks, like guestfish --ro -i.
# Sort keys by length, shortest first, so that we end up
# mounting the filesystems in the correct order.
mps = list(g.inspect_get_mountpoints(root))
def compare(a, b):
if len(a[0]) > len(b[0]):
return 1
elif len(a[0]) == len(b[0]):
return 0
else:
return -1
mps.sort(compare)
for mp_dev in mps:
try:
g.mount_ro(mp_dev[1], mp_dev[0])
except:
logging.exception("%s: exception mounting %s on %s "
"(ignored)",
prettyvm, mp_dev[1], mp_dev[0])
filesystems_mounted = True
except:
logging.exception("%s: exception while mounting disks (ignored)",
prettyvm)
icon = None
apps = None
if filesystems_mounted:
# string containing PNG data
icon = g.inspect_get_icon(root, favicon=0, highquality=1)
if icon == "":
icon = None
# Inspection applications.
apps = g.inspect_list_applications(root)
# Force the libguestfs handle to close right now.
del g
# Log what we found.
logging.debug("%s: detected operating system: %s %s %d.%d (%s)",
prettyvm, typ, distro, major_version, minor_version,
product_name)
logging.debug("hostname: %s", hostname)
if icon:
logging.debug("icon: %d bytes", len(icon))
if apps:
logging.debug("# apps: %d", len(apps))
data = vmmInspectionData()
data.type = str(type)
data.distro = str(distro)
data.major_version = int(major_version)
data.minor_version = int(minor_version)
data.hostname = str(hostname)
data.product_name = str(product_name)
data.product_variant = str(product_variant)
data.icon = icon
data.applications = list(apps)
self._set_vm_inspection_data(vm, data)
def _set_vm_inspection_data(self, vm, data):
vm.inspection = data
vm.inspection_data_updated()
self._cached_data[vm.get_uuid()] = data
| gpl-2.0 | 184,982,375,710,807,000 | 33.157303 | 79 | 0.539035 | false |
scikit-garden/scikit-garden | skgarden/quantile/tests/test_tree.py | 1 | 2933 | import numpy as np
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from numpy.testing import assert_array_almost_equal
from skgarden.quantile import DecisionTreeQuantileRegressor
from skgarden.quantile import ExtraTreeQuantileRegressor
from skgarden.quantile.utils import weighted_percentile
boston = load_boston()
X, y = boston.data, boston.target
X_train, X_test, y_train, y_test = train_test_split(
X, y, train_size=0.6, test_size=0.4, random_state=0)
X_train = np.array(X_train, dtype=np.float32)
X_test = np.array(X_test, dtype=np.float32)
estimators = [
DecisionTreeQuantileRegressor(random_state=0),
ExtraTreeQuantileRegressor(random_state=0)
]
def test_quantiles():
# Test with max depth 1.
for est in estimators:
est.set_params(max_depth=1)
est.fit(X_train, y_train)
tree = est.tree_
for q in [20, 40, 50, 60, 80, 90]:
left_ind = X_train[:, tree.feature[0]] <= tree.threshold[0]
right_ind = X_train[:, tree.feature[0]] > tree.threshold[0]
# fixme
left_q = weighted_percentile(y_train[left_ind], q)
right_q = weighted_percentile(y_train[right_ind], q)
for curr_X, curr_y in [[X_train, y_train], [X_test, y_test]]:
actual_q = np.zeros(curr_X.shape[0])
left_ind = curr_X[:, tree.feature[0]] <= tree.threshold[0]
actual_q[left_ind] = left_q
right_ind = curr_X[:, tree.feature[0]] > tree.threshold[0]
actual_q[right_ind] = right_q
expected_q = est.predict(curr_X, quantile=q)
assert_array_almost_equal(expected_q, actual_q)
def test_max_depth_None():
# Since each leaf is pure and has just one unique value.
# the mean equals any quantile.
for est in estimators:
est.set_params(max_depth=None)
est.fit(X_train, y_train)
for quantile in [20, 40, 50, 60, 80, 90]:
for curr_X in [X_train, X_test]:
assert_array_almost_equal(
est.predict(curr_X, quantile=None),
est.predict(curr_X, quantile=quantile), 1)
def test_tree_toy_data():
rng = np.random.RandomState(0)
x1 = rng.randn(1, 10)
X1 = np.tile(x1, (10000, 1))
x2 = 20.0 * rng.randn(1, 10)
X2 = np.tile(x2, (10000, 1))
X = np.vstack((X1, X2))
y1 = rng.randn(10000)
y2 = 5.0 + rng.randn(10000)
y = np.concatenate((y1, y2))
for est in estimators:
est.set_params(max_depth=1)
est.fit(X, y)
for quantile in [20, 30, 40, 50, 60, 70, 80]:
assert_array_almost_equal(
est.predict(x1, quantile=quantile),
[np.percentile(y1, quantile)], 3)
assert_array_almost_equal(
est.predict(x2, quantile=quantile),
[np.percentile(y2, quantile)], 3)
| bsd-3-clause | 3,495,671,320,194,699,300 | 33.916667 | 74 | 0.595295 | false |
chunshen1987/superMC | scripts/generateEbeprofiles.py | 1 | 11849 | #! /usr/bin/env python
import sys, shutil
from numpy import *
from os import path, makedirs
import subprocess
import re
from glob import glob
class color:
"""
define colors in the terminal
"""
purple = '\033[95m'
cyan = '\033[96m'
darkcyan = '\033[36m'
blue = '\033[94m'
green = '\033[92m'
yellow = '\033[93m'
red = '\033[91m'
bold = '\033[1m'
underline = '\033[4m'
end = '\033[0m'
#dictionary for parameter list in superMC
superMCParameters = {
'which_mc_model' : 5,
'sub_model' : 1,
'Npmin' : 2,
'Npmax' : 1000,
'bmin' : 0,
'bmax' : 20,
'cutdSdy' : 1,
'cutdSdy_lowerBound' : 551.864,
'cutdSdy_upperBound' : 1000000.0,
'Aproj' : 197,
'Atarg' : 197,
'ecm' : 200,
'finalFactor' : 1.0,
'use_ed' : 0,
'use_sd' : 1,
'alpha' : 0.14,
'lambda' : 0.138,
'operation' : 2,
'cc_fluctuation_model' : 6,
'output_TATB' : 0,
'output_rho_binary' : 0,
'output_TA' : 1,
'output_rhob' : 0,
'output_spectator_density' : 1,
'generate_reaction_plane_avg_profile' : 0,
'nev' : 1000,
'average_from_order' : 2,
'average_to_order' : 2,
}
nucleus_name_dict = {
208: 'Pb',
197: 'Au',
238: 'U',
63: 'Cu',
1: 'p',
2: 'd',
3: 'He3',
}
nucleus_number_dict = {
'Pb': 208,
'Au': 197,
'U': 238,
'Cu': 63,
'p': 1,
'd': 2,
'He3': 3,
}
def form_assignment_string_from_dict(adict):
"""
Generate a parameter-equals-value string from the given dictionary. The
generated string has a leading blank.
"""
result = ""
for aparameter in adict.keys():
result += " {}={}".format(aparameter, adict[aparameter])
return result
def translate_centrality_cut(centrality_bound, cut_type='total_entropy'):
"""
translate the centrality boundaries to Npart, dS/dy, b values and update
the parameter lists for simulations
"""
centrality_lower_bound = centrality_bound[0]
centrality_upper_bound = centrality_bound[1]
if superMCParameters['which_mc_model'] == 5:
model_name = 'MCGlb'
elif superMCParameters['which_mc_model'] == 1:
model_name = 'MCKLN'
if superMCParameters['cc_fluctuation_model'] != 0:
multiplicity_fluctuation = 'withMultFluct'
else:
multiplicity_fluctuation = 'noMultFluct'
collision_energy = '%g' % superMCParameters['ecm']
Aproj = superMCParameters['Aproj']
Atrag = superMCParameters['Atarg']
if Aproj == Atrag: #symmetric collision
nucleus_name = nucleus_name_dict[Aproj]+nucleus_name_dict[Atrag]
else: # asymmetric collision
nucleus_name = (nucleus_name_dict[min(Aproj, Atrag)]
+ nucleus_name_dict[max(Aproj, Atrag)])
centrality_cut_file_name = (
'iebe_centralityCut_%s_%s_sigmaNN_gauss_d0.9_%s.dat'
% (cut_type, model_name + nucleus_name + collision_energy,
multiplicity_fluctuation)
)
try:
centrality_cut_file = loadtxt(
path.join(path.abspath('./centrality_cut_tables'),
centrality_cut_file_name))
except IOError:
print("Can not find the centrality cut table for the collision system")
print(centrality_cut_file_name)
exit(1)
lower_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_lower_bound+1e-30))
upper_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_upper_bound))
cut_value_upper = (
(centrality_cut_file[lower_idx-1, 1]
- centrality_cut_file[lower_idx, 1])
/(centrality_cut_file[lower_idx-1, 0]
- centrality_cut_file[lower_idx, 0])
*(centrality_lower_bound - centrality_cut_file[lower_idx-1, 0])
+ centrality_cut_file[lower_idx-1, 1]
)
cut_value_low = (
(centrality_cut_file[upper_idx-1, 1]
- centrality_cut_file[upper_idx, 1])
/(centrality_cut_file[upper_idx-1, 0]
- centrality_cut_file[upper_idx, 0])
*(centrality_upper_bound - centrality_cut_file[upper_idx-1, 0])
+ centrality_cut_file[upper_idx-1, 1]
)
if cut_type == 'total_entropy':
superMCParameters['cutdSdy'] = 1
npart_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
npart_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['cutdSdy_lowerBound'] = cut_value_low
superMCParameters['cutdSdy_upperBound'] = cut_value_upper
elif cut_type == 'Npart':
superMCParameters['cutdSdy'] = 0
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
npart_min = cut_value_low
npart_max = cut_value_upper
superMCParameters['Npmax'] = npart_max
superMCParameters['Npmin'] = npart_min
superMCParameters['bmax'] = b_max
superMCParameters['bmin'] = b_min
#print out information
print('-'*80)
print('%s collisions at sqrt{s} = %s A GeV with %s initial conditions'
% (nucleus_name , collision_energy, model_name))
print("Centrality : %g - %g"
% (centrality_lower_bound, centrality_upper_bound) + r"%")
print('centrality cut on ', cut_type)
if cut_type == 'total_entropy':
print('dS/dy :', cut_value_low, '-', cut_value_upper)
print("Npart: ", npart_min, '-', npart_max)
print("b: ", b_min, '-', b_max, ' fm')
print('-'*80)
return
def update_superMC_dict(model, ecm, collsys, nev):
"""
update the superMCParameters dictionary with users input settings
"""
superMCParameters['nev'] = nev
if model == 'MCGlb':
superMCParameters['which_mc_model'] = 5
superMCParameters['sub_model'] = 1
elif model == 'MCKLN':
superMCParameters['which_mc_model'] = 1
superMCParameters['sub_model'] = 7
superMCParameters['cc_fluctuation_model'] = 0
else:
print(sys.argv[0], ': invalid initial model type', model)
print_help_message()
sys.exit(1)
superMCParameters['ecm'] = ecm
if ecm == 2760:
if model == 'MCGlb':
superMCParameters['alpha'] = 0.118
elif model == 'MCKLN':
superMCParameters['lambda'] = 0.138
if ecm <= 200:
if model == 'MCGlb':
superMCParameters['alpha'] = 0.14
elif model == 'MCKLN':
superMCParameters['lambda'] = 0.218
superMCParameters['Aproj'] = nucleus_number_dict[collsys[0]]
superMCParameters['Atarg'] = nucleus_number_dict[collsys[1]]
# for checking
#for x in superMCParameters.keys():
# print x + ': ' + str(superMCParameters[x])
return
def generateEbeprofiles(output_path, centrality_bounds,
cut_type='total_entropy'):
runRecord = open('./runRecord.dat', 'a')
errRecord = open('./errRecord.dat', 'a')
if not path.exists(output_path):
makedirs(output_path)
translate_centrality_cut(centrality_bounds, cut_type)
cen_string = '%g-%g' %(centrality_bounds[0], centrality_bounds[1])
option = form_assignment_string_from_dict(superMCParameters)
cmd = './superMC.e' + option
superMC_folder = path.abspath('./')
print(cmd)
runRecord.write(cmd)
p = subprocess.Popen(cmd, shell=True, stdout=runRecord,
stderr=errRecord, cwd=superMC_folder)
p.wait()
# save files
store_folder = output_path
filelist = glob(path.join(superMC_folder, "data", "*"))
for ifile, filepath in enumerate(filelist):
filename = filepath.split("/")[-1]
if "block" in filename:
newfilename = re.sub("event", "C{}_event".format(cen_string),
filename)
newfilename = re.sub("_block", "".format(cen_string), newfilename)
shutil.move(filepath, path.join(store_folder, newfilename))
shutil.move('./runRecord.dat', path.join(store_folder, 'runRecord.dat'))
shutil.move('./errRecord.dat', path.join(store_folder, 'errRecord.dat'))
def print_help_message():
print("Usage : ")
print(color.bold
+ "{} -ecm ecm ".format(sys.argv[0])
+ "-cen cen_bounds"
+ "[-model model -collision_system collsys -cut_type cut_type]"
+ color.end)
print("Usage of {} command line arguments: ".format(sys.argv[0]))
print(color.bold + "-cen" + color.end
+ " centrality bounds(%): "
+ color.purple + "20-30" + color.end)
print(color.bold + "-ecm" + color.end
+ " collision energy (GeV): "
+ color.purple + "7.7, 11.5, 19.6, 27, 39, 62.4, 200, 2760, 5500"
+ color.end)
print(color.bold + "-cut_type" + color.end
+ " centrality cut type: "
+ color.purple + color.bold + "total_entropy[default]" + color.end
+ color.purple + ", Npart" + color.end)
print(color.bold + "-model" + color.end + " initial condition model: "
+ color.purple + color.bold + " MCGlb[default]" + color.end
+ color.purple + ", MCKLN" + color.end)
print(color.bold + "-collision_system" + color.end
+ " type of collision system: "
+ color.purple + color.bold + " Pb+Pb[default]" + color.end
+ color.purple + ", Au+Au, Cu+Au, U+U, p+Pb, p+Au, d+Au, He3+Au"
+ color.end)
if __name__ == "__main__":
# set default values
model = 'MCGlb'
cut_type = 'total_entropy'
#cut_type = 'Npart'
collsys = 'Au+Au'.split('+')
output_path = path.abspath('./RESULTS/')
nev = 100
while len(sys.argv) > 1:
option = sys.argv[1]
del sys.argv[1]
if option == '-model':
model = str(sys.argv[1])
del sys.argv[1]
elif option == '-collision_system':
collsys = str(sys.argv[1]).split('+')
del sys.argv[1]
elif option == "-nev":
nev = int(sys.argv[1])
del sys.argv[1]
elif option == '-cut_type':
cut_type = str(sys.argv[1])
del sys.argv[1]
if cut_type not in ['total_entropy', 'Npart']:
print(sys.argv[0], ': invalid centrality cut type', cut_type)
print_help_message()
sys.exit(1)
elif option == '-cen':
centrality_bounds = [float(istr) for istr in str(sys.argv[1]).split('-')]
del sys.argv[1]
elif option == '-ecm':
ecm = float(sys.argv[1])
del sys.argv[1]
elif option == '-output':
folder = float(sys.argv[1])
output_path = path.join(path.abspath('./'), folder)
del sys.argv[1]
elif option == '-h':
print_help_message()
sys.exit(0)
else:
print(sys.argv[0], ': invalid option ', option)
print_help_message()
sys.exit(1)
try:
update_superMC_dict(model, ecm, collsys, nev)
generateEbeprofiles(output_path, centrality_bounds, cut_type)
except NameError:
print_help_message()
sys.exit(1)
| gpl-3.0 | 1,107,092,717,768,073,200 | 34.160237 | 85 | 0.545025 | false |
sonusz/PhasorToolBox | examples/freq_meter.py | 1 | 1820 | #!/usr/bin/env python3
"""
This is an real-time frequency meter of two PMUs.
This code connects to two PMUs, plot the frequency of the past 300 time-stamps and update the plot in real-time.
"""
from phasortoolbox import PDC,Client
import matplotlib.pyplot as plt
import numpy as np
import gc
import logging
logging.basicConfig(level=logging.DEBUG)
class FreqMeter(object):
def __init__(self):
x = np.linspace(-10.0, 0.0, num=300, endpoint=False)
y = [60.0]*300
plt.ion()
self.fig = plt.figure()
self.ax1 = self.fig.add_subplot(211)
self.line1, = self.ax1.plot(x, y)
plt.title('PMU1 Frequency Plot')
plt.xlabel('Time (s)')
plt.ylabel('Freq (Hz)')
self.ax2 = self.fig.add_subplot(212)
self.line2, = self.ax2.plot(x, y)
plt.title('PMU2 Frequency Plot')
plt.xlabel('Time (s)')
plt.ylabel('Freq (Hz)')
plt.tight_layout()
def update_plot(self, synchrophasors):
y_data = [[],[]]
for synchrophasor in synchrophasors:
for i, msg in enumerate(synchrophasor):
y_data[i].append(msg.data.pmu_data[0].freq)
self.line1.set_ydata(y_data[0])
self.line2.set_ydata(y_data[1])
self.ax1.set_ylim(min(y_data[0]),max(y_data[0]))
self.ax2.set_ylim(min(y_data[1]),max(y_data[1]))
self.fig.canvas.draw()
self.fig.canvas.flush_events()
del(synchrophasors)
gc.collect()
if __name__ == '__main__':
pmu_client1 = Client(remote_ip='10.0.0.1', remote_port=4722, idcode=1, mode='TCP')
pmu_client2 = Client(remote_ip='10.0.0.2', remote_port=4722, idcode=2, mode='TCP')
fm = FreqMeter()
pdc = PDC(clients=[pmu_client1,pmu_client2],history=300)
pdc.callback = fm.update_plot
pdc.run()
| mit | 5,007,648,745,987,173,000 | 29.847458 | 112 | 0.606044 | false |
KenKundert/quantiphy | tests/test_extract.py | 1 | 11090 | # encoding: utf8
import math
from math import pi
from textwrap import dedent
import pytest
from quantiphy import Quantity, add_constant
def test_workout():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fclk = 50MHz -- clock frequency
This is an arbitrary line of text.
This is an line of text that: triggers the line processing but still should be ignored..
"""
)
f_clk = qs.pop('Fclk')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == 'Fclk'
assert f_clk.desc == 'clock frequency'
assert not qs
def test_roomful():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fclk: 50MHz // clock frequency
"""
)
f_clk = qs.pop('Fclk')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == 'Fclk'
assert f_clk.desc == 'clock frequency'
assert not qs
def test_bulletin():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fclk = 50MHz # clock frequency
"""
)
f_clk = qs.pop('Fclk')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == 'Fclk'
assert f_clk.desc == 'clock frequency'
assert not qs
def test_deduce():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fclk = 50MHz
"""
)
f_clk = qs.pop('Fclk')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == 'Fclk'
assert f_clk.desc == ''
assert not qs
def test_proof():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
$F_{\rm clk}$ = 50MHz -- clock frequency
"""
)
f_clk = qs.pop(r'$F_{\rm clk}$')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == r'$F_{\rm clk}$'
assert f_clk.desc == 'clock frequency'
assert not qs
def test_wager():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fclk ($F_{\rm clk}$) = 50MHz -- clock frequency
"""
)
f_clk = qs.pop('Fclk')
assert f_clk.is_close(Quantity(5e7, 'Hz'), check_units=True)
assert f_clk.is_close(5e7)
assert f_clk.units == 'Hz'
assert f_clk.name == r'$F_{\rm clk}$'
assert f_clk.desc == 'clock frequency'
assert not qs
def test_disallow():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
rate = 64GiB/s -- bit rate
""",
binary = True,
)
rate = qs.pop('rate')
assert float(rate) == 68719476736
assert rate.units == 'B/s'
assert not qs
def test_anatomy():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Rin = ∞Ω -- input resistance
""",
)
Rin = qs.pop('Rin')
assert float(Rin) == float('inf')
assert Rin.units == 'Ω'
assert Rin.is_infinite() == 'inf'
assert not qs
def test_billow():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
C3 = 250.7nF
f_corner (f₀) = 500mHz
w_corner (ω₀) = 2*pi*f_corner 'rads/s'
Aw = C3*sqrt(w_corner) '√Ω'
""",
predefined = dict(sqrt=math.sqrt)
)
C3 = qs.pop('C3')
assert str(C3) == '250.7 nF'
assert C3.units == 'F'
assert C3.name == 'C3'
f_corner = qs.pop('f_corner')
assert str(f_corner) == '500 mHz'
assert f_corner.units == 'Hz'
assert f_corner.name == 'f₀'
w_corner = qs.pop('w_corner')
assert str(w_corner) == '3.1416 rads/s'
assert w_corner.units == 'rads/s'
assert w_corner.name == 'ω₀'
Aw = qs.pop('Aw')
assert str(Aw) == '444.35 n√Ω'
assert Aw.units == '√Ω'
assert Aw.name == 'Aw'
assert not qs
def test_invention():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fin = 10MHz -- input frequency
Tstop = 5/Fin -- stop time
"""
)
f_in = qs.pop('Fin')
assert f_in.is_close(Quantity(1e7, 'Hz'), check_units=True)
assert f_in.is_close(1e7)
assert f_in.units == 'Hz'
assert f_in.name == 'Fin'
assert f_in.desc == 'input frequency'
t_stop = qs.pop('Tstop')
assert t_stop.is_close(Quantity(5/f_in, ''), check_units=True)
assert t_stop.is_close(5/f_in)
assert t_stop.units == ''
assert t_stop.name == 'Tstop'
assert t_stop.desc == 'stop time'
assert not qs
def test_route():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Fin = 10MHz -- input frequency
Tstop = 5/Fin "s" -- stop time
"""
)
f_in = qs.pop('Fin')
assert f_in.is_close(Quantity(1e7, 'Hz'), check_units=True)
assert f_in.is_close(1e7)
assert f_in.units == 'Hz'
assert f_in.name == 'Fin'
assert f_in.desc == 'input frequency'
t_stop = qs.pop('Tstop')
assert t_stop.is_close(Quantity(5/f_in, 's'), check_units=True)
assert t_stop.is_close(5/f_in)
assert t_stop.units == 's'
assert t_stop.name == 'Tstop'
assert t_stop.desc == 'stop time'
assert not qs
def test_basilica():
Quantity.reset_prefs()
qs = Quantity.extract('XAU = 1.9 k$/oz # price of gold on 23 July 2020')
xau = qs.pop('XAU')
assert xau.is_close(Quantity(1900, '$/oz'), check_units=True)
assert xau.is_close(1900)
assert xau.units == '$/oz'
assert xau.name == 'XAU'
assert xau.desc == 'price of gold on 23 July 2020'
assert not qs
def test_critique():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
-- Fclk = 50MHz -- clock frequency
"""
)
assert not qs
def test_socialist():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
# Fclk = 50MHz -- clock frequency
"""
)
assert not qs
def test_stumble():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
// Fclk = 50MHz -- clock frequency
"""
)
assert not qs
def test_guardian():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
This is a non conforming line.
"""
)
assert not qs
def test_hussy():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
This is a non conforming line.
"""
)
assert not qs
def test_affiliate():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
This is a non conforming line.
Fin = 10MHz -- input frequency
-- Fin = 10MHz -- input frequency
Tstop = 5/Fin "s" -- stop time
This is a non conforming line.
"""
)
f_in = qs.pop('Fin')
assert f_in.is_close(Quantity(1e7, 'Hz'), check_units=True)
assert f_in.is_close(1e7)
assert f_in.units == 'Hz'
assert f_in.name == 'Fin'
assert f_in.desc == 'input frequency'
t_stop = qs.pop('Tstop')
assert t_stop.is_close(Quantity(5/f_in, 's'), check_units=True)
assert t_stop.is_close(5/f_in)
assert t_stop.units == 's'
assert t_stop.name == 'Tstop'
assert t_stop.desc == 'stop time'
assert not qs
def test_sagan():
Quantity.reset_prefs()
qs = Quantity.extract(
r"""
Carl Sagan's frequencies
-- These are the frequencies that Carl Sagan asserted were of
-- high interest to SETI.
f_hy ($f_{\rm hy}$) = 1420.405751786 MHz -- Hydrogen line frequency
f_sagan1 ($f_{\rm sagan1}$) = pi*f_hy "Hz" -- Sagan's first frequency
f_sagan2 ($f_{\rm sagan2}$) = 2*pi*f_hy "Hz" -- Sagan's second frequency
f_sagan2x ($f_{\rm sagan2}$) = tau*f_hy "Hz" -- Sagan's second frequency
half_c ($\frac{c}{2}$) = c/2 "m/s" -- Half the speed of light
a_string (a string) = 'a string' -- yep, its a string
a_dict (a dict) = {0:0, 1:1} -- yep, its a dict
"""
)
f_hy = qs.pop('f_hy')
assert f_hy.is_close(Quantity(1.420405751786e9, 'Hz'), check_units=True)
assert f_hy.is_close(1.420405751786e9)
assert f_hy.units == 'Hz'
assert f_hy.name == r'$f_{\rm hy}$'
assert f_hy.desc == 'Hydrogen line frequency'
f_sagan1 = qs.pop('f_sagan1')
assert f_sagan1.is_close(Quantity(pi*1.420405751786e9, 'Hz'), check_units=True)
assert f_sagan1.is_close(pi*1.420405751786e9)
assert f_sagan1.units == 'Hz'
assert f_sagan1.name == r'$f_{\rm sagan1}$'
assert f_sagan1.desc == "Sagan's first frequency"
f_sagan2 = qs.pop('f_sagan2')
assert f_sagan2.is_close(Quantity(2*pi*1.420405751786e9, 'Hz'), check_units=True)
assert f_sagan2.is_close(2*pi*1.420405751786e9)
assert f_sagan2.units == 'Hz'
assert f_sagan2.name == r'$f_{\rm sagan2}$'
assert f_sagan2.desc == "Sagan's second frequency"
f_sagan2x = qs.pop('f_sagan2x')
assert f_sagan2x.is_close(Quantity(2*pi*1.420405751786e9, 'Hz'), check_units=True)
assert f_sagan2x.is_close(2*pi*1.420405751786e9)
assert f_sagan2x.units == 'Hz'
assert f_sagan2x.name == r'$f_{\rm sagan2}$'
assert f_sagan2x.desc == "Sagan's second frequency"
half_c = qs.pop('half_c')
assert half_c.is_close(Quantity('c')/2, check_units=True)
assert half_c.is_close(Quantity('c')/2)
assert half_c.units == 'm/s'
assert half_c.name == r'$\frac{c}{2}$'
assert half_c.desc == "Half the speed of light"
a_string = qs.pop('a_string')
assert a_string == 'a string'
a_dict = qs.pop('a_dict')
assert a_dict == {0:0, 1:1}
assert not qs
def test_assign_rec():
Quantity.reset_prefs()
with Quantity.prefs(
assign_rec=r'(?P<name>\w+?)\s*=\s*(?P<val>\w*)(\s+(--)\s*(?P<desc>.*?))?\Z'
):
qs = Quantity.extract(
r"""
-- The Hydrogen Line
bad = -- Also known as the 21 cm line
= bad -- The spectral line associated with a spin flip.
f_hy = 1420MHz -- Hydrogen line frequency
"""
)
f_hy = qs.pop('f_hy')
assert f_hy.is_close(Quantity(1.42e9, 'Hz'), check_units=True)
assert f_hy.is_close(1.42e9)
assert f_hy.units == 'Hz'
assert f_hy.name == 'f_hy'
assert f_hy.desc == 'Hydrogen line frequency'
assert not qs
if __name__ == '__main__':
# As a debugging aid allow the tests to be run on their own, outside pytest.
# This makes it easier to see and interpret and textual output.
defined = dict(globals())
for k, v in defined.items():
if callable(v) and k.startswith('test_'):
print()
print('Calling:', k)
print((len(k)+9)*'=')
v()
| gpl-3.0 | 5,471,605,463,591,412,000 | 28.830189 | 100 | 0.552905 | false |
Nexedi/neoppod | neo/master/handlers/client.py | 1 | 6146 | #
# Copyright (C) 2006-2019 Nexedi SA
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from neo.lib.handler import DelayEvent
from neo.lib.protocol import Packets, ProtocolError, MAX_TID, Errors
from ..app import monotonic_time
from . import MasterHandler
class ClientServiceHandler(MasterHandler):
""" Handler dedicated to client during service state """
def handlerSwitched(self, conn, new):
assert new
super(ClientServiceHandler, self).handlerSwitched(conn, new)
def _connectionLost(self, conn):
# cancel its transactions and forgot the node
app = self.app
node = app.nm.getByUUID(conn.getUUID())
assert node is not None, conn
for x in app.tm.clientLost(node):
app.notifyTransactionAborted(*x)
node.setUnknown()
app.broadcastNodesInformation([node])
def askBeginTransaction(self, conn, tid):
"""
A client request a TID, nothing is kept about it until the finish.
"""
app = self.app
# Delay new transaction as long as we are waiting for NotifyReady
# answers, otherwise we can't know if the client is expected to commit
# the transaction in full to all these storage nodes.
if app.storage_starting_set:
raise DelayEvent
node = app.nm.getByUUID(conn.getUUID())
tid = app.tm.begin(node, app.storage_readiness, tid)
conn.answer(Packets.AnswerBeginTransaction(tid))
def askNewOIDs(self, conn, num_oids):
conn.answer(Packets.AnswerNewOIDs(self.app.tm.getNextOIDList(num_oids)))
def getEventQueue(self):
# for askBeginTransaction & failedVote
return self.app.tm
def failedVote(self, conn, *args):
app = self.app
conn.answer((Errors.Ack if app.tm.vote(app, *args) else
Errors.IncompleteTransaction)())
def askFinishTransaction(self, conn, ttid, oid_list, checked_list):
app = self.app
tid, node_list = app.tm.prepare(
app,
ttid,
oid_list,
checked_list,
conn.getPeerId(),
)
if tid:
p = Packets.AskLockInformation(ttid, tid)
for node in node_list:
node.ask(p)
else:
conn.answer(Errors.IncompleteTransaction())
# It's simpler to abort automatically rather than asking the client
# to send a notification on tpc_abort, since it would have keep the
# transaction longer in list of transactions.
# This should happen so rarely that we don't try to minimize the
# number of abort notifications by looking the modified partitions.
self.abortTransaction(conn, ttid, app.getStorageReadySet())
def askFinalTID(self, conn, ttid):
tm = self.app.tm
if tm.getLastTID() < ttid:
# Invalid ttid, or aborted transaction.
tid = None
elif ttid in tm:
# Transaction is being finished.
# We'll answer when it is unlocked.
tm[ttid].registerForNotification(conn.getUUID())
return
else:
# Transaction committed ? Tell client to ask storages.
tid = MAX_TID
conn.answer(Packets.AnswerFinalTID(tid))
def askPack(self, conn, tid):
app = self.app
if app.packing is None:
storage_list = app.nm.getStorageList(only_identified=True)
app.packing = (conn, conn.getPeerId(),
{x.getUUID() for x in storage_list})
p = Packets.AskPack(tid)
for storage in storage_list:
storage.getConnection().ask(p)
else:
conn.answer(Packets.AnswerPack(False))
def abortTransaction(self, conn, tid, uuid_list):
# Consider a failure when the connection between the storage and the
# client breaks while the answer to the first write is sent back.
# In other words, the client can not know the exact set of nodes that
# know this transaction, and it sends us all nodes it considered for
# writing.
# We must also add those that are waiting for this transaction to be
# finished (returned by tm.abort), because they may have join the
# cluster after that the client started to abort.
app = self.app
involved = app.tm.abort(tid, conn.getUUID())
involved.update(uuid_list)
app.notifyTransactionAborted(tid, involved)
# like ClientServiceHandler but read-only & only for tid <= backup_tid
class ClientReadOnlyServiceHandler(ClientServiceHandler):
def _readOnly(self, conn, *args, **kw):
conn.answer(Errors.ReadOnlyAccess(
'read-only access because cluster is in backuping mode'))
askBeginTransaction = _readOnly
askNewOIDs = _readOnly
askFinishTransaction = _readOnly
askFinalTID = _readOnly
askPack = _readOnly
abortTransaction = _readOnly
# XXX LastIDs is not used by client at all, and it requires work to determine
# last_oid up to backup_tid, so just make it non-functional for client.
askLastIDs = _readOnly
# like in MasterHandler but returns backup_tid instead of last_tid
def askLastTransaction(self, conn):
assert self.app.backup_tid is not None # we are in BACKUPING mode
backup_tid = self.app.pt.getBackupTid(min)
conn.answer(Packets.AnswerLastTransaction(backup_tid))
| gpl-2.0 | 2,060,158,760,011,242,200 | 39.701987 | 81 | 0.642857 | false |
silinternational/idp-pw-ui | selenium-test/tests/test_se.py | 1 | 7262 | from .base_test import *
import time
@on_platforms(browsers)
class SeTest(BaseTest):
def assertTextByTag(self, tag, text):
max_attempts = 60
while (max_attempts > 0):
try:
name = self.driver.find_element_by_tag_name(tag)
assert text in name.text
return True
except:
time.sleep(1)
max_attempts -= 1
raise AssertionError
def assertTextById(self, html_id, text):
max_attempts = 60
while (max_attempts > 0):
try:
name = self.driver.find_element_by_id(html_id)
assert text in name.text
return True
except:
time.sleep(1)
max_attempts -= 1
raise AssertionError
def clickButtonById(self, html_id):
max_attempts = 60
while (max_attempts > 0):
try:
self.driver.find_element_by_id(html_id).click()
return True
except:
time.sleep(1)
max_attempts -= 1
raise AssertionError
def clickButtonByCssSelector(self, css_selector, index):
max_attempts = 60
while (max_attempts > 0):
try:
self.driver.find_elements_by_css_selector(css_selector)[index].click()
return True
except:
time.sleep(1)
max_attempts -= 1
raise AssertionError
@classmethod
def setup_class(cls):
BaseTest.setup_class()
global profileUrl
global forgotUrl
profileUrl = os.environ.get('PROFILE_URL', 'http://localhost:9000/#/profile')
forgotUrl = os.environ.get('FORGOT_URL', 'http://localhost:9000/#/forgot')
# TEST - CHANGE PASSWORD
def test_changePassword(self):
self.driver.get(profileUrl)
self.clickButtonById('change-btn')
self.assertTextByTag('h2', 'Change password')
element = self.driver.find_element_by_id("password")
element.send_keys("askldjfiaweurxvk234")
element = self.driver.find_element_by_id("pwagain")
element.send_keys("askldjfiaweurxvk234")
self.clickButtonById('change-btn')
self.assertTextById('successful-header', 'Update successful')
self.clickButtonById('ok-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
self.clickButtonById('change-btn')
self.clickButtonById('cancel-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
# TEST - RESET PASSWORD (USE ALTERNATE METHODS)
def test_resetPassword1(self):
self.driver.get(forgotUrl)
self.assertTextById('forgot-description', 'ACME Inc')
self.assertTextById('hint-label', 'Acme username, ex: first_last')
element = self.driver.find_element_by_id("username")
element.send_keys("username1")
self.clickButtonById('continue-btn')
self.assertTextById('reset-header', 'Password reset email sent')
self.clickButtonById('methods-btn')
self.assertTextByTag('h2', 'Alternate verification')
self.clickButtonByCssSelector('button.md-icon-button', 0)
self.clickButtonById('close-dialog')
self.clickButtonByCssSelector('button.md-icon-button', 0)
self.assertTextById('verification-header', 'Verification code sent')
element = self.driver.find_element_by_name("verificationCode")
element.send_keys("123")
self.clickButtonById('verify-btn')
self.assertTextByTag('h2', 'Change password')
# TEST - RESET PASSWORD (RESEND)
def test_resetPassword2(self):
self.driver.get(forgotUrl)
self.assertTextById('forgot-description', 'ACME Inc')
element = self.driver.find_element_by_id("username")
element.send_keys("username1")
self.clickButtonById('continue-btn')
self.assertTextById('reset-header', 'Password reset email sent')
self.clickButtonById('resend-btn')
self.assertTextById('reset-header', 'Password reset email sent')
# TEST - RESET PASSWORD
def test_resetPassword3(self):
self.driver.get(forgotUrl)
self.assertTextById('forgot-description', 'ACME Inc')
element = self.driver.find_element_by_id("username")
element.send_keys("username1")
self.clickButtonById('continue-btn')
self.assertTextById('reset-header', 'Password reset email sent')
self.clickButtonById('done-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
# TEST - ADD RECOVERY METHOD (EMAIL)
def test_addEmailRecoveryMethod(self):
self.driver.get(profileUrl)
self.clickButtonById('add-btn')
self.assertTextByTag('h2', 'Add recovery method')
element = self.driver.find_element_by_id("email")
element.send_keys("[email protected]")
time.sleep(1)
self.clickButtonById('add-btn')
self.assertTextByTag('h2', 'Verify code')
self.clickButtonById('resend-btn')
self.assertTextByTag('h2', 'Verify code')
element = self.driver.find_element_by_name("verificationCode")
element.send_keys("123")
self.clickButtonById('verify-btn')
self.assertTextById('successful-header', 'Update successful')
self.clickButtonById('ok-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
self.clickButtonById('add-btn')
self.clickButtonById('cancel-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
# TEST - ADD RECOVERY METHOD (PHONE)
def test_addPhoneRecoveryMethod(self):
self.driver.get(profileUrl)
self.clickButtonById('add-btn')
self.assertTextByTag('h2', 'Add recovery method')
self.driver.find_elements_by_tag_name('md-radio-button')[1].click()
element = self.driver.find_element_by_id("phone")
element.send_keys("7777777777")
self.clickButtonById('add-btn')
self.assertTextByTag('h2', 'Verify code')
element = self.driver.find_element_by_name("verificationCode")
element.send_keys("123")
self.clickButtonById('verify-btn')
self.assertTextById('successful-header', 'Update successful')
self.clickButtonById('ok-btn')
self.assertTextById('name-header', 'Lorem Ipsum')
# TEST - DELETE RECOVERY METHOD
def test_deleteRecoveryMethod(self):
self.driver.get(profileUrl)
self.clickButtonByCssSelector('button.md-icon-button', 0)
self.assertTextById('sure-header', 'Are you sure?')
self.driver.find_element_by_css_selector('button.md-warn').click()
self.assertTextById('name-header', 'Lorem Ipsum')
self.clickButtonByCssSelector('button.md-icon-button', 0)
self.assertTextById('sure-header', 'Are you sure?')
self.driver.find_element_by_css_selector('button.md-accent').click()
self.assertTextById('name-header', 'Lorem Ipsum')
# TEST - HELP CENTER
def test_helpCenter(self):
self.driver.get(profileUrl)
self.clickButtonByCssSelector('a.md-icon-button', 0)
self.assertTextById('help-header', 'Help center')
if __name__ == '__main__':
unittest.main()
| mit | -1,470,806,640,687,275,300 | 39.797753 | 86 | 0.626274 | false |
alex-dot/upwdchg | backend/UPwdChg/Util/__init__.py | 2 | 1247 | #!/usr/bin/env python
# -*- mode:python; tab-width:4; c-basic-offset:4; intent-tabs-mode:nil; -*-
# ex: filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab autoindent smartindent
#
# Universal Password Changer (UPwdChg)
# Copyright (C) 2014-2018 Cedric Dufour <http://cedric.dufour.name>
# Author: Cedric Dufour <http://cedric.dufour.name>
#
# The Universal Password Changer (UPwdChg) is free software:
# you can redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation, Version 3.
#
# The Universal Password Changer (UPwdChg) is distributed in the hope
# that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License for more details.
#
# SPDX-License-Identifier: GPL-3.0
# License-Filename: LICENSE/GPL-3.0.txt
#
#------------------------------------------------------------------------------
# DEPENDENCIES
#------------------------------------------------------------------------------
# UPwdChg
from .upwdchg_token import Token, TokenMain
from .upwdchg_process import Process, ProcessMain
from .upwdchg_daemon import Daemon, DaemonMain
| gpl-3.0 | -1,586,562,051,378,027,300 | 39.225806 | 91 | 0.668805 | false |
Code4SA/municipal-data | bin/materialised_views.py | 1 | 12424 | """
A script to build a set files of materialised views of the data presented
in municipality profiles on the Municipal Money website.
Municipality-specific profile data is stored in municipality-specific files
since producing them takes a lot of time with many queries against the API.
By storing municipality-specific data separately from comparisons to other
municipalities based on this data (e.g. medians, number of similar
municipalities in norm bounds) allows quick iteration on the latter without
recalculating muni-specifics from the API each time.
By storing this data to file instead of database, version control helps to
understand what changed as code is changed and avoid unintended changes to
calculations. It also allows deploying template and data changes synchronously
and avoids data/code structure mismatch that could occur if the data is in
a database and not upgraded during deployment - potentially leading to downtime.
By keeping this script separate from the Municipal Money website django app,
this data can be recalculated without more-complex environment setup.
"""
import sys
sys.path.append('.')
from collections import defaultdict
from itertools import groupby
from scorecard.profile_data import (
APIData,
MuniApiClient,
Demarcation,
get_indicators,
get_indicator_calculators,
)
import argparse
import json
API_URL = 'https://municipaldata.treasury.gov.za/api'
def main():
parser = argparse.ArgumentParser(
description='Tool to dump the materialised views of the municipal finance data used on the Municipal Money website.')
parser.add_argument(
'--api-url',
help='API URL to use. Default: ' + API_URL)
command_group = parser.add_mutually_exclusive_group(required=True)
command_group.add_argument(
'--profiles-from-api',
action='store_true',
help='Fetch profile data from API, generate and store profiles.')
command_group.add_argument(
'--calc-medians',
action='store_true',
help='Calculate medians from stored profiles and store.')
command_group.add_argument(
'--calc-rating-counts',
action='store_true',
help='Calculate the number of items with each rating from stored profiles and store.')
parser.add_argument(
'--print-sets',
action='store_true',
help='Print the distribution sets')
parser.add_argument(
'--skip',
nargs='?',
default=0,
help='The number of municipalities to skip')
args = parser.parse_args()
if args.api_url:
api_url = args.api_url
else:
api_url = API_URL
if args.profiles_from_api:
generate_profiles(args, api_url)
elif args.calc_medians:
calculate_medians(args, api_url)
elif args.calc_rating_counts:
calculate_rating_counts(args, api_url)
def generate_profiles(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis[int(args.skip):]:
demarcation_code = muni.get('municipality.demarcation_code')
api_data = APIData(api_client.API_URL, demarcation_code, client=api_client)
api_data.fetch_data()
indicators = get_indicators(api_data)
profile = {
'mayoral_staff': api_data.mayoral_staff(),
'muni_contact': api_data.muni_contact(),
'audit_opinions': api_data.audit_opinions(),
'indicators': indicators,
'demarcation': Demarcation(api_data).as_dict(),
}
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'w', encoding="utf8") as f:
json.dump(profile, f, sort_keys=True, indent=4, separators=(',', ': '))
def calculate_medians(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis:
demarcation_code = muni.get('municipality.demarcation_code')
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'r') as f:
profile = json.load(f)
indicators = profile['indicators']
muni.update(indicators)
nat_sets, nat_medians = calc_national_medians(munis)
prov_sets, prov_medians = calc_provincial_medians(munis)
if args.print_sets:
print("Indicator value sets by MIIF category nationally")
print(json.dumps(nat_sets, sort_keys=True, indent=4, separators=(',', ': ')))
print
print("Indicator value sets by MIIF category and province")
print(json.dumps(prov_sets, sort_keys=True, indent=4, separators=(',', ': ')))
# write medians
filename = "scorecard/materialised/indicators/distribution/median.json"
medians = {
'provincial': prov_medians,
'national': nat_medians,
}
with open(filename, 'w', encoding="utf8") as f:
json.dump(medians, f, sort_keys=True, indent=4, separators=(',', ': '))
def calc_national_medians(munis):
nat_sets = get_national_miif_sets(munis)
nat_medians = defaultdict(lambda: defaultdict(dict))
# calculate national median per MIIF category and year for each indicator
for name in nat_sets.keys():
for dev_cat in nat_sets[name].keys():
for year in nat_sets[name][dev_cat].keys():
results = [period['result'] for period in nat_sets[name][dev_cat][year]]
nat_medians[name][dev_cat][year] = median(results)
return nat_sets, nat_medians
def calc_provincial_medians(munis):
prov_sets = get_provincial_miif_sets(munis)
prov_medians = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
# calculate provincial median per province, MIIF category and year for each indicator
for name in prov_sets.keys():
for prov_code in prov_sets[name].keys():
for dev_cat in prov_sets[name][prov_code].keys():
for year in prov_sets[name][prov_code][dev_cat].keys():
results = [period['result'] for period in prov_sets[name][prov_code][dev_cat][year]]
prov_medians[name][prov_code][dev_cat][year] = median(results)
return prov_sets, prov_medians
def median(items):
sorted_items = sorted(items)
count = len(sorted_items)
if count % 2 == 1:
# middle item of odd set is floor of half of count
return sorted_items[count//2]
else:
# middle item of even set is mean of middle two items
return (sorted_items[(count-1)//2] + sorted_items[(count+1)//2])/2.0
def calculate_rating_counts(args, api_url):
api_client = MuniApiClient(api_url)
munis = get_munis(api_client)
for muni in munis:
demarcation_code = muni.get('municipality.demarcation_code')
filename = "scorecard/materialised/profiles/%s.json" % demarcation_code
with open(filename, 'r') as f:
profile = json.load(f)
indicators = profile['indicators']
muni.update(indicators)
nat_sets, nat_rating_counts = calc_national_rating_counts(munis)
prov_sets, prov_rating_counts = calc_provincial_rating_counts(munis)
if args.print_sets:
print("Indicator value sets by MIIF category nationally")
print(json.dumps(nat_sets, sort_keys=True, indent=4, separators=(',', ': ')))
print
print("Indicator value sets by MIIF category and province")
print(json.dumps(prov_sets, sort_keys=True, indent=4, separators=(',', ': ')))
# write rating counts
filename = "scorecard/materialised/indicators/distribution/rating_counts.json"
rating_counts = {
'provincial': prov_rating_counts,
'national': nat_rating_counts,
}
with open(filename, 'w', encoding="utf8") as f:
json.dump(rating_counts, f, sort_keys=True, indent=4, separators=(',', ': '))
def calc_national_rating_counts(munis):
"""
Calculate the number of munis with each norm rating per MIIF category
and year for each indicator
"""
nat_sets = get_national_miif_sets(munis)
nat_rating_counts = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
rating_key = lambda period: period['rating']
for name in nat_sets.keys():
for dev_cat in nat_sets[name].keys():
for year in nat_sets[name][dev_cat].keys():
rating_sorted = sorted(nat_sets[name][dev_cat][year], key=rating_key)
for rating, rating_group in groupby(rating_sorted, rating_key):
nat_rating_counts[name][dev_cat][year][rating] = len(list(rating_group))
return nat_sets, nat_rating_counts
def calc_provincial_rating_counts(munis):
"""
Calculate the number of munis with each norm rating per province,
MIIF category and year for each indicator
"""
prov_sets = get_provincial_miif_sets(munis)
prov_rating_counts = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(dict))))
rating_key = lambda period: period['rating']
for name in prov_sets.keys():
for prov_code in prov_sets[name].keys():
for dev_cat in prov_sets[name][prov_code].keys():
for year in prov_sets[name][prov_code][dev_cat].keys():
rating_sorted = sorted(prov_sets[name][prov_code][dev_cat][year], key=rating_key)
for rating, rating_group in groupby(rating_sorted, rating_key):
prov_rating_counts[name][prov_code][dev_cat][year][rating] = len(list(rating_group))
return prov_sets, prov_rating_counts
def get_national_miif_sets(munis):
"""
collect set of indicator values for each MIIF category and year
returns dict of the form {
'cash_coverage': {
'B1': {
'2015': [{'result': ...}]
}
}
}
"""
nat_sets = defaultdict(lambda: defaultdict(lambda: defaultdict(list)))
dev_cat_key = lambda muni: muni['municipality.miif_category']
dev_cat_sorted = sorted(munis, key=dev_cat_key)
for calculator in get_indicator_calculators(has_comparisons=True):
name = calculator.indicator_name
for dev_cat, dev_cat_group in groupby(dev_cat_sorted, dev_cat_key):
for muni in dev_cat_group:
for period in muni[name]['values']:
if period['result'] is not None:
nat_sets[name][dev_cat][period['date']].append(period)
return nat_sets
def get_provincial_miif_sets(munis):
"""
collect set of indicator values for each province, MIIF category and year
returns dict of the form {
'cash_coverage': {
'FS': {
'B1': {
'2015': [{'result': ...}]
}
}
}
}
"""
prov_sets = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(list))))
dev_cat_key = lambda muni: muni['municipality.miif_category']
dev_cat_sorted = sorted(munis, key=dev_cat_key)
prov_key = lambda muni: muni['municipality.province_code']
for calculator in get_indicator_calculators(has_comparisons=True):
name = calculator.indicator_name
for dev_cat, dev_cat_group in groupby(dev_cat_sorted, dev_cat_key):
prov_sorted = sorted(dev_cat_group, key=prov_key)
for prov_code, prov_group in groupby(prov_sorted, prov_key):
for muni in prov_group:
for period in muni[name]['values']:
if period['result'] is not None:
prov_sets[name][prov_code][dev_cat][period['date']].append(period)
return prov_sets
def get_munis(api_client):
query = api_client.api_get({'query_type': 'facts',
'cube': 'municipalities',
'fields': [
'municipality.demarcation_code',
'municipality.name',
'municipality.miif_category',
'municipality.province_code',
],
'value_label': '',
})
result = query.result()
result.raise_for_status()
body = result.json()
if body.get("total_cell_count") == body.get("page_size"):
raise Exception("should page municipalities")
return body.get("data")
if __name__ == "__main__":
main()
| mit | 7,838,789,445,039,154,000 | 38.44127 | 125 | 0.632325 | false |
damoxc/vmail | vmail/tests/test_core.py | 1 | 6682 | import time
from nose.tools import raises
from vmail.error import DomainNotFoundError
from vmail.error import ForwardNotFoundError
from vmail.error import UserNotFoundError
from vmail.error import VmailCoreError
from vmail.tests import test
class TestCoreSMTP(test.DaemonUnitTest):
def test_authenticate(self):
self.assertTrue(self.client.core.authenticate('[email protected]', 'daisychain').get())
def test_authenticate_fails(self):
self.assertFalse(self.client.core.authenticate('[email protected]', 'password').get())
def test_blocking_host(self):
self.assertNone(self.client.core.block_host('10.20.30.40').get())
result = self.client.core.check_host('10.20.30.40').get()
self.assertNotNone(result)
(action, comment) = result
self.assertEqual(action, 'DENY_DISCONNECT')
def test_blocking_host_already_exists(self):
with self.assertRaises(VmailCoreError) as _:
self.client.core.block_host('43.52.175.8').get()
def test_check_host(self):
result = self.client.core.check_host('97.38.123.17').get()
(action, comment) = result
self.assertEqual(action, 'DENY_DISCONNECT')
self.assertEqual(comment, 'Suspected spam source')
def test_check_host_unknown(self):
self.assertNone(self.client.core.check_host('1.2.3.4').get())
def test_check_whitelist(self):
self.assertTrue(self.client.core.check_whitelist('[email protected]').get())
def test_check_whitelist_unknown(self):
self.assertFalse(self.client.core.check_whitelist('[email protected]').get())
def test_get_usage_domain(self):
self.assertEqual(self.client.core.get_usage('example.com').get(), 20656946)
def test_get_usage_domain_unknown(self):
with self.assertRaises(DomainNotFoundError) as _:
self.client.core.get_usage('higglepuddle.com').get()
def test_get_usage_user(self):
self.assertEqual(self.client.core.get_usage('testing.com', 'fred').get(), 81998643)
def test_get_usage_user_unknown(self):
with self.assertRaises(UserNotFoundError) as _:
self.client.core.get_usage('higglepuddle.com', 'yankeedoodle').get()
def test_get_quota_domain(self):
self.assertEqual(self.client.core.get_quota('example.com').get(),
52428800)
def test_get_quota_domain_unknown(self):
with self.assertRaises(DomainNotFoundError) as _:
self.client.core.get_quota('higglepuddle.com').get()
def test_last_login(self):
self.assertTrue(self.client.core.last_login('[email protected]', 'imap', '1.2.3.4').get())
def test_last_login_unknown(self):
with self.assertRaises(UserNotFoundError) as _:
self.client.core.last_login('[email protected]', 'imap', '1.2.3.4').get()
def test_last_login_mixed_case(self):
self.assertTrue(self.client.core.last_login('[email protected]', 'imap', '1.2.3.4').get())
class TestCoreManagement(test.DaemonUnitTest):
def test_delete_forward(self):
self.assertNone(self.client.core.delete_forward('[email protected]').get())
with self.assertRaises(ForwardNotFoundError) as _:
self.client.core.get_forward('[email protected]').get()
def test_delete_user(self):
self.assertNone(self.client.core.delete_user('[email protected]').get())
with self.assertRaises(UserNotFoundError) as _:
self.client.core.get_user('[email protected]').get()
with self.assertRaises(VmailCoreError) as _:
self.client.core.get_vacation('[email protected]').get()
def test_delete_user_unknown(self):
with self.assertRaises(UserNotFoundError) as _:
self.client.core.delete_user('[email protected]').get()
def test_get_forward(self):
forwards = ['[email protected]']
self.assertEqual(self.client.core.get_forward('[email protected]').get(), forwards)
def test_get_forward_unknown(self):
with self.assertRaises(ForwardNotFoundError) as _:
self.client.core.get_forward('[email protected]').get()
def test_get_forwards(self):
forwards = {
u'[email protected]': [u'[email protected]'],
u'[email protected]': [u'[email protected]'],
u'[email protected]': [u'[email protected]'],
u'[email protected]': [u'[email protected]'],
u'[email protected]': [u'[email protected]']
}
self.assertEqual(self.client.core.get_forwards('example.com').get(),
forwards)
def test_get_forwards_unknown(self):
with self.assertRaises(DomainNotFoundError) as _:
self.client.core.get_forwards('higglepuddle.com').get()
def test_get_user(self):
user = self.client.core.get_user('[email protected]').get()
self.assertNotNone(user)
self.assertTrue(user['enabled'])
self.assertEqual(user['email'], '[email protected]')
def test_get_user_unknown(self):
with self.assertRaises(UserNotFoundError) as _:
self.client.core.get_user('[email protected]').get()
def test_get_vacation(self):
vacation = self.client.core.get_vacation('[email protected]').get()
self.assertNotNone(vacation)
self.assertEqual(vacation['email'], '[email protected]')
def test_get_vacation_unicode(self):
vacation = self.client.core.get_vacation('[email protected]').get()
self.assertNotNone(vacation)
self.assertEqual(vacation['email'], '[email protected]')
# def test_get_vacation_missing(self):
# with self.assertRaises(VmailCoreError) as _:
# print self.client.core.get_vacation('[email protected]').get()
def test_get_vacation_unknown(self):
with self.assertRaises(VmailCoreError) as _:
self.client.core.get_vacation('[email protected]').get()
def test_save_forward(self):
source = '[email protected]'
destinations = ['[email protected]']
def on_added_forward(source_):
self.assertEqual(source_, source)
return self.client.core.get_forward(source_
).addCallback(self.assertEqual, destinations
).addErrback(self.fail)
self.client.core.save_forward(1, source, destinations).get()
def test_save_forward_unknown(self):
source = '[email protected]'
destinations = ['[email protected]']
with self.assertRaises(DomainNotFoundError) as _:
self.client.core.save_forward(5, source, destinations).get() | gpl-3.0 | 1,304,521,197,937,086,000 | 40 | 97 | 0.658186 | false |
jdsika/TUM_HOly | openrave/sandbox/debugik.py | 1 | 53228 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# random code that helps with debugging/testing the python interfaces and examples
# this is not meant to be run by normal users
from __future__ import with_statement # for python 2.5
__copyright__ = 'Copyright (C) 2009-2010'
__license__ = 'Apache License, Version 2.0'
# random code that helps with debugging/testing the python interfaces and examples
# this is not meant to be run by normal users
from openravepy import *
import openravepy.examples
from openravepy.interfaces import *
from numpy import *
import numpy,time
def solvefailed1(self):
coupledvars = RightEquations[0].symbols[0:2] + RightEquations[0].symbols[4:]
leftvars = RightEquations[0].symbols[2:4]
unknownmonoms = [(1,0,1,0),(1,0,0,1),(0,1,1,0),(0,1,0,1),(1,0,0,0),(0,1,0,0)]
unknownvars = [Symbol('dummy%d'%i) for i in range(len(unknownmonoms))]
unknownsubs = []
for var,monom in izip(unknownvars,unknownmonoms):
unknownsubs.append((var,Poly(S.Zero,*coupledvars).add_term(S.One,monom).as_basic()))
leftsideeqs = []
rightsideeqs = []
for peq in RightEquations:
peq = Poly(peq,*coupledvars)
leftside = Poly(S.Zero,*unknownvars)
rightside = S.Zero
for c,m in peq.iter_terms():
if m in unknownmonoms:
leftside += c.subs(psubs)*unknownvars[unknownmonoms.index(m)]
else:
rightside -= c.subs(psubs)*Poly(S.Zero,*coupledvars).add_term(S.One,m).as_basic()
leftsideeqs.append(leftside)
rightsideeqs.append(rightside)
#reducedeqs2 = self.reduceBothSides(leftsideeqs,rightsideeqs,maxsymbols=32,usesymbols=False)
Mtemp = Matrix(6,6,[S.Zero]*36)
for ipeq,peq in enumerate(leftsideeqs):
for imonom in range(len(unknownmonoms)):
monom = [S.Zero]*len(unknownmonoms)
monom[imonom] = 1
Mtemp[ipeq,imonom] = peq.coeff(*monom)
P,L,DD,U= self.LUdecompositionFF(Mtemp,*RightEquations[0].symbols[2:])
finalnums = S.One
finaldenoms = S.One
for i in range(6):
n,d = self.recursiveFraction(L[i,i]*U[i,i]/DD[i,i])
finalnums *= n
finaldenoms *= d
q,r = div(finalnums,finaldenoms,RightEquations[0].symbols[2:])
q=q.subs(leftvars[1]**2,1-leftvars[0]**2).expand()
x=Symbol('x')
soleq=self.solveHighDegreeEquationHalfAngle(q.subs([(leftvars[0],cos(x)),(leftvars[1],sin(x))]),x)
coeffs = []
for i in range(soleq.poly.degree,-1,-1):
coeffs.append(soleq.poly.coeff(i))
roots = mpmath.polyroots(coeffs)
sols = [2*atan(root) for root in roots]
return soleq
def solvefailed2(self):
testeqs = [eq.as_basic().subs(psubs+invsubs) for eq in RightEquations]
testeqs2=self.solveSingleVariableLinearly(testeqs,usedvars[0],usedvars[1:],maxnumeqs=10,douniquecheck=True)
testeqs3 = [Poly(eq.subs(symbolsubs),*symbols[2:]) for eq in testeqs2]
# choose which leftvar can determine the singularity of the following equations!
testeqs4 = []
for ipeq,peq in enumerate(testeqs3):
maxdenom = [0]*(len(testeqs3[0].symbols)/2)
for monoms in peq.iter_monoms():
for i in range(len(maxdenom)):
maxdenom[i] = max(maxdenom[i],monoms[2*i]+monoms[2*i+1])
eqnew = S.Zero
#if ipeq >= 4: maxdenom[-1] = 0;
for c,monoms in peq.iter_terms():
term = c
for i in range(len(testeqs3[0].symbols)):
num,denom = fraction(dummysubs[i][1])
term *= num**monoms[i]
# the denoms for 0,1 and 2,3 are the same
for i in range(len(maxdenom)):
denom = fraction(dummysubs[2*i][1])[1]
term *= denom**(maxdenom[i]-monoms[2*i]-monoms[2*i+1])
eqnew += simplify(term)
testeqs4.append(Poly(eqnew,*dummys[0:2]))
# testeqs4[0] has 81 monomials!
def solvefailed3(self):
if len(leftsideeqs0) >= 2:
# can solve linearly!
p0 = leftsideeqs0[0]
p1 = leftsideeqs0[1]
M = Matrix(2,3,[p0.coeff(1,0,0,0),p0.coeff(0,1,0,0),p0.coeff(0,0,0,0)-rightsideeqs0[0].as_basic(),p1.coeff(1,0,0,0),p1.coeff(0,1,0,0),p1.coeff(0,0,0,0)-rightsideeqs0[1].as_basic()])
partialsolution = [-M[1,1]*M[0,2]+M[0,1]*M[1,2],M[1,0]*M[0,2]-M[0,0]*M[1,2],M[0,0]*M[1,1]-M[0,1]*M[1,0]]
for i in range(3):
partialsolution[i] = partialsolution[i].expand()
for i in range(len(leftsideeqs)):
left = leftsideeqs[i]
right = rightsideeqs[i]
c = left.coeff(1,0,0,0)
if c != S.Zero:
left = Poly(partialsolution[2]*left.sub_term(c,(1,0,0,0)).as_basic(),*left.symbols)
right = Poly(partialsolution[2]*right.as_basic() - c*partialsolution[0],*right.symbols)
c = left.coeff(0,1,0,0)
if c != S.Zero:
left = Poly(partialsolution[2]*left.sub_term(c,(0,1,0,0)).as_basic(),*left.symbols)
right = Poly(partialsolution[2]*right.as_basic() - c*partialsolution[1],*right.symbols)
leftsideeqs[i] = left
rightsideeqs[i] = right
unknownmonoms = [(1, 0, 1, 0), (1, 0, 0, 1), (0, 1, 1, 0), (0, 1, 0, 1)]
Mtemp = Matrix(4,4,[S.Zero]*16)
for ipeq,peq in enumerate(leftsideeqs):
for imonom in range(len(unknownmonoms)):
monom = [S.Zero]*len(unknownmonoms)
monom[imonom] = 1
Mtemp[ipeq,imonom] = peq.coeff(*unknownmonoms[imonom])
#P,L,DD,U= self.LUdecompositionFF(Mtemp,*Tee[0:12])
def simplifyPolynomial(self,peq,leftvar,symbolgen,localsymbols,localsymbols_reduced=None,localsymbols_mult=None,Tee=None):
neweq = Poly(S.Zero,*peq.symbols)
if Tee is not None:
othervars = list(Tee[0:3,3])
if leftvar is not None:
othervars.append(leftvar)
for c,m in peq.iter_terms():
if c != S.Zero and not c.is_number:
if Tee is not None:
c = self.simplifyTransform(c,Tee,othervars=othervars).expand()
if leftvar is None:
leftpolyterms = [(c,None)]
else:
leftpoly = Poly(c,leftvar)
leftpolyterms = [t for t in leftpoly.iter_terms()]
neweq2=S.Zero
for c2,m2 in leftpolyterms:
if localsymbols_reduced is not None:
c2clean,c2common = self.removecommonexprs(c2,returncommon=True,onlynumbers=True)
index = self.getCommonExpression(localsymbols_reduced,c2clean)
if index is None:
index = self.getCommonExpression(localsymbols_reduced,-c2clean)
if index is not None:
c2common = -c2common
if index is not None:
v = localsymbols[index][0]*c2common/localsymbols_mult[index]
else:
index = None
if index is None:
v=symbolgen.next()
localsymbols.append((v,c2))
if localsymbols_reduced is not None:
localsymbols_reduced.append(c2clean)
localsymbols_mult.append(c2common)
if m2 is None:
neweq2 += v
else:
neweq2 += v * leftvar**m2[0]
neweq = neweq.add_term(neweq2,m)
else:
neweq = neweq.add_term(c,m)
return neweq
def solveDialytically2(self,reducedeqs,ileftvar):
allmonoms = set()
for peq in reducedeqs:
for m in peq.iter_monoms():
mlist = list(m)
degree=mlist.pop(ileftvar)
allmonoms.add(tuple(mlist))
mlist[0] += 1
allmonoms.add(tuple(mlist))
allmonoms = list(allmonoms)
allmonoms.sort()
assert(len(allmonoms)<=2*len(reducedeqs))
leftvar = reducedeqs[0].symbols[ileftvar]
Mall = zeros((2*len(reducedeqs),len(allmonoms)))
for i,peq in enumerate(reducedeqs):
for c,m in peq.iter_terms():
mlist = list(m)
degree = mlist.pop(ileftvar)
c = c*leftvar**degree
Mall[len(reducedeqs)+i,allmonoms.index(tuple(mlist))] += c
mlist[0] += 1
Mall[i,allmonoms.index(tuple(mlist))] += c
Mconst = Matrix(len(newreducedeqs),len(allmonoms),[S.Zero]*len(newreducedeqs)*len(allmonoms))
for i in range(len(newreducedeqs)):
for j in range(len(allmonoms)):
Mconst[i,j] = Poly(Mall[i,j],leftvar).coeff(2)
alpha=Symbol('alpha')
beta=Symbol('beta')
x=[Symbol('x%d'%i) for i in range(11)]
ns0=[Symbol('ns0_%d'%i) for i in range(11)]
ns1=[Symbol('ns1_%d'%i) for i in range(11)]
final = [x[i]+ns0[i]*alpha+ns1[i]*beta for i in range(11)]
nexteqs = [final[0]*final[0]-final[1],final[0]*final[2]-final[3],final[1]*final[2]-final[4],final[2]*final[2]-final[5],final[0]*final[5]-final[6],final[1]*final[5]-final[7],final[2]*final[5]-final[8],final[0]*final[8]-final[9],final[1]*final[8]-final[10]]
polyeqs = [Poly(eq,alpha,beta) for eq in nexteqs]
newmonoms = [(2,0),(1,1),(0,2),(1,0),(0,1)]
Mnew = Matrix(len(polyeqs),len(newmonoms),[S.Zero]*(len(polyeqs)*len(newmonoms)))
Mconst = Matrix(len(polyeqs),1,[S.Zero]*len(polyeqs))
for i in range(len(polyeqs)):
Mconst[i] = -polyeqs[i].coeff(0,0)
for j in range(len(newmonoms)):
Mnew[i,j] = polyeqs[i].coeff(*newmonoms[j])
# should check if determinant vanishes for all valuespolynomial
# set determinant of M = 0 and solve for leftvar
#characteristic_poly = Mall.det_bareis()
raise self.CannotSolveError('dialytic solution not implemented')
def solveDialytically3(self,reducedeqs,ileftvar):
allmonoms = set()
for peq in reducedeqs:
for m in peq.iter_monoms():
mlist = list(m)
degree=mlist.pop(ileftvar)
allmonoms.add(tuple(mlist))
mlist[0] += 1
allmonoms.add(tuple(mlist))
allmonoms = list(allmonoms)
allmonoms.sort()
assert(len(allmonoms)<=2*len(reducedeqs))
leftvar = reducedeqs[0].symbols[ileftvar]
A = [zeros((2*len(reducedeqs),len(allmonoms))),zeros((2*len(reducedeqs),len(allmonoms))),zeros((2*len(reducedeqs),len(allmonoms)))]
for i,peq in enumerate(reducedeqs):
for c,m in peq.iter_terms():
mlist = list(m)
degree = mlist.pop(ileftvar)
A[degree][len(reducedeqs)+i,allmonoms.index(tuple(mlist))] += c
mlist[0] += 1
A[degree][i,allmonoms.index(tuple(mlist))] += c
Av = [A[i].subs(psubs).evalf() for i in range(3)]
M = zeros((24,24))
M[0:12,12:24] = eye(12)
M[12:24,0:12] = -Av[2].inv()*Av[0]
M[12:24,12:24] = -Av[2].inv()*Av[1]
w,v = linalg.eig(numpy.array(numpy.array(M),float64))
index = w[12]
v[:,12]
lf = [1.0,-1.0,1.0,1.0]
Av = [lf[1]*lf[1]*Av[2] + lf[1]*lf[3]*Av[1] + lf[3]*lf[3]*Av[0],
2*lf[0]*lf[1]*Av[2] + (lf[0]*lf[3]+lf[1]*lf[2])*Av[1] + 2*lf[2]*lf[3]*Av[0],
lf[0]*lf[0]*Av[2]+lf[0]*lf[2]*Av[1]+lf[2]*lf[2]*Av[0]]
def computeDixonResultant(self,polyeqs,othersolvedvars):
"""Computes the dixon resultant of the polynomial equations and find a non-singular sub-matrix such that its determinant can be used to get polynomial in one of the variables.
See:
Deepak Kapur, Tushar Saxena, and Lu Yang. "Algebraic and geometric reasoning using Dixon resultants". ISSAC '94 Proceedings of the international symposium on Symbolic and algebraic computation .
"""
allmonoms = set()
orgsymbols = polyeqs[0].symbols
dixsymbols = [Symbol('d_%s'%s.name) for s in orgsymbols]
dixmaxmonoms = [0]*len(orgsymbols)
for peq in polyeqs:
allmonoms = allmonoms.union(set(peq.monoms))
for m in peq.iter_monoms():
for j in range(len(dixmaxmonoms)):
dixmaxmonoms[j] = max(dixmaxmonoms[j],m[j])
allmonoms = list(allmonoms)
allmonoms.sort()
allmonoms.reverse() # this should put (0) at the end
assert(len(orgsymbols) < len(polyeqs))
neweqs = []
dixonsymbolgen = symbolgen = cse_main.numbered_symbols('dconst')
dixonsymbols = []
for eq in polyeqs:
neweq = Poly(S.Zero,*orgsymbols)
for c,m in eq.iter_terms():
v = symbolgen.next()
dixonsymbols.append((v,c))
neweq = neweq.add_term(v,m)
neweqs.append(neweq)
M = Matrix(len(orgsymbols)+1,len(orgsymbols)+1,[S.Zero]*((len(orgsymbols)+1)**2))
for isymbol in range(len(orgsymbols)+1):
subs = [(orgsymbols[i],dixsymbols[i]) for i in range(0,isymbol)]
for i in range(len(orgsymbols)+1):
M[isymbol,i] = neweqs[i].as_basic().subs(subs)
allsymbols = list(dixsymbols)+list(orgsymbols)
localvars = list(orgsymbols)+list(dixsymbols)+[s for s,v in dixonsymbols]
det = self.det_bareis(M,*localvars)
polydixon_raw = Poly(det,*dixsymbols)
quotient = S.One
for sym,dsym in izip(orgsymbols,dixsymbols):
quotient *= (sym-dsym)
polydixon,r = div(polydixon_raw,quotient)
polydixon = Poly(polydixon,*dixsymbols)
newmonoms = set()
for c,m in polydixon.iter_terms():
p = Poly(c,*orgsymbols)
newmonoms = newmonoms.union(set(p.monoms))
newmonoms = list(newmonoms)
newmonoms.sort()
newmonoms.reverse() # this should put (0) at the end
dixonsymbolsvalues = [(s,v.subs(newvalsubs+psubs)) for s,v in dixonsymbols]
Mdixon = Matrix(len(polydixon.monoms),len(newmonoms),[S.Zero]*(len(polydixon.monoms)*len(newmonoms)))
i = 0
for c,m in polydixon.iter_terms():
p = Poly(c,*orgsymbols)
for c2,m2 in p.iter_terms():
Mdixon[i,newmonoms.index(m2)] = c2.subs(dixonsymbols).expand()
i += 1
localvars2 = [var for var in self.pvars if self.has_any_symbols(Mdixon,var)]
for othersolvedvar in othersolvedvars:
for var in self.Variable(othersolvedvar).vars:
if self.has_any_symbols(Mdixon,var):
localvars2.append(var)
dfinal = self.det_bareis(Mdixon,*localvars2)
s=linalg.svd(Mdixon.subs(leftvar,0),compute_uv=0)
rank = numpy.sum(numpy.greater(abs(s),1e-14))
Mdettest = Matrix(Mdixon.shape[0],Mdixon.shape[1]-1,[S.Zero]*(Mdixon.shape[0]*(Mdixon.shape[1]-1)))
for j in range(Mdixon.shape[1]):
Mtemp = Mdixon.subs(leftvar,0)
Mtemp.col_del(j)
s=linalg.svd(Mtemp,compute_uv=0)
if numpy.sum(numpy.greater(abs(s),1e-14)) < rank:
print j
Mvalues = Mdixon.subs(dixonsymbolsvalues)
for i in range(Mvalues.shape[0]):
for j in range(Mvalues.shape[1]):
Mvalues[i,j] = Mvalues[i,j].expand()
dfinal = Mdixon.det_bareis()
solutionvector = Matrix(len(newmonoms),1,[Poly(S.Zero,*orgsymbols).add_term(S.One,m) for m in newmonoms])
def ComputeMatrix(self,eqs,allsubs,symbols=None):
if symbols is None:
symbols = eqs[0].symbols
unknownmonoms = list(Poly(eqs[0],*symbols).monoms)
unknownmonoms.sort()
unknownmonoms.pop(0) # remove 0,0
unknownvars = [Symbol('x%d'%i) for i in range(len(unknownmonoms))]
Mtemp = Matrix(len(eqs),len(unknownmonoms),[S.Zero]*(len(eqs)*len(unknownmonoms)))
for ipeq,peq in enumerate(eqs):
peq = Poly(peq,*symbols)
for im,m in enumerate(unknownmonoms):
Mtemp[ipeq,im] = peq.coeff(*m).subs(allsubs)
return Mtemp
def ComputeDeterminant(self,eqs,allsubs,symbols=None):
Mtemp = self.ComputeMatrix(eqs,allsubs,symbols)
Mtemp2 = Mtemp * Mtemp.transpose()
return Mtemp2.det()
def characteristic_poly(self,eqs):
#eqs=[eq.subs(psubs) for eq in reducedeqs[0:5]]
x=eqs[0].symbols[0]
ipower = 0
remainders = []
while True:
print ipower
changed = True
f = Poly(x**ipower,*eqs[0].symbols)
while changed:
changed = False
for eq in eqs:
q,r = div(f,eq)
if q != S.Zero:
print q
changed = True
f = Poly(f,*eqs[0].symbols)
print f
remainders.append(f)
ipower += 1
def using_solvedetdialyticpoly12(self):
name = 'solvedetdialyticpoly12'
checkconsistency12=self.using_checkconsistency12()
polyroots2=self.using_polyroots(2)
if not name in self.functions:
fcode = """
/// \\brief Solves a polynomial given its evaluation is the determinant of a matrix
///
/// matcoeffs is a 3*9*6 = 162 length vector
/// every 18 coeffs describe one equation in the following order:
/// [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2), (2, 0), (2, 1), (2, 2)]
/// let A have
static inline void %s(const IKReal* matcoeffs, IKReal* rawroots, int& numroots)
{
using std::complex;
const IKReal tol = 128.0*std::numeric_limits<IKReal>::epsilon();
const int maxsteps = 100;
const int D = 24;
const int matrixdim = 12;
complex<IKReal> roots[D];
complex<IKReal> IKFAST_ALIGNED16(A[matrixdim*matrixdim]);
// can do this since A and Areal/Breal are used at different times
IKReal* Areal = (IKReal*)&A;
IKReal* Breal = &Areal[matrixdim*matrixdim];
int ipiv[matrixdim]={0};
numroots = 0;
IKReal err[D];
roots[0] = complex<IKReal>(1,0);
roots[1] = complex<IKReal>(0.4,0.9); // any complex number not a root of unity is works
err[0] = 1.0;
err[1] = 1.0;
for(int i = 2; i < D; ++i) {
roots[i] = roots[i-1]*roots[1];
err[i] = 1.0;
}
int info, coeffindex;
IKReal polymultiplier=1; // should be 1/lead coeff. Needed or otherwise convergence will be slow
{
coeffindex = 0;
for(int j = 0; j < 6; ++j) {
for(int k = 0; k < 9; ++k) {
Areal[j*matrixdim+k+3] = matcoeffs[coeffindex+2];
Areal[(j+6)*matrixdim+k] = Areal[j*matrixdim+k+3];
coeffindex += 3;
}
// fill the rest with 0s!
for(int k = 0; k < 3; ++k) {
Areal[j*matrixdim+k] = Areal[(j+6)*matrixdim+k+9] = 0;
}
}
dgetrf_ (&matrixdim, &matrixdim, Areal, &matrixdim, &ipiv[0], &info);
if( info != 0 ) {
return; // failed
}
polymultiplier = ipiv[0] != 1 ? -Areal[0] : Areal[0];
for(int j = 1; j < matrixdim; ++j) {
polymultiplier *= Areal[j*matrixdim+j];
if (ipiv[j] != (j+1)) {
polymultiplier = -polymultiplier;
}
}
if( isnan(polymultiplier) || polymultiplier == 0 ) {
return;
}
polymultiplier = 1/polymultiplier;
}
int step;
for(step = 0; step < maxsteps; ++step) {
bool changed = false;
for(int i = 0; i < D; ++i) {
if ( err[i] >= tol && !isinf(real(roots[i])) && !isinf(imag(roots[i])) ) {
changed = true;
// evaluate the determinant
complex<IKReal> x = roots[i], x2 = roots[i]*roots[i];
coeffindex = 0;
for(int j = 0; j < 6; ++j) {
for(int k = 0; k < 9; ++k) {
A[j*matrixdim+k+3] = matcoeffs[coeffindex]+matcoeffs[coeffindex+1]*x+matcoeffs[coeffindex+2]*x2;
A[(j+6)*matrixdim+k] = A[j*matrixdim+k+3];
coeffindex += 3;
}
// fill the rest with 0s!
A[j*matrixdim+0] = A[(j+6)*matrixdim+0+9] = 0;
A[j*matrixdim+1] = A[(j+6)*matrixdim+1+9] = 0;
A[j*matrixdim+2] = A[(j+6)*matrixdim+2+9] = 0;
}
zgetrf_ (&matrixdim, &matrixdim, A, &matrixdim, &ipiv[0], &info);
if( info != 0 ) {
continue; // failed
}
complex<IKReal> det = ipiv[0] != 1 ? -A[0] : A[0];
for(int j = 1; j < matrixdim; ++j) {
det *= A[j*matrixdim+j];
if (ipiv[j] != (j+1)) {
det = -det;
}
}
if( isnan(real(det)) ) {
continue; // failed;
}
det *= polymultiplier;
// have to divide by (1+roots[i]^2)^4 to get a 16th degree polynomial (actually this is not always the case!)
//complex<IKReal> denom = complex<IKReal>(1,0)+x2;
//denom *= denom;
//denom *= denom;
//det /= denom;
for(int j = 0; j < D; ++j) {
if( i != j ) {
if( roots[i] != roots[j] && !isinf(real(roots[j])) ) {
det /= (roots[i] - roots[j]);
}
}
}
roots[i] -= det;
err[i] = abs(det);
}
}
if( !changed ) {
break;
}
}
numroots = 0;
for(int i = 0; i < D; ++i) {
if( IKabs(imag(roots[i])) < 100*tol && IKabs(err[i]) < 100*tol ) {
// found a real solution!, now solve the linear system
IKReal curroot = real(roots[i]);
IKReal curroot2 = curroot*curroot;
coeffindex = 0;
for(int j = 0; j < 6; ++j) {
for(int k = 0; k < 9; ++k) {
IKReal x = matcoeffs[coeffindex]+matcoeffs[coeffindex+1]*curroot+matcoeffs[coeffindex+2]*curroot2;
Areal[j+matrixdim*(k+3-1)] = x;
if( k == 0 ) {
Breal[j+6] = -x;
}
else {
Areal[j+6+matrixdim*(k-1)] = x;
}
coeffindex += 3;
}
// fill the rest with 0s!
Breal[j] = 0;
Areal[j] = Areal[j+matrixdim*1] = 0;
Areal[(j+6)+matrixdim*8] = Areal[(j+6)+matrixdim*9] = Areal[(j+6)+matrixdim*10] = 0;
}
// perform LU decomposition to solve for null space and solution simultaneously
int n = matrixdim-1, nrhs=1;
dgetrf_(&matrixdim, &n, Areal, &matrixdim, &ipiv[0], &info);
if( info != 0 ) {
continue;
}
dgetrs_("No transpose", &n, &nrhs, Areal, &matrixdim, &ipiv[0], Breal, &matrixdim, &info);
if( info != 0 ) {
continue;
}
if(info == 0) {
// because Areal might have a null space, have to solve for it
int nullspacedim = 0;
for(int j = 0; j < matrixdim-1; ++j) {
if( IKabs(Areal[j+matrixdim*j]) < 1000*tol ) {
nullspacedim++;
}
}
if( nullspacedim >= 2 ) {
// have two nullspace vectors!
IKReal ns[2][matrixdim-1];
for(int ins = 0; ins < 2; ++ins) {
IKReal nsnum=1;
for(int j = matrixdim-2; j >= 0; --j) {
if( IKabs(Areal[j+matrixdim*j]) < 1000*tol ) {
ns[ins][j] = nsnum;
if( ins > 0 ) {
nsnum += 1;
}
}
else {
IKReal accum = 0;
for(int k = j+1; k < matrixdim-1; ++k) {
accum += ns[ins][k] * Areal[j+matrixdim*k];
}
ns[ins][j] = -accum/Areal[j+matrixdim*j];
}
}
}
// have to solve this with another least squares....
// [ns0_0**2, 2*ns1_0*ns0_0, ns1_0**2, -ns0_1 + 2*ns0_0*x0, -ns1_1 + 2*ns1_0*x0] [x1 - x0**2]
// [ns0_0*ns0_2, ns1_0*ns0_2 + ns1_2*ns0_0, ns1_0*ns1_2, -ns0_3 + ns0_0*x2 + ns0_2*x0, -ns1_3 + ns1_0*x2 + ns1_2*x0] [x3 - x0*x2]
// [ns0_1*ns0_2, ns1_1*ns0_2 + ns1_2*ns0_1, ns1_1*ns1_2, -ns0_4 + ns0_1*x2 + ns0_2*x1, -ns1_4 + ns1_1*x2 + ns1_2*x1] X = [x4 - x1*x2]
// [ns0_2**2, 2*ns1_2*ns0_2, ns1_2**2, -ns0_5 + 2*ns0_2*x2, -ns1_5 + 2*ns1_2*x2] [x5 - x2**2]
// [ns0_0*ns0_5, ns1_0*ns0_5 + ns1_5*ns0_0, ns1_0*ns1_5, -ns0_6 + ns0_0*x5 + ns0_5*x0, -ns1_6 + ns1_0*x5 + ns1_5*x0] [x6 - x0*x5]
Areal[0] = ns[0][0]*ns[0][0];
Areal[1] = ns[0][0]*ns[0][2];
Areal[2] = ns[0][1]*ns[0][2];
Areal[3] = ns[0][2]*ns[0][2];
Areal[4] = ns[0][0]*ns[0][5];
Areal[5] = 2*ns[1][0]*ns[0][0];
Areal[6] = ns[1][0]*ns[0][2] + ns[1][2]*ns[0][0];
Areal[7] = ns[1][1]*ns[0][2] + ns[1][2]*ns[0][1];
Areal[8] = 2*ns[1][2]*ns[0][2];
Areal[9] = ns[1][0]*ns[0][5] + ns[1][5]*ns[0][0];
Areal[10] = ns[1][0]*ns[1][0];
Areal[11] = ns[1][0]*ns[1][2];
Areal[12] = ns[1][1]*ns[1][2];
Areal[13] = ns[1][2]*ns[1][2];
Areal[14] = ns[1][0]*ns[1][5];
Areal[15] = -ns[0][1] + 2*ns[0][0]*Breal[0];
Areal[16] = -ns[0][3] + ns[0][0]*Breal[2] + ns[0][2]*Breal[0];
Areal[17] = -ns[0][4] + ns[0][1]*Breal[2] + ns[0][2]*Breal[1];
Areal[18] = -ns[0][5] + 2*ns[0][2]*Breal[2];
Areal[19] = -ns[0][6] + ns[0][0]*Breal[5] + ns[0][5]*Breal[0];
Areal[20] = -ns[1][1] + 2*ns[1][0]+Breal[0];
Areal[21] = -ns[1][3] + ns[1][0]*Breal[2] + ns[1][2]*Breal[0];
Areal[22] = -ns[1][4] + ns[1][1]*Breal[2] + ns[1][2]*Breal[1];
Areal[23] = -ns[1][5] + 2*ns[1][2]*Breal[2];
Areal[24] = -ns[1][6] + ns[1][0]*Breal[5] + ns[1][5]*Breal[0];
int startindex = 25;
Areal[startindex] = Breal[1]-Breal[0]*Breal[0];
Areal[startindex+1] = Breal[3]-Breal[0]*Breal[2];
Areal[startindex+2] = Breal[4]-Breal[1]*Breal[2];
Areal[startindex+3] = Breal[5]-Breal[2]*Breal[2];
Areal[startindex+4] = Breal[6]-Breal[0]*Breal[5];
int nn=5;
dgesv_(&nn, &nrhs, Areal, &nn, ipiv, &Areal[startindex], &nn, &info);
if( info == 0 ) {
if( 1 ) {//IKabs(Areal[startindex]-Areal[startindex+3]*Areal[startindex+3]) < 1000*tol && IKabs(Areal[startindex+2]-Areal[startindex+4]*Areal[startindex+4]) < 1000*tol ) {
for(int k = 0; k < matrixdim-1; ++k) {
Breal[k] += Areal[startindex+3]*ns[0][k]+Areal[startindex+4]*ns[1][k];
}
if( %s(Breal) ) {
rawroots[numroots++] = curroot;
rawroots[numroots++] = Breal[2];
rawroots[numroots++] = Breal[0];
}
}
}
}
else if( nullspacedim == 1 ) {
// solve an angle with quadratic equation
IKReal nullspace[matrixdim-1];
for(int j = matrixdim-2; j >= 0; --j) {
if( IKabs(Areal[j+matrixdim*j]) < 1000*tol ) {
nullspace[j] = 1;
}
else {
IKReal accum = 0;
for(int k = j+1; k < matrixdim-1; ++k) {
accum += nullspace[k] * Areal[j+matrixdim*k];
}
nullspace[j] = -accum/Areal[j+matrixdim*j];
}
}
// take the biggest abs value between [0],[1] and [2],[5]
IKReal f0 = IKabs(nullspace[0])+IKabs(nullspace[1]);
IKReal f1 = IKabs(nullspace[2])+IKabs(nullspace[5]);
int nsnumroots;
IKReal nsroots[2], nscoeffs[3];
if( f0 == 0 && f1 == 0 ) {
if( %s(Breal) ) {
rawroots[numroots++] = curroot;
rawroots[numroots++] = Breal[2];
rawroots[numroots++] = Breal[0];
}
}
else {
if( f0 > f1 ) {
nscoeffs[0] = nullspace[0]*nullspace[0];
nscoeffs[1] = 2*nullspace[0]*Breal[0]-nullspace[1];
nscoeffs[2] = Breal[0]*Breal[0]-Breal[1];
}
else if( f1 > 0 ) {
nscoeffs[0] = nullspace[2]*nullspace[2];
nscoeffs[1] = 2*nullspace[2]*Breal[2]-nullspace[5];
nscoeffs[2] = Breal[2]*Breal[2]-Breal[5];
}
%s(nscoeffs,nsroots,nsnumroots);
nsroots[1] -= nsroots[0];
for(int j = 0; j < nsnumroots; ++j) {
for(int k = 0; k < matrixdim-1; ++k) {
Breal[k] += nsroots[j]*nullspace[k];
}
if( %s(Breal) ) {
rawroots[numroots++] = curroot;
rawroots[numroots++] = Breal[2];
rawroots[numroots++] = Breal[0];
}
}
}
}
else {
if( %s(Breal) ) {
rawroots[numroots++] = curroot;
rawroots[numroots++] = Breal[2];
rawroots[numroots++] = Breal[0];
}
}
}
}
}
}
"""%(name,checkconsistency12,checkconsistency12,polyroots2,checkconsistency12,checkconsistency12)
self.functions[name] = fcode
return name
def solveFailed(self):
for c in C:
reducedeqs.append(Poly(simplify(c.subs(htvarsubs)*(1+htvar[0]**2)*(1+htvar[1]**2)),htvars[0],htvars[1],tvar))
x = Symbol('ht%s'%othersymbols[0].name[1:])
dummyeq = eq.coeff(0,0)*(1+x**2) + eq.coeff(1,0)*(1-x**2) + eq.coeff(0,1)*2*x
eq,symbolsubs = self.removeConstants(dummyeq,[x],symbolgen)
# create a new matrix using the coefficients of reducedeqs
newmonoms = set()
origmonoms = set()
maxdegree = 0
for peq in reducedeqs:
for m in peq.iter_monoms():
mlist = list(m)
newmonoms.add(tuple(mlist))
origmonoms.add(tuple(mlist))
mlist[0] += 1
newmonoms.add(tuple(mlist))
newmonoms = list(newmonoms)
newmonoms.sort()
origmonoms = list(origmonoms)
origmonoms.sort()
assert(len(newmonoms)<=2*len(reducedeqs))
symbolgen = cse_main.numbered_symbols('const')
localsymbols = []
localexprs = []
localcommon = []
M = zeros((2*len(reducedeqs),len(newmonoms)))
exportcoeffeqs = [S.Zero]*(len(reducedeqs)*len(newmonoms)*3)
x = Symbol('ht%s'%othersymbols[0].name[1:])
for ipeq,peq in enumerate(reducedeqs):
for c,m in peq.iter_terms():
#eq,symbolsubs = self.removeConstants(c,othersymbols[0:2],symbolgen)
eq = Poly(c,othersymbols[0],othersymbols[1])
assert(eq.degree<=1)
dummyeq = eq.coeff(0,0)*(1+x**2) + eq.coeff(1,0)*(1-x**2) + eq.coeff(0,1)*2*x
eq,symbolsubs = self.removeConstants(dummyeq,[x],symbolgen)
for s,expr in symbolsubs:
expr0,common0 = self.removecommonexprs(expr,returncommon=True)
index = self.getCommonExpression(localexprs,expr0)
if index is not None:
eq=eq.subs(s,localsymbols[index][0]/localcommon[index]*common0)
else:
index = self.getCommonExpression(localexprs,-expr0)
if index is not None:
eq=eq.subs(s,-localsymbols[index][0]/localcommon[index]*common0)
else:
localsymbols.append((s,expr))
localexprs.append(expr0)
localcommon.append(common0)
#eq = Poly(eq,othersymbols[0],othersymbols[1])
exportindex = len(newmonoms)*ipeq+newmonoms.index(m)
#exportcoeffeqs[exportindex] = eq.coeff(0,0)
#exportcoeffeqs[len(newmonoms)*len(reducedeqs)+exportindex] = eq.coeff(1,0)
#exportcoeffeqs[2*len(newmonoms)*len(reducedeqs)+exportindex] = eq.coeff(0,1)
M[ipeq+len(reducedeqs),newmonoms.index(m)] = eq.as_basic()
mlist = list(m)
mlist[0] += 1
M[ipeq,newmonoms.index(tuple(mlist))] = eq.as_basic()
Mpowers = [zeros(M.shape)]
for i in range(M.shape[0]):
for j in range(M.shape[0]):
Mpowers[0][i,j] = Poly(M[i,j],x)
Mpowers[0][i,i] += S.One
multcombs = [(0,0),(0,1),(1,1),(0,3),(1,3),(2,3),(3,3)]
for indices in multcombs:
print indices
Mnew = Mpowers[indices[0]]*Mpowers[indices[1]]
for i in range(M.shape[0]):
for j in range(M.shape[0]):
eq,symbolsubs = self.removeConstants(Mnew[i,j],[x],symbolgen)
for s,expr in symbolsubs:
localsymbols.append((s,expr))
localexprs.append(expr)
localcommon.append(S.One)
Mnew[i,j] = eq
Mpowers.append(Mnew)
# have M.shape[0] unknowns with constant term being 1
characteristiccoeffs = [Symbol('dummyc%d'%i) for i in range(M.shape[0]+1)]
characteristicpolys = []
for i in range(M.shape[0]):
for j in range(M.shape[0]):
print i,j
p = Poly(characteristiccoeffs[0],x,*characteristiccoeffs)
for k in range(M.shape[0]):
p = p + characteristiccoeffs[k+1]*Mpowers[k][i,j].as_basic()
characteristicpolys.append(p)
allmonoms = set()
for peq in characteristicpolys:
allmonoms = allmonoms.union(set(peq.monoms))
allmonoms = list(allmonoms)
allmonoms.sort()
localsymbolsvalues = []
for i in range(len(localsymbols)):
localsymbolsvalues.append((localsymbols[i][0],localsymbols[i][1].subs(localsymbolsvalues+psubs).evalf()))
Msub = [zeros(M.shape),zeros(M.shape),zeros(M.shape)]
for i in range(M.shape[0]):
for j in range(M.shape[1]):
peq = Poly(M[i,j],x)
for k in range(peq.degree+1):
Msub[k][i,j] = peq.coeff(k)
#allsymbols = self.pvars+[s for s,v in localsymbols]+[x]
#P,L,DD,U= self.LUdecompositionFF(M,*allsymbols)
#det=self.det_bareis(M,*(self.pvars+othersymbols[0:2]))
raise self.CannotSolveError('not implemented')
def isolatepair():
print 'attempting to isolate a variable'
finalsolutions = []
for i in [1,3]: # for every variable, used to be range(4) but it is never the case that [1] fails and [0] succeeds
# if variable ever appears, it should be alone
complementvar = unknownvars[[1,0,3,2][i]]
print 'var:',unknownvars[i]
varsol = None
for rank,eq in orgeqns:
if eq.has_any_symbols(unknownvars[i]):
# the equations can get big, so 'solve' does not work, also it doesn't make sense to solve for degree > 1
eq2=Poly(eq,unknownvars[i])
if eq2.degree == 1 and eq2.coeff(0) != S.Zero:
# dividing directly leaves common denominators, so use gcd
# long fractions can make the gcd computation, so first remove all numbers
tsymbolgen = cse_main.numbered_symbols('tconst')
coeff0,tsymbols0 = self.replaceNumbers(eq2.coeff(0),tsymbolgen)
coeff1,tsymbols1 = self.replaceNumbers(eq2.coeff(1),tsymbolgen)
common = gcd(coeff0,coeff1,unknownvars)
coeff0,r = div(coeff0,common,unknownvars)
coeff1,r = div(coeff1,common,unknownvars)
if not coeff1.has_any_symbols(complementvar):
varsol = (-coeff0/coeff1).subs(tsymbols0+tsymbols1)
break
if varsol is not None:
#eq=simplify(fraction(varsol)[0]**2 + fraction(varsol)[1]**2*(complementvar**2 - 1))
varsolvalid=fraction(varsol)[1]
valideqs = []
valideqscheck = []
for rank,eq in orgeqns:
# find the max degree tha unknownvars[i] appears
maxdegree = max([m[i] for m in eq.iter_monoms()])
if maxdegree <= 1:
eqsub = Symbol('tempsym')**maxdegree*eq.as_basic().subs(allsymbols+[(unknownvars[i],varsol)]).subs(fraction(varsol)[1],Symbol('tempsym'))
if self.codeComplexity(eqsub) < 70: # bobcat fk has a 75 valued equation that does not simplify
#print eqsub,'complexity: ',self.codeComplexity(eqsub)
eqsub = simplify(eqsub)
else:
eqsub=eqsub.expand()
print 'solvePairVariables: could not simplify eqsub: ',eqsub
eqnew = eqsub.subs(Symbol('tempsym'),fraction(varsol)[1]).expand()
if self.codeComplexity(eqnew) < 120:
eqnew = simplify(eqnew)
else:
print 'solvePairVariables: could not simplify eqnew: ',eqnew
eqnew = eqnew.expand().subs(reducesubs).expand()
if self.codeComplexity(eqnew) < 120:
eqnewcheck = self.removecommonexprs(eqnew)
else:
eqnewcheck = eqnew
if eqnew != S.Zero and self.isExpressionUnique(valideqscheck,eqnewcheck) and self.isExpressionUnique(valideqscheck,-eqnewcheck):
valideqs.append(eqnew)
valideqscheck.append(eqnewcheck)
if len(valideqs) <= 1:
continue
valideqs2 = []
for eq in valideqs:
eqnew, symbols = self.groupTerms(eq, unknownvars, symbolgen)
# only accept simple equations
if self.codeComplexity(eqnew) < 100:
allsymbols += symbols
valideqs2.append(eqnew)
if len(valideqs2) <= 1:
continue
self.sortComplexity(valideqs2)
complementvarsols = []
othervarpoly = None
othervars = unknownvars[0:2] if i >= 2 else unknownvars[2:4]
postcheckforzeros = []
postcheckforrange = []
postcheckfornonzeros = []
for eq in valideqs2:
try:
peq = Poly(eq,complementvar)
except PolynomialError,e:
try:
peq = Poly(eq,complementvar)
except PolynomialError,e:
print 'solvePairVariables: ',e
continue
if peq.degree == 1: # degree > 1 adds sqrt's
solutions = [-peq.coeff(0).subs(allsymbols),peq.coeff(1).subs(allsymbols)]
if solutions[0] != S.Zero and solutions[1] != S.Zero and self.isValidSolution(solutions[0]/solutions[1]):
complementvarsols.append(solutions)
if len(complementvarsols) >= 2:
# test new solution with previous ones
eq0num,eq0denom = complementvarsols[-1]
for eq1num,eq1denom in complementvarsols[:-1]:
# although not apparent, this is actually a dangerous transformation that allows
# wrong solutions to pass through since complementvar is actually constrained, but the constraint
# is ignored. Therefore, this requires us to explicitly check denominator for zero and
# that each solution is within the [-1,1] range.
neweq = eq0num*eq1denom-eq1num*eq0denom
if self.codeComplexity(neweq.expand()) < 700:
neweq = simplify(neweq)
neweq = neweq.expand() # added expand due to below Poly call failing
if neweq != S.Zero:
try:
othervarpoly = Poly(neweq,*othervars).subs(othervars[0]**2,1-othervars[1]**2).subs(othervars[1]**2,1-othervars[0]**2)
if othervarpoly.expand() != S.Zero:
postcheckforzeros = [varsolvalid, eq0denom, eq1denom]
postcheckfornonzeros = [(eq1num/eq1denom)**2+varsol.subs(complementvar,eq1num/eq1denom)**2-1]
break
else:
othervarpoly = None
except PolynomialError,e:
print e
if othervarpoly is not None:
break
if othervarpoly is not None:
# now we have one polynomial with only one variable (sin and cos)!
solution = self.solveHighDegreeEquationsHalfAngle([othervarpoly],varsym1 if i < 2 else varsym0)
solution.postcheckforzeros = [self.removecommonexprs(eq,onlygcd=False,onlynumbers=True) for eq in postcheckforzeros]
solution.postcheckfornonzeros = [self.removecommonexprs(eq,onlygcd=False,onlynumbers=True) for eq in postcheckfornonzeros]
solution.postcheckforrange = postcheckforrange
finalsolutions.append(solution)
if solution.poly.degree <= 2:
# found a really good solution, so choose it
break
else:
print 'othervarpoly too complex: ',othervarpoly
if len(finalsolutions) > 0:
# find the equation with the minimal degree, and the least code complexity
return [min(finalsolutions, key=lambda f: f.poly.degree*1e6 + self.codeComplexity(f.poly.as_basic()))]
def solveLinearly(self,raweqns,varsyms,othersolvedvars,maxdegree=1):
varsubs = []
unknownvars = []
for varsym in varsyms:
varsubs += varsym.subs
unknownvars += [varsym.cvar,varsym.svar,varsym.var]
polyeqs = [Poly(eq.subs(varsubs),*unknownvars) for eq in raweqns]
allmonoms = set()
newpolyeqs = []
for peq in polyeqs:
if peq.degree <= maxdegree:
allmonoms = allmonoms.union(set(peq.monoms))
newpolyeqs.append(peq)
allmonoms = list(allmonoms)
allmonoms.sort()
if len(allmonoms) > len(newpolyeqs):
raise self.CannotSolveError('not enough equations %d>%d'%(len(allmonoms),len(newpolyeqs)))
if __builtin__.sum(allmonoms[0]) != 0:
raise self.CannotSolveError('need null space')
# try to solve for all pairwise variables
systemofequations = []
for peq in newpolyeqs:
if peq.degree <= maxdegree:
arr = [S.Zero]*len(allmonoms)
for c,m in peq.iter_terms():
arr[allmonoms.index(m)] = c
systemofequations.append(arr)
singleeqs = None
M = zeros((len(allmonoms),len(allmonoms)))
for eqs in combinations(systemofequations,len(allmonoms)):
for i,arr in enumerate(eqs):
for j in range(len(allmonoms)):
M[i,j] = arr[j]
if __builtin__.sum(allmonoms[0]) == 0:
# can solve directly
det = self.det_bareis(M)
if det != S.Zero:
break
X = M[1:,1:].inv()*M[1:,0]
print X
else:
# find a nullspace of M, this means that det(M) = 0
det = self.det_bareis(M,*(self.pvars+unknownvars)).subs(allsymbols)
if det.evalf() != S.Zero:
X = M.adjugate()*B
singleeqs = []
for i in range(4):
eq = (pairwisesubs[i][0]*det - X[i]).subs(allsymbols)
eqnew, symbols = self.groupTerms(eq, unknownvars, symbolgen)
allsymbols += symbols
singleeqs.append([self.codeComplexity(eq),Poly(eqnew,*unknownvars)])
break
if singleeqs is not None:
neweqns += singleeqs
neweqns.sort(lambda x, y: x[0]-y[0])
def detdialytically():
M = Mall[2]*leftvar**2+Mall[1]*leftvar+Mall[0]
tempsymbols = [Symbol('a%d'%i) for i in range(16)]
tempsubs = []
for i in range(16):
if M[i] != S.Zero:
tempsubs.append((tempsymbols[i],Poly(M[i],leftvar)))
else:
tempsymbols[i] = S.Zero
Mtemp = Matrix(4,4,tempsymbols)
dettemp=Mtemp.det()
log.info('multiplying all determinant coefficients')
eqadds = []
for arg in dettemp.args:
log.info('%s',arg)
eqmuls = [Poly(arg2.subs(tempsubs),leftvar) for arg2 in arg.args]
if eqmuls[0].degree == 0:
eq = eqmuls.pop(0)
eqmuls[0] = eqmuls[0]*eq
while len(eqmuls) > 1:
ioffset = 0
eqmuls2 = []
while ioffset < len(eqmuls)-1:
eqmuls2.append(eqmuls[ioffset]*eqmuls[ioffset+1])
ioffset += 2
eqmuls = eqmuls2
eqadds.append(eqmuls[0])
log.info('adding all determinant coefficients')
eqaddsorg=eqadds
eqadds2 = []
for eq in eqadds:
print 'yo'
eq2 = Poly(S.Zero,leftvar)
for c,m in eq.iter_terms():
eq2 = eq2.add_term(simplifyfn(c),m)
eqadds2.append(eq2)
# any further simplification will just freeze the generation process
det = Poly(S.Zero,leftvar)
for eq in eqadds2:
for c,m in eq.iter_terms():
sym=self.gsymbolgen.next()
dictequations.append([sym,c])
det += sym*leftvar**m[0]
@staticmethod
def _LUdecompositionFF(M,*vars):
"""
Function from sympy.
Returns 4 matrices P, L, D, U such that PA = L D**-1 U.
From the paper "fraction-free matrix factors..." by Zhou and Jeffrey
"""
n, m = M.rows, M.cols
U, L, P = M[:,:], eye(n), eye(n)
DD = zeros(n) # store it smarter since it's just diagonal
oldpivot = 1
for k in range(n-1):
if U[k,k] == S.Zero:
for kpivot in range(k+1, n):
if U[kpivot, k] != S.Zero:
break
else:
raise IKFastSolver.CannotSolveError("Matrix is not full rank")
U[k, k:], U[kpivot, k:] = U[kpivot, k:], U[k, k:]
L[k, :k], L[kpivot, :k] = L[kpivot, :k], L[k, :k]
P[k, :], P[kpivot, :] = P[kpivot, :], P[k, :]
L[k,k] = Ukk = U[k,k]
DD[k,k] = oldpivot * Ukk
for i in range(k+1, n):
L[i,k] = Uik = U[i,k]
for j in range(k+1, m):
if len(vars) == 0:
U[i,j] = (Ukk * U[i,j] - U[k,j]*Uik) / oldpivot
else:
log.debug('LU %d,%d: %s',i,j,oldpivot)
q,r = div(Poly(Ukk * U[i,j] - U[k,j]*Uik,*vars),oldpivot)
assert(r==S.Zero)
U[i,j] = q
U[i,k] = S.Zero
oldpivot = Ukk
DD[n-1,n-1] = oldpivot
return P, L, DD, U
def test_ik():
from sympy import *
from sympy import S, pi, sin, cos, PolynomialError, Symbol
import numpy
import __builtin__
from openravepy.ikfast import AST, combinations, fmod
from itertools import izip
from openravepy import axisAngleFromRotationMatrix
numpy.set_printoptions(15)
IkType=IkParameterizationType
ikmodel=self
self = solver
self.ikfast_module = ikmodel.ikfast
freeindices = ikmodel.freeindices
log = ikmodel.ikfast.log
log.setLevel(logging.DEBUG)
rawglobaldir = [1.0,0.0,0.0]
rawnormaldir = [0.0,0.0,1.0]
rawbasedir=dot(ikmodel.manip.GetLocalToolTransform()[0:3,0:3],ikmodel.manip.GetDirection())
rawbasepos=ikmodel.manip.GetLocalToolTransform()[0:3,3]
chaintree = solver.generateIkSolver(baselink=baselink,eelink=eelink,freeindices=freeindices,solvefn=solvefn,ikfastoptions=1)
code=ikmodel.ikfast.ikfast_generator_cpp.CodeGenerator(version=ikmodel.ikfast.__version__).generate(chaintree)
open(sourcefilename,'w').write(code)
#T0links.append(self.affineInverse(T1links.pop(-1)))
# get values
possibleangles = [S.Zero, pi.evalf()/2, asin(3.0/5).evalf(), asin(4.0/5).evalf(), asin(5.0/13).evalf(), asin(12.0/13).evalf()]
jointvalues = [S.Zero]*len(self._jointvars)
jointvalues[0] = possibleangles[2]
jointvalues[1] = possibleangles[3]
jointvalues[2] = possibleangles[2]
jointvalues[3] = possibleangles[3]
jointvalues[4] = possibleangles[3]
valsubs = []
freevalsubs = []
for var,value in izip(self._jointvars,jointvalues):
newsubs = [(var,Real(value)),(Symbol('c%s'%var.name),self.convertRealToRational(cos(value).evalf())),(Symbol('s%s'%var.name),self.convertRealToRational(sin(value).evalf())),(Symbol('t%s'%var.name),self.convertRealToRational(tan(value).evalf())),(Symbol('ht%s'%var.name),self.convertRealToRational(tan(value/2).evalf()))]
valsubs += newsubs
if not var in self._solvejointvars:
freevalsubs += newsubs
psubs = []
for i in range(12):
psubs.append((self.Tee[i],self.convertRealToRational(self.Tfinal[i].subs(valsubs).evalf())))
for s,v in self.ppsubs+self.npxyzsubs+self.rxpsubs:
psubs.append((s,v.subs(psubs)))
if len(self.globalsymbols) > 0:
psubs += [(s,v.subs(psubs+valsubs)) for s,v in self.globalsymbols]
if len(raghavansolutiontree) > 0:
psubs += [(s,v.subs(psubs)) for s,v in raghavansolutiontree[0].subs]
dummyvaluesubs = [(dvar,self.convertRealToRational(var.subs(valsubs).evalf())) for dvar,var in dummyvars]
allsubs = valsubs+psubs+dummyvaluesubs
localsymbolsvalues = [(var,value.subs(valsubs+psubs)) for var,value in localsymbols]
for var,value in izip(jointvars,jointvalues):
valsubs += [(var,value),(Symbol('c%s'%var.name),(cos(value).evalf())),(Symbol('s%s'%var.name),(sin(value).evalf())),(Symbol('t%s'%var.name),(tan(value).evalf())),(Symbol('ht%s'%var.name),(tan(value/2).evalf()))]
psubs = []
for i in range(12):
psubs.append((self.Tee[i],(Tfinal[i].subs(psubs+valsubs).evalf())))
for s,v in self.ppsubs+self.npxyzsubs+self.rxpsubs:
psubs.append((s,v.subs(psubs)))
newsubs=[(var,eq.subs(self.testconsistentvalues[1]).evalf()) for var,eq in coupledsolutions[0].dictequations]
mpmath.polyroots(coupledsolutions[0].poly.subs(newsubs).coeffs)
jointvalues = [-0.2898119639388401, 0.0, -5.913881500780583, 0.0, -3.116541584197247, 1.570796326794897]
"""
ikfast notes;
pr2 with conic sections:
success rate: 0.955000, wrong solutions: 0.000000, no solutions: 0.145000, missing solution: 0.281000
mean: 0.000006s, median: 0.000007s, min: 0.000001s, max: 0.000015s
pr2 with half-angle transformation:
success rate: 0.993000, wrong solutions: 0.000000, no solutions: 0.011000, missing solution: 0.081000
mean: 0.000009s, median: 0.000009s, min: 0.000005s, max: 0.000016s
"""
| mit | 2,666,996,518,185,427,500 | 45.446771 | 328 | 0.525626 | false |
jezdez/hirefire | hirefire/procs/hotqueue.py | 1 | 1845 | from __future__ import absolute_import
from hotqueue import HotQueue
from . import ClientProc
class HotQueueProc(ClientProc):
"""
A proc class for the `HotQueue
<http://richardhenry.github.com/hotqueue/>`_ library.
:param name: the name of the proc (required)
:param queues: list of queue names to check (required)
:param connection_params: the connection parameter to use by default
(optional)
:type name: str
:type queues: str or list
:type connection_params: dict
Example::
from hirefire.procs.hotqueue import HotQueueProc
class WorkerHotQueueProc(HotQueueProc):
name = 'worker'
queues = ['myqueue']
connection_params = {
'host': 'localhost',
'port': 6379,
'db': 0,
}
"""
#: The name of the proc (required).
name = None
#: The list of queues to check (required).
queues = []
#: The connection parameter to use by default (optional).
connection_params = {}
def __init__(self, connection_params=None, *args, **kwargs):
super(HotQueueProc, self).__init__(*args, **kwargs)
if connection_params is not None:
self.connection_params = connection_params
def client(self, queue):
"""
Given one of the configured queues returns a
:class:`hotqueue.HotQueue` instance with the
:attr:`~hirefire.procs.hotqueue.HotQueueProc.connection_params`.
"""
if isinstance(queue, HotQueue):
return queue
return HotQueue(queue, **self.connection_params)
def quantity(self, **kwargs):
"""
Returns the aggregated number of tasks of the proc queues.
"""
return sum([len(client) for client in self.clients])
| bsd-3-clause | -1,827,444,227,618,777,600 | 28.285714 | 72 | 0.598374 | false |
kootenpv/brightml | brightml/timer.py | 1 | 1228 | import time
class ZmqTimerManager(object):
def __init__(self):
self.timers = []
self.next_call = 0
def add_timer(self, timer):
self.timers.append(timer)
def check(self):
if time.time() > self.next_call:
for timer in self.timers:
timer.check()
def get_next_interval(self):
if time.time() >= self.next_call:
call_times = []
for timer in self.timers:
call_times.append(timer.get_next_call())
self.next_call = min(call_times)
if self.next_call < time.time():
val = 1
else:
val = (self.next_call - time.time()) * 1000
else:
val = (self.next_call - time.time()) * 1000
if val < 1:
val = 1
return val
class ZmqTimer(object):
def __init__(self, interval, callback):
self.interval = interval
self.callback = callback
self.last_call = 0
def check(self):
if time.time() > (self.interval + self.last_call):
self.callback()
self.last_call = time.time()
def get_next_call(self):
return self.last_call + self.interval
| mit | 6,674,582,009,593,183,000 | 25.695652 | 59 | 0.517101 | false |
larrybradley/astropy-helpers | astropy_helpers/sphinx/ext/edit_on_github.py | 1 | 5898 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This extension makes it easy to edit documentation on github.
It adds links associated with each docstring that go to the
corresponding view source page on Github. From there, the user can
push the "Edit" button, edit the docstring, and submit a pull request.
It has the following configuration options (to be set in the project's
``conf.py``):
* ``edit_on_github_project``
The name of the github project, in the form
"username/projectname".
* ``edit_on_github_branch``
The name of the branch to edit. If this is a released version,
this should be a git tag referring to that version. For a
dev version, it often makes sense for it to be "master". It
may also be a git hash.
* ``edit_on_github_source_root``
The location within the source tree of the root of the
Python package. Defaults to "lib".
* ``edit_on_github_doc_root``
The location within the source tree of the root of the
documentation source. Defaults to "doc", but it may make sense to
set it to "doc/source" if the project uses a separate source
directory.
* ``edit_on_github_docstring_message``
The phrase displayed in the links to edit a docstring. Defaults
to "[edit on github]".
* ``edit_on_github_page_message``
The phrase displayed in the links to edit a RST page. Defaults
to "[edit this page on github]".
* ``edit_on_github_help_message``
The phrase displayed as a tooltip on the edit links. Defaults to
"Push the Edit button on the next page"
* ``edit_on_github_skip_regex``
When the path to the .rst file matches this regular expression,
no "edit this page on github" link will be added. Defaults to
``"_.*"``.
"""
import inspect
import os
import re
import sys
from docutils import nodes
from sphinx import addnodes
def import_object(modname, name):
"""
Import the object given by *modname* and *name* and return it.
If not found, or the import fails, returns None.
"""
try:
__import__(modname)
mod = sys.modules[modname]
obj = mod
for part in name.split('.'):
obj = getattr(obj, part)
return obj
except:
return None
def get_url_base(app):
return 'http://github.com/%s/tree/%s/' % (
app.config.edit_on_github_project,
app.config.edit_on_github_branch)
def doctree_read(app, doctree):
# Get the configuration parameters
if app.config.edit_on_github_project == 'REQUIRED':
raise ValueError(
"The edit_on_github_project configuration variable must be "
"provided in the conf.py")
source_root = app.config.edit_on_github_source_root
url = get_url_base(app)
docstring_message = app.config.edit_on_github_docstring_message
# Handle the docstring-editing links
for objnode in doctree.traverse(addnodes.desc):
if objnode.get('domain') != 'py':
continue
names = set()
for signode in objnode:
if not isinstance(signode, addnodes.desc_signature):
continue
modname = signode.get('module')
if not modname:
continue
fullname = signode.get('fullname')
if fullname in names:
# only one link per name, please
continue
names.add(fullname)
obj = import_object(modname, fullname)
anchor = None
if obj is not None:
try:
lines, lineno = inspect.getsourcelines(obj)
except:
pass
else:
anchor = '#L%d' % lineno
if anchor:
real_modname = inspect.getmodule(obj).__name__
path = '%s%s%s.py%s' % (
url, source_root, real_modname.replace('.', '/'), anchor)
onlynode = addnodes.only(expr='html')
onlynode += nodes.reference(
reftitle=app.config.edit_on_github_help_message,
refuri=path)
onlynode[0] += nodes.inline(
'', '', nodes.raw('', ' ', format='html'),
nodes.Text(docstring_message),
classes=['edit-on-github', 'viewcode-link'])
signode += onlynode
def html_page_context(app, pagename, templatename, context, doctree):
if (templatename == 'page.html' and
not re.match(app.config.edit_on_github_skip_regex, pagename)):
doc_root = app.config.edit_on_github_doc_root
if doc_root != '' and not doc_root.endswith('/'):
doc_root += '/'
doc_path = os.path.relpath(doctree.get('source'), app.builder.srcdir)
url = get_url_base(app)
page_message = app.config.edit_on_github_page_message
context['edit_on_github'] = url + doc_root + doc_path
context['edit_on_github_page_message'] = (
app.config.edit_on_github_page_message)
def setup(app):
app.add_config_value('edit_on_github_project', 'REQUIRED', True)
app.add_config_value('edit_on_github_branch', 'master', True)
app.add_config_value('edit_on_github_source_root', 'lib', True)
app.add_config_value('edit_on_github_doc_root', 'doc', True)
app.add_config_value('edit_on_github_docstring_message',
'[edit on github]', True)
app.add_config_value('edit_on_github_page_message',
'Edit This Page on Github', True)
app.add_config_value('edit_on_github_help_message',
'Push the Edit button on the next page', True)
app.add_config_value('edit_on_github_skip_regex',
'_.*', True)
app.connect('doctree-read', doctree_read)
app.connect('html-page-context', html_page_context)
| bsd-3-clause | -1,403,147,038,377,882,000 | 34.745455 | 77 | 0.603934 | false |
google/rekall | rekall-core/rekall/plugins/windows/pfn.py | 1 | 19615 | from __future__ import division
# Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Authors:
# Michael Cohen <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# References:
# http://www.codemachine.com/article_kernelstruct.html#MMPFN
# http://www.reactos.org/wiki/Techwiki:Memory_management_in_the_Windows_XP_kernel#MmPfnDatabase
# pylint: disable=protected-access
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from builtins import range
from past.utils import old_div
import re
import io
from rekall import kb
from rekall import testlib
from rekall import plugin
from rekall.ui import text
from rekall.plugins import core
from rekall.plugins.addrspaces import intel
from rekall.plugins.windows import common
from rekall.plugins.windows import pagefile
from rekall_lib import utils
class VtoP(core.VtoPMixin, common.WinProcessFilter):
"""Prints information about the virtual to physical translation."""
class PFNInfo(common.WindowsCommandPlugin):
"""Prints information about an address from the PFN database."""
__name = "pfn"
# Size of page.
PAGE_SIZE = 0x1000
PAGE_BITS = 12
__args = [
dict(name="pfn", type="IntParser", positional=True, required=True,
help="The PFN to examine.")
]
table_header = [
dict(name="fact", width=25),
dict(name="Address", style="address"),
dict(name="Value"),
]
def collect(self):
pfn_obj = self.profile.get_constant_object("MmPfnDatabase")[
self.plugin_args.pfn]
yield "PFN", self.plugin_args.pfn
yield "PFN Record VA", pfn_obj.obj_offset
yield "Type", None, pfn_obj.Type
# In these states the other fields are meaningless.
if pfn_obj.Type in ("Zeroed", "Freed", "Bad"):
yield "Flink", pfn_obj.u1.Flink
yield "Blink", pfn_obj.u2.Blink
return
# The flags we are going to print.
flags = ["Modified",
"ParityError",
"ReadInProgress",
"WriteInProgress"]
long_flags_string = " ".join(
[v for v in flags if pfn_obj.u3.e1.m(v) == 0])
yield "Flags", None, long_flags_string
containing_page = int(pfn_obj.u4.PteFrame)
pte_physical_address = ((containing_page << self.PAGE_BITS) |
(int(pfn_obj.PteAddress) & 0xFFF))
yield "Reference", None, pfn_obj.u3.e2.ReferenceCount
yield "ShareCount", None, pfn_obj.u2.ShareCount
yield "Color", None, pfn_obj.multi_m("u3.e1.PageColor", "u4.PageColor")
yield "Controlling PTE (VA)", pfn_obj.PteAddress
yield "Controlling PTE (PA)", pte_physical_address
yield ("Controlling PTE Type", None,
"Prototype" if pfn_obj.IsPrototype else "Hardware")
# PFN is actually a DTB.
if containing_page == self.plugin_args.pfn:
owning_process = pfn_obj.u1.Flink.cast(
"Pointer", target="_EPROCESS")
yield "Owning process", owning_process
# Now describe the PTE and Prototype PTE pointed to by this PFN entry.
collection = intel.DescriptorCollection(self.session)
# We read the controlling PTE from the physical space (via read_pte)
# since PteAddress refers to the process' address space, which we
# don't have here right now and would be more expensive to gather.
self.session.kernel_address_space.describe_pte(
collection, pfn_obj.PteAddress,
self.session.kernel_address_space.read_pte(pte_physical_address),
0)
yield "Controlling PTE", None, collection
if pfn_obj.OriginalPte:
collection = intel.DescriptorCollection(self.session)
self.session.kernel_address_space.describe_proto_pte(
collection, pfn_obj.OriginalPte.v(),
pfn_obj.OriginalPte.Long, 0)
yield "Original PTE", None, collection
class PtoV(common.WinProcessFilter):
"""Converts a physical address to a virtual address."""
__name = "ptov"
PAGE_SIZE = 0x1000
PAGE_BITS = 12
__args = [
dict(name="physical_address", type="IntParser", positional=True,
help="The Virtual Address to examine.")
]
def __init__(self, *args, **kwargs):
super(PtoV, self).__init__(*args, **kwargs)
if self.profile.metadata("arch") == "I386":
if self.profile.metadata("pae"):
self.table_names = ["Phys", "PTE", "PDE", "DTB"]
self.bit_divisions = [12, 9, 9, 2]
else:
self.table_names = ["Phys", "PTE", "PDE", "DTB"]
self.bit_divisions = [12, 10, 10]
elif self.profile.metadata("arch") == "AMD64":
self.table_names = ["Phys", "PTE", "PDE", "PDPTE", "PML4E", "DTB"]
self.bit_divisions = [12, 9, 9, 9, 9, 4]
else:
raise plugin.PluginError("Memory model not supported.")
def ptov(self, collection, physical_address):
pfn_obj = self.profile.get_constant_object("MmPfnDatabase")[
physical_address >> self.PAGE_BITS]
# The PFN points at a prototype PTE.
if pfn_obj.IsPrototype:
collection.add(pagefile.WindowsFileMappingDescriptor,
pte_address=pfn_obj.PteAddress.v(),
page_offset=physical_address & 0xFFF,
original_pte=pfn_obj.OriginalPte)
else:
# PTE is a system PTE, we can directly resolve the virtual address.
self._ptov_x64_hardware_PTE(collection, physical_address)
def _ptov_x64_hardware_PTE(self, collection, physical_address):
"""An implementation of ptov for x64."""
pfn_database = self.session.profile.get_constant_object("MmPfnDatabase")
# A list of PTEs and their physical addresses.
physical_addresses = dict(Phys=physical_address)
# The physical and virtual address of the pte that controls the named
# member.
phys_addresses_of_pte = {}
ptes = {}
p_addr = physical_address
pfns = {}
# Starting with the physical address climb the PFN database in reverse
# to reach the DTB. At each page table entry we store the its physical
# offset. Then below we traverse the page tables in the forward order
# and add the bits into the virtual address.
for i, name in enumerate(self.table_names):
pfn = p_addr >> self.PAGE_BITS
pfns[name] = pfn_obj = pfn_database[pfn]
# The PTE which controls this pfn.
pte = pfn_obj.PteAddress
# PTE is not valid - this may be a large page. We dont currently
# know how to handle large pages.
#if not pte.u.Hard.Valid:
# return
if i > 0:
physical_addresses[name] = ptes[
self.table_names[i-1]].obj_offset
# The physical address of the PTE.
p_addr = ((pfn_obj.u4.PteFrame << self.PAGE_BITS) |
(pte.v() & 0xFFF))
phys_addresses_of_pte[name] = p_addr
# Hold on to the PTE in the physical AS. This is important as it
# ensures we can always access the correct PTE no matter the process
# context.
ptes[name] = self.session.profile._MMPTE(
p_addr, vm=self.session.physical_address_space)
self.session.logging.getChild("PageTranslation").debug(
"%s %#x is controlled by pte %#x (PFN %#x)",
name, physical_addresses[name], ptes[name], pfns[name])
# The DTB must be page aligned.
dtb = p_addr & ~0xFFF
# Now we construct the virtual address by locating the offset in each
# page table where the PTE is and deducing the bits covered within that
# range.
virtual_address = 0
start_of_page_table = dtb
size_of_pte = self.session.profile._MMPTE().obj_size
for name, bit_division in reversed(list(zip(
self.table_names, self.bit_divisions))):
pte = ptes[name]
virtual_address += old_div((
ptes[name].obj_offset - start_of_page_table), size_of_pte)
virtual_address <<= bit_division
# The physical address where the page table begins. The next
# iteration will find the offset of the next higher up page table
# level in this table.
start_of_page_table = pte.u.Hard.PageFrameNumber << self.PAGE_BITS
if name == "Phys":
collection.add(intel.PhysicalAddressDescriptor,
address=physical_address)
elif name == "DTB":
# The DTB must be page aligned.
collection.add(pagefile.WindowsDTBDescriptor,
dtb=physical_addresses["DTB"] & ~0xFFF)
else:
collection.add(pagefile.WindowsPTEDescriptor,
object_name=name, pte_value=pte.Long,
pte_addr=pte.obj_offset, session=self.session)
virtual_address = self.session.profile.integer_to_address(
virtual_address)
virtual_address += physical_address & 0xFFF
collection.add(intel.VirtualAddressDescriptor, dtb=dtb,
address=virtual_address)
def render(self, renderer):
if self.plugin_args.physical_address is None:
return
descriptors = intel.DescriptorCollection(self.session)
self.ptov(descriptors, self.plugin_args.physical_address)
for descriptor in descriptors:
descriptor.render(renderer)
class WinRammap(common.WindowsCommandPlugin):
"""Scan all physical memory and report page owners."""
name = "rammap"
__args = [
dict(name="start", type="IntParser", default=0, positional=True,
help="Physical memory address to start displaying."),
dict(name="end", type="IntParser",
help="Physical memory address to end displaying."),
]
table_header = [
dict(name="phys_offset", max_depth=1,
type="TreeNode", child=dict(style="address", align="l"),
width=16),
dict(name="List", width=10),
dict(name="Use", width=15),
dict(name="Pr", width=2),
dict(name="Process", type="_EPROCESS"),
dict(name="VA", style="address"),
dict(name="Offset", style="address"),
dict(name="Filename"),
]
def __init__(self, *args, **kwargs):
super(WinRammap, self).__init__(*args, **kwargs)
self.plugin_args.start &= ~0xFFF
self.ptov_plugin = self.session.plugins.ptov()
self.pfn_database = self.session.profile.get_constant_object(
"MmPfnDatabase")
self.pools = self.session.plugins.pools()
def describe_phys_addr(self, phys_off):
pfn_obj = self.pfn_database[phys_off >> 12]
collection = intel.DescriptorCollection(self.session)
self.ptov_plugin.ptov(collection, phys_off)
result = dict(phys_offset=phys_off,
List=pfn_obj.Type,
Pr=pfn_obj.Priority)
# Go through different kinds of use and display them in the table.
descriptor = collection["VirtualAddressDescriptor"]
if descriptor:
dtb_descriptor = collection["WindowsDTBDescriptor"]
# Address is in kernel space.
if descriptor.address > self.session.GetParameter(
"highest_usermode_address"):
_, _, pool = self.pools.is_address_in_pool(descriptor.address)
if pool:
yield dict(Use=pool.PoolType,
VA=descriptor.address, **result)
else:
yield dict(Use="Kernel",
VA=descriptor.address, **result)
else:
yield dict(Use="Private",
Process=dtb_descriptor.owner(),
VA=descriptor.address, **result)
return
descriptor = collection["WindowsFileMappingDescriptor"]
if descriptor:
subsection = descriptor.get_subsection()
filename, file_offset = descriptor.filename_and_offset(
subsection=subsection)
# First show the owner that mapped the file.
virtual_address = None
depth = 0
# A real mapped file.
for process, virtual_address in descriptor.get_owners(
subsection=subsection):
yield dict(Use="Mapped File",
Filename=filename,
Offset=file_offset,
depth=depth,
Process=process,
VA=virtual_address, **result)
if self.plugin_args.verbosity <= 1:
return
# If the user wants more, also show the other processes which
# map this file.
depth = 1
# We could not find a process owner so we just omit it.
if depth == 0:
yield dict(Use="Mapped File",
Filename=filename,
Offset=file_offset,
**result)
return
if pfn_obj.u3.e2.ReferenceCount == 0:
result["Use"] = "Unused"
yield result
return
yield result
def collect(self):
phys_off = self.plugin_args.start
end = self.plugin_args.end
if end is None or end < phys_off:
end = phys_off + 10 * 0x1000
for phys_off in utils.xrange(self.plugin_args.start, end, 0x1000):
for result in self.describe_phys_addr(phys_off):
yield result
# Re-run from here next invocation.
self.plugin_args.start = phys_off
def summary(self):
"""Return a multistring summary of the result."""
# We just use the WideTextRenderer to render the records.
fd = io.StringIO()
with text.WideTextRenderer(session=self.session, fd=fd) as renderer:
self.render(renderer)
return [_f for _f in re.split(r"(^|\n)\*+\n", fd.getvalue(), re.S | re.M) if _f]
class TestWinRammap(testlib.SimpleTestCase):
PARAMETERS = dict(
commandline="rammap %(start)s",
start=0x4d7000,
)
class DTBScan(common.WinProcessFilter):
"""Scans the physical memory for DTB values.
This plugin can compare the DTBs found against the list of known processes
to find hidden processes.
"""
__name = "dtbscan"
__args = [
dict(name="limit", type="IntParser", default=2**64,
help="Stop scanning after this many mb.")
]
table_header = [
dict(name="DTB", style="address"),
dict(name="VA", style="address"),
dict(name="Owner", type="_EPROCESS"),
dict(name="Known", type="Bool"),
]
def collect(self):
ptov = self.session.plugins.ptov(session=self.session)
pslist = self.session.plugins.pslist(session=self.session)
pfn_database = self.session.profile.get_constant_object("MmPfnDatabase")
# Known tasks:
known_tasks = set()
for task in pslist.list_eprocess():
known_tasks.add(task.obj_offset)
seen_dtbs = set()
# Now scan all the physical address space for DTBs.
for run in self.physical_address_space.get_mappings():
for page in range(run.start, run.end, 0x1000):
self.session.report_progress("Scanning 0x%08X (%smb)" % (
page, page/1024/1024))
# Quit early if requested to.
if page > self.plugin_args.limit:
return
collection = intel.DescriptorCollection(self.session)
ptov.ptov(collection, page)
dtb_descriptor = collection["WindowsDTBDescriptor"]
if dtb_descriptor:
dtb = dtb_descriptor.dtb
if dtb not in seen_dtbs:
seen_dtbs.add(dtb)
pfn_obj = pfn_database[dtb >> 12]
# Report the VA of the DTB (Since DTBs contains
# themselves this will equal to the VA of the DTB.
va = pfn_obj.PteAddress.v()
task = dtb_descriptor.owner()
yield (dtb, va, task,
task.obj_offset and task.obj_offset in known_tasks)
class TestDTBScan(testlib.SimpleTestCase):
PARAMETERS = dict(
commandline="dtbscan --limit 10mb",
)
class WinSubsectionProducer(kb.ParameterHook):
"""Produce all the subsection objects we know about.
Returns a dict keyed with subsection offsets with values being a details
dict. The details include the vad and the _EPROCESS address for this
process.
"""
name = "subsections"
def calculate(self):
result = {}
for task in self.session.plugins.pslist().filter_processes():
self.session.report_progress("Inspecting VAD for %s", task.name)
for vad in task.RealVadRoot.traverse():
subsection_list = vad.multi_m(
"Subsection", "ControlArea.FirstSubsection")
for subsection in subsection_list.walk_list(
"NextSubsection", include_current=True):
record = result.setdefault(subsection.obj_offset, [])
record.append(dict(task=task.obj_offset,
vad=vad.obj_offset,
type=vad.obj_type))
return result
class WinPrototypePTEArray(kb.ParameterHook):
"""A ranged collection for Prototype PTE arrays."""
name = "prototype_pte_array_subsection_lookup"
def calculate(self):
result = utils.RangedCollection()
for subsection_offset in self.session.GetParameter("subsections"):
subsection = self.session.profile._SUBSECTION(subsection_offset)
start = subsection.SubsectionBase.v()
# Pte Arrays are always allocated from kernel pools.
if start < self.session.GetParameter("highest_usermode_address"):
continue
end = start + (subsection.PtesInSubsection *
subsection.SubsectionBase[0].obj_size)
result.insert(start, end, subsection_offset)
return result
| gpl-2.0 | -8,307,783,502,107,839,000 | 35.056985 | 95 | 0.582462 | false |
socialsensor/community-evolution-analysis | matlab/python_data_parsing/json_mention_multifile_noDialog_crawler.py | 1 | 2244 | #-------------------------------------------------------------------------------
# Purpose: parsing data from the crawler's "rawmetadata.json.#" json files to a form:
# author mentioned1,mentioned2,... unixTimestamp + text \n
# creating as many txt files as there are json files.
# This .py file does not present the user with a folder selection dialog.
# Required libs: unidecode
# Author: konkonst
#
# Created: 31/05/2013
# Copyright: (c) ITI (CERTH) 2013
# Licence: <apache licence 2.0>
#-------------------------------------------------------------------------------
import json
import os, glob
import codecs, unicodedata
from unidecode import unidecode
# User selects dataset folder
dataset_path = "E:/konkonst/retriever/crawler_temp/"
#Parsing commences###
counter=0
for filename in sorted(glob.glob(dataset_path+"/rawmetadata.json.*"),reverse=True):#json files
print(filename)
my_file=open(filename,"r")
counter+=1
my_txt=open(dataset_path+"/auth_ment_time_txt_"+str(counter)+".txt","w")#target files
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
while read_line:
json_line=json.loads(ustr_to_load)##,encoding="cp1252")#.decode("utf-8","replace")
if "delete" in json_line or "scrub_geo" in json_line or "limit" in json_line:
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
continue
else:
if json_line["entities"]["user_mentions"] and json_line["user"]["screen_name"]:
len_ment=len(json_line["entities"]["user_mentions"])
mentions=[]
for i in range(len_ment):
mentions.append(json_line["entities"]["user_mentions"][i]["screen_name"])
my_text=json_line["text"].replace("\n", "")
my_text=unidecode(my_text)
my_txt.write(json_line["user"]["screen_name"]+"\t" + ",".join(mentions)+"\t"+"\""+json_line["created_at"]+"\""+"\t"+my_text+"\n")
read_line=my_file.readline()
ustr_to_load = unicode(read_line, 'iso-8859-15')
else:
my_file.close()
my_txt.close()
| apache-2.0 | 3,282,412,746,953,365,500 | 43.88 | 145 | 0.559269 | false |
tyrylu/pyfmodex | pyfmodex/studio/event_description.py | 1 | 1936 | """The description for an FMOD Studio Event."""
from ctypes import byref, c_int, c_void_p, create_string_buffer
from .event_instance import EventInstance
from .studio_object import StudioObject
from .enums import LOADING_STATE
class EventDescription(StudioObject):
"""The description for an FMOD Studio Event.
Event descriptions belong to banks and can be queried after the relevant
bank has been loaded. Event descriptions may be retrieved via path or GUID
lookup, or by enumerating all descriptions in a bank.
"""
function_prefix = "FMOD_Studio_EventDescription"
@property
def path(self):
"""The path."""
required = c_int()
self._call("GetPath", None, 0, byref(required))
path_buffer = create_string_buffer(required.value)
self._call("GetPath", path_buffer, len(path_buffer), None)
return path_buffer.value.decode("utf-8")
def create_instance(self):
"""Create a playable instance."""
instance_ptr = c_void_p()
self._call("CreateInstance", byref(instance_ptr))
return EventInstance(instance_ptr)
@property
def parameter_description_count(self):
"""The number of parameters in the event."""
count = c_int()
self._call("GetParameterDescriptionCount", byref(count))
return count.value
@property
def user_property_count(self):
"""The number of user properties attached to the event."""
count = c_int()
self._call("GetUserPropertyCount", byref(count))
return count.value
def load_sample_data(self):
"""Loads non-streaming sample data used by the event."""
self._call("LoadSampleData")
@property
def sample_loading_state(self):
"""Retrieves the sample data loading state."""
state = c_int()
self._call("GetSampleLoadingState", byref(state))
return LOADING_STATE(state.value) | mit | -1,565,802,748,173,493,800 | 32.982456 | 78 | 0.657541 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.