code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)
| Suor/django-cacheops | cacheops/transaction.py | Python | bsd-3-clause | 4,124 |
# -*- coding: utf-8 -*-
"""
flaskbb.management.views
~~~~~~~~~~~~~~~~~~~~~~~~
This module handles the management views.
:copyright: (c) 2014 by the FlaskBB Team.
:license: BSD, see LICENSE for more details.
"""
import sys
from flask import (Blueprint, current_app, request, redirect, url_for, flash,
jsonify, __version__ as flask_version)
from flask_login import current_user, login_fresh
from flask_plugins import get_all_plugins, get_plugin, get_plugin_from_all
from flask_babelplus import gettext as _
from flask_allows import Permission, Not
from flaskbb import __version__ as flaskbb_version
from flaskbb._compat import iteritems
from flaskbb.forum.forms import UserSearchForm
from flaskbb.utils.settings import flaskbb_config
from flaskbb.utils.requirements import (IsAtleastModerator, IsAdmin,
CanBanUser, CanEditUser,
IsAtleastSuperModerator)
from flaskbb.extensions import db, allows
from flaskbb.utils.helpers import (render_template, time_diff, time_utcnow,
get_online_users)
from flaskbb.user.models import Guest, User, Group
from flaskbb.forum.models import Post, Topic, Forum, Category, Report
from flaskbb.management.models import Setting, SettingsGroup
from flaskbb.management.forms import (AddUserForm, EditUserForm, AddGroupForm,
EditGroupForm, EditForumForm,
AddForumForm, CategoryForm)
management = Blueprint("management", __name__)
@management.before_request
def check_fresh_login():
"""Checks if the login is fresh for the current user, otherwise the user
has to reauthenticate."""
if not login_fresh():
return current_app.login_manager.needs_refresh()
@management.route("/")
@allows.requires(IsAtleastModerator)
def overview():
# user and group stats
banned_users = User.query.filter(
Group.banned == True,
Group.id == User.primary_group_id
).count()
if not current_app.config["REDIS_ENABLED"]:
online_users = User.query.filter(User.lastseen >= time_diff()).count()
else:
online_users = len(get_online_users())
stats = {
# user stats
"all_users": User.query.count(),
"banned_users": banned_users,
"online_users": online_users,
"all_groups": Group.query.count(),
# forum stats
"report_count": Report.query.count(),
"topic_count": Topic.query.count(),
"post_count": Post.query.count(),
# misc stats
"plugins": get_all_plugins(),
"python_version": "%s.%s" % (sys.version_info[0], sys.version_info[1]),
"flask_version": flask_version,
"flaskbb_version": flaskbb_version
}
return render_template("management/overview.html", **stats)
@management.route("/settings", methods=["GET", "POST"])
@management.route("/settings/<path:slug>", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def settings(slug=None):
slug = slug if slug else "general"
# get the currently active group
active_group = SettingsGroup.query.filter_by(key=slug).first_or_404()
# get all groups - used to build the navigation
all_groups = SettingsGroup.query.all()
SettingsForm = Setting.get_form(active_group)
old_settings = Setting.get_settings(active_group)
new_settings = {}
form = SettingsForm()
if form.validate_on_submit():
for key, values in iteritems(old_settings):
try:
# check if the value has changed
if values['value'] == form[key].data:
continue
else:
new_settings[key] = form[key].data
except KeyError:
pass
Setting.update(settings=new_settings, app=current_app)
flash(_("Settings saved."), "success")
else:
for key, values in iteritems(old_settings):
try:
form[key].data = values['value']
except (KeyError, ValueError):
pass
return render_template("management/settings.html", form=form,
all_groups=all_groups, active_group=active_group)
# Users
@management.route("/users", methods=['GET', 'POST'])
@allows.requires(IsAtleastModerator)
def users():
page = request.args.get("page", 1, type=int)
search_form = UserSearchForm()
if search_form.validate():
users = search_form.get_results().\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/users.html", users=users,
search_form=search_form)
users = User.query. \
order_by(User.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/users.html", users=users,
search_form=search_form)
@management.route("/users/<int:user_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAtleastModerator)
def edit_user(user_id):
user = User.query.filter_by(id=user_id).first_or_404()
if not Permission(CanEditUser, identity=current_user):
flash(_("You are not allowed to edit this user."), "danger")
return redirect(url_for("management.users"))
member_group = db.and_(*[db.not_(getattr(Group, p)) for p in
['admin', 'mod', 'super_mod', 'banned', 'guest']])
filt = db.or_(
Group.id.in_(g.id for g in current_user.groups), member_group
)
if Permission(IsAtleastSuperModerator, identity=current_user):
filt = db.or_(filt, Group.mod)
if Permission(IsAdmin, identity=current_user):
filt = db.or_(filt, Group.admin, Group.super_mod)
if Permission(CanBanUser, identity=current_user):
filt = db.or_(filt, Group.banned)
group_query = Group.query.filter(filt)
form = EditUserForm(user)
form.primary_group.query = group_query
form.secondary_groups.query = group_query
if form.validate_on_submit():
form.populate_obj(user)
user.primary_group_id = form.primary_group.data.id
# Don't override the password
if form.password.data:
user.password = form.password.data
user.save(groups=form.secondary_groups.data)
flash(_("User updated."), "success")
return redirect(url_for("management.edit_user", user_id=user.id))
return render_template("management/user_form.html", form=form,
title=_("Edit User"))
@management.route("/users/delete", methods=["POST"])
@management.route("/users/<int:user_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_user(user_id=None):
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for user in User.query.filter(User.id.in_(ids)).all():
# do not delete current user
if current_user.id == user.id:
continue
if user.delete():
data.append({
"id": user.id,
"type": "delete",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} users deleted.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
if current_user.id == user.id:
flash(_("You cannot delete yourself.", "danger"))
return redirect(url_for("management.users"))
user.delete()
flash(_("User deleted."), "success")
return redirect(url_for("management.users"))
@management.route("/users/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_user():
form = AddUserForm()
if form.validate_on_submit():
form.save()
flash(_("User added."), "success")
return redirect(url_for("management.users"))
return render_template("management/user_form.html", form=form,
title=_("Add User"))
@management.route("/users/banned", methods=["GET", "POST"])
@allows.requires(IsAtleastModerator)
def banned_users():
page = request.args.get("page", 1, type=int)
search_form = UserSearchForm()
users = User.query.filter(
Group.banned == True,
Group.id == User.primary_group_id
).paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
if search_form.validate():
users = search_form.get_results().\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/banned_users.html", users=users,
search_form=search_form)
return render_template("management/banned_users.html", users=users,
search_form=search_form)
@management.route("/users/ban", methods=["POST"])
@management.route("/users/<int:user_id>/ban", methods=["POST"])
@allows.requires(IsAtleastModerator)
def ban_user(user_id=None):
if not Permission(CanBanUser, identity=current_user):
flash(_("You do not have the permissions to ban this user."), "danger")
return redirect(url_for("management.overview"))
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
users = User.query.filter(User.id.in_(ids)).all()
for user in users:
# don't let a user ban himself and do not allow a moderator to ban
# a admin user
if (
current_user.id == user.id or
Permission(IsAdmin, identity=user) and
Permission(Not(IsAdmin), current_user)
):
continue
elif user.ban():
data.append({
"id": user.id,
"type": "ban",
"reverse": "unban",
"reverse_name": _("Unban"),
"reverse_url": url_for("management.unban_user",
user_id=user.id)
})
return jsonify(
message="{} users banned.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
# Do not allow moderators to ban admins
if Permission(IsAdmin, identity=user) and \
Permission(Not(IsAdmin), identity=current_user):
flash(_("A moderator cannot ban an admin user."), "danger")
return redirect(url_for("management.overview"))
if not current_user.id == user.id and user.ban():
flash(_("User is now banned."), "success")
else:
flash(_("Could not ban user."), "danger")
return redirect(url_for("management.banned_users"))
@management.route("/users/unban", methods=["POST"])
@management.route("/users/<int:user_id>/unban", methods=["POST"])
@allows.requires(IsAtleastModerator)
def unban_user(user_id=None):
if not Permission(CanBanUser, identity=current_user):
flash(_("You do not have the permissions to unban this user."),
"danger")
return redirect(url_for("management.overview"))
# ajax request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for user in User.query.filter(User.id.in_(ids)).all():
if user.unban():
data.append({
"id": user.id,
"type": "unban",
"reverse": "ban",
"reverse_name": _("Ban"),
"reverse_url": url_for("management.ban_user",
user_id=user.id)
})
return jsonify(
message="{} users unbanned.".format(len(data)),
category="success",
data=data,
status=200
)
user = User.query.filter_by(id=user_id).first_or_404()
if user.unban():
flash(_("User is now unbanned."), "success")
else:
flash(_("Could not unban user."), "danger")
return redirect(url_for("management.banned_users"))
# Reports
@management.route("/reports")
@allows.requires(IsAtleastModerator)
def reports():
page = request.args.get("page", 1, type=int)
reports = Report.query.\
order_by(Report.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/reports.html", reports=reports)
@management.route("/reports/unread")
@allows.requires(IsAtleastModerator)
def unread_reports():
page = request.args.get("page", 1, type=int)
reports = Report.query.\
filter(Report.zapped == None).\
order_by(Report.id.desc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/unread_reports.html", reports=reports)
@management.route("/reports/<int:report_id>/markread", methods=["POST"])
@management.route("/reports/markread", methods=["POST"])
@allows.requires(IsAtleastModerator)
def report_markread(report_id=None):
# AJAX request
if request.is_xhr:
ids = request.get_json()["ids"]
data = []
for report in Report.query.filter(Report.id.in_(ids)).all():
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
data.append({
"id": report.id,
"type": "read",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} reports marked as read.".format(len(data)),
category="success",
data=data,
status=200
)
# mark single report as read
if report_id:
report = Report.query.filter_by(id=report_id).first_or_404()
if report.zapped:
flash(_("Report %(id)s is already marked as read.", id=report.id),
"success")
return redirect(url_for("management.reports"))
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
flash(_("Report %(id)s marked as read.", id=report.id), "success")
return redirect(url_for("management.reports"))
# mark all as read
reports = Report.query.filter(Report.zapped == None).all()
report_list = []
for report in reports:
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report_list.append(report)
db.session.add_all(report_list)
db.session.commit()
flash(_("All reports were marked as read."), "success")
return redirect(url_for("management.reports"))
# Groups
@management.route("/groups")
@allows.requires(IsAdmin)
def groups():
page = request.args.get("page", 1, type=int)
groups = Group.query.\
order_by(Group.id.asc()).\
paginate(page, flaskbb_config['USERS_PER_PAGE'], False)
return render_template("management/groups.html", groups=groups)
@management.route("/groups/<int:group_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_group(group_id):
group = Group.query.filter_by(id=group_id).first_or_404()
form = EditGroupForm(group)
if form.validate_on_submit():
form.populate_obj(group)
group.save()
if group.guest:
Guest.invalidate_cache()
flash(_("Group updated."), "success")
return redirect(url_for("management.groups", group_id=group.id))
return render_template("management/group_form.html", form=form,
title=_("Edit Group"))
@management.route("/groups/<int:group_id>/delete", methods=["POST"])
@management.route("/groups/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_group(group_id=None):
if request.is_xhr:
ids = request.get_json()["ids"]
if not (set(ids) & set(["1", "2", "3", "4", "5"])):
data = []
for group in Group.query.filter(Group.id.in_(ids)).all():
group.delete()
data.append({
"id": group.id,
"type": "delete",
"reverse": False,
"reverse_name": None,
"reverse_url": None
})
return jsonify(
message="{} groups deleted.".format(len(data)),
category="success",
data=data,
status=200
)
return jsonify(
message=_("You cannot delete one of the standard groups."),
category="danger",
data=None,
status=404
)
if group_id is not None:
if group_id <= 5: # there are 5 standard groups
flash(_("You cannot delete the standard groups. "
"Try renaming it instead.", "danger"))
return redirect(url_for("management.groups"))
group = Group.query.filter_by(id=group_id).first_or_404()
group.delete()
flash(_("Group deleted."), "success")
return redirect(url_for("management.groups"))
flash(_("No group chosen."), "danger")
return redirect(url_for("management.groups"))
@management.route("/groups/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_group():
form = AddGroupForm()
if form.validate_on_submit():
form.save()
flash(_("Group added."), "success")
return redirect(url_for("management.groups"))
return render_template("management/group_form.html", form=form,
title=_("Add Group"))
# Forums and Categories
@management.route("/forums")
@allows.requires(IsAdmin)
def forums():
categories = Category.query.order_by(Category.position.asc()).all()
return render_template("management/forums.html", categories=categories)
@management.route("/forums/<int:forum_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_forum(forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
form = EditForumForm(forum)
if form.validate_on_submit():
form.save()
flash(_("Forum updated."), "success")
return redirect(url_for("management.edit_forum", forum_id=forum.id))
else:
if forum.moderators:
form.moderators.data = ",".join([
user.username for user in forum.moderators
])
else:
form.moderators.data = None
return render_template("management/forum_form.html", form=form,
title=_("Edit Forum"))
@management.route("/forums/<int:forum_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_forum(forum_id):
forum = Forum.query.filter_by(id=forum_id).first_or_404()
involved_users = User.query.filter(Topic.forum_id == forum.id,
Post.user_id == User.id).all()
forum.delete(involved_users)
flash(_("Forum deleted."), "success")
return redirect(url_for("management.forums"))
@management.route("/forums/add", methods=["GET", "POST"])
@management.route("/forums/<int:category_id>/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_forum(category_id=None):
form = AddForumForm()
if form.validate_on_submit():
form.save()
flash(_("Forum added."), "success")
return redirect(url_for("management.forums"))
else:
form.groups.data = Group.query.order_by(Group.id.asc()).all()
if category_id:
category = Category.query.filter_by(id=category_id).first()
form.category.data = category
return render_template("management/forum_form.html", form=form,
title=_("Add Forum"))
@management.route("/category/add", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def add_category():
form = CategoryForm()
if form.validate_on_submit():
form.save()
flash(_("Category added."), "success")
return redirect(url_for("management.forums"))
return render_template("management/category_form.html", form=form,
title=_("Add Category"))
@management.route("/category/<int:category_id>/edit", methods=["GET", "POST"])
@allows.requires(IsAdmin)
def edit_category(category_id):
category = Category.query.filter_by(id=category_id).first_or_404()
form = CategoryForm(obj=category)
if form.validate_on_submit():
form.populate_obj(category)
flash(_("Category updated."), "success")
category.save()
return render_template("management/category_form.html", form=form,
title=_("Edit Category"))
@management.route("/category/<int:category_id>/delete", methods=["POST"])
@allows.requires(IsAdmin)
def delete_category(category_id):
category = Category.query.filter_by(id=category_id).first_or_404()
involved_users = User.query.filter(Forum.category_id == category.id,
Topic.forum_id == Forum.id,
Post.user_id == User.id).all()
category.delete(involved_users)
flash(_("Category with all associated forums deleted."), "success")
return redirect(url_for("management.forums"))
# Plugins
@management.route("/plugins")
@allows.requires(IsAdmin)
def plugins():
plugins = get_all_plugins()
return render_template("management/plugins.html", plugins=plugins)
@management.route("/plugins/<path:plugin>/enable", methods=["POST"])
@allows.requires(IsAdmin)
def enable_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.enabled:
flash(_("Plugin %(plugin)s is already enabled.", plugin=plugin.name),
"info")
return redirect(url_for("management.plugins"))
try:
plugin.enable()
flash(_("Plugin %(plugin)s enabled. Please restart FlaskBB now.",
plugin=plugin.name), "success")
except OSError:
flash(_("It seems that FlaskBB does not have enough filesystem "
"permissions. Try removing the 'DISABLED' file by "
"yourself instead."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/disable", methods=["POST"])
@allows.requires(IsAdmin)
def disable_plugin(plugin):
try:
plugin = get_plugin(plugin)
except KeyError:
flash(_("Plugin %(plugin)s not found.", plugin=plugin.name), "danger")
return redirect(url_for("management.plugins"))
try:
plugin.disable()
flash(_("Plugin %(plugin)s disabled. Please restart FlaskBB now.",
plugin=plugin.name), "success")
except OSError:
flash(_("It seems that FlaskBB does not have enough filesystem "
"permissions. Try creating the 'DISABLED' file by "
"yourself instead."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/uninstall", methods=["POST"])
@allows.requires(IsAdmin)
def uninstall_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.uninstallable:
plugin.uninstall()
Setting.invalidate_cache()
flash(_("Plugin has been uninstalled."), "success")
else:
flash(_("Cannot uninstall plugin."), "danger")
return redirect(url_for("management.plugins"))
@management.route("/plugins/<path:plugin>/install", methods=["POST"])
@allows.requires(IsAdmin)
def install_plugin(plugin):
plugin = get_plugin_from_all(plugin)
if plugin.installable and not plugin.uninstallable:
plugin.install()
Setting.invalidate_cache()
flash(_("Plugin has been installed."), "success")
else:
flash(_("Cannot install plugin."), "danger")
return redirect(url_for("management.plugins"))
| realityone/flaskbb | flaskbb/management/views.py | Python | bsd-3-clause | 24,085 |
# -*- coding: utf-8 -*-
"""
Display current network and ip address for newer Huwei modems.
It is tested for Huawei E3276 (usb-id 12d1:1506) aka Telekom Speed
Stick LTE III but may work on other devices, too.
DEPENDENCIES:
- netifaces
- pyserial
Configuration parameters:
- baudrate : There should be no need to configure this, but
feel free to experiment.
Default is 115200.
- cache_timeout : How often we refresh this module in seconds.
Default is 5.
- consider_3G_degraded : If set to True, only 4G-networks will be
considered 'good'; 3G connections are shown
as 'degraded', which is yellow by default. Mostly
useful if you want to keep track of where there
is a 4G connection.
Default is False.
- format_down : What to display when the modem is not plugged in
Default is: 'WWAN: down'
- format_error : What to display when modem can't be accessed.
Default is 'WWAN: {error}'
- format_no_service : What to display when the modem does not have a
network connection. This allows to omit the then
meaningless network generation. Therefore the
default is 'WWAN: ({status}) {ip}'
- format_up : What to display upon regular connection
Default is 'WWAN: ({status}/{netgen}) {ip}'
- interface : The default interface to obtain the IP address
from. For wvdial this is most likely ppp0.
For netctl it can be different.
Default is: ppp0
- modem : The device to send commands to. Default is
- modem_timeout : The timespan betwenn querying the modem and
collecting the response.
Default is 0.4 (which should be sufficient)
@author Timo Kohorst [email protected]
PGP: B383 6AE6 6B46 5C45 E594 96AB 89D2 209D DBF3 2BB5
"""
import subprocess
import netifaces as ni
import os
import stat
import serial
from time import time, sleep
class Py3status:
baudrate = 115200
cache_timeout = 5
consider_3G_degraded = False
format_down = 'WWAN: down'
format_error = 'WWAN: {error}'
format_no_service = 'WWAN: {status} {ip}'
format_up = 'WWAN: {status} ({netgen}) {ip}'
interface = "ppp0"
modem = "/dev/ttyUSB1"
modem_timeout = 0.4
def wwan_status(self, i3s_output_list, i3s_config):
query = "AT^SYSINFOEX"
target_line = "^SYSINFOEX"
# Set up the highest network generation to display as degraded
if self.consider_3G_degraded:
degraded_netgen = 3
else:
degraded_netgen = 2
response = {}
response['cached_until'] = time() + self.cache_timeout
# Check if path exists and is a character device
if os.path.exists(self.modem) and stat.S_ISCHR(os.stat(
self.modem).st_mode):
print("Found modem " + self.modem)
try:
ser = serial.Serial(
port=self.modem,
baudrate=self.baudrate,
# Values below work for my modem. Not sure if
# they neccessarily work for all modems
parity=serial.PARITY_ODD,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if ser.isOpen():
ser.close()
ser.open()
ser.write((query + "\r").encode())
print("Issued query to " + self.modem)
sleep(self.modem_timeout)
n = ser.inWaiting()
modem_response = ser.read(n)
ser.close()
except:
# This will happen...
# 1) in the short timespan between the creation of the device node
# and udev changing the permissions. If this message persists,
# double check if you are using the proper device file
# 2) if/when you unplug the device
PermissionError
print("Permission error")
response['full_text'] = self.format_error.format(
error="no access to " + self.modem)
response['color'] = i3s_config['color_bad']
return response
# Dissect response
for line in modem_response.decode("utf-8").split('\n'):
print(line)
if line.startswith(target_line):
# Determine IP once the modem responds
ip = self._get_ip(self.interface)
if not ip:
ip = "no ip"
modem_answer = line.split(',')
netgen = len(modem_answer[-2]) + 1
netmode = modem_answer[-1].rstrip()[1:-1]
if netmode == "NO SERVICE":
response['full_text'] = self.format_no_service.format(
status=netmode,
ip=ip)
response['color'] = i3s_config['color_bad']
else:
response['full_text'] = self.format_up.format(
status=netmode,
netgen=str(netgen) + "G",
ip=ip)
if netgen <= degraded_netgen:
response['color'] = i3s_config['color_degraded']
else:
response['color'] = i3s_config['color_good']
elif line.startswith("COMMAND NOT SUPPORT") or line.startswith(
"ERROR"):
response['color'] = i3s_config['color_bad']
response['full_text'] = self.format_error.format(
error="unsupported modem")
else:
# Outputs can be multiline, so just try the next one
pass
else:
print(self.modem + " not found")
response['color'] = i3s_config['color_bad']
response['full_text'] = self.format_down
return response
def _get_ip(self, interface):
"""
Returns the interface's IPv4 address if device exists and has a valid
ip address. Otherwise, returns an empty string
"""
if interface in ni.interfaces():
addresses = ni.ifaddresses(interface)
if ni.AF_INET in addresses:
return addresses[ni.AF_INET][0]['addr']
return ""
if __name__ == "__main__":
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
'color_degraded': '#FFFF00',
}
while True:
print(x.wwan_status([], config))
sleep(1)
| hburg1234/py3status | py3status/modules/wwan_status.py | Python | bsd-3-clause | 7,301 |
"""Schedule models.
Much of this module is derived from the work of Eldarion on the
`Symposion <https://github.com/pinax/symposion>`_ project.
Copyright (c) 2010-2014, Eldarion, Inc. and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Eldarion, Inc. nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from bisect import bisect_left
from itertools import tee
from cached_property import cached_property
from sqlalchemy import func
from pygotham.core import db
__all__ = ('Day', 'Room', 'Slot', 'Presentation')
def pairwise(iterable):
"""Return values from ``iterable`` two at a time.
Recipe from
https://docs.python.org/3/library/itertools.html#itertools-recipes.
"""
a, b = tee(iterable)
next(b, None)
return zip(a, b)
rooms_slots = db.Table(
'rooms_slots',
db.Column('slot_id', db.Integer, db.ForeignKey('slots.id')),
db.Column('room_id', db.Integer, db.ForeignKey('rooms.id')),
)
class Day(db.Model):
"""Day of talks."""
__tablename__ = 'days'
id = db.Column(db.Integer, primary_key=True)
date = db.Column(db.Date)
event_id = db.Column(
db.Integer, db.ForeignKey('events.id'), nullable=False)
event = db.relationship(
'Event', backref=db.backref('days', lazy='dynamic'))
def __str__(self):
"""Return a printable representation."""
return self.date.strftime('%B %d, %Y')
@cached_property
def rooms(self):
"""Return the rooms for the day."""
return Room.query.join(rooms_slots, Slot).filter(
Slot.day == self).order_by(Room.order).all()
def __iter__(self):
"""Iterate over the schedule for the day."""
if not self.rooms:
raise StopIteration
def rowspan(start, end):
"""Find the rowspan for an entry in the schedule table.
This uses a binary search for the given end time from a
sorted list of start times in order to find the index of the
first start time that occurs after the given end time. This
method is used to prevent issues that can occur with
overlapping start and end times being included in the same
list.
"""
return bisect_left(times, end) - times.index(start)
times = sorted({slot.start for slot in self.slots})
# While we typically only care about the start times here, the
# list is iterated over two items at a time. Without adding a
# final element, the last time slot would be omitted. Any value
# could be used here as bisect_left only assumes the list is
# sorted, but using a meaningful value feels better.
times.append(self.slots[-1].end)
slots = db.session.query(
Slot.id,
Slot.content_override,
Slot.kind,
Slot.start,
Slot.end,
func.count(rooms_slots.c.slot_id).label('room_count'),
func.min(Room.order).label('order'),
).join(rooms_slots, Room).filter(Slot.day == self).order_by(
func.count(rooms_slots.c.slot_id), func.min(Room.order)
).group_by(
Slot.id, Slot.content_override, Slot.kind, Slot.start, Slot.end
).all()
for time, next_time in pairwise(times):
row = {'time': time, 'slots': []}
for slot in slots:
if slot.start == time:
slot.rowspan = rowspan(slot.start, slot.end)
slot.colspan = slot.room_count
if not slot.content_override:
slot.presentation = Presentation.query.filter(
Presentation.slot_id == slot.id).first()
row['slots'].append(slot)
if row['slots'] or next_time is None:
yield row
class Room(db.Model):
"""Room of talks."""
__tablename__ = 'rooms'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
order = db.Column(db.Integer, nullable=False)
def __str__(self):
"""Return a printable representation."""
return self.name
class Slot(db.Model):
"""Time slot."""
__tablename__ = 'slots'
id = db.Column(db.Integer, primary_key=True)
kind = db.Column(
db.Enum(
'break', 'meal', 'keynote', 'talk', 'tutorial', name='slotkind'),
nullable=False,
)
content_override = db.Column(db.Text)
start = db.Column(db.Time, nullable=False)
end = db.Column(db.Time, nullable=False)
day_id = db.Column(db.Integer, db.ForeignKey('days.id'), nullable=False)
day = db.relationship('Day', backref=db.backref('slots', lazy='dynamic'))
rooms = db.relationship(
'Room',
secondary=rooms_slots,
backref=db.backref('slots', lazy='dynamic'),
order_by=Room.order,
)
def __str__(self):
"""Return a printable representation."""
start = self.start.strftime('%I:%M %p')
end = self.end.strftime('%I:%M %p')
rooms = ', '.join(map(str, self.rooms))
return '{} - {} on {}, {}'.format(start, end, self.day, rooms)
@cached_property
def duration(self):
"""Return the duration as a :class:`~datetime.timedelta`."""
return self.end - self.start
class Presentation(db.Model):
"""Presentation of a talk."""
__tablename__ = 'presentations'
id = db.Column(db.Integer, primary_key=True)
slot_id = db.Column(db.Integer, db.ForeignKey('slots.id'), nullable=False)
slot = db.relationship(
'Slot', backref=db.backref('presentation', uselist=False))
talk_id = db.Column(db.Integer, db.ForeignKey('talks.id'), nullable=False)
talk = db.relationship(
'Talk', backref=db.backref('presentation', uselist=False))
def __str__(self):
"""Return a printable representation."""
return str(self.talk)
def is_in_all_rooms(self):
"""Return whether the instance is in all rooms."""
return self.slot.number_of_rooms == 4
@cached_property
def number_of_rooms(self):
"""Return the number of rooms for the instance."""
return len(self.slot.rooms)
| djds23/pygotham-1 | pygotham/schedule/models.py | Python | bsd-3-clause | 7,610 |
# proxy module
from apptools.logger.util import *
| enthought/etsproxy | enthought/logger/util.py | Python | bsd-3-clause | 50 |
import re
from django import template
from django.core.urlresolvers import NoReverseMatch
from django.core.urlresolvers import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active(context, name):
try:
pattern = reverse(name)
except NoReverseMatch:
return ''
if re.match(pattern, context['request'].path):
return 'active'
return ''
| jbittel/django-signage | signage/templatetags/active.py | Python | bsd-3-clause | 413 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Difference'] , ['LinearTrend'] , ['Seasonal_Hour'] , ['NoAR'] ); | antoinecarme/pyaf | tests/model_control/detailed/transf_Difference/model_control_one_enabled_Difference_LinearTrend_Seasonal_Hour_NoAR.py | Python | bsd-3-clause | 160 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-05-03 02:00
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('commtrack', '0005_populate_config_models'),
]
operations = [
migrations.RemoveField(
model_name='sqlcommtrackconfig',
name='couch_id',
),
]
| dimagi/commcare-hq | corehq/apps/commtrack/migrations/0006_remove_sqlcommtrackconfig_couch_id.py | Python | bsd-3-clause | 414 |
#!/usr/bin/env python
'''
isobands_matplotlib.py is a script for creating isobands.
Works in a similar way as gdal_contour, but creating polygons
instead of polylines
This version requires matplotlib, but there is another one,
isobands_gdal.py that uses only GDAL python
Originally created by Roger Veciana i Rovira, made available via his
blog post
http://geoexamples.blogspot.com.au/2013/08/creating-vectorial-isobands-with-python.html
and on Github at https://github.com/rveciana/geoexamples/tree/master/python/raster_isobands
'''
from numpy import arange
from numpy import meshgrid
from osgeo import ogr
from osgeo import gdal
from osgeo import osr
from math import floor
from math import ceil
from os.path import exists
from os import remove
from argparse import ArgumentParser
import matplotlib.pyplot as plt
def str2bool(v):
return v.lower() in ("yes", "true", "t", "1")
def isobands(in_file, band, out_file, out_format, layer_name, attr_name,
offset, interval, min_level = None, upper_val_output = False):
'''
The method that calculates the isobands
'''
ds_in = gdal.Open(in_file)
band_in = ds_in.GetRasterBand(band)
xsize_in = band_in.XSize
ysize_in = band_in.YSize
geotransform_in = ds_in.GetGeoTransform()
srs = osr.SpatialReference()
srs.ImportFromWkt( ds_in.GetProjectionRef() )
#Creating the output vectorial file
drv = ogr.GetDriverByName(out_format)
if exists(out_file):
remove(out_file)
dst_ds = drv.CreateDataSource( out_file )
dst_layer = dst_ds.CreateLayer(layer_name, geom_type = ogr.wkbPolygon,
srs = srs)
fdef = ogr.FieldDefn( attr_name, ogr.OFTReal )
dst_layer.CreateField( fdef )
# Use the geotransform pixel size value to avoid weird rounding errors in
# original approach.
x_pos = [geotransform_in[0]+geotransform_in[1]*ii \
for ii in range(xsize_in)]
y_pos = [geotransform_in[3]+geotransform_in[5]*ii \
for ii in range(ysize_in)]
#x_pos = arange(geotransform_in[0],
# geotransform_in[0] + xsize_in*geotransform_in[1], geotransform_in[1])
#y_pos = arange(geotransform_in[3],
# geotransform_in[3] + ysize_in*geotransform_in[5], geotransform_in[5])
x_grid, y_grid = meshgrid(x_pos, y_pos)
raster_values = band_in.ReadAsArray(0, 0, xsize_in, ysize_in)
#stats = band_in.GetStatistics(True, True)
min_value, max_value = band_in.ComputeRasterMinMax()
if min_level == None:
#min_value = stats[0]
min_level = offset + interval * floor((min_value - offset)/interval)
#max_value = stats[1]
#Due to range issues, a level is added
max_level = offset + interval * (1 + ceil((max_value - offset)/interval))
levels = arange(min_level, max_level, interval)
contours = plt.contourf(x_grid, y_grid, raster_values, levels)
for level in range(len(contours.collections)):
paths = contours.collections[level].get_paths()
for path in paths:
feat_out = ogr.Feature( dst_layer.GetLayerDefn())
if upper_val_output:
out_val = contours.levels[level] + interval
else:
out_val = contours.levels[level]
feat_out.SetField( attr_name, out_val )
pol = ogr.Geometry(ogr.wkbPolygon)
ring = None
for i in range(len(path.vertices)):
point = path.vertices[i]
if path.codes[i] == 1:
if ring != None:
pol.AddGeometry(ring)
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint_2D(point[0], point[1])
pol.AddGeometry(ring)
feat_out.SetGeometry(pol)
if dst_layer.CreateFeature(feat_out) != 0:
print "Failed to create feature in shapefile.\n"
exit( 1 )
feat_out.Destroy()
if __name__ == "__main__":
PARSER = ArgumentParser(
description="Calculates the isobands from a raster into a vector file")
PARSER.add_argument("src_file", help="The raster source file")
PARSER.add_argument("out_file", help="The vectorial out file")
PARSER.add_argument("-b",
help="The band in the source file to process (default 1)",
type=int, default = 1, metavar = 'band')
PARSER.add_argument("-off",
help="The offset to start the isobands (default 0)",
type=float, default = 0.0, metavar = 'offset')
PARSER.add_argument("-i",
help="The interval (default 0)",
type=float, default = 0.0, metavar = 'interval')
PARSER.add_argument("-nln",
help="The out layer name (default bands)",
default = 'bands', metavar = 'layer_name')
PARSER.add_argument("-a",
help="The out layer attribute name (default h)",
default = 'h', metavar = 'attr_name')
PARSER.add_argument("-f",
help="The output file format name (default ESRI Shapefile)",
default = 'ESRI Shapefile', metavar = 'formatname')
PARSER.add_argument("-up",
help="In the output file, whether to use the upper value of an "
"isoband, as value name for polygons, rather than lower.",
default = "False", metavar='upper_val_output')
ARGS = PARSER.parse_args()
isobands(ARGS.src_file, ARGS.b, ARGS.out_file, ARGS.f, ARGS.nln, ARGS.a,
ARGS.off, ARGS.i, upper_val_output=str2bool(ARGS.up))
| PatSunter/pyOTPA | Isochrones/isobands_matplotlib.py | Python | bsd-3-clause | 5,552 |
from __future__ import division
from direct.showbase.ShowBase import ShowBase
from direct.actor.Actor import ActorNode
from panda3d.core import WindowProperties, NodePath, LVector3
from panda3d.core import LineSegs, OrthographicLens, CardMaker
from inputs import Inputs
from sys import path
import square
try:
path.insert(1, '../pydaq')
import pydaq
except ImportError:
pydaq = None
class ColorWorld(object):
def __init__(self, config=None):
# keep track of velocity, this allows me to counteract joystick with keyboard
self.velocity = LVector3(0)
if config is None:
self.config = {}
execfile('config.py', self.config)
else:
self.config = config
self.reward = None
if pydaq:
self.reward = pydaq.GiveReward()
self.reward_count = 0
# self.color_map always corresponds to (r, g, b)
# does not change during game, each game uses a particular color space
self.color_dict = square.make_color_map(self.config['colors'])
# sets the range of colors for this map
self.c_range = self.config['c_range']
# color variables (make dictionary?)
# color_list is set in beginning, and then after that this is only
# called again for non-random (training)
self.color_list = square.set_start_position_colors(self.config)
self.color_match = [0, 0, 0]
self.color_tolerance = []
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'starting avt position', self.last_avt
print 'map avatar factor', self.avt_factor
self.random = True
if self.config.get('match_direction'):
self.random = False
# adjustment to speed so corresponds to gobananas task
# 7 seconds to cross original environment
# speed needs to be adjusted to both speed in original
# environment and c_range of colors
# self.speed = 0.05 * (self.c_range[1] - self.c_range[0])
# speed is own variable, so can be changed during training.
self.speed = self.config['speed']
# map avatar variables
self.render2d = None
self.match_square = None
self.map_avt_node = []
# need a multiplier to the joystick output to tolerable speed
self.vel_base = 3
self.max_vel = [500, 500, 0]
self.card = None
self.base = ShowBase()
self.base.disableMouse()
# assume we are showing windows unless proven otherwise
if self.config.get('win', True):
# only need inputs if we have a window
self.inputs = Inputs(self.base)
props = WindowProperties()
props.setCursorHidden(True)
props.setForeground(True)
print self.config.get('resolution')
if self.config.get('resolution'):
props.set_size(int(self.config['resolution'][0]), int(self.config['resolution'][1]))
props.set_origin(0, 0)
else:
props.set_size(600, 600)
props.set_origin(400, 50)
self.base.win.requestProperties(props)
# print self.base.win.get_size()
# setup color map on second window
sq_node = square.setup_square(self.config)
self.setup_display2(sq_node)
# print 'background color', self.base.getBackgroundColor()
# create the avatar
self.avatar = NodePath(ActorNode("avatar"))
self.avatar.reparentTo(self.base.render)
self.avatar.setH(self.base.camera.getH())
self.base.camera.reparentTo(self.avatar)
self.base.camera.setPos(0, 0, 0)
# initialize task variables
self.frame_task = None
self.started_game = None
self.showed_match = None
self.gave_reward = None
# initialize and start the game
self.set_next_trial()
# print 'end init'
def start_loop(self):
# need to get new match
print 'start loop'
self.started_game = self.base.taskMgr.doMethodLater(5, self.start_play, 'start_play')
self.showed_match = self.base.taskMgr.add(self.show_match_sample, 'match_image')
# Task methods
def show_match_sample(self, task):
print 'show match sample'
print self.color_match[:]
# match_image.fill(*self.color_match[:])
card = CardMaker('card')
color_match = self.color_match[:]
# add alpha channel
color_match.append(1)
print color_match
card.set_color(*color_match[:])
card.set_frame(-12, -8, 0, 4)
# log this
self.card = self.base.render.attach_new_node(card.generate())
return task.done
def start_play(self, task):
print 'start play'
# log this
self.base.taskMgr.remove('match_image')
self.card.removeNode()
# print self.base.render.ls()
self.frame_task = self.base.taskMgr.add(self.game_loop, "game_loop")
self.frame_task.last = 0 # initiate task time of the last frame
# log this
self.base.setBackgroundColor(self.color_list[:])
return task.done
def game_loop(self, task):
dt = task.time - task.last
task.last = task.time
self.velocity = self.inputs.poll_inputs(self.velocity)
move = self.move_avatar(dt)
stop = self.change_background(move)
self.move_map_avatar(move, stop)
match = self.check_color_match()
if match:
self.give_reward()
return task.done
return task.cont
def reward_loop(self, task):
self.reward_count += 1
if self.reward_count <= self.config['num_beeps']:
if self.reward:
# log this
print 'give a bloody reward already'
self.reward.pumpOut()
print 'give reward'
return task.again
else:
self.end_loop()
return task.done
def move_avatar(self, dt):
# print 'velocity', self.velocity
# this makes for smooth (correct speed) diagonal movement
# print 'velocity', self.velocity
magnitude = max(abs(self.velocity[0]), abs(self.velocity[1]))
move = None
if self.velocity.normalize():
# go left in increasing amount
# print 'dt', dt
# print 'normalized'
# print 'velocity', self.velocity
# print 'magnitude', magnitude
self.velocity *= magnitude
# print 'velocity', self.velocity
# this makes for smooth movement
move = self.velocity * self.vel_base * dt
# print move
self.avatar.setFluidPos(self.avatar, move)
return move
def change_background(self, move):
stop = [True, True, True]
if move:
# print move
move *= self.speed
for i in range(3):
value = self.color_dict[i]
if value is not None:
stop[i] = False
# keys correspond to x,y,z
# values correspond to r,g,b
if i == 2:
# z axis is treated differently
# need to work on this. z should
# be at min when both x and y are at max
# taking the average is not quite right...
z_move = (move[0] + move[1])/2
# print z_move
self.color_list[value] -= z_move
else:
self.color_list[value] += move[i]
if self.color_list[value] < self.c_range[0]:
self.color_list[value] = self.c_range[0]
stop[i] = True
elif self.color_list[value] > self.c_range[1]:
self.color_list[value] = self.c_range[1]
stop[i] = True
# log this
self.base.setBackgroundColor(self.color_list[:])
# print self.base.getBackgroundColor()
return stop
def move_map_avatar(self, move, stop):
# print move
# avatar is mapped assuming c_range of 0.5. What do I need to
# change to use a different c_range? c_range of one is twice
# the
if move:
avt = LineSegs()
avt.setThickness(1)
avt.setColor(1, 1, 1)
# print 'last', self.last_avt
avt.move_to(self.last_avt[0], -5, self.last_avt[1])
# print 'move', move
new_move = [i + (j * self.avt_factor) for i, j in zip(self.last_avt, move)]
# new_move = [i + j for i, j in zip(self.last_avt, move)]
# would it be better to have a local stop condition?
if stop[0]:
new_move[0] = self.last_avt[0]
# print 'stop x', self.last_avt[0]
if stop[1]:
new_move[1] = self.last_avt[1]
# print 'stop y', self.last_avt[1]
# print 'new', new_move
self.last_avt = [new_move[0], new_move[1]]
avt.draw_to(new_move[0], -5, new_move[1])
self.map_avt_node.append(self.render2d.attach_new_node(avt.create()))
# print self.map_avt_node[-1]
# can't let too many nodes pile up
if len(self.map_avt_node) > 299:
# removing the node does not remove the object from the list
for i, j in enumerate(self.map_avt_node):
j.removeNode()
if i > 49:
break
del self.map_avt_node[0:50]
def check_color_match(self):
# print 'match this', self.color_tolerance
# print self.color_list
check_color = [j[0] < self.color_list[i] < j[1] for i, j in enumerate(self.color_tolerance)]
# print check_color
if all(check_color):
return True
else:
return False
def give_reward(self):
# clear the background
self.base.setBackgroundColor(0.41, 0.41, 0.41)
print 'give first reward'
self.reward_count = 1
if self.reward:
# log this
self.reward.pumpOut()
self.gave_reward = self.base.taskMgr.doMethodLater(self.config['pump_delay'], self.reward_loop, 'reward_loop')
def end_loop(self):
print 'end loop'
# clear avatar map
self.clear_avatar_map()
# if there is a match set, return to center of color gradient,
# set new match, if applicable
self.set_next_trial()
def clear_avatar_map(self):
for i, j in enumerate(self.map_avt_node):
j.removeNode()
self.map_avt_node = []
def plot_match_square(self, corners):
print 'plot match square'
print corners
match = LineSegs()
match.setThickness(1.5)
match.setColor(0, 0, 0)
match.move_to(corners[0][0], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][0])
match.draw_to(corners[0][1], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][1])
match.draw_to(corners[0][0], -5, corners[1][0])
# print self.render2d
self.match_square = self.render2d.attach_new_node(match.create())
def create_avatar_map_match_square(self, config=None):
print 'make new square for map'
if config is not None:
config_dict = config
else:
config_dict = self.config
# create square on avatar map for new color match
map_color_match, factor = square.translate_color_map(config_dict, self.color_dict, self.color_match)
tolerance = config_dict['tolerance'] * factor
map_color_tolerance = [(i - tolerance, i + tolerance) for i in map_color_match]
print map_color_tolerance
if self.render2d:
if self.match_square:
self.match_square.removeNode()
self.plot_match_square(map_color_tolerance)
def set_next_trial(self):
print 'set next trial'
# move avatar back to beginning position, only matters for
# showing card for next color match
self.avatar.set_pos(-10, -10, 2)
# set color_list with starting color
# if random, won't use this again, but for manual, will
# return to center
# need to update self.config to new direction, if there is one
if self.config.get('match_direction'):
self.check_key_map()
# return to center, otherwise random will start where you left off
self.color_list = square.set_start_position_colors(self.config)
# starting position for map avatar, just translate new color_list
self.last_avt, self.avt_factor = square.translate_color_map(self.config, self.color_dict, self.color_list)
print 'start color', self.color_list
print self.color_dict
# again need to update self.config for match if using keys
self.color_match = square.set_match_colors(self.config, self.color_dict)
# sets the tolerance for how close to a color for reward
self.color_tolerance = [(i - self.config['tolerance'], i + self.config['tolerance']) for i in self.color_match]
print 'color match', self.color_match
print 'color tolerance', self.color_tolerance
self.create_avatar_map_match_square(self.config)
# start the game
self.start_loop()
def check_key_map(self):
if self.config['colors'][0]:
if self.inputs.key_map['r']:
self.config['match_direction'] = ['right']
elif self.inputs.key_map['r'] is not None:
self.config['match_direction'] = ['left']
elif self.config['colors'][1]:
if self.inputs.key_map['f']:
self.config['match_direction'] = ['front']
elif self.inputs.key_map['f'] is not None:
self.config['match_direction'] = ['back']
def setup_display2(self, display_node):
print 'setup display2'
props = WindowProperties()
props.set_cursor_hidden(True)
props.set_foreground(False)
if self.config.get('resolution'):
props.setSize(700, 700)
props.setOrigin(-int(self.config['resolution'][0] - 5), 5)
else:
props.setSize(300, 300)
props.setOrigin(10, 10)
window2 = self.base.openWindow(props=props, aspectRatio=1)
lens = OrthographicLens()
lens.set_film_size(2, 2)
lens.setNearFar(-100, 100)
self.render2d = NodePath('render2d')
self.render2d.attach_new_node(display_node)
camera2d = self.base.makeCamera(window2)
camera2d.setPos(0, -10, 0)
camera2d.node().setLens(lens)
camera2d.reparentTo(self.render2d)
if __name__ == "__main__":
CW = ColorWorld()
CW.base.run()
| codedragon/color_world | color_world.py | Python | bsd-3-clause | 15,178 |
#!/usr/bin/env python
'''
#from sc2casts_parser import *
from sc2casts_client import *
import json
from pprint import *
parser = SC2CastsParser()
client = SC2CastsClient()
TEST_DATA_DIR = 'data'
# test cases:
def test_titles():
pass
# test cases:
def test_casts():
with open(TEST_DATA_DIR + '/all', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.casts(test_data)
pprint(actual)
# TODO check each cast
# test cases:
# bo3 in 1 game
# 1 game
# 3 games
# 5 games
def test_games_bo3_in_1_game():
with open(TEST_DATA_DIR + '/cast14719-Soulkey-vs-Cure-Best-of-3-All-in-1-video-IEM-Cologne-2014-Korean-Qualifier', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.games(test_data)
assert len(actual) == 1
assert actual[0]['game_id'] == 'Gt4E3rIUhoA'
assert actual[0]['game_title'] == 'Game 1'
# games 4 and 5 not played
def test_games_5_games():
with open(TEST_DATA_DIR + '/cast14705-KT-Rolster-vs-Prime-Best-of-5-2014-Proleague-Round-1', 'r') as f:
test_data = f.read()
#print test_data
actual = parser.games(test_data)
print actual
assert len(actual) == 5
assert actual[0]['game_id'] == 'QqSRtBVEXDs'
assert actual[0]['game_title'] == 'Game 1'
assert actual[1]['game_id'] == '5lFLuOKYTa8'
assert actual[1]['game_title'] == 'Game 2'
assert actual[2]['game_id'] == 'wNhcT-NenNs'
assert actual[2]['game_title'] == 'Game 3'
assert actual[3]['game_id'] == ''
assert actual[3]['game_title'] == 'Game 4'
assert actual[4]['game_id'] == ''
assert actual[4]['game_title'] == 'Game 5'
# test cases:
def test_events():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.events(test_data)
pprint(actual)
# test cases:
def test_casters():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.casters(test_data)
pprint(actual)
# test cases:
def test_matchups():
with open(TEST_DATA_DIR + '/browse', 'r') as f:
test_data = f.read()
actual = parser.matchups(test_data)
assert len(actual) == 6
# TODO test that the actual URLs are still valid
# client tests
def test_client_matchups():
actual = client.matchups()
assert len(actual) == 6
'''
| thmttch/sc2castsclient | tests/tests.py | Python | bsd-3-clause | 2,367 |
#!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/formatters.py
__all__=('Formatter','DecimalFormatter')
__version__=''' $Id: formatters.py 3959 2012-09-27 14:39:39Z robin $ '''
__doc__="""
These help format numbers and dates in a user friendly way.
Used by the graphics framework.
"""
import string, sys, os, re
class Formatter:
"Base formatter - simply applies python format strings"
def __init__(self, pattern):
self.pattern = pattern
def format(self, obj):
return self.pattern % obj
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self.pattern)
def __call__(self, x):
return self.format(x)
_ld_re=re.compile(r'^\d*\.')
_tz_re=re.compile('0+$')
class DecimalFormatter(Formatter):
"""lets you specify how to build a decimal.
A future NumberFormatter class will take Microsoft-style patterns
instead - "$#,##0.00" is WAY easier than this."""
def __init__(self, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
if places=='auto':
self.calcPlaces = self._calcPlaces
else:
self.places = places
self.dot = decimalSep
self.comma = thousandSep
self.prefix = prefix
self.suffix = suffix
def _calcPlaces(self,V):
'''called with the full set of values to be formatted so we can calculate places'''
self.places = max([len(_tz_re.sub('',_ld_re.sub('',str(v)))) for v in V])
def format(self, num):
# positivize the numbers
sign=num<0
if sign:
num = -num
places, sep = self.places, self.dot
strip = places<=0
if places and strip: places = -places
strInt = ('%.' + str(places) + 'f') % num
if places:
strInt, strFrac = strInt.split('.')
strFrac = sep + strFrac
if strip:
while strFrac and strFrac[-1] in ['0',sep]: strFrac = strFrac[:-1]
else:
strFrac = ''
if self.comma is not None:
strNew = ''
while strInt:
left, right = strInt[0:-3], strInt[-3:]
if left == '':
#strNew = self.comma + right + strNew
strNew = right + strNew
else:
strNew = self.comma + right + strNew
strInt = left
strInt = strNew
strBody = strInt + strFrac
if sign: strBody = '-' + strBody
if self.prefix:
strBody = self.prefix + strBody
if self.suffix:
strBody = strBody + self.suffix
return strBody
def __repr__(self):
return "%s(places=%d, decimalSep=%s, thousandSep=%s, prefix=%s, suffix=%s)" % (
self.__class__.__name__,
self.places,
repr(self.dot),
repr(self.comma),
repr(self.prefix),
repr(self.suffix)
)
if __name__=='__main__':
def t(n, s, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
f=DecimalFormatter(places,decimalSep,thousandSep,prefix,suffix)
r = f(n)
print("places=%2d dot=%-4s comma=%-4s prefix=%-4s suffix=%-4s result=%10s %s" %(f.places, f.dot, f.comma, f.prefix, f.suffix,r, r==s and 'OK' or 'BAD'))
t(1000.9,'1,000.9',1,thousandSep=',')
t(1000.95,'1,001.0',1,thousandSep=',')
t(1000.95,'1,001',-1,thousandSep=',')
t(1000.9,'1,001',0,thousandSep=',')
t(1000.9,'1000.9',1)
t(1000.95,'1001.0',1)
t(1000.95,'1001',-1)
t(1000.9,'1001',0)
t(1000.1,'1000.1',1)
t(1000.55,'1000.6',1)
t(1000.449,'1000.4',-1)
t(1000.45,'1000',0)
| nakagami/reportlab | src/reportlab/lib/formatters.py | Python | bsd-3-clause | 3,887 |
from datetime import datetime, timedelta, tzinfo
import unittest
import pytz
import re
#noinspection PyUnresolvedReferences
from nose.tools import assert_equal, assert_raises # you need it for tests in form of continuations
import six
from flask_restful import inputs
def test_reverse_rfc822_datetime():
dates = [
("Sat, 01 Jan 2011 00:00:00 -0000", datetime(2011, 1, 1, tzinfo=pytz.utc)),
("Sat, 01 Jan 2011 23:59:59 -0000", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
("Sat, 01 Jan 2011 21:59:59 -0200", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
]
for date_string, expected in dates:
yield assert_equal, inputs.datetime_from_rfc822(date_string), expected
def test_reverse_iso8601_datetime():
dates = [
("2011-01-01T00:00:00+00:00", datetime(2011, 1, 1, tzinfo=pytz.utc)),
("2011-01-01T23:59:59+00:00", datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)),
("2011-01-01T23:59:59.001000+00:00", datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc)),
("2011-01-01T23:59:59+02:00", datetime(2011, 1, 1, 21, 59, 59, tzinfo=pytz.utc))
]
for date_string, expected in dates:
yield assert_equal, inputs.datetime_from_iso8601(date_string), expected
def test_urls():
urls = [
'http://www.djangoproject.com/',
'http://localhost/',
'http://example.com/',
'http://www.example.com/',
'http://www.example.com:8000/test',
'http://valid-with-hyphens.com/',
'http://subdomain.example.com/',
'http://200.8.9.10/',
'http://200.8.9.10:8000/test',
'http://valid-----hyphens.com/',
'http://example.com?something=value',
'http://example.com/index.php?something=value&another=value2',
'http://foo:[email protected]',
'http://foo:@example.com',
'http://foo:@2001:db8:85a3::8a2e:370:7334',
'http://foo2:qd1%[email protected]',
]
for value in urls:
yield assert_equal, inputs.url(value), value
def check_bad_url_raises(value):
try:
inputs.url(value)
assert False, "shouldn't get here"
except ValueError as e:
assert_equal(six.text_type(e), u"{0} is not a valid URL".format(value))
def test_bad_urls():
values = [
'foo',
'http://',
'http://example',
'http://example.',
'http://.com',
'http://invalid-.com',
'http://-invalid.com',
'http://inv-.alid-.com',
'http://inv-.-alid.com',
'foo bar baz',
u'foo \u2713',
'http://@foo:[email protected]',
'http://:[email protected]',
'http://bar:bar:[email protected]',
]
for value in values:
yield check_bad_url_raises, value
def test_bad_url_error_message():
values = [
'google.com',
'domain.google.com',
'kevin:[email protected]/path?query',
u'google.com/path?\u2713',
]
for value in values:
yield check_url_error_message, value
def check_url_error_message(value):
try:
inputs.url(value)
assert False, u"inputs.url({0}) should raise an exception".format(value)
except ValueError as e:
assert_equal(six.text_type(e),
(u"{0} is not a valid URL. Did you mean: http://{0}".format(value)))
def test_regex_bad_input():
cases = (
'abc',
'123abc',
'abc123',
'',
)
num_only = inputs.regex(r'^[0-9]+$')
for value in cases:
yield assert_raises, ValueError, lambda: num_only(value)
def test_regex_good_input():
cases = (
'123',
'1234567890',
'00000',
)
num_only = inputs.regex(r'^[0-9]+$')
for value in cases:
yield assert_equal, num_only(value), value
def test_regex_bad_pattern():
"""Regex error raised immediately when regex input parser is created."""
assert_raises(re.error, inputs.regex, '[')
def test_regex_flags_good_input():
cases = (
'abcd',
'ABCabc',
'ABC',
)
case_insensitive = inputs.regex(r'^[A-Z]+$', re.IGNORECASE)
for value in cases:
yield assert_equal, case_insensitive(value), value
def test_regex_flags_bad_input():
cases = (
'abcd',
'ABCabc'
)
case_sensitive = inputs.regex(r'^[A-Z]+$')
for value in cases:
yield assert_raises, ValueError, lambda: case_sensitive(value)
class TypesTestCase(unittest.TestCase):
def test_boolean_false(self):
assert_equal(inputs.boolean("False"), False)
def test_boolean_is_false_for_0(self):
assert_equal(inputs.boolean("0"), False)
def test_boolean_true(self):
assert_equal(inputs.boolean("true"), True)
def test_boolean_is_true_for_1(self):
assert_equal(inputs.boolean("1"), True)
def test_boolean_upper_case(self):
assert_equal(inputs.boolean("FaLSE"), False)
def test_boolean(self):
assert_equal(inputs.boolean("FaLSE"), False)
def test_boolean_with_python_bool(self):
"""Input that is already a native python `bool` should be passed through
without extra processing."""
assert_equal(inputs.boolean(True), True)
assert_equal(inputs.boolean(False), False)
def test_bad_boolean(self):
assert_raises(ValueError, lambda: inputs.boolean("blah"))
def test_date_later_than_1900(self):
assert_equal(inputs.date("1900-01-01"), datetime(1900, 1, 1))
def test_date_input_error(self):
assert_raises(ValueError, lambda: inputs.date("2008-13-13"))
def test_date_input(self):
assert_equal(inputs.date("2008-08-01"), datetime(2008, 8, 1))
def test_natual_negative(self):
assert_raises(ValueError, lambda: inputs.natural(-1))
def test_natural(self):
assert_equal(3, inputs.natural(3))
def test_natual_string(self):
assert_raises(ValueError, lambda: inputs.natural('foo'))
def test_positive(self):
assert_equal(1, inputs.positive(1))
assert_equal(10000, inputs.positive(10000))
def test_positive_zero(self):
assert_raises(ValueError, lambda: inputs.positive(0))
def test_positive_negative_input(self):
assert_raises(ValueError, lambda: inputs.positive(-1))
def test_int_range_good(self):
int_range = inputs.int_range(1, 5)
assert_equal(3, int_range(3))
def test_int_range_inclusive(self):
int_range = inputs.int_range(1, 5)
assert_equal(5, int_range(5))
def test_int_range_low(self):
int_range = inputs.int_range(0, 5)
assert_raises(ValueError, lambda: int_range(-1))
def test_int_range_high(self):
int_range = inputs.int_range(0, 5)
assert_raises(ValueError, lambda: int_range(6))
def test_isointerval():
intervals = [
(
# Full precision with explicit UTC.
"2013-01-01T12:30:00Z/P1Y2M3DT4H5M6S",
(
datetime(2013, 1, 1, 12, 30, 0, tzinfo=pytz.utc),
datetime(2014, 3, 5, 16, 35, 6, tzinfo=pytz.utc),
),
),
(
# Full precision with alternate UTC indication
"2013-01-01T12:30+00:00/P2D",
(
datetime(2013, 1, 1, 12, 30, 0, tzinfo=pytz.utc),
datetime(2013, 1, 3, 12, 30, 0, tzinfo=pytz.utc),
),
),
(
# Implicit UTC with time
"2013-01-01T15:00/P1M",
(
datetime(2013, 1, 1, 15, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 31, 15, 0, 0, tzinfo=pytz.utc),
),
),
(
# TZ conversion
"2013-01-01T17:00-05:00/P2W",
(
datetime(2013, 1, 1, 22, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 15, 22, 0, 0, tzinfo=pytz.utc),
),
),
(
# Date upgrade to midnight-midnight period
"2013-01-01/P3D",
(
datetime(2013, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
datetime(2013, 1, 4, 0, 0, 0, 0, tzinfo=pytz.utc),
),
),
(
# Start/end with UTC
"2013-01-01T12:00:00Z/2013-02-01T12:00:00Z",
(
datetime(2013, 1, 1, 12, 0, 0, tzinfo=pytz.utc),
datetime(2013, 2, 1, 12, 0, 0, tzinfo=pytz.utc),
),
),
(
# Start/end with time upgrade
"2013-01-01/2013-06-30",
(
datetime(2013, 1, 1, tzinfo=pytz.utc),
datetime(2013, 6, 30, tzinfo=pytz.utc),
),
),
(
# Start/end with TZ conversion
"2013-02-17T12:00:00-07:00/2013-02-28T15:00:00-07:00",
(
datetime(2013, 2, 17, 19, 0, 0, tzinfo=pytz.utc),
datetime(2013, 2, 28, 22, 0, 0, tzinfo=pytz.utc),
),
),
# Resolution expansion for single date(time)
(
# Second with UTC
"2013-01-01T12:30:45Z",
(
datetime(2013, 1, 1, 12, 30, 45, tzinfo=pytz.utc),
datetime(2013, 1, 1, 12, 30, 46, tzinfo=pytz.utc),
),
),
(
# Second with tz conversion
"2013-01-01T12:30:45+02:00",
(
datetime(2013, 1, 1, 10, 30, 45, tzinfo=pytz.utc),
datetime(2013, 1, 1, 10, 30, 46, tzinfo=pytz.utc),
),
),
(
# Second with implicit UTC
"2013-01-01T12:30:45",
(
datetime(2013, 1, 1, 12, 30, 45, tzinfo=pytz.utc),
datetime(2013, 1, 1, 12, 30, 46, tzinfo=pytz.utc),
),
),
(
# Minute with UTC
"2013-01-01T12:30+00:00",
(
datetime(2013, 1, 1, 12, 30, tzinfo=pytz.utc),
datetime(2013, 1, 1, 12, 31, tzinfo=pytz.utc),
),
),
(
# Minute with conversion
"2013-01-01T12:30+04:00",
(
datetime(2013, 1, 1, 8, 30, tzinfo=pytz.utc),
datetime(2013, 1, 1, 8, 31, tzinfo=pytz.utc),
),
),
(
# Minute with implicit UTC
"2013-01-01T12:30",
(
datetime(2013, 1, 1, 12, 30, tzinfo=pytz.utc),
datetime(2013, 1, 1, 12, 31, tzinfo=pytz.utc),
),
),
(
# Hour, explicit UTC
"2013-01-01T12Z",
(
datetime(2013, 1, 1, 12, tzinfo=pytz.utc),
datetime(2013, 1, 1, 13, tzinfo=pytz.utc),
),
),
(
# Hour with offset
"2013-01-01T12-07:00",
(
datetime(2013, 1, 1, 19, tzinfo=pytz.utc),
datetime(2013, 1, 1, 20, tzinfo=pytz.utc),
),
),
(
# Hour with implicit UTC
"2013-01-01T12",
(
datetime(2013, 1, 1, 12, tzinfo=pytz.utc),
datetime(2013, 1, 1, 13, tzinfo=pytz.utc),
),
),
(
# Interval with trailing zero fractional seconds should
# be accepted.
"2013-01-01T12:00:00.0/2013-01-01T12:30:00.000000",
(
datetime(2013, 1, 1, 12, tzinfo=pytz.utc),
datetime(2013, 1, 1, 12, 30, tzinfo=pytz.utc),
),
),
]
for value, expected in intervals:
yield assert_equal, inputs.iso8601interval(value), expected
def test_invalid_isointerval_error():
try:
inputs.iso8601interval('2013-01-01/blah')
except ValueError as error:
assert_equal(
str(error),
"Invalid argument: 2013-01-01/blah. argument must be a valid ISO8601 "
"date/time interval.",
)
return
assert False, 'Should raise a ValueError'
def test_bad_isointervals():
bad_intervals = [
'2013-01T14:',
'',
'asdf',
'01/01/2013',
]
for bad_interval in bad_intervals:
yield (
assert_raises,
Exception,
inputs.iso8601interval,
bad_interval,
)
if __name__ == '__main__':
unittest.main()
| flask-restful/flask-restful | tests/test_inputs.py | Python | bsd-3-clause | 12,433 |
# Django settings for example_project project.
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import django
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Justin Quick', '[email protected]'),
)
ENGINE = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
DATABASES = {
'default': {
'ENGINE': ENGINE,
'NAME': 'test',
'OPTIONS': {
}
}
}
if 'postgres' in ENGINE or 'mysql' in ENGINE:
USER, PASSWORD = 'test', 'test'
if os.environ.get('TRAVIS', False):
if 'mysql' in ENGINE:
USER, PASSWORD = 'travis', ''
else:
USER, PASSWORD = 'postgres', ''
DATABASES['default'].update(
USER=os.environ.get('DATABASE_USER', USER),
PASSWORD=os.environ.get('DATABASE_PASSWORD', PASSWORD),
HOST=os.environ.get('DATABASE_HOST', 'localhost')
)
print(ENGINE)
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = 'media'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'wzf0h@r2u%m^_zgj^39-y(kd%+n+j0r7=du(q0^s@q1asdfasdfasdft%^2!p'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'actstream.runtests.urls'
TEMPLATE_DIRS = (
'templates',
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admindocs',
'django.contrib.sites',
'django.contrib.comments',
'actstream.runtests.testapp',
'actstream.runtests.testapp_nested',
'actstream',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
)
ACTSTREAM_SETTINGS = {
'MANAGER': 'actstream.runtests.testapp.streams.MyActionManager',
'FETCH_RELATIONS': True,
'USE_PREFETCH': True,
'USE_JSONFIELD': True,
'GFK_FETCH_DEPTH': 0,
}
if django.VERSION[:2] >= (1, 5):
AUTH_USER_MODEL = 'testapp.MyUser'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
if 'COVERAGE' in os.environ:
INSTALLED_APPS += ('django_coverage',)
TEST_RUNNER = 'django_coverage.coverage_runner.CoverageRunner'
COVERAGE_REPORT_HTML_OUTPUT_DIR = 'coverage'
COVERAGE_REPORT_DATA_FILE = '.coverage'
| hiepthai/django-activity-stream | actstream/runtests/settings.py | Python | bsd-3-clause | 3,999 |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("DecisionTreeClassifier" , "BinaryClass_10" , "db2")
| antoinecarme/sklearn2sql_heroku | tests/classification/BinaryClass_10/ws_BinaryClass_10_DecisionTreeClassifier_db2_code_gen.py | Python | bsd-3-clause | 149 |
# orm/dependency.py
# Copyright (C) 2005, 2006, 2007, 2008 Michael Bayer [email protected]
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Bridge the ``PropertyLoader`` (i.e. a ``relation()``) and the
``UOWTransaction`` together to allow processing of relation()-based
dependencies at flush time.
"""
from sqlalchemy.orm import sync
from sqlalchemy import sql, util, exceptions
from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
def create_dependency_processor(prop):
types = {
ONETOMANY : OneToManyDP,
MANYTOONE: ManyToOneDP,
MANYTOMANY : ManyToManyDP,
}
if prop.association is not None:
return AssociationDP(prop)
else:
return types[prop.direction](prop)
class DependencyProcessor(object):
no_dependencies = False
def __init__(self, prop):
self.prop = prop
self.cascade = prop.cascade
self.mapper = prop.mapper
self.parent = prop.parent
self.secondary = prop.secondary
self.direction = prop.direction
self.is_backref = prop.is_backref
self.post_update = prop.post_update
self.foreign_keys = prop.foreign_keys
self.passive_deletes = prop.passive_deletes
self.passive_updates = prop.passive_updates
self.enable_typechecks = prop.enable_typechecks
self.key = prop.key
if not self.prop.synchronize_pairs:
raise exceptions.ArgumentError("Can't build a DependencyProcessor for relation %s. No target attributes to populate between parent and child are present" % self.prop)
def _get_instrumented_attribute(self):
"""Return the ``InstrumentedAttribute`` handled by this
``DependencyProecssor``.
"""
return getattr(self.parent.class_, self.key)
def hasparent(self, state):
"""return True if the given object instance has a parent,
according to the ``InstrumentedAttribute`` handled by this ``DependencyProcessor``."""
# TODO: use correct API for this
return self._get_instrumented_attribute().impl.hasparent(state)
def register_dependencies(self, uowcommit):
"""Tell a ``UOWTransaction`` what mappers are dependent on
which, with regards to the two or three mappers handled by
this ``PropertyLoader``.
Also register itself as a *processor* for one of its mappers,
which will be executed after that mapper's objects have been
saved or before they've been deleted. The process operation
manages attributes and dependent operations upon the objects
of one of the involved mappers.
"""
raise NotImplementedError()
def whose_dependent_on_who(self, state1, state2):
"""Given an object pair assuming `obj2` is a child of `obj1`,
return a tuple with the dependent object second, or None if
there is no dependency.
"""
if state1 is state2:
return None
elif self.direction == ONETOMANY:
return (state1, state2)
else:
return (state2, state1)
def process_dependencies(self, task, deplist, uowcommit, delete = False):
"""This method is called during a flush operation to
synchronize data between a parent and child object.
It is called within the context of the various mappers and
sometimes individual objects sorted according to their
insert/update/delete order (topological sort).
"""
raise NotImplementedError()
def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
"""Used before the flushes' topological sort to traverse
through related objects and ensure every instance which will
require save/update/delete is properly added to the
UOWTransaction.
"""
raise NotImplementedError()
def _verify_canload(self, state):
if not self.enable_typechecks:
return
if state is not None and not self.mapper._canload(state):
raise exceptions.FlushError("Attempting to flush an item of type %s on collection '%s', which is handled by mapper '%s' and does not load items of that type. Did you mean to use a polymorphic mapper for this relationship ? Set 'enable_typechecks=False' on the relation() to disable this exception. Mismatched typeloading may cause bi-directional relationships (backrefs) to not function properly." % (state.class_, self.prop, self.mapper))
def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
"""Called during a flush to synchronize primary key identifier
values between a parent/child object, as well as to an
associationrow in the case of many-to-many.
"""
raise NotImplementedError()
def _conditional_post_update(self, state, uowcommit, related):
"""Execute a post_update call.
For relations that contain the post_update flag, an additional
``UPDATE`` statement may be associated after an ``INSERT`` or
before a ``DELETE`` in order to resolve circular row
dependencies.
This method will check for the post_update flag being set on a
particular relationship, and given a target object and list of
one or more related objects, and execute the ``UPDATE`` if the
given related object list contains ``INSERT``s or ``DELETE``s.
"""
if state is not None and self.post_update:
for x in related:
if x is not None:
uowcommit.register_object(state, postupdate=True, post_update_cols=[r for l, r in self.prop.synchronize_pairs])
break
def _pks_changed(self, uowcommit, state):
raise NotImplementedError()
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, str(self.prop))
class OneToManyDP(DependencyProcessor):
def register_dependencies(self, uowcommit):
if self.post_update:
if not self.is_backref:
stub = MapperStub(self.parent, self.mapper, self.key)
uowcommit.register_dependency(self.mapper, stub)
uowcommit.register_dependency(self.parent, stub)
uowcommit.register_processor(stub, self, self.parent)
else:
uowcommit.register_dependency(self.parent, self.mapper)
uowcommit.register_processor(self.parent, self, self.parent)
def process_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
if delete:
# head object is being deleted, and we manage its list of child objects
# the child objects have to have their foreign key to the parent set to NULL
# this phase can be called safely for any cascade but is unnecessary if delete cascade
# is on.
if self.post_update or not self.passive_deletes=='all':
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if unchanged or deleted:
for child in deleted:
if child is not None and self.hasparent(child) is False:
self._synchronize(state, child, None, True, uowcommit)
self._conditional_post_update(child, uowcommit, [state])
if self.post_update or not self.cascade.delete:
for child in unchanged:
if child is not None:
self._synchronize(state, child, None, True, uowcommit)
self._conditional_post_update(child, uowcommit, [state])
else:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=True)
if added or deleted:
for child in added:
self._synchronize(state, child, None, False, uowcommit)
if child is not None:
self._conditional_post_update(child, uowcommit, [state])
for child in deleted:
if not self.cascade.delete_orphan and not self.hasparent(child):
self._synchronize(state, child, None, True, uowcommit)
if self._pks_changed(uowcommit, state):
if unchanged:
for child in unchanged:
self._synchronize(state, child, None, False, uowcommit)
def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
if delete:
# head object is being deleted, and we manage its list of child objects
# the child objects have to have their foreign key to the parent set to NULL
if not self.post_update:
should_null_fks = not self.cascade.delete and not self.passive_deletes=='all'
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if unchanged or deleted:
for child in deleted:
if child is not None and self.hasparent(child) is False:
if self.cascade.delete_orphan:
uowcommit.register_object(child, isdelete=True)
else:
uowcommit.register_object(child)
if should_null_fks:
for child in unchanged:
if child is not None:
uowcommit.register_object(child)
else:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
if added or deleted:
for child in added:
if child is not None:
uowcommit.register_object(child)
for child in deleted:
if not self.cascade.delete_orphan:
uowcommit.register_object(child, isdelete=False)
elif self.hasparent(child) is False:
uowcommit.register_object(child, isdelete=True)
for c, m in self.mapper.cascade_iterator('delete', child):
uowcommit.register_object(c._state, isdelete=True)
if not self.passive_updates and self._pks_changed(uowcommit, state):
if not unchanged:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key, passive=False)
if unchanged:
for child in unchanged:
uowcommit.register_object(child)
def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
source = state
dest = child
if dest is None or (not self.post_update and uowcommit.is_deleted(dest)):
return
self._verify_canload(child)
if clearkeys:
sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
else:
sync.populate(source, self.parent, dest, self.mapper, self.prop.synchronize_pairs)
def _pks_changed(self, uowcommit, state):
return sync.source_changes(uowcommit, state, self.parent, self.prop.synchronize_pairs)
class DetectKeySwitch(DependencyProcessor):
"""a special DP that works for many-to-one relations, fires off for
child items who have changed their referenced key."""
no_dependencies = True
def register_dependencies(self, uowcommit):
uowcommit.register_processor(self.parent, self, self.mapper)
def preprocess_dependencies(self, task, deplist, uowcommit, delete=False):
# for non-passive updates, register in the preprocess stage
# so that mapper save_obj() gets a hold of changes
if not delete and not self.passive_updates:
self._process_key_switches(deplist, uowcommit)
def process_dependencies(self, task, deplist, uowcommit, delete=False):
# for passive updates, register objects in the process stage
# so that we avoid ManyToOneDP's registering the object without
# the listonly flag in its own preprocess stage (results in UPDATE)
# statements being emitted
if not delete and self.passive_updates:
self._process_key_switches(deplist, uowcommit)
def _process_key_switches(self, deplist, uowcommit):
switchers = util.Set([s for s in deplist if self._pks_changed(uowcommit, s)])
if switchers:
# yes, we're doing a linear search right now through the UOW. only
# takes effect when primary key values have actually changed.
# a possible optimization might be to enhance the "hasparents" capability of
# attributes to actually store all parent references, but this introduces
# more complicated attribute accounting.
for s in [elem for elem in uowcommit.session.identity_map.all_states()
if issubclass(elem.class_, self.parent.class_) and
self.key in elem.dict and
elem.dict[self.key]._state in switchers
]:
uowcommit.register_object(s, listonly=self.passive_updates)
sync.populate(s.dict[self.key]._state, self.mapper, s, self.parent, self.prop.synchronize_pairs)
#self.syncrules.execute(s.dict[self.key]._state, s, None, None, False)
def _pks_changed(self, uowcommit, state):
return sync.source_changes(uowcommit, state, self.mapper, self.prop.synchronize_pairs)
class ManyToOneDP(DependencyProcessor):
def __init__(self, prop):
DependencyProcessor.__init__(self, prop)
self.mapper._dependency_processors.append(DetectKeySwitch(prop))
def register_dependencies(self, uowcommit):
if self.post_update:
if not self.is_backref:
stub = MapperStub(self.parent, self.mapper, self.key)
uowcommit.register_dependency(self.mapper, stub)
uowcommit.register_dependency(self.parent, stub)
uowcommit.register_processor(stub, self, self.parent)
else:
uowcommit.register_dependency(self.mapper, self.parent)
uowcommit.register_processor(self.mapper, self, self.parent)
def process_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
if delete:
if self.post_update and not self.cascade.delete_orphan and not self.passive_deletes=='all':
# post_update means we have to update our row to not reference the child object
# before we can DELETE the row
for state in deplist:
self._synchronize(state, None, None, True, uowcommit)
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if added or unchanged or deleted:
self._conditional_post_update(state, uowcommit, deleted + unchanged + added)
else:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
if added or deleted or unchanged:
for child in added:
self._synchronize(state, child, None, False, uowcommit)
self._conditional_post_update(state, uowcommit, deleted + unchanged + added)
def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " PRE process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
if self.post_update:
return
if delete:
if self.cascade.delete or self.cascade.delete_orphan:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if self.cascade.delete_orphan:
todelete = added + unchanged + deleted
else:
todelete = added + unchanged
for child in todelete:
if child is None:
continue
uowcommit.register_object(child, isdelete=True)
for c, m in self.mapper.cascade_iterator('delete', child):
uowcommit.register_object(c._state, isdelete=True)
else:
for state in deplist:
uowcommit.register_object(state)
if self.cascade.delete_orphan:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if deleted:
for child in deleted:
if self.hasparent(child) is False:
uowcommit.register_object(child, isdelete=True)
for c, m in self.mapper.cascade_iterator('delete', child):
uowcommit.register_object(c._state, isdelete=True)
def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
if state is None or (not self.post_update and uowcommit.is_deleted(state)):
return
if clearkeys or child is None:
sync.clear(state, self.parent, self.prop.synchronize_pairs)
else:
self._verify_canload(child)
sync.populate(child, self.mapper, state, self.parent, self.prop.synchronize_pairs)
class ManyToManyDP(DependencyProcessor):
def register_dependencies(self, uowcommit):
# many-to-many. create a "Stub" mapper to represent the
# "middle table" in the relationship. This stub mapper doesnt save
# or delete any objects, but just marks a dependency on the two
# related mappers. its dependency processor then populates the
# association table.
stub = MapperStub(self.parent, self.mapper, self.key)
uowcommit.register_dependency(self.parent, stub)
uowcommit.register_dependency(self.mapper, stub)
uowcommit.register_processor(stub, self, self.parent)
def process_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " process_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
connection = uowcommit.transaction.connection(self.mapper)
secondary_delete = []
secondary_insert = []
secondary_update = []
if self.prop._reverse_property:
reverse_dep = getattr(self.prop._reverse_property, '_dependency_processor', None)
else:
reverse_dep = None
if delete:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=self.passive_deletes)
if deleted or unchanged:
for child in deleted + unchanged:
if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(state, child, associationrow, False, uowcommit)
secondary_delete.append(associationrow)
uowcommit.attributes[(self, "manytomany", state, child)] = True
else:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key)
if added or deleted:
for child in added:
if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(state, child, associationrow, False, uowcommit)
uowcommit.attributes[(self, "manytomany", state, child)] = True
secondary_insert.append(associationrow)
for child in deleted:
if child is None or (reverse_dep and (reverse_dep, "manytomany", child, state) in uowcommit.attributes):
continue
associationrow = {}
self._synchronize(state, child, associationrow, False, uowcommit)
uowcommit.attributes[(self, "manytomany", state, child)] = True
secondary_delete.append(associationrow)
if not self.passive_updates and unchanged and self._pks_changed(uowcommit, state):
for child in unchanged:
associationrow = {}
sync.update(state, self.parent, associationrow, "old_", self.prop.synchronize_pairs)
sync.update(child, self.mapper, associationrow, "old_", self.prop.secondary_synchronize_pairs)
#self.syncrules.update(associationrow, state, child, "old_")
secondary_update.append(associationrow)
if secondary_delete:
secondary_delete.sort()
# TODO: precompile the delete/insert queries?
statement = self.secondary.delete(sql.and_(*[c == sql.bindparam(c.key, type_=c.type) for c in self.secondary.c if c.key in associationrow]))
result = connection.execute(statement, secondary_delete)
if result.supports_sane_multi_rowcount() and result.rowcount != len(secondary_delete):
raise exceptions.ConcurrentModificationError("Deleted rowcount %d does not match number of secondary table rows deleted from table '%s': %d" % (result.rowcount, self.secondary.description, len(secondary_delete)))
if secondary_update:
statement = self.secondary.update(sql.and_(*[c == sql.bindparam("old_" + c.key, type_=c.type) for c in self.secondary.c if c.key in associationrow]))
result = connection.execute(statement, secondary_update)
if result.supports_sane_multi_rowcount() and result.rowcount != len(secondary_update):
raise exceptions.ConcurrentModificationError("Updated rowcount %d does not match number of secondary table rows updated from table '%s': %d" % (result.rowcount, self.secondary.description, len(secondary_update)))
if secondary_insert:
statement = self.secondary.insert()
connection.execute(statement, secondary_insert)
def preprocess_dependencies(self, task, deplist, uowcommit, delete = False):
#print self.mapper.mapped_table.name + " " + self.key + " " + repr(len(deplist)) + " preprocess_dep isdelete " + repr(delete) + " direction " + repr(self.direction)
if not delete:
for state in deplist:
(added, unchanged, deleted) = uowcommit.get_attribute_history(state, self.key,passive=True)
if deleted:
for child in deleted:
if self.cascade.delete_orphan and self.hasparent(child) is False:
uowcommit.register_object(child, isdelete=True)
for c, m in self.mapper.cascade_iterator('delete', child):
uowcommit.register_object(c._state, isdelete=True)
def _synchronize(self, state, child, associationrow, clearkeys, uowcommit):
if associationrow is None:
return
self._verify_canload(child)
sync.populate_dict(state, self.parent, associationrow, self.prop.synchronize_pairs)
sync.populate_dict(child, self.mapper, associationrow, self.prop.secondary_synchronize_pairs)
def _pks_changed(self, uowcommit, state):
return sync.source_changes(uowcommit, state, self.parent, self.prop.synchronize_pairs)
class AssociationDP(OneToManyDP):
def __init__(self, *args, **kwargs):
super(AssociationDP, self).__init__(*args, **kwargs)
self.cascade.delete = True
self.cascade.delete_orphan = True
class MapperStub(object):
"""Pose as a Mapper representing the association table in a
many-to-many join, when performing a ``flush()``.
The ``Task`` objects in the objectstore module treat it just like
any other ``Mapper``, but in fact it only serves as a dependency
placeholder for the many-to-many update task.
"""
__metaclass__ = util.ArgSingleton
def __init__(self, parent, mapper, key):
self.mapper = mapper
self.base_mapper = self
self.class_ = mapper.class_
self._inheriting_mappers = []
def polymorphic_iterator(self):
return iter([self])
def _register_dependencies(self, uowcommit):
pass
def _save_obj(self, *args, **kwargs):
pass
def _delete_obj(self, *args, **kwargs):
pass
def primary_mapper(self):
return self
| santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/SQLAlchemy-0.4.5-py2.5.egg/sqlalchemy/orm/dependency.py | Python | bsd-3-clause | 26,274 |
import numpy as np
import pandas as pd
import gen_fns
import math
import re
import csv
import correlation_matrix as co
#
# reformat_raw_data
#
# Takes compiled data on historical proliferation contributers from xlsx
# spreadsheet (final_hist_data.xlsx) and reformats. xlsx file has a single
# row for each country with separate data for a 'pursue date' and an 'acquire
# 'date'. These are split out so that each state has a unique row entry for
# every year for which we have data. Nuclear Weapon states have an entry when
# they began pursuing and another when they acquired. States that explored or
# pursued have an entry for that year as well as an entry for 2015. States
# that never pursued have only an entry for 2015.
# A 'Status' column in output codes the action of the state in the given year
# (Never/Explore/Pursue/Acquire). Coding details available inline.
#
# Input
# File: tab-separated-file dropped straight from xlsx spreadsheet
# (n_header): number of extra header lines to be skipped from spreadsheet
# (outfile): name of desired output file, written to directory you're
# running python from
# Output
# States: Array of states for each entry of the file database
# Data: structured array of historical data for each state at time
# indicated by 'Date'
#
def reformat_raw_data(file, n_header=1, outfile=None):
from gen_fns import get_data
from numpy.lib.recfunctions import append_fields
countries, columns, raw_data = get_data(file, n_header, named_struct=True)
pursue_names = []
acquire_names = []
clean_names = []
status = np.full(len(countries), 0)
raw_data = append_fields(raw_data, 'Status', status)
cols = raw_data.dtype.names
order = range(0, len(cols) - 1)
order.insert(2,len(cols) - 1)
data = raw_data[[cols[i] for i in order]]
cols = data.dtype.names
for c in range(len(cols)):
if ('Pursuit_' in cols[c]):
pursue_names.append(cols[c])
new_str = re.sub('Pursuit_', '', cols[c])
clean_names.append(new_str)
elif ('Acquire_' in cols[c]):
acquire_names.append(cols[c])
else:
pursue_names.append(cols[c])
acquire_names.append(cols[c])
clean_names.append(cols[c])
# all countries, data split into pursue-relevant or acquire-relevant
pursue_array = data[pursue_names]
acquire_array = data[acquire_names]
no_acquire_mask = np.isnan(acquire_array['Acquire_Date'])
conven_mask = np.isnan(pursue_array['Pursuit_Date'])
explore_mask = (pursue_array['Pursuit_Date'] < 0)
pursue_only_mask = (np.isnan(acquire_array['Acquire_Date'])) & (~(np.isnan(pursue_array['Pursuit_Date'])) & (pursue_array['Pursuit_Date'] > 0))
# states that pursued (_init_) and successfully acquired
acquire_states = countries[~no_acquire_mask]
acquire_final_array = acquire_array[~no_acquire_mask]
acquire_init_array = pursue_array[~no_acquire_mask]
# states that never pursued and have only conventional weapons
conven_array = pursue_array[conven_mask]
conven_states = countries[conven_mask]
# states that explored NW but ultimately did not pursue
explore_array = pursue_array[explore_mask]
explore_present_array = acquire_array[explore_mask]
explore_states = countries[explore_mask]
# states that pursued but did not succeeed in acquiring
pursue_only_array = pursue_array[pursue_only_mask]
pursue_present_array = acquire_array[pursue_only_mask]
pursue_states = countries[pursue_only_mask]
# Status
# -1 present data for a state that unsucessfully pursued
# 0 present data for never-pursued (has only 'conventional weapons')
# 1 historical data for explored
# 2 historical data for pursued
# 3 historical data for acquired
acquire_init_array['Status'] = 2
acquire_final_array['Status'] = 3
conven_array['Status'] = 0
pursue_only_array['Status'] = 2
pursue_present_array['Status'] = -1
explore_array['Status'] = 1
explore_present_array['Status'] = 0
conven_array['Pursuit_Date'] = 2015
explore_array['Pursuit_Date'] = abs(explore_array['Pursuit_Date'])
explore_present_array['Acquire_Date'] = 2015
pursue_present_array['Acquire_Date'] = 2015
acquire_final_array.dtype.names = clean_names
acquire_final_array.mask.dtype.names = clean_names
acquire_init_array.dtype.names = clean_names
acquire_init_array.mask.dtype.names = clean_names
conven_array.dtype.names = clean_names
conven_array.mask.dtype.names = clean_names
pursue_only_array.dtype.names = clean_names
pursue_only_array.mask.dtype.names = clean_names
pursue_present_array.dtype.names = clean_names
pursue_present_array.mask.dtype.names = clean_names
explore_array.dtype.names = clean_names
explore_array.mask.dtype.names = clean_names
explore_present_array.dtype.names = clean_names
explore_present_array.mask.dtype.names = clean_names
final_states = np.hstack((conven_states,
explore_states,
explore_states,
pursue_states,
acquire_states,
acquire_states,
pursue_states))
final_data = np.hstack((conven_array,
explore_present_array,
explore_array,
pursue_only_array,
acquire_init_array,
acquire_final_array,
pursue_present_array))
header ='Country' + '\t' + ('\t'.join(map(str,final_data.dtype.names)))
if (outfile != None):
with open(outfile, 'wb') as f:
writer = csv.writer(f)
writer.writerow([header])
i = 0
for comp in final_data.compressed():
cur_line = final_states[i]
for c in range(len(comp)):
val = comp[c]
if (c <= 1):
val = int(val)
cur_line = cur_line + '\t' + str(val)
writer.writerow([cur_line])
i+=1
return final_states,final_data
#
# calc_weights
#
# Given a tab separated input file with the following columns, use partial
# component analysis to determine the relative importance of each factor
# In:
# filename: tab-separated input file with factor scores for each state
# columns: Country, Date, Status, Factor1, Factor2, ....
# mn_status: min value of nuclear weapons status for state to be considered
# in analysis (-1 = gave up weapons program, 0 = never pursued,
# 1 = explored, 2 = pursued, 3 = acquired)
# mx_status: max value of weapons status to be considered
# correl_min: weights below this number are rescaled to zero as they are
# not significant (including negative correlations)
#
# Out:
# weights: relative importance of each factor from the input file. Weights
# sum to 1 and cannot be negative.
#
def calc_weights(filename, mn_status=0, mx_status=2, correl_min = 1e-6):
data_file = open(filename, 'r')
full_matrix = np.loadtxt(data_file, skiprows=1,usecols=(2,3,4,5,6,7,8,9,10))
relevant_mask = ((full_matrix[:,0] >= mn_status) &
(full_matrix[:,0] <= mx_status))
matrix = full_matrix[relevant_mask]
cor = co.Cor_matrix(matrix)
factor_vals = np.array(cor[0,1:])[0]
factor_vals[factor_vals < correl_min] = 0
f_tot = factor_vals.sum()
weights = factor_vals/f_tot # normalize weights to sum to one
return weights
#
# calc_pursuit
#
# Calculates 0-10 scale pursuit scores for historical data using factor
# weighting from Baptiste's correlation analysis.
#
# In:
# raw_data: np.array of value for each factor on a 0-10 scale
# weights: np.array of factor weights (from correlation analysis)
# Out:
# final_vals: List of pursuit equation values in same order as input data
# (order: Auth, Mil_Iso, Reactor, En_Repr, Sci_Net, Mil_Sp,
# Conflict, U_Res)
#
def calc_pursuit(raw_data, weights):
final_vals = []
weighted_factors = weights*raw_data
for i in range(raw_data.shape[0]):
val = weighted_factors[i].sum()
final_vals.append(round(val,4))
return final_vals
# Map of nuclear weapon states and their acquire date
def get_nws():
nws = {}
nws["China"] = 1964
nws["France"] = 1960
nws["India"] = 1974
# nws["India"] = 1988 # This was the Way coding but not first detonation
nws["Israel"] = 1969
nws["N-Korea"] = 2006
nws["Pakist"] = 1987
nws["S-Afric"] = 1979
nws["UK"] = 1952
nws["US"] = 1945
nws["USSR"] = 1949
return nws
#
# time_to_acquire
#
# Returns a dictionary of countries that pursued and how long it took them to
# succeed. Countries that never acquired are listed with a negative number that
# indicates the time elapsed from beginning of pursuit to 2015
#
# In: (none)
# Out:
# t2acq: Dictionary of how long it took state to acquire nuclear weapon
#
def time_to_acquire():
t2acq = {}
t2acq["Argent"] = -37
t2acq["Austral"] = -54
t2acq["Brazil"] = -37
t2acq["China"] = 9
t2acq["Egypt"] = -50
t2acq["France"] = 6
t2acq["India"] = 10
t2acq["Iran"] = -30
t2acq["Iraq"] = -32
t2acq["Israel"] = 9
t2acq["Libya"] = -45
t2acq["N-Korea"] = 26
t2acq["S-Korea"] = -45
t2acq["Pakist"] = 15
t2acq["S-Afric"] = 5
t2acq["Syria"] = -15
t2acq["UK"] = 5
t2acq["US"] = 3
t2acq["USSR"] = 4
return t2acq
# States that pursued and their pursuit date
# (from google doc: Main Prolif Spreadsheet, Dec-5-2016)
def get_pursue():
pursues = {}
pursues["Argent"] = 1978
pursues["Austral"] = 1961
pursues["Brazil"] = 1978
pursues["China"] = 1955
pursues["Egypt"] = 1965
pursues["France"] = 1954
pursues["India"] = 1964
pursues["Iran"] = 1985
pursues["Iraq"] = 1983
pursues["Israel"] = 1960
pursues["Libya"] = 1970
pursues["N-Korea"] = 1980
pursues["S-Korea"] = 1970
pursues["Pakist"] = 1972
pursues["S-Afric"] = 1974
pursues["Syria"] = 2000
pursues["UK"] = 1947
pursues["US"] = 1939
pursues["USSR"] = 1945
return pursues
# From a matched pair of numpy arrays containing countries and their pursuit
# scores, make a new array of the pursuit scores for countries that succeeded
def get_prolif_pe(countries, pes):
prolif_pes = []
prolif_st = []
proliferants = get_prolif()
for i in range(len(countries)):
curr_state = countries[i]
if curr_state in proliferants:
prolif_pes.append(pes[i])
prolif_st.append(curr_state)
return(prolif_st, prolif_pes)
# From a matched pair of numpy arrays containing countries and their pursuit
# scores, make a new array of the scores for the subset of countries that
# actually chose to pursue and/or succeeded
def get_pes(all_countries, pes, status):
pursue_pes = []
pursue_st = []
if (status == "Pursue"):
states = get_pursue()
elif (status == "Prolif"):
states = get_nws()
else:
return "DO YOU WANT PURSUIT OR PROLIFERANTS?"
for i in range(len(all_countries)):
curr_state = all_countries[i]
if curr_state in states:
pursue_pes.append(pes[i])
pursue_st.append(curr_state)
return(pursue_st, pursue_pes)
#
# raw_to_factor_scores
#
# Read in the historical data for all states, output a tsv with the data
# converted into 0-10 scores for each of the 8 factors
#
# In:
# infile: tsv for historical data in clean format
# (from reformat_raw_data)
# (n_head): how many header lines to skip
# (outfile): name of desired tab separated file for output, with row and
# column headers
# Out: (all structures preserve order across columns)
# countries: array of state names that pairs with output data
# dates: array of date for the row of factor scores
# status: array with status for the row (never/explore/pursue/acquire)
# (status coding described in README.md)
# columns: list of column names for data
# all_scores: np.array of 0-10 score for each factor
#
def raw_to_factor_scores(infile, n_head=1, outfile=None):
from gen_fns import get_data
countries, col_names, raw_data = get_data(infile, n_header = n_head,
named_struct=True)
factors = {
"Reactor": [react2score, [raw_data['Reactors'],
raw_data['Research_Reactors']]],
"Mil_Iso": [alliance2iso_score, [raw_data['NonProlif_Alliance'],
raw_data['Prolif_Alliance']]],
"En_Repr": [enrich2score, raw_data['Enrichment']],
"U_Res": [ures2score, raw_data['UReserves']],
"Sci_Net": [network2score, raw_data['Scientific_Network']],
# "Conflict": [upsala2conflict_score, [raw_data['Unique_Conflicts'],
# raw_data['Conflict_Intensity']]],
"Conflict": [relations2conflict_score,
[raw_data['Weapon_Status_1'],
raw_data['Conflict_Relation_1'],
raw_data['Weapon_Status_2'],
raw_data['Conflict_Relation_2'],
raw_data['Weapon_Status_3'],
raw_data['Conflict_Relation_3'],
raw_data['Status']]],
"Auth": [polity2auth_score, raw_data['Polity_Index']],
"Mil_Sp": [mil_sp2score, raw_data['Military_GDP']]
}
score_columns = []
i = 0
for key in factors:
score_columns.append(key)
fn, inputs = factors[key]
scores = fn(inputs)
if (i == 0):
all_scores = scores
else:
all_scores = np.column_stack((all_scores, scores))
i+=1
header ='Country' + '\t' + 'Date'+ '\t' + 'Status' + '\t'+ (
'\t'.join(map(str,score_columns)))
if (outfile != None):
with open(outfile, 'wb') as f:
writer = csv.writer(f)
writer.writerow([header])
for row in range(len(all_scores)):
cur_line = countries[row]+ '\t'+ (
str(int(raw_data['Date'][row]))+ '\t') + (
str(raw_data['Status'][row]) + '\t') + (
('\t'.join(map(str, all_scores[row]))))
writer.writerow([cur_line])
return countries, raw_data['Date'], raw_data['Status'], score_columns, all_scores
# Bilateral agreements converted to score
# If only with non-nuclear states (npt), score is lower
# If with nuclear states (nuclear umbrella), then only count these
#
# NOT CURRENTLY USED
#
def bilateral2score(npt, ws=None):
stepA = 2
stepB = 4
stepC = 7
all_scores = np.ndarray(npt.size)
for i in range(npt.size):
score = -1
# if all agreements are with other non-nuclear states
if (ws is None) or (ws[i] == 0) or (math.isnan(ws[i])):
if (math.isnan(npt[i])):
score = np.nan
elif (npt[i] <= stepA):
score = 1
elif (npt[i] <= stepB):
score = 2
else:
score = 3
else:
if (ws[i] <= stepA):
score = 6
elif (ws[i] <= stepB):
score = 7
elif (ws[i] <= stepC):
score = 8
else:
score = 10
all_scores[i] = score
return all_scores
#
# Global Peace Index
# (includes both domestic and external stability)
# Institute for Economics & Peace Global Peace Index
# http://economicsandpeace.org/wp-content/uploads/2015/06/Global-Peace-Index-Report-2015_0.pdf
#
# NOT USED BECAUSE NO HISTORICAL DATA
#
def gpi2conflict_score(gpi_array):
stepA = 1.5
stepB = 2
stepC = 2.5
stepD = 3.5
all_scores = np.ndarray(gpi_array.size)
for i in range(gpi_array.size):
gpi_val = gpi_array[i]
score = -1
if (math.isnan(gpi_val)):
score = np.nan
elif (gpi_val < stepA):
score = 2
elif (gpi_val < stepB):
score = 4
elif (gpi_val < stepC):
score = 6
elif (gpi_val < stepD):
score = 8
else:
score = 10
all_scores[i] = score
return all_scores
#
# Fraction of GDP spent on military
# World Bank http://data.worldbank.org/indicator/MS.MIL.XPND.GD.ZS
#
def mil_sp2score(mil_gdp):
stepA = 1
stepB = 2
stepC = 3
stepD = 5
all_scores = np.ndarray(mil_gdp.size)
for i in range(mil_gdp.size):
score = -1
if (math.isnan(mil_gdp[i])):
score = np.nan
elif (mil_gdp[i] < stepA):
score = 1
elif (mil_gdp[i] < stepB):
score = 2
elif (mil_gdp[i] < stepC):
score = 4
elif (mil_gdp[i] < stepD):
score = 7
else:
score = 10
all_scores[i] = score
return all_scores
# Convert number of reactors to a score
# 'Reactors' is commercial, 'Research_Reactors' are in a separate column,
# with negative number indicating they have only planned reactors.
# Built reactors take precedence over planned
#
# REFERENCE???
# ** Correlation analysis demonstrated that reactors are protective against
# proliferation (hypothesis that countries stable enough to run reactors
# have other mitigating factors that reduce proliferation risk)
# Therefore reactor score has been inverted so that the more reactors a country
# has the lower their pursuit score.
def react2score(all_reactors):
step0 = 0.0
stepA = -4.0
stepB = -1.0
stepC = 3.0
stepD = 7.0
n_react = all_reactors[0]
n_research = all_reactors[1]
all_scores = np.ndarray(n_react.size)
for i in range(n_react.size):
score = 0
n_tot = 0
# if there are both planned (negative) and built reactors (positive)
# between research and commercial, use the 'built' number
if ((n_react[i] * n_research[i]) < 0):
n_tot = max(n_research[i], n_react[i])
else:
n_tot = n_research[i] + n_react[i]
if (math.isnan(n_tot)):
score = np.nan
elif (n_tot == step0):
score = 0
elif (n_tot <= stepA):
score = 2
elif (n_tot <= stepB):
score = 1
elif (n_tot <= stepC):
score = 4
elif (n_tot <= stepD):
score = 7
else:
score = 10
all_scores[i] = 10 - abs(score)
return all_scores
#
# upsala2conflict_score
#
# Uses number of unique conflicts * conflict intensity to determine total
# conflict score. We have re-coded Iraq, Afghanistan, and Mali wars (2000s)
# as coalition-wars, such that for individual countries the number of deaths
# is the lower intensity coded as 1.
# Max # of unique conflicts in historical data is 3
# Conflict = 5 for neutral relationships, increases with additional conflict.
#
# From Upsala
#
def upsala2conflict_score(all_conflict):
neutral = 5 # for neutral relationships, score is 5
stepC = 4
n_conflicts = all_conflict[0]
intensity = all_conflict[1]
all_scores = np.ndarray(n_conflicts.size)
for i in range(n_conflicts.size):
score = 0
# If intensity is (-) then conflict is a coalition, downgrade intensity
# to -1. If n_conflict is (-) then an additional non-armed (tense)
# conflict has been added to the data (eg Korean Armistace), (but still
# may be coded as zero intensity)
if ((intensity[i] < 0) or (n_conflicts[i] < 0)):
n_tot = abs(n_conflicts[i])
else:
n_tot = n_conflicts[i] * intensity[i]
if (math.isnan(n_tot)):
score = np.nan
elif (n_tot <= stepC):
score = neutral + n_tot
else:
score = 10
all_scores[i] = score
return all_scores
#
# relations2conflict_score
#
# Averages individual conflict scores with up to 3 pairs of countries that
# have significant relationships (as determined by Hoffman and Buys).
# Each country has a weapons status (0,2,3) and a defined
# relationship with partner country (+1 = allies, 0 = neutral, -1 = enemies).
# Scores are determined based on those 3 values using the score_matrix
# dictionary in lookup_conflict_val (these dictionary values are also used
# in mbmore::StateInst.cc (archetype for use with Cyclus).
#
# In: np.array of arrays - for each target country, weapons status and
# relationship of each pair country,
# and weapon status of target country
# Out: np.array - for each target country, final conflict score (0-10 scale)
#
def relations2conflict_score(all_conflict):
weapon_stat1 = all_conflict[0]
conflict1= all_conflict[1]
weapon_stat2 = all_conflict[2]
conflict2= all_conflict[3]
weapon_stat3 = all_conflict[4]
conflict3= all_conflict[5]
host_status = all_conflict[6]
all_scores = np.ndarray(weapon_stat1.size)
for i in range(all_scores.size):
n_scores = 0
score = 0
if (np.isfinite(weapon_stat1[i])):
n_scores+=1
score+= lookup_conflict_val(host_status[i],
weapon_stat1[i],
conflict1[i])
if (np.isfinite(weapon_stat2[i])):
n_scores+=1
score+= lookup_conflict_val(host_status[i],
weapon_stat2[i],
conflict2[i])
if (np.isfinite(weapon_stat3[i])):
n_scores+=1
score+= lookup_conflict_val(host_status[i],
weapon_stat3[i],
conflict3[i])
if (math.isnan(score) or (n_scores == 0)):
avg_score = np.nan
else:
avg_score = score/n_scores
all_scores[i] = avg_score
return all_scores
#
# lookup_conflict_val
#
# Given the relationship between 2 countries (enemy, ally, neutral), and their
# respective weapon status, look up the 0-10 score for that conflict level
# 0-10 scores based on discussion with Andy Kydd and documented here:
# https://docs.google.com/document/d/1c9YeFngXm3RCbuyFCEDWJjUK9Ovn072SpmlZU6j1qhg/edit?usp=sharing
# Same defns are used in mbmore::StateInst.cc (archetype for use with Cyclus).
# Historical countries with scores of -1 (gave up a weapons program) or
# +1 (explored but did not pursue) have been reassigned a value of 0 (never
# pursued). These gradations could be refined in future work.
#
# In: statusA - weapons status of country A (0, 2, 3)
# statusB - weapons status of country B (0, 2, 3)
# relation - relationship between country A and B (-1, 0, +1)
#
# Out: Conflict factor score for that pair (0-10)
#
def lookup_conflict_val(statusA, statusB, relation):
score_matrix = {
"ally_0_0": 2,
"neut_0_0": 2,
"enemy_0_0": 6,
"ally_0_2": 3,
"neut_0_2": 4,
"enemy_0_2": 8,
"ally_0_3": 1,
"neut_0_3": 4,
"enemy_0_3": 6,
"ally_2_2": 3,
"neut_2_2": 4,
"enemy_2_2": 9,
"ally_2_3": 3,
"neut_2_3": 5,
"enemy_2_3": 10,
"ally_3_3": 1,
"neut_3_3": 3,
"enemy_3_3": 5
}
# string is enemy if relation is -1
reln_str = "enemy_"
if (relation == 1):
reln_str = "ally_"
elif (relation == 0):
reln_str = "neut_"
first_stat = statusA
second_stat = statusB
#Convention - list smaller number first
if (statusA > statusB):
first_stat = statusB
second_stat = statusA
# Recode any status that is not 0,2,3
# If a country has given up its weapon program (-1) or is only
# 'exploring' (1) then treat them as 'never pursued' (0) for now.
# Someday these status' could be given unique conflict values in the matrix
if (first_stat == 1):
first_stat = 0
if (first_stat == -1):
first_stat = 0
if (second_stat == 1):
second_stat = 0
if (second_stat == -1):
second_stat = 0
reln_str += str(int(first_stat)) + "_" + str(int(second_stat))
return score_matrix[reln_str]
# convert network (defined by intuition as small=1, medium=2, large=3) into
# a factor score on 1-10 scale.
#
# Technology Achievement Index
#
# REFERENCE URL??
#
def network2score(sci_val):
stepA = 1
stepB = 2
stepC = 3
all_scores = np.ndarray(sci_val.size)
for i in range(sci_val.size):
score = -1
if (math.isnan(sci_val[i])):
score = 1
elif (sci_val[i] < stepA):
score = 2
elif (sci_val[i] < stepB):
score = 4
elif (sci_val[i] < stepC):
score = 7
else:
score = 10
all_scores[i] = score
return all_scores
#
# First assign a score of 0-3 based on number of alliances with non-nuclear
# states.
# Then add to that score a 5, 6, or 7 based on the number of alliances with
# nuclear states (if none then add 0).
#
# Rice University Database http://atop.rice.edu/search
#
def alliance2iso_score(all_alliances):
np_stepA = 2
np_stepB = 4
p_stepA = 1
p_stepB = 2
p_stepC = 3
non_prolif = all_alliances[0]
prolif = all_alliances[1]
all_scores = np.ndarray(non_prolif.size)
for i in range(non_prolif.size):
score = 0
if (math.isnan(prolif[i])) and (math.isnan(non_prolif[i])):
score = np.nan
elif (non_prolif[i] <= np_stepA):
score = 1
elif (non_prolif[i] <= np_stepB):
score = 2
else:
score = 3
if (not math.isnan(prolif[i])):
if (prolif[i] == p_stepA):
score = score + 5
elif (prolif[i] == p_stepB):
score = score + 6
elif (prolif[i] >= p_stepC):
score = score + 7
# Isolation is the inverse of amount of alliances
all_scores[i] = 10 - score
return all_scores
#
# Center for Systemic Peace
# Polity IV Series http://www.systemicpeace.org/inscrdata.html
#
def polity2auth_score(polity):
scores = polity
return scores
#
# Fuhrmman http://www.matthewfuhrmann.com/datasets.html
# If any enrichment or reprocessing capability then 10, otherwise 0
#
def enrich2score(enrich):
scores = enrich*10.0
return scores
#
# If any U reserves than 10, otherwise 0
# OECD U report
# https://www.oecd-nea.org/ndd/pubs/2014/7209-uranium-2014.pdf
#
def ures2score(ures):
scores = ures*10.0
return scores
| mbmcgarry/historical_prolif | hist_bench.py | Python | bsd-3-clause | 27,370 |
from __future__ import division
import abc
import numpy as n
import scipy.linalg as linalg
import scipy.optimize as opt
import scipy.spatial.distance as dist
class Feature(object):
'''
Abstract class that represents a feature to be used
with :py:class:`pyransac.ransac.RansacFeature`
'''
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
pass
@abc.abstractproperty
def min_points(self):
'''int: Minimum number of points needed to define the feature.'''
pass
@abc.abstractmethod
def points_distance(self,points):
'''
This function implements a method to compute the distance
of points from the feature.
Args:
points (numpy.ndarray): a numpy array of points the distance must be
computed of.
Returns:
distances (numpy.ndarray): the computed distances of the points from the feature.
'''
pass
@abc.abstractmethod
def print_feature(self,num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
class Circle(Feature):
'''
Feature class for a Circle :math:`(x-x_c)^2 + (y-y_c)^2 - r = 0`
'''
min_points = 3
'''int: Minimum number of points needed to define the circle (3).'''
def __init__(self,points):
self.radius,self.xc,self.yc = self.__gen(points)
def __gen(self,points):
'''
Compute the radius and the center coordinates of a
circumference given three points
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
(tuple): A 3 elements tuple that contains the circumference radius
and center coordinates [radius,xc,yc]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
# Linear system for (D,E,F) in circle
# equations: D*xi + E*yi + F = -(xi**2 + yi**2)
# where xi, yi are the coordinate of the i-th point.
# Generating A matrix
A = n.array([(x,y,1) for x,y in points])
# Generating rhs
rhs = n.array([-(x**2+y**2) for x,y in points])
try:
#Solving linear system
D,E,F = linalg.lstsq(A,rhs)[0]
except linalg.LinAlgError:
raise RuntimeError('Circle calculation not successful. Please\
check the input data, probable collinear points')
xc = -D/2
yc = -E/2
r = n.sqrt(xc**2+yc**2-F)
return (r,xc,yc)
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \left| \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2} - r \right|`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
xa = n.array([self.xc,self.yc]).reshape((1,2))
d = n.abs(dist.cdist(points,xa) - self.radius)
return d
def print_feature(self, num_points):
'''
This method returns an array of x,y coordinates for
points that are in the feature.
Args:
num_points (numpy.ndarray): the number of points to be returned
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
theta = n.linspace(0,2*n.pi,num_points)
x = self.xc + self.radius*n.cos(theta)
y = self.yc + self.radius*n.sin(theta)
return n.vstack((x,y))
class Exponential (Feature):
'''
Feature Class for an exponential curve :math:`y=ax^{k} + b`
'''
min_points = 3
def __init__(self,points):
self.a,self.k,self.b = self.__gen(points)
def __gen(self,points):
'''
Compute the three parameters that univocally determine the
exponential curve
Args:
points(numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
exp(numpy.ndarray): A (3,) numpy array that contains the a,n,b parameters
[a,k,b]
Raises:
RuntimeError: If the circle computation does not succeed
a RuntimeError is raised.
'''
def exponential(x,points):
''' Non linear system function to use
with :py:func:`scypy.optimize.root`
'''
aa = x[0]
nn = x[1]
bb = x[2]
f = n.zeros((3,))
f[0] = n.abs(aa)*n.power(points[0,0],nn)+bb - points[0,1]
f[1] = n.abs(aa)*n.power(points[1,0],nn)+bb - points[1,1]
f[2] = n.abs(aa)*n.power(points[2,0],nn)+bb - points[2,1]
return f
exp = opt.root(exponential,[1,1,1],points,method='lm')['x']
return exp
def points_distance(self,points):
r'''
Compute the distance of the points from the feature
:math:`d = \sqrt{(x_i - x_c)^2 + (y_i-y_c)^2}`
Args:
points (numpy.ndarray): a (3,2) numpy array, each row is a 2D Point.
Returns:
d (numpy.ndarray): the computed distances of the points from the feature.
'''
x = points[:,0]
xa = n.array([x,self.a*n.power(x,self.k)+self.b])
xa = xa.T
d = dist.cdist(points,xa)
return n.diag(d)
def print_feature(self, num_points, a,b):
'''
This method returns an array of x,y coordinates for
points that are in the feature in the interval [a,b].
Args:
num_points (numpy.ndarray): the number of points to be returned
a (float): left end of the interval
b (float): right end of the interval
Returns:
coords (numpy.ndarray): a num_points x 2 numpy array that contains
the points coordinates
'''
x = n.linspace(a,b,num_points)
y = self.a*x**self.k + self.b
return n.vstack((x,y))
| rubendibattista/python-ransac-library | pyransac/features.py | Python | bsd-3-clause | 6,919 |
"""Klamp't visualization routines. See Python/demos/vistemplate.py for an
example of how to run this module.
The visualization module lets you draw most Klamp't objects in a 3D world
using a simple interface. It also lets you customize the GUI using Qt
widgets, OpenGL drawing, and keyboard/mouse intercept routines.
Main features include:
- Simple interface to modify the visualization
- Simple interface to animate and render trajectories
- Simple interface to edit certain Klamp't objects (configurations, points,
transforms)
- Simple interface to drawing text and text labels, and drawing plots
- Multi-window, multi-viewport support
- Unified interface to PyQt and GLUT (with loss of resource editing functionality
under GLUT)
- Automatic camera setup
The resource editing functionality in the klampt.io.resource module (based on
klampt.vis.editors) use this module as well.
Due to weird OpenGL and Qt behavior in multi-threaded programs, you should
only run visualizations using the methods in this module.
There are two primary ways of setting up a visualization:
- The first is by adding items to the visualization world and customizing them
using the vis.X routines that mirror the methods in VisualizationPlugin (like
add, setColor, animate, etc). See Python/demos/vistemplate.py for more information.
- The second is by creating a subclass of GLPluginInterface and doing
all the necessary drawing / interaction yourself inside its hooks. In the
latter case, you will call vis.setPlugin(plugin) to override the default
visualization behavior before creating your window. See Python/demos/visplugin.py
for more information.
A third way of setting up a visualization is a hybrid of the two, where you can
add functionality on top of default the visualization world. You can either use
vis.pushPlugin(plugin) in which case your plugin adds additional functionality,
or you can subclass the vis.VisualizationPlugin class, and selectively augment /
override the default functionality.
Instructions:
- To add things to the default visualization:
Call the VisualizationPlugin aliases (add, animate, setColor, etc)
- To show the visualization and quit when the user closes the window:
vis.run()
- To show the visualization and return when the user closes the window:
vis.dialog()
... do stuff afterwards ...
vis.kill()
- To show the visualization and be able to run a script alongside it
until the user closes the window:
vis.show()
while vis.shown():
vis.lock()
... do stuff ...
[to exit the loop call show(False)]
vis.unlock()
time.sleep(dt)
... do stuff afterwards ...
vis.kill()
- To run a window with a custom plugin (GLPluginInterface) and terminate on
closure:
vis.run(plugin)
- To show a dialog or parallel window
vis.setPlugin(plugin)
... then call
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To add a GLPluginInterface that just customizes a few things on top of
the default visualization:
vis.pushPlugin(plugin)
vis.dialog()
vis.popPlugin()
- To run plugins side-by-side in the same window:
vis.setPlugin(plugin1)
vis.addPlugin(plugin2) #this creates a new split-screen
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To run a custom dialog in a QtWindow
vis.setPlugin([desired plugin or None for visualization])
vis.setParent(qt_window)
vis.dialog()
... or
vis.show()
... do stuff afterwards ...
vis.kill()
- To launch a second window after the first is closed: just call whatever you
want again. Note: if show was previously called with a plugin and you wish to
revert to the default visualization, you should call setPlugin(None) first to
restore the default.
- To create a separate window with a given plugin:
w1 = vis.createWindow() #w1=0
show()
w2 = vis.createWindow() #w2=1
vis.setPlugin(plugin)
vis.dialog()
#to restore commands to the original window
vis.setWindow(w1)
while vis.shown():
...
vis.kill()
Note: when changing the data shown by the window (e.g., modifying the
configurations of robots in a WorldModel) you must call vis.lock() before
accessing the data and then call vis.unlock() afterwards.
The main interface is as follows:
def createWindow(title=None): creates a new visualization window and returns an
integer identifier.
def setWindow(id): sets the active window for all subsequent calls. ID 0 is
the default visualization window.
def getWindow(): gets the active window ID.
def setWindowTitle(title): sets the title of the visualization window.
def getWindowTitle(): returns the title of the visualization window
def setPlugin(plugin=None): sets the current plugin (a GLPluginInterface instance).
This plugin will now capture input from the visualization and can override
any of the default behavior of the visualizer. Set plugin=None if you want to return
to the default visualization.
def addPlugin(plugin): adds a second OpenGL viewport governed by the given plugin (a
GLPluginInterface instance).
def run([plugin]): pops up a dialog and then kills the program afterwards.
def kill(): kills all previously launched visualizations. Afterwards, you may not
be able to start new windows. Call this to cleanly quit.
def dialog(): pops up a dialog box (does not return to calling
thread until closed).
def show(hidden=False): shows/hides a visualization window run in parallel with the calling script.
def spin(duration): shows the visualization window for the desired amount
of time before returning, or until the user closes the window.
def shown(): returns true if the window is shown.
def lock(): locks the visualization world for editing. The visualization will
be paused until unlock() is called.
def unlock(): unlocks the visualization world. Must only be called once
after every lock().
def customUI(make_func): launches a user-defined UI window by calling make_func(gl_backend)
in the visualization thread. This can be used to build custom editors and windows that
are compatible with other visualization functionality. Here gl_backend is an instance of
_GLBackend instantiated for the current plugin.
def getViewport(): Returns the currently active viewport.
The following VisualizationPlugin methods are also added to the klampt.vis namespace
and operate on the default plugin. If you are calling these methods from an external
loop (as opposed to inside a plugin) be sure to lock/unlock the visualization before/after
calling these methods.
def add(name,item,keepAppearance=False): adds an item to the visualization.
name is a unique identifier. If an item with the same name already exists,
it will no longer be shown. If keepAppearance=True, then the prior item's
appearance will be kept, if a prior item exists.
def clear(): clears the visualization world.
def listItems(): prints out all names of visualization objects
def listItems(name): prints out all names of visualization objects under the given name
def dirty(item_name='all'): marks the given item as dirty and recreates the
OpenGL display lists. You may need to call this if you modify an item's geometry,
for example.
def remove(name): removes an item from the visualization.
def setItemConfig(name,vector): sets the configuration of a named item.
def getItemConfig(name): returns the configuration of a named item.
def hide(name,hidden=True): hides/unhides an item. The item is not removed,
it just becomes invisible.
def edit(name,doedit=True): turns on/off visual editing of some item. Only points,
transforms, coordinate.Point's, coordinate.Transform's, coordinate.Frame's,
robots, and objects are currently accepted.
def hideLabel(name,hidden=True): hides/unhides an item's text label.
def setAppearance(name,appearance): changes the Appearance of an item.
def revertAppearance(name): restores the Appearance of an item
def setAttribute(name,attribute,value): sets an attribute of the appearance
of an item. Typical attributes are 'color', 'size', 'length', 'width'...
TODO: document all accepted attributes.
def setColor(name,r,g,b,a=1.0): changes the color of an item.
def setDrawFunc(name,func): sets a custom OpenGL drawing function for an item.
func is a one-argument function that takes the item data as input. Set
func to None to revert to default drawing.
def animate(name,animation,speed=1.0,endBehavior='loop'): Sends an animation to the
object. May be a Trajectory or a list of configurations. Works with points,
so3 elements, se3 elements, rigid objects, or robots.
- speed: a modulator on the animation speed. If the animation is a list of
milestones, it is by default run at 1 milestone per second.
- endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once).
def pauseAnimation(paused=True): Turns on/off animation.
def stepAnimation(amount): Moves forward the animation time by the given amount
in seconds
def animationTime(newtime=None): Gets/sets the current animation time
If newtime == None (default), this gets the animation time.
If newtime != None, this sets a new animation time.
def addText(name,text,position=None): adds text. You need to give an
identifier to all pieces of text, which will be used to access the text as any other
vis object. If position is None, this is added as an on-screen display. If position
is of length 2, it is the (x,y) position of the upper left corner of the text on the
screen. Negative units anchor the text to the right or bottom of the window.
If position is of length 3, the text is drawn in the world coordinates. You can
then set the color, 'size' attribute, and 'position' attribute of the text using the
identifier given in 'name'.
def clearText(): clears all previously added text.
def addPlot(name): creates a new empty plot.
def addPlotItem(name,itemname): adds a visualization item to a plot.
def logPlot(name,itemname,value): logs a custom visualization item to a plot
def logPlotEvent(name,eventname,color=None): logs an event on the plot.
def hidePlotItem(name,itemname,hidden=True): hides an item in the plot. To hide a
particular channel of a given item pass a pair (itemname,channelindex). For example,
to hide configurations 0-5 of 'robot', call hidePlotItem('plot',('robot',0)), ...,
hidePlotItem('plot',('robot',5)).
def setPlotDuration(name,time): sets the plot duration.
def setPlotRange(name,vmin,vmax): sets the y range of a plot.
def setPlotPosition(name,x,y): sets the upper left position of the plot on the screen.
def setPlotSize(name,w,h): sets the width and height of the plot.
def savePlot(name,fn): saves a plot to a CSV (extension .csv) or Trajectory (extension
.traj) file.
def autoFitCamera(scale=1.0): Automatically fits the camera to all objects in the
visualization. A scale > 1 magnifies the camera zoom.
Utility function:
def autoFitViewport(viewport,objects): Automatically fits the viewport's camera to
see all the given objects.
NAMING CONVENTION:
The world, if one exists, should be given the name 'world'. Configurations and paths are drawn
with reference to the first robot in the world.
All items that refer to a name (except add) can either be given a top level item name
(a string) or a sub-item (a sequence of strings, given a path from the root to the leaf).
For example, if you've added a RobotWorld under the name 'world' containing a robot called
'myRobot', then setColor(('world','myRobot'),0,1,0) will turn the robot green. If 'link5'
is the robot's 5th link, then setColor(('world','myRobot','link5'),0,0,1) will turn the 5th
link blue.
"""
from OpenGL.GL import *
from threading import Thread,RLock
from ..robotsim import *
from ..math import vectorops,so3,se3
import gldraw
from glinit import *
from glinit import _GLBackend,_PyQtAvailable,_GLUTAvailable
from glinterface import GLPluginInterface
from glprogram import GLPluginProgram
import glcommon
import time
import signal
import weakref
from ..model import types
from ..model import config
from ..model import coordinates
from ..model.subrobot import SubRobotModel
from ..model.trajectory import *
from ..model.contact import ContactPoint,Hold
class WindowInfo:
"""Mode can be hidden, shown, or dialog"""
def __init__(self,name,frontend,vis,glwindow=None):
self.name = name
self.frontend = frontend
self.vis = vis
self.glwindow = glwindow
self.mode = 'hidden'
self.guidata = None
self.custom_ui = None
self.doRefresh = False
self.doReload = False
self.worlds = []
self.active_worlds = []
_globalLock = RLock()
#the VisualizationPlugin instance of the currently active window
_vis = None
#the GLPluginProgram of the currently active window. Accepts _vis as plugin or other user-defined plugins as well
_frontend = GLPluginProgram()
#the window title for the next created window
_window_title = "Klamp't visualizer"
#a list of WorldModel's in the current window. A world cannot be used in multiple simultaneous
#windows in GLUT. If a world is reused with a different window, its display lists will be refreshed.
#Note: must be proxies to allow for deletion
_current_worlds = []
#list of WindowInfo's
_windows = []
#the index of the current window
_current_window = None
def createWindow(name):
"""Creates a new window (and sets it active)."""
global _globalLock,_frontend,_vis,_window_title,_current_worlds,_windows,_current_window
_globalLock.acquire()
if len(_windows) == 0:
#save the defaults in window 0
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_windows[-1].worlds = _current_worlds
_windows[-1].active_worlds = _current_worlds[:]
#make a new window
_window_title = name
_frontend = GLPluginProgram()
_vis = VisualizationPlugin()
_frontend.setPlugin(_vis)
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_current_worlds = []
id = len(_windows)-1
_current_window = id
_globalLock.release()
return id
def setWindow(id):
"""Sets currently active window."""
global _globalLock,_frontend,_vis,_window_title,_windows,_current_window,_current_worlds
if id == _current_window:
return
_globalLock.acquire()
if len(_windows) == 0:
#save the defaults in window 0
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_windows[-1].worlds = _current_worlds
_windows[-1].active_worlds = _current_worlds[:]
assert id >= 0 and id < len(_windows),"Invalid window id"
_window_title,_frontend,_vis,_current_worlds = _windows[id].name,_windows[id].frontend,_windows[id].vis,_windows[id].worlds
#print "vis.setWindow(",id,") the window has status",_windows[id].mode
if not _PyQtAvailable:
#PyQt interface allows sharing display lists but GLUT does not.
#refresh all worlds' display lists that were once active.
for w in _current_worlds:
if w in _windows[_current_window].active_worlds:
print "klampt.vis.setWindow(): world",w().index,"becoming active in the new window",id
_refreshDisplayLists(w())
_windows[_current_window].active_worlds.remove(w)
_windows[id].active_worlds = _current_worlds[:]
_current_window = id
_globalLock.release()
def getWindow():
"""Retrieves ID of currently active window or -1 if no window is active"""
global _current_window
if _current_window == None: return 0
return _current_window
def setPlugin(plugin):
"""Lets the user capture input via a glinterface.GLPluginInterface class.
Set plugin to None to disable plugins and return to the standard visualization"""
global _globalLock,_frontend,_windows,_current_window
_globalLock.acquire()
if not isinstance(_frontend,GLPluginProgram):
_frontend = GLPluginProgram()
if _current_window != None:
if _windows[_current_window].glwindow != None:
_frontend.window = _windows[_current_window].glwindow
if plugin == None:
global _vis
if _vis==None:
raise RuntimeError("Visualization disabled")
_frontend.setPlugin(_vis)
else:
_frontend.setPlugin(plugin)
if hasattr(plugin,'world'):
_checkWindowCurrent(plugin.world)
_onFrontendChange()
_globalLock.release()
def pushPlugin(plugin):
"""Adds a new glinterface.GLPluginInterface plugin on top of the old one."""
global _globalLock,_frontend
_globalLock.acquire()
assert isinstance(_frontend,GLPluginProgram),"Can't push a plugin after addPlugin"
if len(_frontend.plugins) == 0:
global _vis
if _vis==None:
raise RuntimeError("Visualization disabled")
_frontend.setPlugin(_vis)
_frontend.pushPlugin(plugin)
_onFrontendChange()
_globalLock.release()
def popPlugin():
"""Reverses a prior pushPlugin() call"""
global _frontend
_globalLock.acquire()
_frontend.popPlugin()
_onFrontendChange()
_globalLock.release()
def addPlugin(plugin):
"""Adds a second OpenGL viewport in the same window, governed by the given plugin (a
glinterface.GLPluginInterface instance)."""
global _frontend
_globalLock.acquire()
#create a multi-view widget
if isinstance(_frontend,glcommon.GLMultiViewportProgram):
_frontend.addView(plugin)
else:
if len(_frontend.plugins) == 0:
setPlugin(None)
multiProgram = glcommon.GLMultiViewportProgram()
multiProgram.window = None
if _current_window != None:
if _windows[_current_window].glwindow != None:
multiProgram.window = _windows[_current_window].glwindow
multiProgram.addView(_frontend)
multiProgram.addView(plugin)
multiProgram.name = _window_title
_frontend = multiProgram
_onFrontendChange()
_globalLock.release()
def run(plugin=None):
"""A blocking call to start a single window and then kill the visualization
when closed. If plugin == None, the default visualization is used.
Otherwise, plugin is a glinterface.GLPluginInterface object, and it is used."""
setPlugin(plugin)
show()
while shown():
time.sleep(0.1)
setPlugin(None)
kill()
def dialog():
"""A blocking call to start a single dialog window with the current plugin. It is
closed by pressing OK or closing the window."""
_dialog()
def setWindowTitle(title):
global _window_title
_window_title = title
_onFrontendChange()
def getWindowTitle():
global _window_title
return _window_title
def kill():
"""This should be called at the end of the calling program to cleanly terminate the
visualization thread"""
global _vis,_globalLock
if _vis==None:
print "vis.kill() Visualization disabled"
return
_kill()
def show(display=True):
"""Shows or hides the current window"""
_globalLock.acquire()
if display:
_show()
else:
_hide()
_globalLock.release()
def spin(duration):
"""Spin-shows a window for a certain duration or until the window is closed."""
show()
t = 0
while t < duration:
if not shown(): break
time.sleep(min(0.04,duration-t))
t += 0.04
show(False)
return
def lock():
"""Begins a locked section. Needs to be called any time you modify a visualization item outside
of the visualization thread. unlock() must be called to let the visualization thread proceed."""
global _globalLock
_globalLock.acquire()
def unlock():
"""Ends a locked section acquired by lock()."""
global _globalLock,_windows
for w in _windows:
if w.glwindow:
w.doRefresh = True
_globalLock.release()
def shown():
"""Returns true if a visualization window is currently shown."""
global _globalLock,_thread_running,_current_window
_globalLock.acquire()
res = (_thread_running and _current_window != None and _windows[_current_window].mode in ['shown','dialog'] or _windows[_current_window].guidata is not None)
_globalLock.release()
return res
def customUI(func):
"""Tells the next created window/dialog to use a custom UI function. func is a 1-argument function that
takes a QtWindow or GLUTWindow as its argument."""
global _globalLock
_globalLock.acquire()
_set_custom_ui(func)
_globalLock.release()
def getViewport():
"""Returns the GLViewport of the current window (see klampt.vis.glprogram.GLViewport)"""
return _frontend.get_view()
def setViewport(viewport):
"""Sets the current window to use a given GLViewport (see klampt.vis.glprogram.GLViewport)"""
_frontend.set_view(viewport)
######### CONVENIENCE ALIASES FOR VisualizationPlugin methods ###########
def clear():
"""Clears the visualization world."""
global _vis
if _vis==None:
return
_vis.clear()
def add(name,item,keepAppearance=False):
"""Adds an item to the visualization. name is a unique identifier. If an item with
the same name already exists, it will no longer be shown. If keepAppearance=True, then
the prior item's appearance will be kept, if a prior item exists."""
global _vis
if _vis==None:
print "Visualization disabled"
return
_globalLock.acquire()
_checkWindowCurrent(item)
_globalLock.release()
_vis.add(name,item,keepAppearance)
def listItems(name=None,indent=0):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.listItems(name,indent)
def dirty(item_name='all'):
"""Marks the given item as dirty and recreates the OpenGL display lists. You may need
to call this if you modify an item's geometry, for example. If things start disappearing
from your world when you create a new window, you may need to call this too."""
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.dirty(item_name)
def animate(name,animation,speed=1.0,endBehavior='loop'):
"""Sends an animation to the named object.
Works with points, so3 elements, se3 elements, rigid objects, or robots, and may work
with other objects as well.
Parameters:
- animation: may be a Trajectory or a list of configurations.
- speed: a modulator on the animation speed. If the animation is a list of
milestones, it is by default run at 1 milestone per second.
- endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once).
"""
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.animate(name,animation,speed,endBehavior)
def pauseAnimation(paused=True):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.pauseAnimation(paused)
def stepAnimation(amount):
global _vis
if _vis==None:
print "Visualization disabled"
return
_vis.stepAnimation(amount)
def animationTime(newtime=None):
"""Gets/sets the current animation time
If newtime == None (default), this gets the animation time.
If newtime != None, this sets a new animation time.
"""
global _vis
if _vis==None:
print "Visualization disabled"
return 0
return _vis.animationTime(newtime)
def remove(name):
global _vis
if _vis==None:
return
return _vis.remove(name)
def getItemConfig(name):
global _vis
if _vis==None:
return None
return _vis.getItemConfig(name)
def setItemConfig(name,value):
global _vis
if _vis==None:
return
return _vis.setItemConfig(name,value)
def hideLabel(name,hidden=True):
global _vis
if _vis==None:
return
return _vis.hideLabel(name,hidden)
def hide(name,hidden=True):
global _vis
if _vis==None:
return
_vis.hide(name,hidden)
def edit(name,doedit=True):
"""Turns on/off visual editing of some item. Only points, transforms,
coordinate.Point's, coordinate.Transform's, coordinate.Frame's, robots,
and objects are currently accepted."""
global _vis
if _vis==None:
return
_vis.edit(name,doedit)
def setAppearance(name,appearance):
global _vis
if _vis==None:
return
_vis.setAppearance(name,appearance)
def setAttribute(name,attr,value):
global _vis
if _vis==None:
return
_vis.setAttribute(name,attr,value)
def revertAppearance(name):
global _vis
if _vis==None:
return
_vis.revertAppearance(name)
def setColor(name,r,g,b,a=1.0):
global _vis
if _vis==None:
return
_vis.setColor(name,r,g,b,a)
def setDrawFunc(name,func):
global _vis
if _vis==None:
return
_vis.setDrawFunc(name,func)
def _getOffsets(object):
if isinstance(object,WorldModel):
res = []
for i in range(object.numRobots()):
res += _getOffsets(object.robot(i))
for i in range(object.numRigidObjects()):
res += _getOffsets(object.rigidObject(i))
return res
elif isinstance(object,RobotModel):
q = object.getConfig()
object.setConfig([0.0]*len(q))
worig = [object.link(i).getTransform()[1] for i in range(object.numLinks())]
object.setConfig(q)
wnew = [object.link(i).getTransform()[1] for i in range(object.numLinks())]
return [vectorops.sub(b,a) for a,b in zip(worig,wnew)]
elif isinstance(object,RigidObjectModel):
return [object.getTransform()[1]]
elif isinstance(object,Geometry3D):
return object.getCurrentTransform()[1]
elif isinstance(object,VisAppearance):
res = _getOffsets(object.item)
if len(res) != 0: return res
if len(object.subAppearances) == 0:
bb = object.getBounds()
if bb != None and not aabb_empty(bb):
return [vectorops.mul(vectorops.add(bb[0],bb[1]),0.5)]
else:
res = []
for a in object.subAppearances.itervalues():
res += _getOffsets(a)
return res
return []
def _getBounds(object):
if isinstance(object,WorldModel):
res = []
for i in range(object.numRobots()):
res += _getBounds(object.robots(i))
for i in range(object.numRigidObjects()):
res += _getBounds(object.rigidObject(i))
return res
elif isinstance(object,RobotModel):
return sum([object.link(i).geometry().getBB() for i in range(object.numLinks())],[])
elif isinstance(object,RigidObjectModel):
return object.geometry().getAABB()
elif isinstance(object,Geometry3D):
return object.getAABB()
elif isinstance(object,VisAppearance):
if len(object.subAppearances) == 0:
if isinstance(object.item,TerrainModel):
return []
bb = object.getBounds()
if bb != None and not aabb_empty(bb):
return list(bb)
else:
res = []
for a in object.subAppearances.itervalues():
res += _getBounds(a)
return res
return []
def _fitPlane(pts):
import numpy as np
if len(pts) < 3:
raise ValueError("Point set is degenerate")
centroid = vectorops.div(vectorops.add(*pts),len(pts))
A = np.array([vectorops.sub(pt,centroid) for pt in pts])
U,S,V = np.linalg.svd(A,full_matrices=False)
imin = 0
smin = S[0]
zeros = []
for i in xrange(len(S)):
if abs(S[i]) < 1e-6:
zeros.append(i)
if abs(S[i]) < smin:
smin = S[i]
imin = i
if len(zeros) > 1:
raise ValueError("Point set is degenerate")
assert V.shape == (3,3)
#normal is the corresponding row of U
normal = V[imin,:]
return centroid,normal.tolist()
def autoFitViewport(viewport,objects):
ofs = sum([_getOffsets(o) for o in objects],[])
pts = sum([_getBounds(o) for o in objects],[])
#print "Bounding box",bb,"center",center
#raw_input()
#reset
viewport.camera.rot = [0.,0.,0.]
viewport.camera.tgt = [0.,0.,0.]
viewport.camera.dist = 6.0
viewport.clippingplanes = (0.2,20)
if len(ofs) == 0:
return
bb = aabb_create(*pts)
center = vectorops.mul(vectorops.add(bb[0],bb[1]),0.5)
viewport.camera.tgt = center
radius = max(vectorops.distance(bb[0],center),0.1)
viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5))
#default: oblique view
viewport.camera.rot = [0,math.radians(30),math.radians(45)]
#fit a plane to these points
try:
centroid,normal = _fitPlane(ofs)
except Exception as e:
try:
centroid,normal = _fitPlane(pts)
except Exception as e:
print "Exception occurred during fitting to points"
print ofs
print pts
raise
return
if normal[2] > 0:
normal = vectorops.mul(normal,-1)
z,x,y = so3.matrix(so3.inv(so3.canonical(normal)))
#print z,x,y
#raw_input()
radius = max([abs(vectorops.dot(x,vectorops.sub(center,pt))) for pt in pts] + [abs(vectorops.dot(y,vectorops.sub(center,pt)))*viewport.w/viewport.h for pt in pts])
zmin = min([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts])
zmax = max([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts])
#print "Viewing direction",normal,"at point",center,"with scene size",radius
#orient camera to point along normal direction
viewport.camera.tgt = center
viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5))
near,far = viewport.clippingplanes
if viewport.camera.dist + zmin < near:
near = max((viewport.camera.dist + zmin)*0.5, radius*0.1)
if viewport.camera.dist + zmax > far:
far = max((viewport.camera.dist + zmax)*1.5, radius*3)
viewport.clippingplanes = (near,far)
roll = 0
yaw = math.atan2(normal[0],normal[1])
pitch = math.atan2(-normal[2],vectorops.norm(normal[0:2]))
#print "Roll pitch and yaw",roll,pitch,yaw
#print "Distance",viewport.camera.dist
viewport.camera.rot = [roll,pitch,yaw]
def addText(name,text,pos=None):
"""Adds text to the visualizer. You must give an identifier to all pieces of
text, which will be used to access the text as any other vis object.
Parameters:
- name: the text's unique identifier.
- text: the string to be drawn
- pos: the position of the string. If pos=None, this is added to the on-screen "console" display.
If pos has length 2, it is the (x,y) position of the upper left corner of the text on the
screen. Negative units anchor the text to the right or bottom of the window.
If pos has length 3, the text is drawn in the world coordinates.
To customize the text appearance, you can set the color, 'size' attribute, and 'position'
attribute of the text using the identifier given in 'name'.
"""
global _vis
_vis.add(name,text,True)
if pos is not None:
_vis.setAttribute(name,'position',pos)
def clearText():
"""Clears all text in the visualization."""
global _vis
if _vis==None:
return
_vis.clearText()
def addPlot(name):
add(name,VisPlot())
def addPlotItem(name,itemname):
global _vis
if _vis==None:
return
_vis.addPlotItem(name,itemname)
def logPlot(name,itemname,value):
"""Logs a custom visualization item to a plot"""
global _vis
if _vis==None:
return
_vis.logPlot(name,itemname,value)
def logPlotEvent(name,eventname,color=None):
"""Logs an event on the plot."""
global _vis
if _vis==None:
return
_vis.logPlotEvent(name,eventname,color)
def hidePlotItem(name,itemname,hidden=True):
global _vis
if _vis==None:
return
_vis.hidePlotItem(name,itemname,hidden)
def setPlotDuration(name,time):
setAttribute(name,'duration',time)
def setPlotRange(name,vmin,vmax):
setAttribute(name,'range',(vmin,vmax))
def setPlotPosition(name,x,y):
setAttribute(name,'position',(x,y))
def setPlotSize(name,w,h):
setAttribute(name,'size',(w,h))
def savePlot(name,fn):
global _vis
if _vis==None:
return
_vis.savePlot(name,fn)
def autoFitCamera(scale=1):
global _vis
if _vis==None:
return
print "klampt.vis: auto-fitting camera to scene."
_vis.autoFitCamera(scale)
def objectToVisType(item,world):
itypes = types.objectToTypes(item,world)
if isinstance(itypes,(list,tuple)):
#ambiguous, still need to figure out what to draw
validtypes = []
for t in itypes:
if t == 'Config':
if world != None and len(item) == world.robot(0).numLinks():
validtypes.append(t)
elif t=='Vector3':
validtypes.append(t)
elif t=='RigidTransform':
validtypes.append(t)
if len(validtypes) > 1:
print "Unable to draw item of ambiguous types",validtypes
return
if len(validtypes) == 0:
print "Unable to draw any of types",itypes
return
return validtypes[0]
return itypes
def aabb_create(*ptlist):
if len(ptlist) == 0:
return [float('inf')]*3,[float('-inf')]*3
else:
bmin,bmax = list(ptlist[0]),list(ptlist[0])
for i in xrange(1,len(ptlist)):
x = ptlist[i]
bmin = [min(a,b) for (a,b) in zip(bmin,x)]
bmax = [max(a,b) for (a,b) in zip(bmax,x)]
return bmin,bmax
def aabb_expand(bb,bb2):
bmin = [min(a,b) for a,b in zip(bb[0],bb2[0])]
bmax = [max(a,b) for a,b in zip(bb[1],bb2[1])]
return (bmin,bmax)
def aabb_empty(bb):
return any((a > b) for (a,b) in zip(bb[0],bb[1]))
_defaultCompressThreshold = 1e-2
class VisPlotItem:
def __init__(self,itemname,linkitem):
self.name = itemname
self.itemnames = []
self.linkitem = linkitem
self.traces = []
self.hidden = []
self.traceRanges = []
self.luminosity = []
self.compressThreshold = _defaultCompressThreshold
if linkitem is not None:
q = config.getConfig(linkitem.item)
assert q is not None
from collections import deque
self.traces = [deque() for i in range(len(q))]
self.itemnames = config.getConfigNames(linkitem.item)
def customUpdate(self,item,t,v):
for i,itemname in enumerate(self.itemnames):
if item == itemname:
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
return
else:
from collections import deque
self.itemnames.append(item)
self.traces.append(deque())
i = len(self.itemnames)-1
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
#raise ValueError("Invalid item specified: "+str(item))
def update(self,t):
if self.linkitem is None:
return
q = config.getConfig(self.linkitem.item)
assert len(self.traces) == len(q)
for i,v in enumerate(q):
self.updateTrace(i,t,v)
self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v))
def discard(self,tstart):
for t in self.traces:
if len(t)<=1: return
while len(t) >= 2:
if t[1][0] < tstart:
t.popleft()
else:
break
def updateTrace(self,i,t,v):
import random
assert i < len(self.traces)
assert i <= len(self.hidden)
assert i <= len(self.luminosity)
while i >= len(self.hidden):
self.hidden.append(False)
while i >= len(self.traceRanges):
self.traceRanges.append((v,v))
if i >= len(self.luminosity):
initialLuminosity = [0.5,0.25,0.75,1.0]
while i >= len(self.luminosity):
if len(self.luminosity)<len(initialLuminosity):
self.luminosity.append(initialLuminosity[len(self.luminosity)])
else:
self.luminosity.append(random.uniform(0,1))
trace = self.traces[i]
if len(trace) > 0 and trace[-1][0] == t:
trace[-1] = (t,v)
return
if self.compressThreshold is None:
trace.append((t,v))
else:
if len(trace) < 2:
trace.append((t,v))
else:
pprev = trace[-2]
prev = trace[-1]
assert prev > pprev,"Added two items with the same time?"
assert t > prev[0]
slope_old = (prev[1]-pprev[1])/(prev[0]-pprev[0])
slope_new = (v-prev[1])/(t-prev[0])
if (slope_old > 0 != slope_new > 0) or abs(slope_old-slope_new) > self.compressThreshold:
trace.append((t,v))
else:
#near-linear, just extend along straight line
trace[-1] = (t,v)
class VisPlot:
def __init__(self):
self.items = []
self.colors = []
self.events = dict()
self.eventColors = dict()
self.outfile = None
self.outformat = None
def __del__(self):
self.endSave()
def update(self,t,duration,compressThreshold):
for i in self.items:
i.compressThreshold = compressThreshold
i.update(t)
if self.outfile:
self.dumpCurrent()
self.discard(t-duration)
else:
self.discard(t-60.0)
def discard(self,tmin):
for i in self.items:
i.discard(tmin)
delevents = []
for e,times in self.events.iteritems():
while len(times) > 0 and times[0] < tmin:
times.popleft()
if len(times)==0:
delevents.append(e)
for e in delevents:
del self.events[e]
def addEvent(self,name,t,color=None):
if name in self.events:
self.events[name].append(t)
else:
from collections import deque
self.events[name] = deque([t])
if color == None:
import random
color = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1))
color = vectorops.mul(color,1.0/max(color))
if color != None:
self.eventColors[name] = color
if len(color)==3:
self.eventColors[name] += [1.0]
def autoRange(self):
vmin = float('inf')
vmax = -float('inf')
for i in self.items:
for j in xrange(len(i.traceRanges)):
if not i.hidden[j]:
vmin = min(vmin,i.traceRanges[j][0])
vmax = max(vmax,i.traceRanges[j][1])
if math.isinf(vmin):
return (0.,1.)
if vmax == vmin:
vmax += 1.0
return (float(vmin),float(vmax))
def render(self,window,x,y,w,h,duration,vmin=None,vmax=None):
if vmin == None:
vmin,vmax = self.autoRange()
import random
while len(self.colors) < len(self.items):
c = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1))
c = vectorops.mul(c,1.0/max(c))
self.colors.append(c)
glColor3f(0,0,0)
glBegin(GL_LINE_LOOP)
glVertex2f(x,y)
glVertex2f(x+w,y)
glVertex2f(x+w,y+h)
glVertex2f(x,y+h)
glEnd()
window.draw_text((x-18,y+4),'%.2f'%(vmax,),9)
window.draw_text((x-18,y+h+4),'%.2f'%(vmin,),9)
tmax = 0
for i in self.items:
for trace in i.traces:
if len(trace)==0: continue
tmax = max(tmax,trace[-1][0])
for i,item in enumerate(self.items):
for j,trace in enumerate(item.traces):
if len(trace)==0: continue
labelheight = trace[-1][1]
if len(item.name)==0:
label = item.itemnames[j]
else:
label = str(item.name) + '.' + item.itemnames[j]
labelheight = (labelheight - vmin)/(vmax-vmin)
labelheight = y + h - h*labelheight
glColor3fv(vectorops.mul(self.colors[i],item.luminosity[j]))
window.draw_text((x+w+3,labelheight+4),label,9)
glBegin(GL_LINE_STRIP)
for k in xrange(len(trace)-1):
if trace[k+1][0] > tmax-duration:
u,v = trace[k]
if trace[k][0] < tmax-duration:
#interpolate so x is at tmax-duration
u2,v2 = trace[k+1]
#u + s(u2-u) = tmax-duration
s = (tmax-duration-u)/(u2-u)
v = v + s*(v2-v)
u = (tmax-duration)
u = (u-(tmax-duration))/duration
v = (v-vmin)/(vmax-vmin)
glVertex2f(x+w*u,y+(1-v)*h)
u,v = trace[-1]
u = (u-(tmax-duration))/duration
v = (v-vmin)/(vmax-vmin)
glVertex2f(x+w*u,y+(1-v)*h)
glEnd()
if len(self.events) > 0:
for e,times in self.events.iteritems():
for t in times:
if t < tmax-duration: continue
labelx = (t - (tmax-duration))/duration
labelx = x + w*labelx
c = self.eventColors[e]
glColor4f(c[0]*0.5,c[1]*0.5,c[2]*0.5,c[3])
window.draw_text((labelx,y+h+12),e,9)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
glBegin(GL_LINES)
for e,times in self.events.iteritems():
for t in times:
if t < tmax-duration: continue
labelx = (t - (tmax-duration))/duration
labelx = x + w*labelx
glColor4f(c[0],c[1],c[2],c[3]*0.5)
glVertex2f(labelx,y)
glVertex2f(labelx,y+h)
glEnd()
glDisable(GL_BLEND)
def beginSave(self,fn):
import os
ext = os.path.splitext(fn)[1]
if ext == '.csv' or ext == '.traj':
self.outformat = ext
else:
raise ValueError("Invalid extension for visualization plot, can only accept .csv or .traj")
self.outfile = open(fn,'w')
if self.outformat == '.csv':
#output a header
self.outfile.write("time")
for i in self.items:
self.outfile.write(",")
fullitemnames = []
if len(i.name) != 0:
name = None
if isinstance(i.name,(list,tuple)):
name = '.'.join(v for v in i.name)
else:
name = i.name
fullitemnames = [name+'.'+itemname for itemname in i.itemnames]
else:
fullitemnames = i.itemnames
self.outfile.write(",".join(fullitemnames))
self.outfile.write("\n")
self.dumpAll()
def endSave(self):
if self.outfile is not None:
self.outfile.close()
def dumpAll(self):
assert self.outfile is not None
if len(self.items) == 0: return
cols = []
mindt = float('inf')
mint = float('inf')
maxt = -float('inf')
for i in self.items:
if len(i.traces) == 0:
continue
for j,trace in enumerate(i.traces):
times,vals = zip(*trace)
if isinstance(vals[0],(int,float)):
vals = [[v] for v in vals]
traj = Trajectory(times,vals)
cols.append(traj)
mint = min(mint,traj.times[0])
maxt = max(maxt,traj.times[-1])
for k in xrange(len(traj.times)-1):
mindt = min(mindt,traj.times[k+1] - traj.times[k])
assert mindt > 0, "For some reason, there is a duplicate time?"
N = int((maxt - mint)/mindt)
dt = (maxt - mint)/N
times = [mint + i*(maxt-mint)/N for i in range(N+1)]
for i in xrange(N+1):
vals = [col.eval(times[i]) for col in cols]
if self.outformat == '.csv':
self.outfile.write(str(times[i])+',')
self.outfile.write(','.join([str(v[0]) for v in vals]))
self.outfile.write('\n')
else:
self.outfile.write(str(times[i])+'\t')
self.outfile.write(str(len(vals))+' ')
self.outfile.write(' '.join([str(v[0]) for v in vals]))
self.outfile.write('\n')
def dumpCurrent(self):
if len(self.items) == 0: return
assert len(self.items[0].trace) > 0, "Item has no channels?"
assert len(self.items[0].trace[0]) > 0, "Item has no readings yet?"
t = self.items[0].trace[0][-1]
vals = []
for i in self.items:
if len(i.trace) == 0:
continue
for j,trace in enumerate(i.trace):
vals.append(trace[-1][1])
if self.outformat == '.csv':
self.outfile.write(str(t)+',')
self.outfile.write(','.join([str(v) for v in vals]))
self.outfile.write('\n')
else:
self.outfile.write(str(t)+'\t')
self.outfile.write(str(len(vals))+' ')
self.outfile.write(' '.join([str(v) for v in vals]))
self.outfile.write('\n')
class VisAppearance:
def __init__(self,item,name = None):
self.name = name
self.hidden = False
self.useDefaultAppearance = True
self.customAppearance = None
self.customDrawFunc = None
#For group items, this allows you to customize appearance of sub-items
self.subAppearances = {}
self.animation = None
self.animationStartTime = 0
self.animationSpeed = 1.0
self.attributes = {}
#used for Qt text rendering
self.widget = None
#used for visual editing of certain items
self.editor = None
#cached drawing
self.displayCache = [glcommon.CachedGLObject()]
self.displayCache[0].name = name
#temporary configuration of the item
self.drawConfig = None
self.setItem(item)
def setItem(self,item):
self.item = item
self.subAppearances = {}
#Parse out sub-items which can have their own appearance changed
if isinstance(item,WorldModel):
for i in xrange(item.numRobots()):
self.subAppearances[("Robot",i)] = VisAppearance(item.robot(i),item.robot(i).getName())
for i in xrange(item.numRigidObjects()):
self.subAppearances[("RigidObject",i)] = VisAppearance(item.rigidObject(i),item.rigidObject(i).getName())
for i in xrange(item.numTerrains()):
self.subAppearances[("Terrain",i)] = VisAppearance(item.terrain(i),item.terrain(i).getName())
elif isinstance(item,RobotModel):
for i in xrange(item.numLinks()):
self.subAppearances[("Link",i)] = VisAppearance(item.link(i),item.link(i).getName())
elif isinstance(item,coordinates.Group):
for n,f in item.frames.iteritems():
self.subAppearances[("Frame",n)] = VisAppearance(f,n)
for n,p in item.points.iteritems():
self.subAppearances[("Point",n)] = VisAppearance(p,n)
for n,d in item.directions.iteritems():
self.subAppearances[("Direction",n)] = VisAppearance(d,n)
for n,g in item.subgroups.iteritems():
self.subAppearances[("Subgroup",n)] = VisAppearance(g,n)
elif isinstance(item,Hold):
if item.ikConstraint is not None:
self.subAppearances["ikConstraint"] = VisAppearance(item.ikConstraint,"ik")
for n,c in enumerate(item.contacts):
self.subAppearances[("contact",n)] = VisAppearance(c,n)
for (k,a) in self.subAppearances.iteritems():
a.attributes = self.attributes
def markChanged(self):
for c in self.displayCache:
c.markChanged()
for (k,a) in self.subAppearances.iteritems():
a.markChanged()
self.update_editor(True)
self.doRefresh = True
def destroy(self):
for c in self.displayCache:
c.destroy()
for (k,a) in self.subAppearances.iteritems():
a.destroy()
self.subAppearances = {}
def drawText(self,text,point):
"""Draws the given text at the given point"""
if self.attributes.get("text_hidden",False): return
self.widget.addLabel(text,point[:],[0,0,0])
def updateAnimation(self,t):
"""Updates the configuration, if it's being animated"""
if not self.animation:
self.drawConfig = None
else:
u = self.animationSpeed*(t-self.animationStartTime)
q = self.animation.eval(u,self.animationEndBehavior)
self.drawConfig = q
for n,app in self.subAppearances.iteritems():
app.updateAnimation(t)
def updateTime(self,t):
"""Updates in real time"""
if isinstance(self.item,VisPlot):
compressThreshold = self.attributes.get('compress',_defaultCompressThreshold)
duration = self.attributes.get('duration',5.)
for items in self.item.items:
if items.linkitem:
items.linkitem.swapDrawConfig()
self.item.update(t,duration,compressThreshold)
for items in self.item.items:
if items.linkitem:
items.linkitem.swapDrawConfig()
def swapDrawConfig(self):
"""Given self.drawConfig!=None, swaps out the item's curren
configuration with self.drawConfig. Used for animations"""
if self.drawConfig:
try:
newDrawConfig = config.getConfig(self.item)
#self.item =
config.setConfig(self.item,self.drawConfig)
self.drawConfig = newDrawConfig
except Exception as e:
print "Warning, exception thrown during animation update. Probably have incorrect length of configuration"
import traceback
traceback.print_exc()
pass
for n,app in self.subAppearances.iteritems():
app.swapDrawConfig()
def clearDisplayLists(self):
if isinstance(self.item,WorldModel):
for r in range(self.item.numRobots()):
for link in range(self.item.robot(r).numLinks()):
self.item.robot(r).link(link).appearance().refresh()
for i in range(self.item.numRigidObjects()):
self.item.rigidObject(i).appearance().refresh()
for i in range(self.item.numTerrains()):
self.item.terrain(i).appearance().refresh()
elif hasattr(self.item,'appearance'):
self.item.appearance().refresh()
elif isinstance(self.item,RobotModel):
for link in range(self.item.numLinks()):
self.item.link(link).appearance().refresh()
for n,o in self.subAppearances.iteritems():
o.clearDisplayLists()
self.markChanged()
def draw(self,world=None):
"""Draws the specified item in the specified world. If name
is given and text_hidden != False, then the name of the item is
shown."""
if self.hidden: return
if self.customDrawFunc is not None:
self.customDrawFunc(self.item)
return
item = self.item
name = self.name
#set appearance
if not self.useDefaultAppearance and hasattr(item,'appearance'):
if not hasattr(self,'oldAppearance'):
self.oldAppearance = item.appearance().clone()
if self.customAppearance != None:
#print "Changing appearance of",name
item.appearance().set(self.customAppearance)
elif "color" in self.attributes:
#print "Changing color of",name
item.appearance().setColor(*self.attributes["color"])
if len(self.subAppearances)!=0:
for n,app in self.subAppearances.iteritems():
app.widget = self.widget
app.draw(world)
elif hasattr(item,'drawGL'):
item.drawGL()
elif hasattr(item,'drawWorldGL'):
item.drawWorldGL()
elif isinstance(item,str):
pos = self.attributes.get("position",None)
if pos is not None and len(pos)==3:
col = self.attributes.get("color",(0,0,0))
self.widget.addLabel(self.item,pos,col)
elif isinstance(item,VisPlot):
pass
elif isinstance(item,Trajectory):
doDraw = False
centroid = None
if isinstance(item,RobotTrajectory):
ees = self.attributes.get("endeffectors",[-1])
if world:
doDraw = (len(ees) > 0)
robot = world.robot(0)
for i,ee in enumerate(ees):
if ee < 0: ees[i] = robot.numLinks()-1
if doDraw:
robot.setConfig(item.milestones[0])
centroid = vectorops.div(vectorops.add(*[robot.link(ee).getTransform()[1] for ee in ees]),len(ees))
elif isinstance(item,SE3Trajectory):
doDraw = True
centroid = item.milestones[0][9:]
else:
if len(item.milestones[0]) == 3:
#R3 trajectory
doDraw = True
centroid = item.milestones[0]
elif len(item.milestones[0]) == 2:
#R2 trajectory
doDraw = True
centroid = item.milestones[0]+[0.0]
if doDraw:
def drawRaw():
pointTrajectories = []
if isinstance(item,RobotTrajectory):
robot = world.robot(0)
ees = self.attributes.get("endeffectors",[-1])
for i,ee in enumerate(ees):
if ee < 0: ees[i] = robot.numLinks()-1
if world:
for ee in ees:
pointTrajectories.append([])
for m in item.milestones:
robot.setConfig(m)
for ee,eetraj in zip(ees,pointTrajectories):
eetraj.append(robot.link(ee).getTransform()[1])
elif isinstance(item,SE3Trajectory):
pointTrajectories.append([])
for m in item.milestones:
pointTrajectories[-1].append(m[9:])
else:
if len(item.milestones[0]) == 3:
#R3 trajectory
pointTrajectories.append(item.milestones)
elif len(item.milestones[0]) == 2:
#R2 trajectory
pointTrajectories.append([v + [0.0] for v in item.milestones])
glDisable(GL_LIGHTING)
glLineWidth(self.attributes.get("width",3))
glColor4f(*self.attributes.get("color",[1,0.5,0,1]))
for traj in pointTrajectories:
if len(traj) == 1:
glBegin(GL_POINTS)
glVertex3f(*traj[0])
glEnd()
if len(traj) >= 2:
glBegin(GL_LINE_STRIP)
for p in traj:
glVertex3f(*p)
glEnd()
glLineWidth(1.0)
self.displayCache[0].draw(drawRaw,se3.identity())
if name != None:
self.drawText(name,centroid)
elif isinstance(item,coordinates.Point):
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
#write name
glDisable(GL_DEPTH_TEST)
self.displayCache[0].draw(drawRaw,[so3.identity(),item.worldCoordinates()])
glEnable(GL_DEPTH_TEST)
if name != None:
self.drawText(name,item.worldCoordinates())
elif isinstance(item,coordinates.Direction):
def drawRaw():
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
L = self.attributes.get("length",0.15)
source = [0,0,0]
glColor4f(*self.attributes.get("color",[0,1,1,1]))
glBegin(GL_LINES)
glVertex3f(*source)
glVertex3f(*vectorops.mul(item.localCoordinates(),L))
glEnd()
glEnable(GL_DEPTH_TEST)
#write name
self.displayCache[0].draw(drawRaw,item.frame().worldCoordinates(),parameters = item.localCoordinates())
if name != None:
self.drawText(name,vectorops.add(item.frame().worldCoordinates()[1],item.worldCoordinates()))
elif isinstance(item,coordinates.Frame):
t = item.worldCoordinates()
if item.parent() != None:
tp = item.parent().worldCoordinates()
else:
tp = se3.identity()
tlocal = item.relativeCoordinates()
def drawRaw():
glDisable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glLineWidth(2.0)
gldraw.xform_widget(tlocal,self.attributes.get("length",0.1),self.attributes.get("width",0.01))
glLineWidth(1.0)
#draw curve between frame and parent
if item.parent() != None:
d = vectorops.norm(tlocal[1])
vlen = d*0.5
v1 = so3.apply(tlocal[0],[-vlen]*3)
v2 = [vlen]*3
#glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
#glColor4f(1,1,0,0.5)
glColor3f(1,1,0)
gldraw.hermite_curve(tlocal[1],v1,[0,0,0],v2,0.03*max(0.1,vectorops.norm(tlocal[1])))
#glDisable(GL_BLEND)
glEnable(GL_DEPTH_TEST)
#For some reason, cached drawing is causing OpenGL problems
#when the frame is rapidly changing
self.displayCache[0].draw(drawRaw,transform=tp, parameters = tlocal)
#glPushMatrix()
#glMultMatrixf(sum(zip(*se3.homogeneous(tp)),()))
#drawRaw()
#glPopMatrix()
#write name
if name != None:
self.drawText(name,t[1])
elif isinstance(item,coordinates.Transform):
#draw curve between frames
t1 = item.source().worldCoordinates()
if item.destination() != None:
t2 = item.destination().worldCoordinates()
else:
t2 = se3.identity()
d = vectorops.distance(t1[1],t2[1])
vlen = d*0.5
v1 = so3.apply(t1[0],[-vlen]*3)
v2 = so3.apply(t2[0],[vlen]*3)
def drawRaw():
glDisable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glColor3f(1,1,1)
gldraw.hermite_curve(t1[1],v1,t2[1],v2,0.03)
glEnable(GL_DEPTH_TEST)
#write name at curve
self.displayCache[0].draw(drawRaw,transform=None,parameters = (t1,t2))
if name != None:
self.drawText(name,spline.hermite_eval(t1[1],v1,t2[1],v2,0.5))
elif isinstance(item,coordinates.Group):
pass
elif isinstance(item,ContactPoint):
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
l = self.attributes.get("length",0.05)
glColor4f(*self.attributes.get("color",[1,0.5,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
glBegin(GL_LINES)
glVertex3f(0,0,0)
glVertex3f(l,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,[so3.canonical(item.n),item.x])
elif isinstance(item,Hold):
pass
else:
try:
itypes = objectToVisType(item,world)
except:
print "visualization.py: Unsupported object type",item,"of type:",item.__class__.__name__
return
if itypes == None:
print "Unable to convert item",item,"to drawable"
return
elif itypes == 'Config':
if world:
robot = world.robot(0)
if not self.useDefaultAppearance:
oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())]
for i in xrange(robot.numLinks()):
if self.customAppearance is not None:
robot.link(i).appearance().set(self.customAppearance)
elif "color" in self.attributes:
robot.link(i).appearance().setColor(*self.attributes["color"])
oldconfig = robot.getConfig()
robot.setConfig(item)
robot.drawGL()
robot.setConfig(oldconfig)
if not self.useDefaultAppearance:
for (i,app) in enumerate(oldAppearance):
robot.link(i).appearance().set(app)
else:
print "Unable to draw Config tiems without a world"
elif itypes == 'Configs':
if world:
maxConfigs = self.attributes.get("maxConfigs",min(10,len(item)))
robot = world.robot(0)
if not self.useDefaultAppearance:
oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())]
for i in xrange(robot.numLinks()):
if self.customAppearance is not None:
robot.link(i).appearance().set(self.customAppearance)
elif "color" in self.attributes:
robot.link(i).appearance().setColor(*self.attributes["color"])
oldconfig = robot.getConfig()
for i in xrange(maxConfigs):
idx = int(i*len(item))/maxConfigs
robot.setConfig(item[idx])
robot.drawGL()
robot.setConfig(oldconfig)
if not self.useDefaultAppearance:
for (i,app) in enumerate(oldAppearance):
robot.link(i).appearance().set(app)
else:
print "Unable to draw Configs items without a world"
elif itypes == 'Vector3':
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,[so3.identity(),item])
if name != None:
self.drawText(name,item)
elif itypes == 'RigidTransform':
def drawRaw():
fancy = self.attributes.get("fancy",False)
if fancy: glEnable(GL_LIGHTING)
else: glDisable(GL_LIGHTING)
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01),fancy=fancy)
self.displayCache[0].draw(drawRaw,transform=item)
if name != None:
self.drawText(name,item[1])
elif itypes == 'IKGoal':
if hasattr(item,'robot'):
#need this to be built with a robot element.
#Otherwise, can't determine the correct transforms
robot = item.robot
elif world:
if world.numRobots() >= 1:
robot = world.robot(0)
else:
robot = None
else:
robot = None
if robot != None:
link = robot.link(item.link())
dest = robot.link(item.destLink()) if item.destLink()>=0 else None
while len(self.displayCache) < 3:
self.displayCache.append(glcommon.CachedGLObject())
self.displayCache[1].name = self.name+" target position"
self.displayCache[2].name = self.name+" curve"
if item.numPosDims() != 0:
lp,wp = item.getPosition()
#set up parameters of connector
p1 = se3.apply(link.getTransform(),lp)
if dest != None:
p2 = se3.apply(dest.getTransform(),wp)
else:
p2 = wp
d = vectorops.distance(p1,p2)
v1 = [0.0]*3
v2 = [0.0]*3
if item.numRotDims()==3: #full constraint
R = item.getRotation()
def drawRaw():
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01))
t1 = se3.mul(link.getTransform(),(so3.identity(),lp))
t2 = (R,wp) if dest==None else se3.mul(dest.getTransform(),(R,wp))
self.displayCache[0].draw(drawRaw,transform=t1)
self.displayCache[1].draw(drawRaw,transform=t2)
vlen = d*0.1
v1 = so3.apply(t1[0],[-vlen]*3)
v2 = so3.apply(t2[0],[vlen]*3)
elif item.numRotDims()==0: #point constraint
def drawRaw():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(0,0,0)
glEnd()
self.displayCache[0].draw(drawRaw,transform=(so3.identity(),p1))
self.displayCache[1].draw(drawRaw,transform=(so3.identity(),p2))
#set up the connecting curve
vlen = d*0.5
d = vectorops.sub(p2,p1)
v1 = vectorops.mul(d,0.5)
#curve in the destination
v2 = vectorops.cross((0,0,0.5),d)
else: #hinge constraint
p = [0,0,0]
d = [0,0,0]
def drawRawLine():
glDisable(GL_LIGHTING)
glEnable(GL_POINT_SMOOTH)
glPointSize(self.attributes.get("size",5.0))
glColor4f(*self.attributes.get("color",[0,0,0,1]))
glBegin(GL_POINTS)
glVertex3f(*p)
glEnd()
glColor4f(*self.attributes.get("color",[0.5,0,0.5,1]))
glLineWidth(self.attributes.get("width",3.0))
glBegin(GL_LINES)
glVertex3f(*p)
glVertex3f(*vectorops.madd(p,d,self.attributes.get("length",0.1)))
glEnd()
glLineWidth(1.0)
ld,wd = item.getRotationAxis()
p = lp
d = ld
self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=(p,d))
p = wp
d = wd
self.displayCache[1].draw(drawRawLine,transform=dest.getTransform() if dest else se3.identity(),parameters=(p,d))
#set up the connecting curve
d = vectorops.sub(p2,p1)
v1 = vectorops.mul(d,0.5)
#curve in the destination
v2 = vectorops.cross((0,0,0.5),d)
def drawConnection():
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
glColor3f(1,0.5,0)
gldraw.hermite_curve(p1,v1,p2,v2,0.03*max(0.1,vectorops.distance(p1,p2)))
#glBegin(GL_LINES)
#glVertex3f(*p1)
#glVertex3f(*p2)
#glEnd()
glEnable(GL_DEPTH_TEST)
#TEMP for some reason the cached version sometimes gives a GL error
self.displayCache[2].draw(drawConnection,transform=None,parameters = (p1,v1,p2,v2))
#drawConnection()
if name != None:
self.drawText(name,wp)
else:
wp = link.getTransform()[1]
if item.numRotDims()==3: #full constraint
R = item.getRotation()
def drawRaw():
gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01))
self.displayCache[0].draw(drawRaw,transform=link.getTransform())
self.displayCache[1].draw(drawRaw,transform=se3.mul(link.getTransform(),(R,[0,0,0])))
elif item.numRotDims() > 0:
#axis constraint
d = [0,0,0]
def drawRawLine():
glDisable(GL_LIGHTING)
glColor4f(*self.attributes.get("color",[0.5,0,0.5,1]))
glLineWidth(self.attributes.get("width",3.0))
glBegin(GL_LINES)
glVertex3f(0,0,0)
glVertex3f(*vectorops.mul(d,self.attributes.get("length",0.1)))
glEnd()
glLineWidth(1.0)
ld,wd = item.getRotationAxis()
d = ld
self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=d)
d = wd
self.displayCache[1].draw(drawRawLine,transform=(dest.getTransform()[0] if dest else so3.identity(),wp),parameters=d)
else:
#no drawing
pass
if name != None:
self.drawText(name,wp)
else:
print "Unable to draw item of type",itypes
#revert appearance
if not self.useDefaultAppearance and hasattr(item,'appearance'):
item.appearance().set(self.oldAppearance)
def getBounds(self):
"""Returns a bounding box (bmin,bmax) or None if it can't be found"""
if len(self.subAppearances)!=0:
bb = aabb_create()
for n,app in self.subAppearances.iteritems():
bb = aabb_expand(bb,app.getBounds())
return bb
item = self.item
if isinstance(item,coordinates.Point):
return [item.worldCoordinates(),item.worldCoordinates()]
elif isinstance(item,coordinates.Direction):
T = item.frame().worldCoordinates()
d = item.localCoordinates()
L = self.attributes.get("length",0.1)
return aabb_create(T[1],se3.apply(T,vectorops.mul(d,L)))
elif isinstance(item,coordinates.Frame):
T = item.worldCoordinates()
L = self.attributes.get("length",0.1)
return aabb_create(T[1],se3.apply(T,(L,0,0)),se3.apply(T,(0,L,0)),se3.apply(T,(0,0,L)))
elif isinstance(item,ContactPoint):
L = self.attributes.get("length",0.05)
return aabb_create(item.x,vectorops.madd(item.x,item.n,L))
elif isinstance(item,WorldModel):
pass
elif hasattr(item,'geometry'):
return item.geometry().getBB()
elif isinstance(item,(str,VisPlot)):
pass
else:
try:
vtype = objectToVisType(item,None)
if 'Vector3' == vtype:
#assumed to be a point
return (item,item)
elif 'RigidTransform' == vtype:
#assumed to be a rigid transform
return (item[1],item[1])
except Exception:
pass
print "Empty bound for object",self.name,"type",self.item.__class__.__name__
return aabb_create()
def getSubItem(self,path):
if len(path) == 0: return self
for k,v in self.subAppearances.iteritems():
if v.name == path[0]:
try:
return v.getSubItem(path[1:])
except ValueError,e:
raise ValueError("Invalid sub-path specified "+str(path)+" at "+str(e))
raise ValueError("Invalid sub-item specified "+path[0])
def make_editor(self):
if self.editor != None:
return
item = self.item
if isinstance(item,coordinates.Point):
res = PointPoser()
res.set(self.item.worldCoordinates())
res.setAxes(self.item.frame().worldCoordinates()[0])
elif isinstance(item,coordinates.Direction):
res = PointPoser()
res.set(self.item.worldCoordinates())
res.setAxes(self.item.frame().worldCoordinates()[0])
elif isinstance(item,coordinates.Frame):
res = TransformPoser()
res.set(*self.item.worldCoordinates())
elif isinstance(self.item,RobotModel):
res = RobotPoser(self.item)
self.hidden = True
elif isinstance(self.item,SubRobotModel):
res = RobotPoser(self.item._robot)
res.setActiveDofs(self.item.links);
self.hidden = True
elif isinstance(self.item,RigidObjectModel):
res = ObjectPoser(self.item)
elif isinstance(self.item,(list,tuple)):
#determine if it's a rotation, transform, or point
itype = objectToVisType(self.item,None)
if itype == 'Vector3':
res = PointPoser()
res.set(self.item)
elif itype == 'Matrix3':
res = TransformPoser()
res.enableRotation(True)
res.enableTranslation(False)
res.set(self.item)
elif itype == 'RigidTransform':
res = TransformPoser()
res.enableRotation(True)
res.enableTranslation(True)
res.set(*self.item)
else:
print "VisAppearance.make_editor(): Warning, editor for object of type",itype,"not defined"
return
else:
print "VisAppearance.make_editor(): Warning, editor for object of type",self.item.__class__.__name__,"not defined"
return
self.editor = res
def update_editor(self,item_to_editor=False):
for (name,item) in self.subAppearances.iteritems():
item.update_editor(item_to_editor)
if self.editor == None:
return
item = self.item
if item_to_editor:
if isinstance(item,coordinates.Point):
self.editor.set(self.item.worldCoordinates())
elif isinstance(item,coordinates.Direction):
self.editor.set(self.item.worldCoordinates())
elif isinstance(item,coordinates.Frame):
self.editor.set(*self.item.worldCoordinates())
elif isinstance(self.item,RobotModel):
self.editor.set(self.item.getConfig())
elif isinstance(self.item,SubRobotModel):
self.editor.set(self.item.tofull(self.item.getConfig()))
elif isinstance(self.item,RigidObjectModel):
self.editor.set(*self.item.getTransform())
elif isinstance(self.item,(list,tuple)):
itype = objectToVisType(self.item,None)
if itype in ('Vector3','Matrix3'):
self.editor.set(self.item)
elif itype == 'RigidTransform':
self.editor.set(*self.item)
else:
raise RuntimeError("Uh... unsupported type with an editor?")
else:
if not self.editor.hasFocus():
return
if isinstance(item,coordinates.Point):
self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get())
elif isinstance(item,coordinates.Direction):
self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get())
elif isinstance(item,coordinates.Frame):
self.item._worldCoordinates = self.editor.get()
self.item._relativeCoordinates = se3.mul(se3.inv(self.item.parent().worldCoordinates()),self.editor.get())
#TODO: updating downstream frames?
elif isinstance(self.item,RobotModel):
self.item.setConfig(self.editor.getConditioned(self.item.getConfig()))
elif isinstance(self.item,SubRobotModel):
self.item.setConfig(self.item.fromfull(self.editor.get()))
elif isinstance(self.item,RigidObjectModel):
self.item.setTransform(*self.editor.get())
elif isinstance(self.item,(tuple,list)):
def setList(a,b):
if isinstance(a,(list,tuple)) and isinstance(b,(list,tuple)):
if len(a) == len(b):
for i in xrange(len(a)):
if not setList(a[i],b[i]):
if isinstance(a,list):
a[i] = b[i]
else:
return False
return True
return False
v = self.editor.get()
if not setList(self.item,v):
self.item = v
elif isinstance(self.item,tuple):
print "Edited a tuple... maybe a point or an xform? can't actually edit"
self.item = self.editor.get()
else:
raise RuntimeError("Uh... unsupported type with an editor?")
def remove_editor(self):
self.editor = None
self.hidden = False
class VisualizationPlugin(glcommon.GLWidgetPlugin):
def __init__(self):
glcommon.GLWidgetPlugin.__init__(self)
self.items = {}
self.labels = []
self.t = time.time()
self.startTime = self.t
self.animating = True
self.currentAnimationTime = 0
self.doRefresh = False
def initialize(self):
#keep or refresh display lists?
#self._clearDisplayLists()
return glcommon.GLWidgetPlugin.initialize(self)
def addLabel(self,text,point,color):
self.labels.append((text,point,color))
def display(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.display(self)
self.labels = []
world = self.items.get('world',None)
if world != None: world=world.item
for (k,v) in self.items.iteritems():
v.widget = self
v.swapDrawConfig()
v.draw(world)
v.swapDrawConfig()
v.widget = None #allows garbage collector to delete these objects
#cluster label points
pointTolerance = self.view.camera.dist*0.03
pointHash = {}
for (text,point,color) in self.labels:
index = tuple([int(x/pointTolerance) for x in point])
try:
pointHash[index][1].append((text,color))
except KeyError:
pointHash[index] = [point,[(text,color)]]
for (p,items) in pointHash.itervalues():
self._drawLabelRaw(p,*zip(*items))
_globalLock.release()
def display_screen(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.display_screen(self)
cx = 20
cy = 20
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
for (k,v) in self.items.iteritems():
if isinstance(v.item,VisPlot):
pos = v.attributes.get('position',None)
duration = v.attributes.get('duration',5.)
vrange = v.attributes.get('range',(None,None))
w,h = v.attributes.get('size',(200,150))
if pos is None:
v.item.render(self.window,cx,cy,w,h,duration,vrange[0],vrange[1])
cy += h+18
else:
x = pos[0]
y = pos[1]
if x < 0:
x = self.view.w + x
if y < 0:
y = self.view.h + y
v.item.render(self.window,x,y,w,h,duration,vrange[0],vrange[1])
for (k,v) in self.items.iteritems():
if isinstance(v.item,str):
pos = v.attributes.get('position',None)
col = v.attributes.get('color',(0,0,0))
size = v.attributes.get('size',12)
if pos is None:
#draw at console
self.window.draw_text((cx,cy+size),v.item,size,col)
cy += (size*15)/10
elif len(pos)==2:
x = pos[0]
y = pos[1]
if x < 0:
x = self.view.w + x
if y < 0:
y = self.view.h + y
self.window.draw_text((x,y+size),v.item,size,col)
glEnable(GL_DEPTH_TEST)
_globalLock.release()
def reshapefunc(self,w,h):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.reshapefunc(self,w,h)
_globalLock.release()
def keyboardfunc(self,c,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.keyboardfunc(self,c,x,y)
_globalLock.release()
def keyboardupfunc(self,c,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.keyboardupfunc(self,c,x,y)
_globalLock.release()
def mousefunc(self,button,state,x,y):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.mousefunc(self,button,state,x,y)
_globalLock.release()
def motionfunc(self,x,y,dx,dy):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.motionfunc(self,x,y,dx,dy)
_globalLock.release()
def eventfunc(self,type,args=""):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.eventfunc(self,type,args)
_globalLock.release()
def closefunc(self):
global _globalLock
_globalLock.acquire()
glcommon.GLWidgetPlugin.closefunc(self)
_globalLock.release()
def _drawLabelRaw(self,point,textList,colorList):
#assert not self.makingDisplayList,"drawText must be called outside of display list"
assert self.window != None
for i,(text,c) in enumerate(zip(textList,colorList)):
if i+1 < len(textList): text = text+","
projpt = self.view.project(point,clip=False)
if projpt[2] > self.view.clippingplanes[0]:
d = float(12)/float(self.view.w)*projpt[2]*0.7
point = vectorops.add(point,so3.apply(so3.inv(self.view.camera.matrix()[0]),(0,-d,0)))
glDisable(GL_LIGHTING)
glDisable(GL_DEPTH_TEST)
glColor3f(*c)
self.draw_text(point,text,size=12)
glEnable(GL_DEPTH_TEST)
def _clearDisplayLists(self):
for i in self.items.itervalues():
i.clearDisplayLists()
def idle(self):
global _globalLock
_globalLock.acquire()
oldt = self.t
self.t = time.time()
if self.animating:
self.currentAnimationTime += (self.t - oldt)
for (k,v) in self.items.iteritems():
#do animation updates
v.updateAnimation(self.currentAnimationTime)
for (k,v) in self.items.iteritems():
#do other updates
v.updateTime(self.t-self.startTime)
_globalLock.release()
return False
def getItem(self,item_name):
"""Returns an VisAppearance according to the given name or path"""
if isinstance(item_name,(list,tuple)):
components = item_name
if len(components)==1:
return self.getItem(components[0])
if components[0] not in self.items:
raise ValueError("Invalid top-level item specified: "+item_name)
return self.items[components[0]].getSubItem(components[1:])
if item_name in self.items:
return self.items[item_name]
def dirty(self,item_name='all'):
"""Marks an item or everything as dirty, forcing a deep redraw."""
global _globalLock
_globalLock.acquire()
if item_name == 'all':
if (name,itemvis) in self.items.iteritems():
itemvis.markChanged()
else:
self.getItem(item_name).markChanged()
_globalLock.release()
def clear(self):
"""Clears the visualization world"""
global _globalLock
_globalLock.acquire()
for (name,itemvis) in self.items.iteritems():
itemvis.destroy()
self.items = {}
_globalLock.release()
def clearText(self):
"""Clears all text in the visualization."""
global _globalLock
_globalLock.acquire()
del_items = []
for (name,itemvis) in self.items.iteritems():
if isinstance(itemvis.item,str):
itemvis.destroy()
del_items.append(name)
for n in del_items:
del self.items[n]
_globalLock.release()
def listItems(self,root=None,indent=0):
"""Prints out all items in the visualization world."""
if root == None:
for name,value in self.items.iteritems():
self.listItems(value,indent)
else:
if isinstance(root,str):
root = self.getItem(root)
if indent > 0:
print " "*(indent-1),
print root.name
for n,v in root.subAppearances.iteritems():
self.listItems(v,indent+2)
def add(self,name,item,keepAppearance=False):
"""Adds a named item to the visualization world. If the item already
exists, the appearance information will be reinitialized if keepAppearance=False
(default) or be kept if keepAppearance=True."""
global _globalLock
assert not isinstance(name,(list,tuple)),"Cannot add sub-path items"
_globalLock.acquire()
if keepAppearance and name in self.items:
self.items[name].setItem(item)
else:
#need to erase prior item visualizer
if name in self.items:
self.items[name].destroy()
app = VisAppearance(item,name)
self.items[name] = app
_globalLock.release()
#self.refresh()
def animate(self,name,animation,speed=1.0,endBehavior='loop'):
global _globalLock
_globalLock.acquire()
if hasattr(animation,'__iter__'):
#a list of milestones -- loop through them with 1s delay
print "visualization.animate(): Making a Trajectory with unit durations between",len(animation),"milestones"
animation = Trajectory(range(len(animation)),animation)
if isinstance(animation,HermiteTrajectory):
animation = animation.configTrajectory()
item = self.getItem(name)
item.animation = animation
item.animationStartTime = self.currentAnimationTime
item.animationSpeed = speed
item.animationEndBehavior = endBehavior
item.markChanged()
_globalLock.release()
def pauseAnimation(self,paused=True):
global _globalLock
_globalLock.acquire()
self.animating = not paused
_globalLock.release()
def stepAnimation(self,amount):
global _globalLock
_globalLock.acquire()
self.currentAnimationTime += amount
self.doRefresh = True
_globalLock.release()
def animationTime(self,newtime=None):
global _globalLock
if self==None:
print "Visualization disabled"
return 0
if newtime != None:
_globalLock.acquire()
self.currentAnimationTime = newtime
_globalLock.release()
return self.currentAnimationTime
def remove(self,name):
global _globalLock
_globalLock.acquire()
assert name in self.items,"Can only remove top level objects from visualization, try hide() instead"
item = self.getItem(name)
item.destroy()
del self.items[name]
self.doRefresh = True
_globalLock.release()
def getItemConfig(self,name):
global _globalLock
_globalLock.acquire()
res = config.getConfig(self.getItem(name).item)
_globalLock.release()
return res
def setItemConfig(self,name,value):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
if isinstance(item.item,(list,tuple,str)):
item.item = value
else:
config.setConfig(item.item,value)
if item.editor:
item.update_editor(item_to_editor = True)
self.doRefresh = True
_globalLock.release()
def hideLabel(self,name,hidden=True):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes["text_hidden"] = hidden
item.markChanged()
self.doRefresh = True
_globalLock.release()
def edit(self,name,doedit=True):
global _globalLock
_globalLock.acquire()
obj = self.getItem(name)
if obj == None:
_globalLock.release()
raise ValueError("Object "+name+" does not exist in visualization")
if doedit:
obj.make_editor()
if obj.editor:
self.klamptwidgetmaster.add(obj.editor)
else:
if obj.editor:
self.klamptwidgetmaster.remove(obj.editor)
obj.remove_editor()
self.doRefresh = True
_globalLock.release()
def widgetchangefunc(self,edit):
"""Called by GLWidgetPlugin on any widget change"""
for name,item in self.items.iteritems():
item.update_editor()
def hide(self,name,hidden=True):
global _globalLock
_globalLock.acquire()
self.getItem(name).hidden = hidden
self.doRefresh = True
_globalLock.release()
def addPlotItem(self,plotname,itemname):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
plot = plot.item
for i in plot.items:
assert i.name != itemname,(str(itemname)+" is already in the plot "+plotname)
item = self.getItem(itemname)
assert item != None,(str(itemname)+" is not a valid item")
plot.items.append(VisPlotItem(itemname,item))
_globalLock.release()
def logPlot(self,plotname,itemname,value):
global _globalLock
_globalLock.acquire()
customIndex = -1
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
compress = plot.attributes.get('compress',_defaultCompressThreshold)
plot = plot.item
for i,item in enumerate(plot.items):
if len(item.name)==0:
customIndex = i
if customIndex < 0:
customIndex = len(plot.items)
plot.items.append(VisPlotItem('',None))
plot.items[customIndex].compressThreshold = compress
plot.items[customIndex].customUpdate(itemname,self.t - self.startTime,value)
_globalLock.release()
def logPlotEvent(self,plotname,eventname,color):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot")
plot.item.addEvent(eventname,self.t-self.startTime,color)
_globalLock.release()
def hidePlotItem(self,plotname,itemname,hidden=True):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot"
plot = plot.item
identified = False
if isinstance(itemname,(tuple,list)):
for i in plot.items:
if i.name == itemname[0]:
assert itemname[1] < len(i.hidden),("Invalid component index of item "+str(itemname[0]))
identified = True
i.hidden[itemname] = hidden
else:
for i in plot.items:
if i.name == itemname:
for j in xrange(len(i.hidden)):
i.hidden[j] = hidden
assert identified,("Invalid item "+str(itemname)+" specified in plot "+plotname)
self.doRefresh = True
_globalLock.release()
def savePlot(self,plotname,fn):
global _globalLock
_globalLock.acquire()
plot = self.getItem(plotname)
assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot"
plot = plot.item
if fn != None:
plot.beginSave(fn)
else:
plot.endSave(fn)
_globalLock.release()
def setAppearance(self,name,appearance):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.useDefaultAppearance = False
item.customAppearance = appearance
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setAttribute(self,name,attr,value):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes[attr] = value
if value==None:
del item.attributes[attr]
item.markChanged()
self.doRefresh = True
_globalLock.release()
def revertAppearance(self,name):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.useDefaultApperance = True
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setColor(self,name,r,g,b,a=1.0):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.attributes["color"] = [r,g,b,a]
item.useDefaultAppearance = False
item.markChanged()
self.doRefresh = True
_globalLock.release()
def setDrawFunc(self,name,func):
global _globalLock
_globalLock.acquire()
item = self.getItem(name)
item.customDrawFunc = func
self.doRefresh = True
_globalLock.release()
def autoFitCamera(self,scale=1.0):
vp = None
if self.window == None:
global _frontend
vp = _frontend.get_view()
else:
vp = self.window.get_view()
try:
autoFitViewport(vp,self.items.values())
vp.camera.dist /= scale
except Exception as e:
print "Unable to auto-fit camera"
print e
_vis = VisualizationPlugin()
_frontend.setPlugin(_vis)
#signals to visualization thread
_quit = False
_thread_running = False
if _PyQtAvailable:
from PyQt4 import QtGui
#Qt specific startup
#need to set up a QDialog and an QApplication
class _MyDialog(QDialog):
def __init__(self,windowinfo):
QDialog.__init__(self)
self.windowinfo = windowinfo
glwidget = windowinfo.glwindow
glwidget.setMinimumSize(640,480)
glwidget.setMaximumSize(4000,4000)
glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum))
self.description = QLabel("Press OK to continue")
self.description.setSizePolicy(QSizePolicy(QSizePolicy.Preferred,QSizePolicy.Fixed))
self.layout = QVBoxLayout(self)
self.layout.addWidget(glwidget)
self.layout.addWidget(self.description)
self.buttons = QDialogButtonBox(QDialogButtonBox.Ok,Qt.Horizontal, self)
self.buttons.accepted.connect(self.accept)
self.layout.addWidget(self.buttons)
self.setWindowTitle(windowinfo.name)
glwidget.name = windowinfo.name
def accept(self):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
_globalLock.release()
print "#########################################"
print "klampt.vis: Dialog accept"
print "#########################################"
return QDialog.accept(self)
def reject(self):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
print "#########################################"
print "klampt.vis: Dialog reject"
print "#########################################"
_globalLock.release()
return QDialog.reject(self)
class _MyWindow(QMainWindow):
def __init__(self,windowinfo):
QMainWindow.__init__(self)
self.windowinfo = windowinfo
self.glwidget = windowinfo.glwindow
self.glwidget.setMinimumSize(self.glwidget.width,self.glwidget.height)
self.glwidget.setMaximumSize(4000,4000)
self.glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum))
self.setCentralWidget(self.glwidget)
self.setWindowTitle(windowinfo.name)
self.glwidget.name = windowinfo.name
self.saving_movie = False
self.movie_timer = QTimer(self)
self.movie_timer.timeout.connect(self.movie_update)
self.movie_frame = 0
self.movie_time_last = 0
self.saving_html = False
self.html_saver = None
self.html_start_time = 0
self.html_timer = QTimer(self)
self.html_timer.timeout.connect(self.html_update)
#TODO: for action-free programs, don't add this... but this has to be detected after initializeGL()?
mainMenu = self.menuBar()
fileMenu = mainMenu.addMenu('&Actions')
self.glwidget.actionMenu = fileMenu
visMenu = mainMenu.addMenu('&Visualization')
a = QtGui.QAction('Save world...', self)
a.setStatusTip('Saves world to xml file')
a.triggered.connect(self.save_world)
visMenu.addAction(a)
a = QtGui.QAction('Add to world...', self)
a.setStatusTip('Adds an item to the world')
a.triggered.connect(self.add_to_world)
visMenu.addAction(a)
a = QtGui.QAction('Save camera...', self)
a.setStatusTip('Saves camera settings')
a.triggered.connect(self.save_camera)
visMenu.addAction(a)
a = QtGui.QAction('Load camera...', self)
a.setStatusTip('Loads camera settings')
a.triggered.connect(self.load_camera)
visMenu.addAction(a)
a = QtGui.QAction('Start/stop movie output', self)
a.setShortcut('Ctrl+M')
a.setStatusTip('Starts / stops saving movie frames')
a.triggered.connect(self.toggle_movie_mode)
visMenu.addAction(a)
a = QtGui.QAction('Start/stop html output', self)
a.setShortcut('Ctrl+H')
a.setStatusTip('Starts / stops saving animation to HTML file')
a.triggered.connect(self.toggle_html_mode)
visMenu.addAction(a)
def getWorld(self):
if not hasattr(self.glwidget.program,'plugins'):
return None
for p in self.glwidget.program.plugins:
if hasattr(p,'world'):
return p.world
elif isinstance(p,VisualizationPlugin):
world = p.items.get('world',None)
if world != None: return world.item
return None
def getSimulator(self):
if not hasattr(self.glwidget.program,'plugins'):
return None
for p in self.glwidget.program.plugins:
if hasattr(p,'sim'):
return p.sim
return None
def save_camera(self):
if not hasattr(self.glwidget.program,'get_view'):
print "Program does not appear to have a camera"
return
v = self.glwidget.program.get_view()
fn = QFileDialog.getSaveFileName(caption="Viewport file (*.txt)",filter="Viewport file (*.txt);;All files (*.*)")
if fn is None:
return
f = open(str(fn),'w')
f.write("VIEWPORT\n")
f.write("FRAME %d %d %d %d\n"%(v.x,v.y,v.w,v.h))
f.write("PERSPECTIVE 1\n")
aspect = float(v.w)/float(v.h)
rfov = v.fov*math.pi/180.0
scale = 1.0/(2.0*math.tan(rfov*0.5/aspect)*aspect)
f.write("SCALE %f\n"%(scale,))
f.write("NEARPLANE %f\n"%(v.clippingplanes[0],))
f.write("FARPLANE %f\n"%(v.clippingplanes[0],))
f.write("CAMTRANSFORM ")
mat = se3.homogeneous(v.camera.matrix())
f.write(' '.join(str(v) for v in sum(mat,[])))
f.write('\n')
f.write("ORBITDIST %f\n"%(v.camera.dist,))
f.close()
def load_camera(self):
print "TODO"
def save_world(self):
w = self.getWorld()
if w is None:
print "Program does not appear to have a world"
fn = QFileDialog.getSaveFileName(caption="World file (elements will be saved to folder)",filter="World file (*.xml);;All files (*.*)")
if fn != None:
w.saveFile(str(fn))
print "Saved to",fn,"and elements were saved to a directory of the same name."
def add_to_world(self):
w = self.getWorld()
if w is None:
print "Program does not appear to have a world"
fn = QFileDialog.getOpenFileName(caption="World element",filter="Robot file (*.rob *.urdf);;Object file (*.obj);;Terrain file (*.env *.off *.obj *.stl *.wrl);;All files (*.*)")
if fn != None:
w.loadElement(str(fn))
for p in self.glwidget.program.plugins:
if isinstance(p,VisualizationPlugin):
p.getItem('world').setItem(w)
def toggle_movie_mode(self):
self.saving_movie = not self.saving_movie
if self.saving_movie:
self.movie_timer.start(33)
sim = self.getSimulator()
if sim != None:
self.movie_time_last = sim.getTime()
else:
self.movie_timer.stop()
dlg = QtGui.QInputDialog(self)
dlg.setInputMode( QtGui.QInputDialog.TextInput)
dlg.setLabelText("Command")
dlg.setTextValue('ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4')
dlg.resize(500,100)
ok = dlg.exec_()
cmd = dlg.textValue()
#(cmd,ok) = QtGui.QInputDialog.getText(self,"Process with ffmpeg?","Command", text='ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4')
if ok:
import os,glob
os.system(str(cmd))
print "Removing temporary files"
for fn in glob.glob('image*.png'):
os.remove(fn)
def movie_update(self):
sim = self.getSimulator()
if sim != None:
while sim.getTime() >= self.movie_time_last + 1.0/30.0:
self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame))
self.movie_frame += 1
self.movie_time_last += 1.0/30.0
else:
self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame))
self.movie_frame += 1
def toggle_html_mode(self):
self.saving_html = not self.saving_html
if self.saving_html:
world = self.getSimulator()
if world is None:
world = self.getWorld()
if world is None:
print "There is no world in the current plugin, can't save"
self.saving_html = False
return
fn = QFileDialog.getSaveFileName(caption="Save path HTML file to...",filter="HTML file (*.html);;All files (*.*)")
if fn is None:
self.saving_html = False
return
from ..io import html
self.html_start_time = time.time()
self.html_saver = html.HTMLSharePath(fn)
self.html_saver.dt = 0.033;
self.html_saver.start(world)
self.html_timer.start(33)
else:
self.html_saver.end()
self.html_timer.stop()
def html_update(self):
t = None
if self.html_saver.sim == None:
#t = time.time()-self.html_start_time
t = self.html_saver.last_t + 0.034
self.html_saver.animate(t)
def closeEvent(self,event):
global _globalLock
_globalLock.acquire()
self.windowinfo.glwindow.hide()
self.windowinfo.mode = 'hidden'
self.windowinfo.glwindow.idlesleep()
self.windowinfo.glwindow.setParent(None)
if self.saving_movie:
self.toggle_movie_mode()
if self.saving_html:
self.toggle_html_mode()
print "#########################################"
print "klampt.vis: Window close"
print "#########################################"
_globalLock.release()
def _run_app_thread():
global _thread_running,_vis,_widget,_window,_quit,_showdialog,_showwindow,_globalLock
_thread_running = True
_GLBackend.initialize("Klamp't visualization")
res = None
while not _quit:
_globalLock.acquire()
for i,w in enumerate(_windows):
if w.glwindow == None and w.mode != 'hidden':
print "vis: creating GL window"
w.glwindow = _GLBackend.createWindow(w.name)
w.glwindow.setProgram(w.frontend)
w.glwindow.setParent(None)
w.glwindow.refresh()
if w.doRefresh:
if w.mode != 'hidden':
w.glwindow.updateGL()
w.doRefresh = False
if w.doReload and w.glwindow != None:
w.glwindow.setProgram(w.frontend)
if w.guidata:
w.guidata.setWindowTitle(w.name)
w.guidata.glwidget = w.glwindow
w.guidata.setCentralWidget(w.glwindow)
w.doReload = False
if w.mode == 'dialog':
print "#########################################"
print "klampt.vis: Dialog on window",i
print "#########################################"
if w.custom_ui == None:
dlg = _MyDialog(w)
else:
dlg = w.custom_ui(w.glwindow)
#need to cache the bastards to avoid deleting the GL object. Not sure why it's being kept around.
#alldlgs.append(dlg)
#here's the crash -- above line deleted the old dialog, which for some reason kills the widget
if dlg != None:
w.glwindow.show()
w.glwindow.idlesleep(0)
w.glwindow.refresh()
w.glwindow.refresh()
_globalLock.release()
res = dlg.exec_()
_globalLock.acquire()
print "#########################################"
print "klampt.vis: Dialog done on window",i
print "#########################################"
w.glwindow.hide()
w.glwindow.setParent(None)
w.glwindow.idlesleep()
w.mode = 'hidden'
if w.mode == 'shown' and w.guidata == None:
print "#########################################"
print "klampt.vis: Making window",i
print "#########################################"
if w.custom_ui == None:
w.guidata = _MyWindow(w)
else:
w.guidata = w.custom_ui(w.glwindow)
w.glwindow.show()
w.glwindow.idlesleep(0)
if w.mode == 'shown' and not w.guidata.isVisible():
print "#########################################"
print "klampt.vis: Showing window",i
print "#########################################"
w.glwindow.show()
w.glwindow.setParent(w.guidata)
w.glwindow.idlesleep(0)
w.guidata.show()
if w.mode == 'hidden' and w.guidata != None:
if w.guidata.isVisible():
print "#########################################"
print "klampt.vis: Hiding window",i
print "#########################################"
w.glwindow.setParent(None)
w.glwindow.idlesleep()
w.glwindow.hide()
w.guidata.hide()
#prevent deleting the GL window
w.glwindow.setParent(None)
w.guidata = None
_globalLock.release()
_GLBackend.app.processEvents()
time.sleep(0.001)
print "Visualization thread closing..."
for w in _windows:
w.vis.clear()
if w.glwindow:
w.glwindow.close()
_thread_running = False
return res
elif _GLUTAvailable:
print "klampt.visualization: QT is not available, falling back to poorer"
print "GLUT interface. Returning to another GLUT thread will not work"
print "properly."
print ""
class GLUTHijacker(GLPluginProgram):
def __init__(self,windowinfo):
GLPluginProgram.__init__(self)
self.windowinfo = windowinfo
self.name = windowinfo.name
self.view = windowinfo.frontend.view
self.clearColor = windowinfo.frontend.clearColor
self.actions = windowinfo.frontend.actions
self.frontend = windowinfo.frontend
self.inDialog = False
self.hidden = False
def initialize(self):
self.frontend.window = self.window
if not self.frontend.initialize(): return False
GLPluginProgram.initialize(self)
return True
def display(self):
global _globalLock
_globalLock.acquire()
self.frontend.display()
_globalLock.release()
return True
def display_screen(self):
global _globalLock
_globalLock.acquire()
self.frontend.display_screen()
glColor3f(1,1,1)
glRasterPos(20,50)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"(Do not close this window except to quit)")
if self.inDialog:
glColor3f(1,1,0)
glRasterPos(20,80)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Dialog mode. Press 'Esc' to return to normal mode")
else:
glColor3f(1,1,0)
glRasterPos(20,80)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Window mode. Press 'Esc' to hide window")
_globalLock.release()
def keyboardfunc(self,c,x,y):
if ord(c)==27:
if self.inDialog:
print "Esc pressed, hiding dialog"
self.inDialog = False
else:
print "Esc pressed, hiding window"
global _globalLock
_globalLock.acquire()
self.windowinfo.mode = 'hidden'
self.hidden = True
glutHideWindow()
_globalLock.release()
return True
else:
return self.frontend.keyboardfunc(c,x,y)
def keyboardupfunc(self,c,x,y):
return self.frontend.keyboardupfunc(c,x,y)
def motionfunc(self,x,y,dx,dy):
return self.frontend.motionfunc(x,y,dx,dy)
def mousefunc(self,button,state,x,y):
return self.frontend.mousefunc(button,state,x,y)
def idlefunc(self):
global _quit,_showdialog
global _globalLock
_globalLock.acquire()
if _quit:
if bool(glutLeaveMainLoop):
glutLeaveMainLoop()
else:
print "Not compiled with freeglut, can't exit main loop safely. Press Ctrl+C instead"
raw_input()
if self.hidden:
print "hidden, waiting...",self.windowinfo.mode
if self.windowinfo.mode == 'shown':
print "Showing window"
glutSetWindow(self.window.glutWindowID)
glutShowWindow()
self.hidden = False
elif self.windowinfo.mode == 'dialog':
print "Showing window in dialog mode"
self.inDialog = True
glutSetWindow(self.window.glutWindowID)
glutShowWindow()
self.hidden = False
_globalLock.release()
return self.frontend.idlefunc()
def _run_app_thread():
global _thread_running,_vis,_old_glut_window,_quit,_windows
import weakref
_thread_running = True
_GLBackend.initialize("Klamp't visualization")
w = _GLBackend.createWindow("Klamp't visualization")
hijacker = GLUTHijacker(_windows[0])
_windows[0].guidata = weakref.proxy(hijacker)
w.setProgram(hijacker)
_GLBackend.run()
print "Visualization thread closing..."
for w in _windows:
w.vis.clear()
_thread_running = False
return
def _kill():
global _quit
_quit = True
while _thread_running:
time.sleep(0.01)
_quit = False
if _PyQtAvailable:
from PyQt4 import QtCore
class MyQThread(QtCore.QThread):
def __init__(self,func,*args):
self.func = func
self.args = args
QtCore.QThread.__init__(self)
def run(self):
self.func(*self.args)
def _show():
global _windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis))
_current_window = 0
_windows[_current_window].mode = 'shown'
_windows[_current_window].worlds = _current_worlds
_windows[_current_window].active_worlds = _current_worlds[:]
if not _thread_running:
signal.signal(signal.SIGINT, signal.SIG_DFL)
if _PyQtAvailable and False:
#for some reason, QThread doesn't allow for mouse events to be posted?
thread = MyQThread(_run_app_thread)
thread.start()
else:
thread = Thread(target=_run_app_thread)
thread.setDaemon(True)
thread.start()
time.sleep(0.1)
def _hide():
global _windows,_current_window,_thread_running
if _current_window == None:
return
_windows[_current_window].mode = 'hidden'
def _dialog():
global __windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis,None))
_current_window = 0
if not _thread_running:
signal.signal(signal.SIGINT, signal.SIG_DFL)
thread = Thread(target=_run_app_thread)
thread.setDaemon(True)
thread.start()
#time.sleep(0.1)
_globalLock.acquire()
assert _windows[_current_window].mode == 'hidden',"dialog() called inside dialog?"
_windows[_current_window].mode = 'dialog'
_windows[_current_window].worlds = _current_worlds
_windows[_current_window].active_worlds = _current_worlds[:]
_globalLock.release()
while _windows[_current_window].mode == 'dialog':
time.sleep(0.1)
return
def _set_custom_ui(func):
global _windows,_current_window,_thread_running
if len(_windows)==0:
_windows.append(WindowInfo(_window_title,_frontend,_vis,None))
_current_window = 0
_windows[_current_window].custom_ui = func
return
def _onFrontendChange():
global _windows,_frontend,_window_title,_current_window,_thread_running
if _current_window == None:
return
w = _windows[_current_window]
w.doReload = True
w.name = _window_title
w.frontend = _frontend
if w.glwindow:
w.glwindow.reshape(_frontend.view.w,_frontend.view.h)
if w.guidata and not _PyQtAvailable:
w.guidata.frontend = _frontend
_frontend.window = w.guidata.window
def _refreshDisplayLists(item):
if isinstance(item,WorldModel):
for i in xrange(item.numRobots()):
_refreshDisplayLists(item.robot(i))
for i in xrange(item.numRigidObjects()):
_refreshDisplayLists(item.rigidObject(i))
for i in xrange(item.numTerrains()):
_refreshDisplayLists(item.terrain(i))
elif isinstance(item,RobotModel):
for i in xrange(item.numLinks()):
_refreshDisplayLists(item.link(i))
elif hasattr(item,'appearance'):
item.appearance().refresh(False)
def _checkWindowCurrent(item):
global _windows,_current_window,_world_to_window,_current_worlds
if isinstance(item,int):
if not all(w.index != item for w in _current_worlds):
print "klampt.vis: item appears to be in a new world, but doesn't have a full WorldModel instance"
if isinstance(item,WorldModel):
#print "Worlds active in current window",_current_window,":",[w().index for w in _current_worlds]
if all(item != w() for w in _current_worlds):
#PyQt interface allows sharing display lists but GLUT does not.
#refresh all worlds' display lists that will be shifted to the current window.
for i,win in enumerate(_windows):
#print "Window",i,"active worlds",[w().index for w in win.active_worlds]
if any(item == w() for w in win.active_worlds):
if not _PyQtAvailable:
print "klampt.vis: world",item.index,"was shown in a different window, now refreshing display lists"
_refreshDisplayLists(item)
win.active_worlds.remove(weakref.ref(item))
_current_worlds.append(weakref.ref(item))
#print "klampt.vis: world added to the visualization's world (items:",[w().index for w in _current_worlds],")"
#else:
# print "klampt.vis: world",item,"is already in the current window's world"
elif hasattr(item,'world'):
_checkWindowCurrent(item.world)
| hpbader42/Klampt | Python/klampt/vis/visualization.py | Python | bsd-3-clause | 123,948 |
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 501ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''visualize morphologies'''
from matplotlib.collections import LineCollection, PolyCollection
from matplotlib.patches import Circle
from mpl_toolkits.mplot3d.art3d import \
Line3DCollection # pylint: disable=relative-import
import numpy as np
from neurom import NeuriteType, geom
from neurom._compat import zip
from neurom.core import iter_neurites, iter_segments
from neurom.core._soma import SomaCylinders
from neurom.core.dataformat import COLS
from neurom.core.types import tree_type_checker
from neurom.morphmath import segment_radius
from neurom.view._dendrogram import Dendrogram
from . import common
_LINEWIDTH = 1.2
_ALPHA = 0.8
_DIAMETER_SCALE = 1.0
TREE_COLOR = {NeuriteType.basal_dendrite: 'red',
NeuriteType.apical_dendrite: 'purple',
NeuriteType.axon: 'blue',
NeuriteType.soma: 'black',
NeuriteType.undefined: 'green'}
def _plane2col(plane):
'''take a string like 'xy', and return the indices from COLS.*'''
planes = ('xy', 'yx', 'xz', 'zx', 'yz', 'zy')
assert plane in planes, 'No such plane found! Please select one of: ' + str(planes)
return (getattr(COLS, plane[0].capitalize()),
getattr(COLS, plane[1].capitalize()), )
def _get_linewidth(tree, linewidth, diameter_scale):
'''calculate the desired linewidth based on tree contents
If diameter_scale exists, it is used to scale the diameter of each of the segments
in the tree
If diameter_scale is None, the linewidth is used.
'''
if diameter_scale is not None and tree:
linewidth = [2 * segment_radius(s) * diameter_scale
for s in iter_segments(tree)]
return linewidth
def _get_color(treecolor, tree_type):
"""if treecolor set, it's returned, otherwise tree_type is used to return set colors"""
if treecolor is not None:
return treecolor
return TREE_COLOR.get(tree_type, 'green')
def plot_tree(ax, tree, plane='xy',
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Plots a 2d figure of the tree's segments
Args:
ax(matplotlib axes): on what to plot
tree(neurom.core.Tree or neurom.core.Neurite): plotted tree
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
Note:
If the tree contains one single point the plot will be empty
since no segments can be constructed.
'''
plane0, plane1 = _plane2col(plane)
segs = [((s[0][plane0], s[0][plane1]),
(s[1][plane0], s[1][plane1]))
for s in iter_segments(tree)]
linewidth = _get_linewidth(tree, diameter_scale=diameter_scale, linewidth=linewidth)
color = _get_color(color, tree.type)
collection = LineCollection(segs, color=color, linewidth=linewidth, alpha=alpha)
ax.add_collection(collection)
def plot_soma(ax, soma, plane='xy',
soma_outline=True,
linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Generates a 2d figure of the soma.
Args:
ax(matplotlib axes): on what to plot
soma(neurom.core.Soma): plotted soma
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plane0, plane1 = _plane2col(plane)
color = _get_color(color, tree_type=NeuriteType.soma)
if isinstance(soma, SomaCylinders):
plane0, plane1 = _plane2col(plane)
for start, end in zip(soma.points, soma.points[1:]):
common.project_cylinder_onto_2d(ax, (plane0, plane1),
start=start[COLS.XYZ], end=end[COLS.XYZ],
start_radius=start[COLS.R], end_radius=end[COLS.R],
color=color, alpha=alpha)
else:
if soma_outline:
ax.add_artist(Circle(soma.center, soma.radius, color=color, alpha=alpha))
else:
plane0, plane1 = _plane2col(plane)
points = [(p[plane0], p[plane1]) for p in soma.iter()]
if points:
points.append(points[0]) # close the loop
ax.plot(points, color=color, alpha=alpha, linewidth=linewidth)
ax.set_xlabel(plane[0])
ax.set_ylabel(plane[1])
bounding_box = geom.bounding_box(soma)
ax.dataLim.update_from_data_xy(np.vstack(([bounding_box[0][plane0], bounding_box[0][plane1]],
[bounding_box[1][plane0], bounding_box[1][plane1]])),
ignore=False)
# pylint: disable=too-many-arguments
def plot_neuron(ax, nrn,
neurite_type=NeuriteType.all,
plane='xy',
soma_outline=True,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Plots a 2D figure of the neuron, that contains a soma and the neurites
Args:
ax(matplotlib axes): on what to plot
neurite_type(NeuriteType): an optional filter on the neurite type
nrn(neuron): neuron to be plotted
soma_outline(bool): should the soma be drawn as an outline
plane(str): Any pair of 'xyz'
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plot_soma(ax, nrn.soma, plane=plane, soma_outline=soma_outline, linewidth=linewidth,
color=color, alpha=alpha)
for neurite in iter_neurites(nrn, filt=tree_type_checker(neurite_type)):
plot_tree(ax, neurite, plane=plane,
diameter_scale=diameter_scale, linewidth=linewidth,
color=color, alpha=alpha)
ax.set_title(nrn.name)
ax.set_xlabel(plane[0])
ax.set_ylabel(plane[1])
def _update_3d_datalim(ax, obj):
'''unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually'''
min_bounding_box, max_bounding_box = geom.bounding_box(obj)
xy_bounds = np.vstack((min_bounding_box[:COLS.Z],
max_bounding_box[:COLS.Z]))
ax.xy_dataLim.update_from_data_xy(xy_bounds, ignore=False)
z_bounds = np.vstack(((min_bounding_box[COLS.Z], min_bounding_box[COLS.Z]),
(max_bounding_box[COLS.Z], max_bounding_box[COLS.Z])))
ax.zz_dataLim.update_from_data_xy(z_bounds, ignore=False)
def plot_tree3d(ax, tree,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''Generates a figure of the tree in 3d.
If the tree contains one single point the plot will be empty \
since no segments can be constructed.
Args:
ax(matplotlib axes): on what to plot
tree(neurom.core.Tree or neurom.core.Neurite): plotted tree
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
segs = [(s[0][COLS.XYZ], s[1][COLS.XYZ]) for s in iter_segments(tree)]
linewidth = _get_linewidth(tree, diameter_scale=diameter_scale, linewidth=linewidth)
color = _get_color(color, tree.type)
collection = Line3DCollection(segs, color=color, linewidth=linewidth, alpha=alpha)
ax.add_collection3d(collection)
_update_3d_datalim(ax, tree)
def plot_soma3d(ax, soma, color=None, alpha=_ALPHA):
'''Generates a 3d figure of the soma.
Args:
ax(matplotlib axes): on what to plot
soma(neurom.core.Soma): plotted soma
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
color = _get_color(color, tree_type=NeuriteType.soma)
if isinstance(soma, SomaCylinders):
for start, end in zip(soma.points, soma.points[1:]):
common.plot_cylinder(ax,
start=start[COLS.XYZ], end=end[COLS.XYZ],
start_radius=start[COLS.R], end_radius=end[COLS.R],
color=color, alpha=alpha)
else:
common.plot_sphere(ax, center=soma.center[COLS.XYZ], radius=soma.radius,
color=color, alpha=alpha)
# unlike w/ 2d Axes, the dataLim isn't set by collections, so it has to be updated manually
_update_3d_datalim(ax, soma)
def plot_neuron3d(ax, nrn, neurite_type=NeuriteType.all,
diameter_scale=_DIAMETER_SCALE, linewidth=_LINEWIDTH,
color=None, alpha=_ALPHA):
'''
Generates a figure of the neuron,
that contains a soma and a list of trees.
Args:
ax(matplotlib axes): on what to plot
nrn(neuron): neuron to be plotted
neurite_type(NeuriteType): an optional filter on the neurite type
diameter_scale(float): Scale factor multiplied with segment diameters before plotting
linewidth(float): all segments are plotted with this width, but only if diameter_scale=None
color(str or None): Color of plotted values, None corresponds to default choice
alpha(float): Transparency of plotted values
'''
plot_soma3d(ax, nrn.soma, color=color, alpha=alpha)
for neurite in iter_neurites(nrn, filt=tree_type_checker(neurite_type)):
plot_tree3d(ax, neurite,
diameter_scale=diameter_scale, linewidth=linewidth,
color=color, alpha=alpha)
ax.set_title(nrn.name)
def _generate_collection(group, ax, ctype, colors):
'''Render rectangle collection'''
color = TREE_COLOR[ctype]
# generate segment collection
collection = PolyCollection(group, closed=False, antialiaseds=True,
edgecolors='face', facecolors=color)
# add it to the axes
ax.add_collection(collection)
# dummy plot for the legend
if color not in colors:
label = str(ctype).replace('NeuriteType.', '').replace('_', ' ').capitalize()
ax.plot((0., 0.), (0., 0.), c=color, label=label)
colors.add(color)
def _render_dendrogram(dnd, ax, displacement):
'''Renders dendrogram'''
# set of unique colors that reflect the set of types of the neurites
colors = set()
for n, (indices, ctype) in enumerate(zip(dnd.groups, dnd.types)):
# slice rectangles array for the current neurite
group = dnd.data[indices[0]:indices[1]]
if n > 0:
# displace the neurites by half of their maximum x dimension
# plus half of the previous neurite's maxmimum x dimension
displacement += 0.5 * (dnd.dims[n - 1][0] + dnd.dims[n][0])
# arrange the trees without overlapping with each other
group += (displacement, 0.)
# create the polygonal collection of the dendrogram
# segments
_generate_collection(group, ax, ctype, colors)
soma_square = dnd.soma
if soma_square is not None:
_generate_collection((soma_square + (displacement / 2., 0.),), ax, NeuriteType.soma, colors)
ax.plot((displacement / 2., displacement), (0., 0.), color='k')
ax.plot((0., displacement / 2.), (0., 0.), color='k')
return displacement
def plot_dendrogram(ax, obj, show_diameters=True):
'''Dendrogram of `obj`
Args:
obj: Neuron or tree \
neurom.Neuron, neurom.Tree
show_diameters : boolean \
Determines if node diameters will \
be show or not.
'''
# create dendrogram and generate rectangle collection
dnd = Dendrogram(obj, show_diameters=show_diameters)
dnd.generate()
# render dendrogram and take into account neurite displacement which
# starts as zero. It is important to avoid overlapping of neurites
# and to determine tha limits of the figure.
_render_dendrogram(dnd, ax, 0.)
ax.set_title('Morphology Dendrogram')
ax.set_xlabel('micrometers (um)')
ax.set_ylabel('micrometers (um)')
ax.set_aspect('auto')
ax.legend()
| lidakanari/NeuroM | neurom/view/view.py | Python | bsd-3-clause | 14,676 |
# Copyright 2017 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def CheckChangeOnCommit(input_api, output_api):
tests = input_api.canned_checks.GetUnitTestsInDirectory(
input_api, output_api, '.', files_to_check=['test_scripts.py$'])
return input_api.RunTests(tests)
| youtube/cobalt | third_party/v8/tools/release/PRESUBMIT.py | Python | bsd-3-clause | 378 |
##-*-coding: utf-8 -*-
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Usage(models.Model):
ip = models.CharField(max_length=50)
method = models.CharField(max_length=3)
path = models.CharField(max_length=100)
params = models.CharField(max_length=255)
def __str__(self):
return self.ip
@python_2_unicode_compatible
class Element(models.Model):
name = models.CharField(max_length=10)
code = models.CharField(max_length=10)
def __str__(self):
return self.name
class Meta:
verbose_name = "ธาตุ"
verbose_name_plural = "ธาตุต่างๆ"
db_table = 'element'
@python_2_unicode_compatible
class Disease(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, null=True)
is_congenital = models.BooleanField(default=False)
created_by = models.CharField(max_length=50, null=True)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "เชื้อโรค"
verbose_name_plural = "กลุ่มเชื้อโรค"
db_table = 'disease'
class Nutrient(models.Model):
water = models.DecimalField(max_digits=14, decimal_places=4)
protein = models.DecimalField(max_digits=14, decimal_places=4)
fat = models.DecimalField(max_digits=14, decimal_places=4)
carbohydrate = models.DecimalField(max_digits=14, decimal_places=4)
dietary_fiber = models.DecimalField(max_digits=14, decimal_places=4)
ash = models.DecimalField(max_digits=14, decimal_places=4)
calcium = models.DecimalField(max_digits=14, decimal_places=4)
phosphorus = models.DecimalField(max_digits=14, decimal_places=4)
iron = models.DecimalField(max_digits=14, decimal_places=4)
retinol = models.DecimalField(max_digits=14, decimal_places=4)
beta_carotene = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_a = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_e = models.DecimalField(max_digits=14, decimal_places=4)
thiamin = models.DecimalField(max_digits=14, decimal_places=4)
riboflavin = models.DecimalField(max_digits=14, decimal_places=4)
niacin = models.DecimalField(max_digits=14, decimal_places=4)
vitamin_c = models.DecimalField(max_digits=14, decimal_places=4)
def __str__(self):
return 'id: ' + str(self._get_pk_val())
class Meta:
verbose_name = "สารอาหาร"
verbose_name_plural = "กลุ่มสารอาหาร"
db_table = 'nutrient'
@python_2_unicode_compatible
class IngredientCategory(models.Model):
name = models.CharField(max_length=50, unique=True)
created_by = models.CharField(max_length=50)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "หมวดหมู่วัตถุดิบ"
verbose_name_plural = "กลุ่มหมวดหมู่วัตถุดิบ"
db_table = 'ingredient_type'
@python_2_unicode_compatible
class FoodCategory(models.Model):
name = models.CharField(max_length=50, unique=True)
created_by = models.CharField(max_length=50)
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "หมวดหมู่อาหาร"
verbose_name_plural = "กลุ่มหมวดหมู่อาหาร"
db_table = 'food_type'
@python_2_unicode_compatible
class Ingredient(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, blank=True, null=True)
calories = models.IntegerField(default=0)
nutrient = models.ForeignKey(Nutrient,
on_delete=models.SET_NULL,
blank=True,
null=True)
element = models.ForeignKey(Element,
on_delete=models.SET_NULL,
blank=True,
null=True)
category = models.ManyToManyField(IngredientCategory, blank=True)
healing = models.ManyToManyField(Disease, related_name="healing", blank=True)
affect = models.ManyToManyField(Disease, related_name="affect", blank=True)
code = models.IntegerField(default=0)
def __str__(self):
return self.name
class Meta:
verbose_name = "วัตถุดิบ"
verbose_name_plural = "กลุ่มวัตถุดิบ"
db_table = 'ingredient'
@python_2_unicode_compatible
class Food(models.Model):
name = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=255, blank=True, null=True, default="")
calories = models.IntegerField(default=0)
nutrient = models.ForeignKey(Nutrient,
on_delete=models.SET_NULL,
blank=True,
null=True)
ingredients = models.ManyToManyField(Ingredient, through='Menu')
category = models.ManyToManyField(FoodCategory)
created_by = models.CharField(max_length=50, default="")
created_date = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True, null=True)
last_modified_by = models.CharField(max_length=30, null=True, blank=True)
code = models.IntegerField(default=0)
def __str__(self):
return self.name
class Meta:
verbose_name = "อาหาร"
verbose_name_plural = "กลุ่มอาหาร"
db_table = 'food'
class Menu(models.Model):
food = models.ForeignKey(Food, on_delete=models.CASCADE)
ingredient = models.ForeignKey(Ingredient, on_delete=models.CASCADE)
weight = models.DecimalField(max_digits=14, decimal_places=4)
name = models.CharField(max_length=100, blank=True, default="")
class Meta:
db_table = 'menu'
| ohmini/thaifoodapi | thaifood/models.py | Python | bsd-3-clause | 6,666 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_None/trend_LinearTrend/cycle_12/ar_/test_artificial_128_None_LinearTrend_12__20.py | Python | bsd-3-clause | 262 |
"""A generic class to build line-oriented command interpreters.
Interpreters constructed with this class obey the following conventions:
1. End of file on input is processed as the command 'EOF'.
2. A command is parsed out of each line by collecting the prefix composed
of characters in the identchars member.
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
is passed a single argument consisting of the remainder of the line.
4. Typing an empty line repeats the last command. (Actually, it calls the
method `emptyline', which may be overridden in a subclass.)
5. There is a predefined `help' method. Given an argument `topic', it
calls the command `help_topic'. With no arguments, it lists all topics
with defined help_ functions, broken into up to three topics; documented
commands, miscellaneous help topics, and undocumented commands.
6. The command '?' is a synonym for `help'. The command '!' is a synonym
for `shell', if a do_shell method exists.
7. If completion is enabled, completing commands will be done automatically,
and completing of commands args is done by calling complete_foo() with
arguments text, line, begidx, endidx. text is string we are matching
against, all returned matches must begin with it. line is the current
input line (lstripped), begidx and endidx are the beginning and end
indexes of the text being matched, which could be used to provide
different completion depending upon which position the argument is in.
The `default' method may be overridden to intercept commands for which there
is no do_ method.
The `completedefault' method may be overridden to intercept completions for
commands that have no complete_ method.
The data member `self.ruler' sets the character used to draw separator lines
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
If the value of `self.intro' is nonempty when the cmdloop method is called,
it is printed out on interpreter startup. This value may be overridden
via an optional argument to the cmdloop() method.
The data members `self.doc_header', `self.misc_header', and
`self.undoc_header' set the headers used for the help function's
listings of documented functions, miscellaneous topics, and undocumented
functions respectively.
"""
import string, sys
__all__ = ["Cmd"]
PROMPT = '(Cmd) '
IDENTCHARS = string.ascii_letters + string.digits + '_'
class ElCmd:
"""A simple framework for writing line-oriented command interpreters.
These are often useful for test harnesses, administrative tools, and
prototypes that will later be wrapped in a more sophisticated interface.
A Cmd instance or subclass instance is a line-oriented interpreter
framework. There is no good reason to instantiate Cmd itself; rather,
it's useful as a superclass of an interpreter class you define yourself
in order to inherit Cmd's methods and encapsulate action methods.
"""
prompt = PROMPT
identchars = IDENTCHARS
ruler = '='
lastcmd = ''
intro = None
doc_leader = ""
doc_header = "Documented commands (type help <topic>):"
misc_header = "Miscellaneous help topics:"
undoc_header = "Undocumented commands:"
nohelp = "*** No help on %s"
use_rawinput = False
def __init__(self, completekey='tab', stdin=None, stdout=None):
"""Instantiate a line-oriented interpreter framework.
The optional argument 'completekey' is the readline name of a
completion key; it defaults to the Tab key. If completekey is
not None and the readline module is available, command completion
is done automatically. The optional arguments stdin and stdout
specify alternate input and output file objects; if not specified,
sys.stdin and sys.stdout are used.
"""
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.cmdqueue = []
self.completekey = completekey
if not self.use_rawinput and self.completekey:
try:
import editline
self.editline = editline.editline("CMD",
self.stdin, self.stdout, sys.stderr)
self.editline.rl_completer = self.complete
except ImportError:
print("Failed to import editline")
pass
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.editline.prompt = self.prompt
line = self.editline.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
pass
def precmd(self, line):
"""Hook method executed just before the command line is
interpreted, but after the input prompt is generated and issued.
"""
return line
def postcmd(self, stop, line):
"""Hook method executed just after a command dispatch is finished."""
return stop
def preloop(self):
"""Hook method executed once when the cmdloop() method is called."""
pass
def postloop(self):
"""Hook method executed once when the cmdloop() method is about to
return.
"""
pass
def parseline(self, line):
"""Parse the line into a command name and a string containing
the arguments. Returns a tuple containing (command, args, line).
'command' and 'args' may be None if the line couldn't be parsed.
"""
line = line.strip()
if not line:
return None, None, line
elif line[0] == '?':
line = 'help ' + line[1:]
elif line[0] == '!':
if hasattr(self, 'do_shell'):
line = 'shell ' + line[1:]
else:
return None, None, line
i, n = 0, len(line)
while i < n and line[i] in self.identchars: i = i+1
cmd, arg = line[:i], line[i:].strip()
return cmd, arg, line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
"""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if line == 'EOF' :
print("")
print("Bye")
sys.exit(0)
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def emptyline(self):
"""Called when an empty line is entered in response to the prompt.
If this method is not overridden, it repeats the last nonempty
command entered.
"""
if self.lastcmd:
return self.onecmd(self.lastcmd)
def default(self, line):
"""Called on an input line when the command prefix is not recognized.
If this method is not overridden, it prints an error message and
returns.
"""
self.stdout.write('*** Unknown syntax: %s (%d)\n' % (line,len(line)))
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
def completenames(self, text, *ignored):
dotext = 'do_'+text
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
def complete(self, text, state):
"""Return the next possible completion for 'text'.
If a command has not been entered, then complete against command list.
Otherwise try to call complete_<command> to get list of completions.
"""
if state == 0:
origline = self.editline.get_line_buffer()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = self.editline.get_begidx() - stripped
endidx = self.editline.get_endidx() - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
return self.completion_matches[state]
except IndexError:
return None
def get_names(self):
# This method used to pull in base class attributes
# at a time dir() didn't do it yet.
return dir(self.__class__)
def complete_help(self, *args):
commands = set(self.completenames(*args))
topics = set(a[5:] for a in self.get_names()
if a.startswith('help_' + args[0]))
return list(commands | topics)
def do_help(self, arg):
'List available commands with "help" or detailed help with "help cmd".'
if arg:
# XXX check arg syntax
try:
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
doc=getattr(self, 'do_' + arg).__doc__
if doc:
self.stdout.write("%s\n"%str(doc))
return
except AttributeError:
pass
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
return
func()
else:
names = self.get_names()
cmds_doc = []
cmds_undoc = []
help = {}
for name in names:
if name[:5] == 'help_':
help[name[5:]]=1
names.sort()
# There can be duplicates if routines overridden
prevname = ''
for name in names:
if name[:3] == 'do_':
if name == prevname:
continue
prevname = name
cmd=name[3:]
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:
cmds_doc.append(cmd)
else:
cmds_undoc.append(cmd)
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
self.print_topics(self.misc_header, list(help.keys()),15,80)
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
self.columnize(cmds, maxcol-1)
self.stdout.write("\n")
def columnize(self, list, displaywidth=80):
"""Display a list of strings as a compact set of columns.
Each column is only as wide as necessary.
Columns are separated by two spaces (one was not legible enough).
"""
if not list:
self.stdout.write("<empty>\n")
return
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], str)]
if nonstrings:
raise TypeError("list[i] not a string for i in %s"
% ", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
return
# Try every row count from 1 upwards
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > displaywidth:
break
if totwidth <= displaywidth:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
for row in range(nrows):
texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
else:
x = list[i]
texts.append(x)
while texts and not texts[-1]:
del texts[-1]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
self.stdout.write("%s\n"%str(" ".join(texts)))
class MyCmd(ElCmd,object):
def do_bleep(self, s):
print("bleep!")
def do_blob(self, s):
print("blob!")
def do_bob(self, s):
print("bob!")
def do_mods(self, s):
print(sys.modules.keys())
if __name__ == '__main__':
mc = MyCmd()
mc.cmdloop()
| mark-nicholson/python-editline | examples/elCmd.py | Python | bsd-3-clause | 15,015 |
from pymacy.db import get_db
from bson.json_util import dumps
db = get_db()
results = []
count = 0
for i in db.benchmark.find({"element": "Ni"}):
count += 1
if count > 100:
break
results.append(i)
print(results[0])
with open("Ni.json", 'w') as f:
file = dumps(results)
f.write(file) | czhengsci/veidt | veidt/potential/tests/name/get_data.py | Python | bsd-3-clause | 312 |
"""
Generic, configurable scatterplot
"""
import collections
import warnings
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
class PlottingAttribute(object):
__slots__ = 'groupby', 'title', 'palette', 'group_to_attribute'
def __init__(self, groupby, title, palette, order):
"""An attribute that you want to visualize with a specific visual cue
Parameters
----------
groupby : mappable
A series or dict or list to groupby on the rows of the data
title : str
Title of this part of the legend
palette : list-like
What to plot for each group
"""
self.groupby = groupby
self.title = title
self.palette = palette
if order is not None:
# there's more than one attribute
self.group_to_attribute = dict(zip(order, palette))
else:
# There's only one attribute
self.group_to_attribute = {None: palette[0]}
def __getitem__(self, item):
return self.group_to_attribute[item]
class PlotterMixin(object):
"""
Must be mixed with something that creates the ``self.plot_data`` attribute
Attributes
----------
color :
"""
# Markers that can be filled, in a reasonable order so things that can be
# confused with each other (e.g. triangles pointing to the left or right) are
# not next to each other
filled_markers = (u'o', u'v', u's', u'*', u'h', u'<', u'H', u'x', u'8',
u'>', u'D', u'd', u'^')
linewidth_min, linewidth_max = 0.1, 5
alpha_min, alpha_max = 0.1, 1
size_min, size_max = 3, 30
legend_order = 'color', 'symbol', 'linewidth', 'edgecolor', 'alpha', 'size'
def establish_colors(self, color, hue, hue_order, palette):
"""Get a list of colors for the main component of the plots."""
n_colors = None
current_palette = sns.utils.get_color_cycle()
color_labels = None
color_title = None
if color is not None and palette is not None:
error = 'Cannot interpret colors to plot when both "color" and ' \
'"palette" are specified'
raise ValueError(error)
# Force "hue" to be a mappable
if hue is not None:
try:
# Check if "hue" is a column in the data
color_title = str(hue)
hue = self.data[hue]
except (ValueError, KeyError):
# Hue is already a mappable
if isinstance(hue, pd.Series):
color_title = hue.name
else:
color_title = None
# This will give the proper number of categories even if there are
# more categories in "hue_order" than represented in "hue"
hue_order = sns.utils.categorical_order(hue, hue_order)
color_labels = hue_order
hue = pd.Categorical(hue, hue_order)
n_colors = len(self.plot_data.groupby(hue))
else:
if hue_order is not None:
# Check if "hue_order" specifies rows in the data
samples_to_plot = self.plot_data.index.intersection(hue_order)
n_colors = len(samples_to_plot)
if n_colors > 0:
# Different color for every sample (row name)
hue = pd.Series(self.plot_data.index,
index=self.plot_data.index)
else:
error = "When 'hue=None' and 'hue_order' is specified, " \
"'hue_order' must overlap with the data row " \
"names (index)"
raise ValueError(error)
else:
# Same color for everything
hue = pd.Series('hue', index=self.plot_data.index)
n_colors = 1
if palette is not None:
colors = sns.color_palette(palette, n_colors=n_colors)
elif color is not None:
colors = sns.light_palette(color, n_colors=n_colors)
else:
colors = sns.light_palette(current_palette[0],
n_colors=n_colors)
self.color = PlottingAttribute(hue, color_title, colors, hue_order)
def _maybe_make_grouper(self, attribute, palette_maker, order=None,
func=None, default=None):
"""Create a Series from a single attribute, else make categorical
Checks if the attribute is in the data provided, or is an external
mapper
Parameters
----------
attribute : object
Either a single item to create into a series, or a series mapping
each sample to an attribute (e.g. the plotting symbol 'o' or
linewidth 1)
palette_maker : function
Function which takes an integer and creates the appropriate
palette for the attribute, e.g. shades of grey for edgecolor or
linearly spaced sizes
order : list
The order to create the attributes into
func : function
A function which returns true if the attribute is a single valid
instance, e.g. "black" for color or 0.1 for linewidth. Otherwise,
we assume that "attribute" is a mappable
Returns
-------
grouper : pandas.Series
A mapping of the high dimensional data samples to the attribute
"""
title = None
if func is None or func(attribute):
# Use this single attribute for everything
return PlottingAttribute(pd.Series(None, index=self.samples),
title, (attribute,), order)
else:
try:
# Check if this is a column in the data
attribute = self.data[attribute]
except (ValueError, KeyError):
pass
if isinstance(attribute, pd.Series):
title = attribute.name
order = sns.utils.categorical_order(attribute, order)
palette = palette_maker(len(order))
attribute = pd.Categorical(attribute, categories=order,
ordered=True)
return PlottingAttribute(pd.Series(attribute, index=self.samples),
title, palette, order)
def establish_symbols(self, marker, marker_order, text, text_order):
"""Figure out what symbol put on the axes for each data point"""
symbol_title = None
if isinstance(text, bool):
# Option 1: Text is a boolean
if text:
# 1a: text=True, so use the sample names of data as the
# plotting symbol
symbol_title = 'Samples'
symbols = [str(x) for x in self.samples]
symbol = pd.Series(self.samples, index=self.samples)
else:
# 1b: text=False, so use the specified marker for each sample
symbol = self._maybe_make_grouper(marker, marker_order, str)
if marker is not None:
try:
symbol_title = marker
symbol = self.data[marker]
symbols = sns.categorical_order(symbol, marker_order)
except (ValueError, KeyError):
# Marker is a single marker, or already a groupable
if marker in self.filled_markers:
# Single marker so make a tuple so it's indexable
symbols = (marker,)
else:
# already a groupable object
if isinstance(marker, pd.Series):
symbol_title = marker.name
n_symbols = len(self.plot_data.groupby(symbol))
if n_symbols > len(self.filled_markers):
# If there's too many categories, then
# auto-expand the existing list of filled
# markers
multiplier = np.ceil(
n_symbols/float(len(self.filled_markers)))
filled_markers = list(self.filled_markers) \
* multiplier
symbols = filled_markers[:n_symbols]
else:
symbols = self.filled_markers[:n_symbols]
symbol = PlottingAttribute(symbol, symbol_title, symbols,
marker_order)
else:
# Assume "text" is a mapping from row names (sample ids) of the
# data to text labels
text_order = sns.utils.categorical_order(text, text_order)
symbols = text_order
symbol = pd.Series(pd.Categorical(text, categories=text_order,
ordered=True),
index=self.samples)
symbol = PlottingAttribute(symbol, symbol_title, symbols,
text_order)
if marker is not None:
warnings.warn('Overriding plotting symbol from "marker" with '
'values in "text"')
# Turn text into a boolean
text = True
self.symbol = symbol
self.text = text
def establish_symbol_attributes(self,linewidth, linewidth_order, edgecolor,
edgecolor_order, alpha, alpha_order, size,
size_order):
self.edgecolor = self._maybe_make_grouper(
edgecolor, self._edgecolor_palette, edgecolor_order,
mpl.colors.is_color_like)
self.linewidth = self._maybe_make_grouper(
linewidth, self._linewidth_palette, linewidth_order, np.isfinite)
self.alpha = self._maybe_make_grouper(
alpha, self._alpha_palette, alpha_order, np.isfinite)
self.size = self._maybe_make_grouper(
size, self._size_palette, size_order, np.isfinite)
@staticmethod
def _edgecolor_palette(self, n_groups):
return sns.color_palette('Greys', n_colors=n_groups)
def _linewidth_palette(self, n_groups):
return np.linspace(self.linewidth_min, self.linewidth_max, n_groups)
def _alpha_palette(self, n_groups):
return np.linspace(self.alpha_min, self.alpha_max, n_groups)
def _size_palette(self, n_groups):
return np.linspace(self.size_min, self.size_max, n_groups)
def symbolplotter(self, xs, ys, ax, symbol, linewidth, edgecolor, **kwargs):
"""Plots either a matplotlib marker or a string at each data position
Wraps plt.text and plt.plot
Parameters
----------
xs : array-like
List of x positions for data
ys : array-like
List of y-positions for data
symbol : str
What to plot at each (x, y) data position
text : bool
If true, then "symboL" is assumed to be a string and iterates over
each data point individually, using plt.text to position the text.
Otherwise, "symbol" is a matplotlib marker and uses plt.plot for
plotting
kwargs
Any other keyword arguments to plt.text or plt.plot
"""
# If both the x- and y- positions don't have data, don't do anything
if xs.empty and ys.empty:
return
if self.text:
# Add dummy plot to make the axes in the right window
ax.plot(xs, ys, color=None)
# Plot each (x, y) position as text
for x, y in zip(xs, ys):
ax.text(x, y, symbol, **kwargs)
else:
# use plt.plot instead of plt.scatter for speed, since plotting all
# the same marker shape and color and linestyle
ax.plot(xs, ys, 'o', marker=symbol, markeredgewidth=linewidth,
markeredgecolor=edgecolor, **kwargs)
def annotate_axes(self, ax):
"""Add descriptive labels to an Axes object."""
if self.xlabel is not None:
ax.set_xlabel(self.xlabel)
if self.ylabel is not None:
ax.set_ylabel(self.ylabel)
def establish_legend_data(self):
self.legend_data = pd.DataFrame(dict(color=self.color.groupby,
symbol=self.symbol.groupby,
size=self.size.groupby,
linewidth=self.linewidth.groupby,
edgecolor=self.edgecolor.groupby,
alpha=self.alpha.groupby),
index=self.samples)
self.legend_data = self.legend_data.reindex(columns=self.legend_order)
def draw_symbols(self, ax, plot_kws):
"""Plot each sample in the data"""
plot_kws = {} if plot_kws is None else plot_kws
for labels, df in self.legend_data.groupby(self.legend_order):
# Get the attributes in order, using the group label to get the
# attribute
for name, label in zip(self.legend_order, labels):
plot_kws[name] = getattr(self, name)[label]
self.symbolplotter(df.iloc[:, 0], df.iloc[:, 1], **plot_kws)
# Iterate over all the possible modifications of the points
# TODO: add alpha and size
# for i, (color_label, df1) in enumerate(self.plot_data.groupby(self.color.groupby)):
# color = self.color.palette[i]
# for j, (marker_label, df2) in enumerate(df1.groupby(self.symbol.groupby)):
# symbol = self.symbol.palette[j]
# for k, (lw_label, df3) in enumerate(df2.groupby(self.linewidth.groupby)):
# linewidth = self.linewidth.palette[k]
# for l, (ec_label, df4) in df3.groupby(self.edgecolor):
# edgecolor = self.edgecolor.palette[l]
# # and finally ... actually plot the data!
# for m
# self.symbolplotter(df4.iloc[:, 0], df4.iloc[:, 1],
# symbol=symbol, color=color,
# ax=ax, linewidth=linewidth,
# edgecolor=edgecolor, **plot_kws)
#
class ScatterPlotter(PlotterMixin):
def __init__(self, data, x, y, color, hue, hue_order, palette, marker,
marker_order, text, text_order, linewidth, linewidth_order,
edgecolor, edgecolor_order, alpha, alpha_order, size,
size_order):
self.establish_data(data, x, y)
self.establish_symbols(marker, marker_order, text, text_order)
self.establish_symbol_attributes(linewidth, linewidth_order, edgecolor,
edgecolor_order, alpha, alpha_order, size, size_order)
self.establish_colors(color, hue, hue_order, palette)
self.establish_legend_data()
# import pdb; pdb.set_trace()
def establish_data(self, data, x, y):
if isinstance(data, pd.DataFrame):
xlabel = data.columns[x]
ylabel = data.columns[y]
else:
data = pd.DataFrame(data)
xlabel = None
ylabel = None
self.data = data
self.plot_data = self.data.iloc[:, [x, y]]
self.xlabel = xlabel
self.ylabel = ylabel
self.samples = self.plot_data.index
self.features = self.plot_data.columns
self.n_samples = len(self.samples)
self.n_features = len(self.features)
def plot(self, ax, kwargs):
self.draw_symbols(ax, kwargs)
self.annotate_axes(ax)
def scatterplot(data, x=0, y=1, color=None, hue=None, hue_order=None,
palette=None, marker='o', marker_order=None, text=False,
text_order=None, linewidth=1, linewidth_order=None,
edgecolor='k', edgecolor_order=None, alpha=1, alpha_order=None,
size=7, size_order=None, ax=None, **kwargs):
plotter = ScatterPlotter(data, x, y, color, hue, hue_order, palette,
marker, marker_order, text, text_order, linewidth,
linewidth_order, edgecolor, edgecolor_order,
alpha, alpha_order, size, size_order)
if ax is None:
ax = plt.gca()
plotter.plot(ax, kwargs)
return ax
| olgabot/cupcake | cupcake/scatter.py | Python | bsd-3-clause | 16,979 |
import os
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from cacheops.simple import file_cache, FILE_CACHE_DIR
class Command(BaseCommand):
help = 'Clean filebased cache'
def handle(self, **options):
os.system('find %s -type f \! -iname "\." -mmin +0 -delete' % FILE_CACHE_DIR)
| dpetzold/django-cacheops | cacheops/management/commands/cleanfilecache.py | Python | bsd-3-clause | 407 |
#!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model tests
Unit tests for model utility methods.
"""
__author__ = '[email protected] (Joe Gregorio)'
import httplib2
import unittest
from apiclient.model import makepatch
TEST_CASES = [
# (message, original, modified, expected)
("Remove an item from an object",
{'a': 1, 'b': 2}, {'a': 1}, {'b': None}),
("Add an item to an object",
{'a': 1}, {'a': 1, 'b': 2}, {'b': 2}),
("No changes",
{'a': 1, 'b': 2}, {'a': 1, 'b': 2}, {}),
("Empty objects",
{}, {}, {}),
("Modify an item in an object",
{'a': 1, 'b': 2}, {'a': 1, 'b': 3}, {'b': 3}),
("Change an array",
{'a': 1, 'b': [2, 3]}, {'a': 1, 'b': [2]}, {'b': [2]}),
("Modify a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
{'b': {'baz': 'qaax'}}),
("Modify a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
{'b': [{'foo':'bar', 'baz': 'qaax'}]}),
("Remove item from a nested array",
{'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
{'a': 1, 'b': [{'foo':'bar'}]},
{'b': [{'foo':'bar'}]}),
("Remove a nested item",
{'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
{'a': 1, 'b': {'foo':'bar'}},
{'b': {'baz': None}})
]
class TestPatch(unittest.TestCase):
def test_patch(self):
for (msg, orig, mod, expected_patch) in TEST_CASES:
self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
if __name__ == '__main__':
unittest.main()
| MapofLife/MOL | earthengine/google-api-python-client/tests/test_model.py | Python | bsd-3-clause | 2,180 |
# -*- coding: utf-8 -*-
"""
jinja2.filters
~~~~~~~~~~~~~~
Bundled jinja filters.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import math
from random import choice
from operator import itemgetter
from itertools import groupby
from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \
unicode_urlencode
from jinja2.runtime import Undefined
from jinja2.exceptions import FilterArgumentError
from jinja2._compat import imap, string_types, text_type, iteritems
_word_re = re.compile(r'\w+(?u)')
def contextfilter(f):
"""Decorator for marking context dependent filters. The current
:class:`Context` will be passed as first argument.
"""
f.contextfilter = True
return f
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
def environmentfilter(f):
"""Decorator for marking evironment dependent filters. The current
:class:`Environment` is passed to the filter as first argument.
"""
f.environmentfilter = True
return f
def make_attrgetter(environment, attribute):
"""Returns a callable that looks up the given attribute from a
passed object with the rules of the environment. Dots are allowed
to access attributes of attributes. Integer parts in paths are
looked up as integers.
"""
if not isinstance(attribute, string_types) \
or ('.' not in attribute and not attribute.isdigit()):
return lambda x: environment.getitem(x, attribute)
attribute = attribute.split('.')
def attrgetter(item):
for part in attribute:
if part.isdigit():
part = int(part)
item = environment.getitem(item, part)
return item
return attrgetter
def do_forceescape(value):
"""Enforce HTML escaping. This will probably double escape variables."""
if hasattr(value, '__html__'):
value = value.__html__()
return escape(text_type(value))
def do_urlencode(value):
"""Escape strings for use in URLs (uses UTF-8 encoding). It accepts both
dictionaries and regular strings as well as pairwise iterables.
.. versionadded:: 2.7
"""
itemiter = None
if isinstance(value, dict):
itemiter = iteritems(value)
elif not isinstance(value, string_types):
try:
itemiter = iter(value)
except TypeError:
pass
if itemiter is None:
return unicode_urlencode(value)
return u'&'.join(unicode_urlencode(k) + '=' +
unicode_urlencode(v) for k, v in itemiter)
@evalcontextfilter
def do_replace(eval_ctx, s, old, new, count=None):
"""Return a copy of the value with all occurrences of a substring
replaced with a new one. The first argument is the substring
that should be replaced, the second is the replacement string.
If the optional third argument ``count`` is given, only the first
``count`` occurrences are replaced:
.. sourcecode:: jinja
{{ "Hello World"|replace("Hello", "Goodbye") }}
-> Goodbye World
{{ "aaaaargh"|replace("a", "d'oh, ", 2) }}
-> d'oh, d'oh, aaargh
"""
if count is None:
count = -1
if not eval_ctx.autoescape:
return text_type(s).replace(text_type(old), text_type(new), count)
if hasattr(old, '__html__') or hasattr(new, '__html__') and \
not hasattr(s, '__html__'):
s = escape(s)
else:
s = soft_unicode(s)
return s.replace(soft_unicode(old), soft_unicode(new), count)
def do_upper(s):
"""Convert a value to uppercase."""
return soft_unicode(s).upper()
def do_lower(s):
"""Convert a value to lowercase."""
return soft_unicode(s).lower()
@evalcontextfilter
def do_xmlattr(_eval_ctx, d, autospace=True):
"""Create an SGML/XML attribute string based on the items in a dict.
All values that are neither `none` nor `undefined` are automatically
escaped:
.. sourcecode:: html+jinja
<ul{{ {'class': 'my_list', 'missing': none,
'id': 'list-%d'|format(variable)}|xmlattr }}>
...
</ul>
Results in something like this:
.. sourcecode:: html
<ul class="my_list" id="list-42">
...
</ul>
As you can see it automatically prepends a space in front of the item
if the filter returned something unless the second parameter is false.
"""
rv = u' '.join(
u'%s="%s"' % (escape(key), escape(value))
for key, value in iteritems(d)
if value is not None and not isinstance(value, Undefined)
)
if autospace and rv:
rv = u' ' + rv
if _eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_capitalize(s):
"""Capitalize a value. The first character will be uppercase, all others
lowercase.
"""
return soft_unicode(s).capitalize()
def do_title(s):
"""Return a titlecased version of the value. I.e. words will start with
uppercase letters, all remaining characters are lowercase.
"""
rv = []
for item in re.compile(r'([-\s]+)(?u)').split(soft_unicode(s)):
if not item:
continue
rv.append(item[0].upper() + item[1:].lower())
return ''.join(rv)
def do_dictsort(value, case_sensitive=False, by='key'):
"""Sort a dict and yield (key, value) pairs. Because python dicts are
unsorted you may want to use this function to order them by either
key or value:
.. sourcecode:: jinja
{% for item in mydict|dictsort %}
sort the dict by key, case insensitive
{% for item in mydict|dictsort(true) %}
sort the dict by key, case sensitive
{% for item in mydict|dictsort(false, 'value') %}
sort the dict by value, case insensitive
"""
if by == 'key':
pos = 0
elif by == 'value':
pos = 1
else:
raise FilterArgumentError('You can only sort by either '
'"key" or "value"')
def sort_func(item):
value = item[pos]
if isinstance(value, string_types) and not case_sensitive:
value = value.lower()
return value
return sorted(value.items(), key=sort_func)
@environmentfilter
def do_sort(environment, value, reverse=False, case_sensitive=False,
attribute=None):
"""Sort an iterable. Per default it sorts ascending, if you pass it
true as first argument it will reverse the sorting.
If the iterable is made of strings the third parameter can be used to
control the case sensitiveness of the comparison which is disabled by
default.
.. sourcecode:: jinja
{% for item in iterable|sort %}
...
{% endfor %}
It is also possible to sort by an attribute (for example to sort
by the date of an object) by specifying the `attribute` parameter:
.. sourcecode:: jinja
{% for item in iterable|sort(attribute='date') %}
...
{% endfor %}
.. versionchanged:: 2.6
The `attribute` parameter was added.
"""
if not case_sensitive:
def sort_func(item):
if isinstance(item, string_types):
item = item.lower()
return item
else:
sort_func = None
if attribute is not None:
getter = make_attrgetter(environment, attribute)
def sort_func(item, processor=sort_func or (lambda x: x)):
return processor(getter(item))
return sorted(value, key=sort_func, reverse=reverse)
def do_default(value, default_value=u'', boolean=False):
"""If the value is undefined it will return the passed default value,
otherwise the value of the variable:
.. sourcecode:: jinja
{{ my_variable|default('my_variable is not defined') }}
This will output the value of ``my_variable`` if the variable was
defined, otherwise ``'my_variable is not defined'``. If you want
to use default with variables that evaluate to false you have to
set the second parameter to `true`:
.. sourcecode:: jinja
{{ ''|default('the string was empty', true) }}
"""
if isinstance(value, Undefined) or (boolean and not value):
return default_value
return value
@evalcontextfilter
def do_join(eval_ctx, value, d=u'', attribute=None):
"""Return a string which is the concatenation of the strings in the
sequence. The separator between elements is an empty string per
default, you can define it with the optional parameter:
.. sourcecode:: jinja
{{ [1, 2, 3]|join('|') }}
-> 1|2|3
{{ [1, 2, 3]|join }}
-> 123
It is also possible to join certain attributes of an object:
.. sourcecode:: jinja
{{ users|join(', ', attribute='username') }}
.. versionadded:: 2.6
The `attribute` parameter was added.
"""
if attribute is not None:
value = imap(make_attrgetter(eval_ctx.environment, attribute), value)
# no automatic escaping? joining is a lot eaiser then
if not eval_ctx.autoescape:
return text_type(d).join(imap(text_type, value))
# if the delimiter doesn't have an html representation we check
# if any of the items has. If yes we do a coercion to Markup
if not hasattr(d, '__html__'):
value = list(value)
do_escape = False
for idx, item in enumerate(value):
if hasattr(item, '__html__'):
do_escape = True
else:
value[idx] = text_type(item)
if do_escape:
d = escape(d)
else:
d = text_type(d)
return d.join(value)
# no html involved, to normal joining
return soft_unicode(d).join(imap(soft_unicode, value))
def do_center(value, width=80):
"""Centers the value in a field of a given width."""
return text_type(value).center(width)
@environmentfilter
def do_first(environment, seq):
"""Return the first item of a sequence."""
try:
return next(iter(seq))
except StopIteration:
return environment.undefined('No first item, sequence was empty.')
@environmentfilter
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
@environmentfilter
def do_random(environment, seq):
"""Return a random item from the sequence."""
try:
return choice(seq)
except IndexError:
return environment.undefined('No random item, sequence was empty.')
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def do_pprint(value, verbose=False):
"""Pretty print a variable. Useful for debugging.
With Jinja 1.2 onwards you can pass it a parameter. If this parameter
is truthy the output will be more verbose (this requires `pretty`)
"""
return pformat(value, verbose=verbose)
@evalcontextfilter
def do_urlize(eval_ctx, value, trim_url_limit=None, nofollow=False,
target=None):
"""Converts URLs in plain text into clickable links.
If you pass the filter an additional integer it will shorten the urls
to that number. Also a third argument exists that makes the urls
"nofollow":
.. sourcecode:: jinja
{{ mytext|urlize(40, true) }}
links are shortened to 40 chars and defined with rel="nofollow"
If *target* is specified, the ``target`` attribute will be added to the
``<a>`` tag:
.. sourcecode:: jinja
{{ mytext|urlize(40, target='_blank') }}
.. versionchanged:: 2.8+
The *target* parameter was added.
"""
rv = urlize(value, trim_url_limit, nofollow, target)
if eval_ctx.autoescape:
rv = Markup(rv)
return rv
def do_indent(s, width=4, indentfirst=False):
"""Return a copy of the passed string, each line indented by
4 spaces. The first line is not indented. If you want to
change the number of spaces or indent the first line too
you can pass additional parameters to the filter:
.. sourcecode:: jinja
{{ mytext|indent(2, true) }}
indent by two spaces and indent the first line too.
"""
indention = u' ' * width
rv = (u'\n' + indention).join(s.splitlines())
if indentfirst:
rv = indention + rv
return rv
def do_truncate(s, length=255, killwords=False, end='...'):
"""Return a truncated copy of the string. The length is specified
with the first parameter which defaults to ``255``. If the second
parameter is ``true`` the filter will cut the text at length. Otherwise
it will discard the last word. If the text was in fact
truncated it will append an ellipsis sign (``"..."``). If you want a
different ellipsis sign than ``"..."`` you can specify it using the
third parameter.
.. sourcecode:: jinja
{{ "foo bar baz"|truncate(9) }}
-> "foo ..."
{{ "foo bar baz"|truncate(9, True) }}
-> "foo ba..."
"""
if len(s) <= length:
return s
elif killwords:
return s[:length - len(end)] + end
result = s[:length - len(end)].rsplit(' ', 1)[0]
if len(result) < length:
result += ' '
return result + end
@environmentfilter
def do_wordwrap(environment, s, width=79, break_long_words=True,
wrapstring=None):
"""
Return a copy of the string passed to the filter wrapped after
``79`` characters. You can override this default using the first
parameter. If you set the second parameter to `false` Jinja will not
split words apart if they are longer than `width`. By default, the newlines
will be the default newlines for the environment, but this can be changed
using the wrapstring keyword argument.
.. versionadded:: 2.7
Added support for the `wrapstring` parameter.
"""
if not wrapstring:
wrapstring = environment.newline_sequence
import textwrap
return wrapstring.join(textwrap.wrap(s, width=width, expand_tabs=False,
replace_whitespace=False,
break_long_words=break_long_words))
def do_wordcount(s):
"""Count the words in that string."""
return len(_word_re.findall(s))
def do_int(value, default=0):
"""Convert the value into an integer. If the
conversion doesn't work it will return ``0``. You can
override this default using the first parameter.
"""
try:
return int(value)
except (TypeError, ValueError):
# this quirk is necessary so that "42.23"|int gives 42.
try:
return int(float(value))
except (TypeError, ValueError):
return default
def do_float(value, default=0.0):
"""Convert the value into a floating point number. If the
conversion doesn't work it will return ``0.0``. You can
override this default using the first parameter.
"""
try:
return float(value)
except (TypeError, ValueError):
return default
def do_format(value, *args, **kwargs):
"""
Apply python string formatting on an object:
.. sourcecode:: jinja
{{ "%s - %s"|format("Hello?", "Foo!") }}
-> Hello? - Foo!
"""
if args and kwargs:
raise FilterArgumentError('can\'t handle positional and keyword '
'arguments at the same time')
return soft_unicode(value) % (kwargs or args)
def do_trim(value):
"""Strip leading and trailing whitespace."""
return soft_unicode(value).strip()
def do_striptags(value):
"""Strip SGML/XML tags and replace adjacent whitespace by one space.
"""
if hasattr(value, '__html__'):
value = value.__html__()
return Markup(text_type(value)).striptags()
def do_slice(value, slices, fill_with=None):
"""Slice an iterator and return a list of lists containing
those items. Useful if you want to create a div containing
three ul tags that represent columns:
.. sourcecode:: html+jinja
<div class="columwrapper">
{%- for column in items|slice(3) %}
<ul class="column-{{ loop.index }}">
{%- for item in column %}
<li>{{ item }}</li>
{%- endfor %}
</ul>
{%- endfor %}
</div>
If you pass it a second argument it's used to fill missing
values on the last iteration.
"""
seq = list(value)
length = len(seq)
items_per_slice = length // slices
slices_with_extra = length % slices
offset = 0
for slice_number in range(slices):
start = offset + slice_number * items_per_slice
if slice_number < slices_with_extra:
offset += 1
end = offset + (slice_number + 1) * items_per_slice
tmp = seq[start:end]
if fill_with is not None and slice_number >= slices_with_extra:
tmp.append(fill_with)
yield tmp
def do_batch(value, linecount, fill_with=None):
"""
A filter that batches items. It works pretty much like `slice`
just the other way round. It returns a list of lists with the
given number of items. If you provide a second parameter this
is used to fill up missing items. See this example:
.. sourcecode:: html+jinja
<table>
{%- for row in items|batch(3, ' ') %}
<tr>
{%- for column in row %}
<td>{{ column }}</td>
{%- endfor %}
</tr>
{%- endfor %}
</table>
"""
tmp = []
for item in value:
if len(tmp) == linecount:
yield tmp
tmp = []
tmp.append(item)
if tmp:
if fill_with is not None and len(tmp) < linecount:
tmp += [fill_with] * (linecount - len(tmp))
yield tmp
def do_round(value, precision=0, method='common'):
"""Round the number to a given precision. The first
parameter specifies the precision (default is ``0``), the
second the rounding method:
- ``'common'`` rounds either up or down
- ``'ceil'`` always rounds up
- ``'floor'`` always rounds down
If you don't specify a method ``'common'`` is used.
.. sourcecode:: jinja
{{ 42.55|round }}
-> 43.0
{{ 42.55|round(1, 'floor') }}
-> 42.5
Note that even if rounded to 0 precision, a float is returned. If
you need a real integer, pipe it through `int`:
.. sourcecode:: jinja
{{ 42.55|round|int }}
-> 43
"""
if not method in ('common', 'ceil', 'floor'):
raise FilterArgumentError('method must be common, ceil or floor')
if method == 'common':
return round(value, precision)
func = getattr(math, method)
return func(value * (10 ** precision)) / (10 ** precision)
@environmentfilter
def do_groupby(environment, value, attribute):
"""Group a sequence of objects by a common attribute.
If you for example have a list of dicts or objects that represent persons
with `gender`, `first_name` and `last_name` attributes and you want to
group all users by genders you can do something like the following
snippet:
.. sourcecode:: html+jinja
<ul>
{% for group in persons|groupby('gender') %}
<li>{{ group.grouper }}<ul>
{% for person in group.list %}
<li>{{ person.first_name }} {{ person.last_name }}</li>
{% endfor %}</ul></li>
{% endfor %}
</ul>
Additionally it's possible to use tuple unpacking for the grouper and
list:
.. sourcecode:: html+jinja
<ul>
{% for grouper, list in persons|groupby('gender') %}
...
{% endfor %}
</ul>
As you can see the item we're grouping by is stored in the `grouper`
attribute and the `list` contains all the objects that have this grouper
in common.
.. versionchanged:: 2.6
It's now possible to use dotted notation to group by the child
attribute of another attribute.
"""
expr = make_attrgetter(environment, attribute)
return sorted(map(_GroupTuple, groupby(sorted(value, key=expr), expr)))
class _GroupTuple(tuple):
__slots__ = ()
grouper = property(itemgetter(0))
list = property(itemgetter(1))
def __new__(cls, xxx_todo_changeme):
(key, value) = xxx_todo_changeme
return tuple.__new__(cls, (key, list(value)))
@environmentfilter
def do_sum(environment, iterable, attribute=None, start=0):
"""Returns the sum of a sequence of numbers plus the value of parameter
'start' (which defaults to 0). When the sequence is empty it returns
start.
It is also possible to sum up only certain attributes:
.. sourcecode:: jinja
Total: {{ items|sum(attribute='price') }}
.. versionchanged:: 2.6
The `attribute` parameter was added to allow suming up over
attributes. Also the `start` parameter was moved on to the right.
"""
if attribute is not None:
iterable = imap(make_attrgetter(environment, attribute), iterable)
return sum(iterable, start)
def do_list(value):
"""Convert the value into a list. If it was a string the returned list
will be a list of characters.
"""
return list(value)
def do_mark_safe(value):
"""Mark the value as safe which means that in an environment with automatic
escaping enabled this variable will not be escaped.
"""
return Markup(value)
def do_mark_unsafe(value):
"""Mark a value as unsafe. This is the reverse operation for :func:`safe`."""
return text_type(value)
def do_reverse(value):
"""Reverse the object or return an iterator the iterates over it the other
way round.
"""
if isinstance(value, string_types):
return value[::-1]
try:
return reversed(value)
except TypeError:
try:
rv = list(value)
rv.reverse()
return rv
except TypeError:
raise FilterArgumentError('argument must be iterable')
@environmentfilter
def do_attr(environment, obj, name):
"""Get an attribute of an object. ``foo|attr("bar")`` works like
``foo.bar`` just that always an attribute is returned and items are not
looked up.
See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details.
"""
try:
name = str(name)
except UnicodeError:
pass
else:
try:
value = getattr(obj, name)
except AttributeError:
pass
else:
if environment.sandboxed and not \
environment.is_safe_attribute(obj, name, value):
return environment.unsafe_undefined(obj, name)
return value
return environment.undefined(obj=obj, name=name)
@contextfilter
def do_map(*args, **kwargs):
"""Applies a filter on a sequence of objects or looks up an attribute.
This is useful when dealing with lists of objects but you are really
only interested in a certain value of it.
The basic usage is mapping on an attribute. Imagine you have a list
of users but you are only interested in a list of usernames:
.. sourcecode:: jinja
Users on this page: {{ users|map(attribute='username')|join(', ') }}
Alternatively you can let it invoke a filter by passing the name of the
filter and the arguments afterwards. A good example would be applying a
text conversion filter on a sequence:
.. sourcecode:: jinja
Users on this page: {{ titles|map('lower')|join(', ') }}
.. versionadded:: 2.7
"""
context = args[0]
seq = args[1]
if len(args) == 2 and 'attribute' in kwargs:
attribute = kwargs.pop('attribute')
if kwargs:
raise FilterArgumentError('Unexpected keyword argument %r' %
next(iter(kwargs)))
func = make_attrgetter(context.environment, attribute)
else:
try:
name = args[2]
args = args[3:]
except LookupError:
raise FilterArgumentError('map requires a filter argument')
func = lambda item: context.environment.call_filter(
name, item, args, kwargs, context=context)
if seq:
for item in seq:
yield func(item)
@contextfilter
def do_select(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and only
selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|select("odd") }}
{{ numbers|select("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, False)
@contextfilter
def do_reject(*args, **kwargs):
"""Filters a sequence of objects by applying a test to the object and
rejecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, False)
@contextfilter
def do_selectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object and only selecting the ones with the test succeeding.
Example usage:
.. sourcecode:: jinja
{{ users|selectattr("is_active") }}
{{ users|selectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: x, True)
@contextfilter
def do_rejectattr(*args, **kwargs):
"""Filters a sequence of objects by applying a test to an attribute of an
object or the attribute and rejecting the ones with the test succeeding.
.. sourcecode:: jinja
{{ users|rejectattr("is_active") }}
{{ users|rejectattr("email", "none") }}
.. versionadded:: 2.7
"""
return _select_or_reject(args, kwargs, lambda x: not x, True)
def _select_or_reject(args, kwargs, modfunc, lookup_attr):
context = args[0]
seq = args[1]
if lookup_attr:
try:
attr = args[2]
except LookupError:
raise FilterArgumentError('Missing parameter for attribute name')
transfunc = make_attrgetter(context.environment, attr)
off = 1
else:
off = 0
transfunc = lambda x: x
try:
name = args[2 + off]
args = args[3 + off:]
func = lambda item: context.environment.call_test(
name, item, args, kwargs)
except LookupError:
func = bool
if seq:
for item in seq:
if modfunc(func(transfunc(item))):
yield item
FILTERS = {
'attr': do_attr,
'replace': do_replace,
'upper': do_upper,
'lower': do_lower,
'escape': escape,
'e': escape,
'forceescape': do_forceescape,
'capitalize': do_capitalize,
'title': do_title,
'default': do_default,
'd': do_default,
'join': do_join,
'count': len,
'dictsort': do_dictsort,
'sort': do_sort,
'length': len,
'reverse': do_reverse,
'center': do_center,
'indent': do_indent,
'title': do_title,
'capitalize': do_capitalize,
'first': do_first,
'last': do_last,
'map': do_map,
'random': do_random,
'reject': do_reject,
'rejectattr': do_rejectattr,
'filesizeformat': do_filesizeformat,
'pprint': do_pprint,
'truncate': do_truncate,
'wordwrap': do_wordwrap,
'wordcount': do_wordcount,
'int': do_int,
'float': do_float,
'string': soft_unicode,
'list': do_list,
'urlize': do_urlize,
'format': do_format,
'trim': do_trim,
'striptags': do_striptags,
'select': do_select,
'selectattr': do_selectattr,
'slice': do_slice,
'batch': do_batch,
'sum': do_sum,
'abs': abs,
'round': do_round,
'groupby': do_groupby,
'safe': do_mark_safe,
'xmlattr': do_xmlattr,
'urlencode': do_urlencode
}
| dstufft/jinja2 | jinja2/filters.py | Python | bsd-3-clause | 29,972 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('formbuilder', '0005_auto_20150826_1600'),
]
operations = [
migrations.RemoveField(
model_name='choiceanswer',
name='option',
),
migrations.AddField(
model_name='choiceanswer',
name='choices',
field=models.ManyToManyField(related_name='answers', to='formbuilder.Option'),
),
migrations.AddField(
model_name='choiceanswer',
name='other',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='choiceanswer',
name='question',
field=models.ForeignKey(related_name='answers', to='formbuilder.Choice', null=True),
),
]
| Kvoti/ditto | ditto/formbuilder/migrations/0006_auto_20150827_1019.py | Python | bsd-3-clause | 918 |
"""
extend TiddlyWiki serialization to optionally use beta or
externalized releases and add the UniversalBackstage.
activated via "twrelease=beta" URL parameter or ServerSettings,
see build_config_var
"""
import logging
from tiddlyweb.util import read_utf8_file
from tiddlywebwiki.serialization import Serialization as WikiSerialization
from tiddlywebplugins.tiddlyspace.web import (determine_host,
determine_space, determine_space_recipe)
LOGGER = logging.getLogger(__name__)
def build_config_var(beta=False, external=False):
"""
Create the configuration key which will be used to locate
the base tiddlywiki file.
"""
base = 'base_tiddlywiki'
if external:
base += '_external'
if beta:
base += '_beta'
return base
class Serialization(WikiSerialization):
"""
Subclass of the standard TiddlyWiki serialization to allow
choosing beta or externalized versions of the base empty.html
in which the tiddlers will be servered.
Also, if the TiddlyWiki is not being downloaded, add
the UniversalBackstage by injecting a script tag.
"""
def list_tiddlers(self, tiddlers):
"""
Override tiddlers.link so the location in noscript is to
/tiddlers.
"""
http_host, _ = determine_host(self.environ)
space_name = determine_space(self.environ, http_host)
if space_name:
recipe_name = determine_space_recipe(self.environ, space_name)
if '/recipes/%s' % recipe_name in tiddlers.link:
tiddlers.link = '/tiddlers'
return WikiSerialization.list_tiddlers(self, tiddlers)
def _get_wiki(self):
beta = external = False
release = self.environ.get('tiddlyweb.query', {}).get(
'twrelease', [False])[0]
externalize = self.environ.get('tiddlyweb.query', {}).get(
'external', [False])[0]
download = self.environ.get('tiddlyweb.query', {}).get(
'download', [False])[0]
if release == 'beta':
beta = True
if externalize:
external = True
# If somebody is downloading, don't allow them to
# externalize.
if download:
external = False
wiki = None
if beta or external:
config_var = build_config_var(beta, external)
LOGGER.debug('looking for %s', config_var)
base_wiki_file = self.environ.get('tiddlyweb.config',
{}).get(config_var, '')
if base_wiki_file:
LOGGER.debug('using %s as base_tiddlywiki', base_wiki_file)
wiki = read_utf8_file(base_wiki_file)
if not wiki:
wiki = WikiSerialization._get_wiki(self)
tag = "<!--POST-SCRIPT-START-->"
if not download:
wiki = wiki.replace(tag, '<script type="text/javascript" '
'src="/bags/common/tiddlers/backstage.js"></script> %s' % tag)
return wiki
| TiddlySpace/tiddlyspace | tiddlywebplugins/tiddlyspace/betaserialization.py | Python | bsd-3-clause | 3,007 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['RelativeDifference'] , ['ConstantTrend'] , ['NoCycle'] , ['LSTM'] ); | antoinecarme/pyaf | tests/model_control/detailed/transf_RelativeDifference/model_control_one_enabled_RelativeDifference_ConstantTrend_NoCycle_LSTM.py | Python | bsd-3-clause | 164 |
"""
Django settings for example_site project.
Generated by 'django-admin startproject' using Django 1.8.dev20150302062936.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import environ
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
env = environ.Env()
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "fbaa1unu0e8z5@9mm%k#+*d@iny*=-)ma2b#ymq)o9z^3%ijh)"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
"address",
"person",
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
)
MIDDLEWARE = (
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
ROOT_URLCONF = "example_site.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "example_site.wsgi.application"
# Specify your Google API key as environment variable GOOGLE_API_KEY
# You may also specify it here, though be sure not to commit it to a repository
GOOGLE_API_KEY = "" # Specify your Google API key here
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY", GOOGLE_API_KEY)
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
"default": env.db(),
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = "/static/"
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
| furious-luke/django-address | example_site/example_site/settings.py | Python | bsd-3-clause | 3,436 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Implements rotations, including spherical rotations as defined in WCS Paper II
[1]_
`RotateNative2Celestial` and `RotateCelestial2Native` follow the convention in
WCS Paper II to rotate to/from a native sphere and the celestial sphere.
The implementation uses `EulerAngleRotation`. The model parameters are
three angles: the longitude (``lon``) and latitude (``lat``) of the fiducial point
in the celestial system (``CRVAL`` keywords in FITS), and the longitude of the celestial
pole in the native system (``lon_pole``). The Euler angles are ``lon+90``, ``90-lat``
and ``-(lon_pole-90)``.
References
----------
.. [1] Calabretta, M.R., Greisen, E.W., 2002, A&A, 395, 1077 (Paper II)
"""
import math
import numpy as np
from .core import Model
from .parameters import Parameter
from astropy.coordinates.matrix_utilities import rotation_matrix, matrix_product
from astropy import units as u
from .utils import _to_radian, _to_orig_unit
__all__ = ['RotateCelestial2Native', 'RotateNative2Celestial', 'Rotation2D',
'EulerAngleRotation', 'RotationSequence3D', 'SphericalRotationSequence']
def _create_matrix(angles, axes_order):
matrices = []
for angle, axis in zip(angles, axes_order):
if isinstance(angle, u.Quantity):
angle = angle.value
angle = angle.item()
matrices.append(rotation_matrix(angle, axis, unit=u.rad))
result = matrix_product(*matrices[::-1])
return result
def spherical2cartesian(alpha, delta):
alpha = np.deg2rad(alpha)
delta = np.deg2rad(delta)
x = np.cos(alpha) * np.cos(delta)
y = np.cos(delta) * np.sin(alpha)
z = np.sin(delta)
return np.array([x, y, z])
def cartesian2spherical(x, y, z):
h = np.hypot(x, y)
alpha = np.rad2deg(np.arctan2(y, x))
delta = np.rad2deg(np.arctan2(z, h))
return alpha, delta
class RotationSequence3D(Model):
"""
Perform a series of rotations about different axis in 3D space.
Positive angles represent a counter-clockwise rotation.
Parameters
----------
angles : array-like
Angles of rotation in deg in the order of axes_order.
axes_order : str
A sequence of 'x', 'y', 'z' corresponding to axis of rotation.
Examples
--------
>>> model = RotationSequence3D([1.1, 2.1, 3.1, 4.1], axes_order='xyzx')
"""
standard_broadcasting = False
_separable = False
n_inputs = 3
n_outputs = 3
angles = Parameter(default=[], getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angles, axes_order, name=None):
self.axes = ['x', 'y', 'z']
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {0}; "
"should be one of {1} ".format(unrecognized,
self.axes))
self.axes_order = axes_order
if len(angles) != len(axes_order):
raise ValueError("The number of angles {0} should match the number \
of axes {1}.".format(len(angles),
len(axes_order)))
super().__init__(angles, name=name)
self._inputs = ('x', 'y', 'z')
self._outputs = ('x', 'y', 'z')
@property
def inverse(self):
"""Inverse rotation."""
angles = self.angles.value[::-1] * -1
return self.__class__(angles, axes_order=self.axes_order[::-1])
def evaluate(self, x, y, z, angles):
"""
Apply the rotation to a set of 3D Cartesian coordinates.
"""
if x.shape != y.shape != z.shape:
raise ValueError("Expected input arrays to have the same shape")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten(), z.flatten()])
result = np.dot(_create_matrix(angles[0], self.axes_order), inarr)
x, y, z = result[0], result[1], result[2]
x.shape = y.shape = z.shape = orig_shape
return x, y, z
class SphericalRotationSequence(RotationSequence3D):
"""
Perform a sequence of rotations about arbitrary number of axes
in spherical coordinates.
Parameters
----------
angles : list
A sequence of angles (in deg).
axes_order : str
A sequence of characters ('x', 'y', or 'z') corresponding to the
axis of rotation and matching the order in ``angles``.
"""
def __init__(self, angles, axes_order, name=None, **kwargs):
self._n_inputs = 2
self._n_outputs = 2
super().__init__(angles, axes_order=axes_order, name=name, **kwargs)
self._inputs = ("lon", "lat")
self._outputs = ("lon", "lat")
@property
def n_inputs(self):
return self._n_inputs
@property
def n_outputs(self):
return self._n_outputs
def evaluate(self, lon, lat, angles):
x, y, z = spherical2cartesian(lon, lat)
x1, y1, z1 = super().evaluate(x, y, z, angles)
lon, lat = cartesian2spherical(x1, y1, z1)
return lon, lat
class _EulerRotation:
"""
Base class which does the actual computation.
"""
_separable = False
def evaluate(self, alpha, delta, phi, theta, psi, axes_order):
shape = None
if isinstance(alpha, np.ndarray) and alpha.ndim == 2:
alpha = alpha.flatten()
delta = delta.flatten()
shape = alpha.shape
inp = spherical2cartesian(alpha, delta)
matrix = _create_matrix([phi, theta, psi], axes_order)
result = np.dot(matrix, inp)
a, b = cartesian2spherical(*result)
if shape is not None:
a.shape = shape
b.shape = shape
return a, b
_input_units_strict = True
_input_units_allow_dimensionless = True
@property
def input_units(self):
""" Input units. """
return {'alpha': u.deg, 'delta': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha': u.deg, 'delta': u.deg}
class EulerAngleRotation(_EulerRotation, Model):
"""
Implements Euler angle intrinsic rotations.
Rotates one coordinate system into another (fixed) coordinate system.
All coordinate systems are right-handed. The sign of the angles is
determined by the right-hand rule..
Parameters
----------
phi, theta, psi : float or `~astropy.units.Quantity`
"proper" Euler angles in deg.
If floats, they should be in deg.
axes_order : str
A 3 character string, a combination of 'x', 'y' and 'z',
where each character denotes an axis in 3D space.
"""
n_inputs = 2
n_outputs = 2
phi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
theta = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
psi = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, phi, theta, psi, axes_order, **kwargs):
self.axes = ['x', 'y', 'z']
if len(axes_order) != 3:
raise TypeError(
"Expected axes_order to be a character sequence of length 3,"
"got {}".format(axes_order))
unrecognized = set(axes_order).difference(self.axes)
if unrecognized:
raise ValueError("Unrecognized axis label {}; "
"should be one of {} ".format(unrecognized, self.axes))
self.axes_order = axes_order
qs = [isinstance(par, u.Quantity) for par in [phi, theta, psi]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(phi=phi, theta=theta, psi=psi, **kwargs)
self._inputs = ('alpha', 'delta')
self._outputs = ('alpha', 'delta')
def inverse(self):
return self.__class__(phi=-self.psi,
theta=-self.theta,
psi=-self.phi,
axes_order=self.axes_order[::-1])
def evaluate(self, alpha, delta, phi, theta, psi):
a, b = super().evaluate(alpha, delta, phi, theta, psi, self.axes_order)
return a, b
class _SkyRotation(_EulerRotation, Model):
"""
Base class for RotateNative2Celestial and RotateCelestial2Native.
"""
lon = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lat = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
lon_pole = Parameter(default=0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, lon, lat, lon_pole, **kwargs):
qs = [isinstance(par, u.Quantity) for par in [lon, lat, lon_pole]]
if any(qs) and not all(qs):
raise TypeError("All parameters should be of the same type - float or Quantity.")
super().__init__(lon, lat, lon_pole, **kwargs)
self.axes_order = 'zxz'
def _evaluate(self, phi, theta, lon, lat, lon_pole):
alpha, delta = super().evaluate(phi, theta, lon, lat, lon_pole,
self.axes_order)
mask = alpha < 0
if isinstance(mask, np.ndarray):
alpha[mask] += 360
else:
alpha += 360
return alpha, delta
class RotateNative2Celestial(_SkyRotation):
"""
Transform from Native to Celestial Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they
should be in units of deg. Inputs are angles on the native sphere.
Outputs are angles on the celestial sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
@property
def return_units(self):
""" Output units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
self.inputs = ('phi_N', 'theta_N')
self.outputs = ('alpha_C', 'delta_C')
def evaluate(self, phi_N, theta_N, lon, lat, lon_pole):
"""
Parameters
----------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles in the Native coordinate system.
lon, lat, lon_pole : float (in deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles on the Celestial sphere.
"""
# The values are in radians since they have already been through the setter.
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = lon_pole - np.pi / 2
theta = - (np.pi / 2 - lat)
psi = -(np.pi / 2 + lon)
alpha_C, delta_C = super()._evaluate(phi_N, theta_N, phi, theta, psi)
return alpha_C, delta_C
@property
def inverse(self):
# convert to angles on the celestial sphere
return RotateCelestial2Native(self.lon, self.lat, self.lon_pole)
class RotateCelestial2Native(_SkyRotation):
"""
Transform from Celestial to Native Spherical Coordinates.
Parameters
----------
lon : float or or `~astropy.units.Quantity`
Celestial longitude of the fiducial point.
lat : float or or `~astropy.units.Quantity`
Celestial latitude of the fiducial point.
lon_pole : float or or `~astropy.units.Quantity`
Longitude of the celestial pole in the native system.
Notes
-----
If ``lon``, ``lat`` and ``lon_pole`` are numerical values they should be
in units of deg. Inputs are angles on the celestial sphere.
Outputs are angles on the native sphere.
"""
n_inputs = 2
n_outputs = 2
@property
def input_units(self):
""" Input units. """
return {'alpha_C': u.deg, 'delta_C': u.deg}
@property
def return_units(self):
""" Output units. """
return {'phi_N': u.deg, 'theta_N': u.deg}
def __init__(self, lon, lat, lon_pole, **kwargs):
super().__init__(lon, lat, lon_pole, **kwargs)
# Inputs are angles on the celestial sphere
self.inputs = ('alpha_C', 'delta_C')
# Outputs are angles on the native sphere
self.outputs = ('phi_N', 'theta_N')
def evaluate(self, alpha_C, delta_C, lon, lat, lon_pole):
"""
Parameters
----------
alpha_C, delta_C : float (deg) or `~astropy.units.Quantity`
Angles in the Celestial coordinate frame.
lon, lat, lon_pole : float (deg) or `~astropy.units.Quantity`
Parameter values when the model was initialized.
Returns
-------
phi_N, theta_N : float (deg) or `~astropy.units.Quantity`
Angles on the Native sphere.
"""
if isinstance(lon, u.Quantity):
lon = lon.value
lat = lat.value
lon_pole = lon_pole.value
# Convert to Euler angles
phi = (np.pi / 2 + lon)
theta = (np.pi / 2 - lat)
psi = -(lon_pole - np.pi / 2)
phi_N, theta_N = super()._evaluate(alpha_C, delta_C, phi, theta, psi)
return phi_N, theta_N
@property
def inverse(self):
return RotateNative2Celestial(self.lon, self.lat, self.lon_pole)
class Rotation2D(Model):
"""
Perform a 2D rotation given an angle.
Positive angles represent a counter-clockwise rotation and vice-versa.
Parameters
----------
angle : float or `~astropy.units.Quantity`
Angle of rotation (if float it should be in deg).
"""
n_inputs = 2
n_outputs = 2
_separable = False
angle = Parameter(default=0.0, getter=_to_orig_unit, setter=_to_radian)
def __init__(self, angle=angle, **kwargs):
super().__init__(angle=angle, **kwargs)
self._inputs = ("x", "y")
self._outputs = ("x", "y")
@property
def inverse(self):
"""Inverse rotation."""
return self.__class__(angle=-self.angle)
@classmethod
def evaluate(cls, x, y, angle):
"""
Rotate (x, y) about ``angle``.
Parameters
----------
x, y : ndarray-like
Input quantities
angle : float (deg) or `~astropy.units.Quantity`
Angle of rotations.
"""
if x.shape != y.shape:
raise ValueError("Expected input arrays to have the same shape")
# If one argument has units, enforce they both have units and they are compatible.
x_unit = getattr(x, 'unit', None)
y_unit = getattr(y, 'unit', None)
has_units = x_unit is not None and y_unit is not None
if x_unit != y_unit:
if has_units and y_unit.is_equivalent(x_unit):
y = y.to(x_unit)
y_unit = x_unit
else:
raise u.UnitsError("x and y must have compatible units")
# Note: If the original shape was () (an array scalar) convert to a
# 1-element 1-D array on output for consistency with most other models
orig_shape = x.shape or (1,)
inarr = np.array([x.flatten(), y.flatten()])
if isinstance(angle, u.Quantity):
angle = angle.to_value(u.rad)
result = np.dot(cls._compute_matrix(angle), inarr)
x, y = result[0], result[1]
x.shape = y.shape = orig_shape
if has_units:
return u.Quantity(x, unit=x_unit), u.Quantity(y, unit=y_unit)
else:
return x, y
@staticmethod
def _compute_matrix(angle):
return np.array([[math.cos(angle), -math.sin(angle)],
[math.sin(angle), math.cos(angle)]],
dtype=np.float64)
| MSeifert04/astropy | astropy/modeling/rotations.py | Python | bsd-3-clause | 16,505 |
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'liste' de la commande 'matelot'."""
from primaires.format.fonctions import supprimer_accents
from primaires.format.tableau import Tableau
from primaires.interpreteur.masque.parametre import Parametre
from secondaires.navigation.equipage.postes.hierarchie import ORDRE
class PrmListe(Parametre):
"""Commande 'matelot liste'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "liste", "list")
self.tronquer = True
self.aide_courte = "liste les matelots de l'équipage"
self.aide_longue = \
"Cette commande liste les matelots de votre équipage. " \
"Elle permet d'obtenir rapidement des informations pratiques " \
"sur le nom du matelot ainsi que l'endroit où il se trouve."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
if not hasattr(salle, "navire"):
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
equipage = navire.equipage
if not navire.a_le_droit(personnage, "officier"):
personnage << "|err|Vous ne pouvez donner d'ordre sur ce " \
"navire.|ff|"
return
matelots = tuple((m, m.nom_poste) for m in \
equipage.matelots.values())
matelots += tuple(equipage.joueurs.items())
matelots = sorted(matelots, \
key=lambda couple: ORDRE.index(couple[1]), reverse=True)
if len(matelots) == 0:
personnage << "|err|Votre équipage ne comprend aucun matelot.|ff|"
return
tableau = Tableau()
tableau.ajouter_colonne("Nom")
tableau.ajouter_colonne("Poste")
tableau.ajouter_colonne("Affectation")
for matelot, nom_poste in matelots:
nom = matelot.nom
nom_poste = nom_poste.capitalize()
titre = "Aucune"
if hasattr(matelot, "personnage"):
titre = matelot.personnage.salle.titre_court.capitalize()
tableau.ajouter_ligne(nom, nom_poste, titre)
personnage << tableau.afficher()
| stormi/tsunami | src/secondaires/navigation/commandes/matelot/liste.py | Python | bsd-3-clause | 3,827 |
import time, copy
import os, os.path
import sys
import numpy
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from scipy import optimize
from echem_plate_ui import *
from echem_plate_math import *
import pickle
p1='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1066.dat'
p2='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9_FeCoNiTi_500C_fastCPCV_plate1_dlist_1662.dat'
pill='C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots/2012-9FeCoNiTi_500C_CAill_plate1_dlist_1164.dat'
os.chdir('C:/Users/Gregoire/Documents/CaltechWork/echemdrop/2012-9_FeCoNiTi/results/echemplots')
vshift=-.24
imult=1.e6
cai0, cai1=(0, 6500)
f=open(p1, mode='r')
d1=pickle.load(f)
f.close()
f=open(p2, mode='r')
d2=pickle.load(f)
f.close()
f=open(pill, mode='r')
dill=pickle.load(f)
f.close()
segd1up, segd1dn=d1['segprops_dlist']
i1up=d1['I(A)'][segd1up['inds']][4:]
lin1up=i1up-d1['I(A)_LinSub'][segd1up['inds']][4:]
v1up=d1['Ewe(V)'][segd1up['inds']][4:]+vshift
i1dn=d1['I(A)'][segd1dn['inds']]
v1dn=d1['Ewe(V)'][segd1dn['inds']]+vshift
i1up*=imult
i1dn*=imult
lin1up*=imult
segd2up, segd2dn=d2['segprops_dlist']
i2up=d2['I(A)'][segd2up['inds']][4:]
lin2up=i2up-d2['I(A)_LinSub'][segd2up['inds']][4:]
v2up=d2['Ewe(V)'][segd2up['inds']][4:]+vshift
i2dn=d2['I(A)'][segd2dn['inds']]
v2dn=d2['Ewe(V)'][segd2dn['inds']]+vshift
i2up*=imult
i2dn*=imult
lin2up*=imult
ica=dill['I(A)_SG'][cai0:cai1]*imult
icadiff=dill['Idiff_time'][cai0:cai1]*imult
tca=dill['t(s)'][cai0:cai1]
tca_cycs=dill['till_cycs']
cycinds=numpy.where((tca_cycs>=tca.min())&(tca_cycs<=tca.max()))[0]
tca_cycs=tca_cycs[cycinds]
iphoto_cycs=dill['Photocurrent_cycs(A)'][cycinds]*imult
pylab.rc('font', family='serif', serif='Times New Roman', size=11)
fig=pylab.figure(figsize=(3.5, 4.5))
#ax1=pylab.subplot(211)
#ax2=pylab.subplot(212)
ax1=fig.add_axes((.2, .6, .74, .35))
ax2=fig.add_axes((.2, .11, .6, .35))
ax3=ax2.twinx()
ax1.plot(v1up, i1up, 'g-', linewidth=1.)
ax1.plot(v1up, lin1up, 'g:', linewidth=1.)
ax1.plot(v1dn, i1dn, 'g--', linewidth=1.)
ax1.plot(v2up, i2up, 'b-', linewidth=1.)
ax1.plot(v2up, lin2up, 'b:', linewidth=1.)
ax1.plot(v2dn, i2dn, 'b--', linewidth=1.)
ax1.set_xlim((-.1, .62))
ax1.set_ylim((-40, 130))
ax1.set_xlabel('Potential (V vs H$_2$O/O$_2$)', fontsize=12)
ax1.set_ylabel('Current ($\mu$A)', fontsize=12)
ax2.plot(tca, ica, 'k-')
ax2.plot(tca, icadiff, 'b--', linewidth=2)
ax2.set_xlim((0, 6.5))
ax2.set_ylim((0, 0.4))
ax3.plot(tca_cycs, iphoto_cycs, 'ro-')
ax3.set_ylim((0, 0.1))
ax2.set_xlabel('Elapsed time (s)', fontsize=12)
ax2.set_ylabel('Current ($\mu$A)', fontsize=12)
ax3.set_ylabel('Photocurrent ($\mu$A)', fontsize=12)
pylab.show()
print ''.join(['%s%.3f' %tup for tup in zip(dill['elements'], dill['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d1['elements'], d1['compositions'])])
print ''.join(['%s%.3f' %tup for tup in zip(d2['elements'], d2['compositions'])])
| johnmgregoire/JCAPdatavis | echem_paperplots.py | Python | bsd-3-clause | 3,052 |
""" Documentation package """
import neuroptikon
import wx, wx.html
import os.path, sys, urllib
_sharedFrame = None
def baseURL():
if neuroptikon.runningFromSource:
basePath = os.path.join(neuroptikon.rootDir, 'documentation', 'build', 'Documentation')
else:
basePath = os.path.join(neuroptikon.rootDir, 'documentation')
return 'file:' + urllib.pathname2url(basePath) + '/'
def showPage(page):
pageURL = baseURL() + page
# Try to open an embedded WebKit-based help browser.
try:
import documentation_frame
documentation_frame.showPage(pageURL)
except:
# Fall back to using the user's default browser outside of Neuroptikon.
wx.LaunchDefaultBrowser(pageURL)
| JaneliaSciComp/Neuroptikon | Source/documentation/__init__.py | Python | bsd-3-clause | 747 |
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Simon Jagoe and Enthought Ltd.
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
import logging
import os
from haas.plugins.discoverer import match_path
from haas.plugins.i_discoverer_plugin import IDiscovererPlugin
from .yaml_test_loader import YamlTestLoader
logger = logging.getLogger(__name__)
class RestTestDiscoverer(IDiscovererPlugin):
"""A ``haas`` test discovery plugin to generate Web API test cases from
YAML descriptions.
Parameters
----------
loader : haas.loader.Loader
The ``haas`` test loader.
"""
def __init__(self, loader, **kwargs):
super(RestTestDiscoverer, self).__init__(**kwargs)
self._loader = loader
self._yaml_loader = YamlTestLoader(loader)
@classmethod
def from_args(cls, args, arg_prefix, loader):
"""Construct the discoverer from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
loader : haas.loader.Loader
The test loader used to construct TestCase and TestSuite instances.
"""
return cls(loader)
@classmethod
def add_parser_arguments(cls, parser, option_prefix, dest_prefix):
"""Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
"""
def discover(self, start, top_level_directory=None, pattern=None):
"""Discover YAML-formatted Web API tests.
Parameters
----------
start : str
Directory from which to recursively discover test cases.
top_level_directory : None
Ignored; for API compatibility with haas.
pattern : None
Ignored; for API compatibility with haas.
"""
if os.path.isdir(start):
start_directory = start
return self._discover_by_directory(start_directory)
elif os.path.isfile(start):
start_filepath = start
return self._discover_by_file(start_filepath)
return self._loader.create_suite()
def _discover_by_directory(self, start_directory):
"""Run test discovery in a directory.
Parameters
----------
start_directory : str
The package directory in which to start test discovery.
"""
start_directory = os.path.abspath(start_directory)
tests = self._discover_tests(start_directory)
return self._loader.create_suite(list(tests))
def _discover_by_file(self, start_filepath):
"""Run test discovery on a single file.
Parameters
----------
start_filepath : str
The module file in which to start test discovery.
"""
start_filepath = os.path.abspath(start_filepath)
logger.debug('Discovering tests in file: start_filepath=%r',
start_filepath)
tests = self._load_from_file(start_filepath)
return self._loader.create_suite(list(tests))
def _load_from_file(self, filepath):
logger.debug('Loading tests from %r', filepath)
tests = self._yaml_loader.load_tests_from_file(filepath)
return self._loader.create_suite(tests)
def _discover_tests(self, start_directory):
pattern = 'test*.yml'
for curdir, dirnames, filenames in os.walk(start_directory):
logger.debug('Discovering tests in %r', curdir)
for filename in filenames:
filepath = os.path.join(curdir, filename)
if not match_path(filename, filepath, pattern):
logger.debug('Skipping %r', filepath)
continue
yield self._load_from_file(filepath)
| sjagoe/usagi | usagi/discoverer.py | Python | bsd-3-clause | 4,383 |
from __future__ import unicode_literals
from django.forms import ValidationError
from django.core.exceptions import NON_FIELD_ERRORS
from django.forms.formsets import TOTAL_FORM_COUNT
from django.forms.models import (
BaseModelFormSet, modelformset_factory,
ModelForm, _get_foreign_key, ModelFormMetaclass, ModelFormOptions
)
from django.db.models.fields.related import ForeignObjectRel
from modelcluster.models import get_all_child_relations
class BaseTransientModelFormSet(BaseModelFormSet):
""" A ModelFormSet that doesn't assume that all its initial data instances exist in the db """
def _construct_form(self, i, **kwargs):
# Need to override _construct_form to avoid calling to_python on an empty string PK value
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
if pk == '':
kwargs['instance'] = self.model()
else:
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
# bypass BaseModelFormSet's own _construct_form
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def save_existing_objects(self, commit=True):
# Need to override _construct_form so that it doesn't skip over initial forms whose instance
# has a blank PK (which is taken as an indication that the form was constructed with an
# instance not present in our queryset)
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
if obj.pk is None:
# no action to be taken to delete an object which isn't in the database
continue
self.deleted_objects.append(obj)
self.delete_existing(obj, commit=commit)
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def transientmodelformset_factory(model, formset=BaseTransientModelFormSet, **kwargs):
return modelformset_factory(model, formset=formset, **kwargs)
class BaseChildFormSet(BaseTransientModelFormSet):
def __init__(self, data=None, files=None, instance=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.rel_name = ForeignObjectRel(self.fk, self.fk.remote_field.model, related_name=self.fk.remote_field.related_name).get_accessor_name()
if queryset is None:
queryset = getattr(self.instance, self.rel_name).all()
super(BaseChildFormSet, self).__init__(data, files, queryset=queryset, **kwargs)
def save(self, commit=True):
# The base ModelFormSet's save(commit=False) will populate the lists
# self.changed_objects, self.deleted_objects and self.new_objects;
# use these to perform the appropriate updates on the relation's manager.
saved_instances = super(BaseChildFormSet, self).save(commit=False)
manager = getattr(self.instance, self.rel_name)
# if model has a sort_order_field defined, assign order indexes to the attribute
# named in it
if self.can_order and hasattr(self.model, 'sort_order_field'):
sort_order_field = getattr(self.model, 'sort_order_field')
for i, form in enumerate(self.ordered_forms):
setattr(form.instance, sort_order_field, i)
# If the manager has existing instances with a blank ID, we have no way of knowing
# whether these correspond to items in the submitted data. We'll assume that they do,
# as that's the most common case (i.e. the formset contains the full set of child objects,
# not just a selection of additions / updates) and so we delete all ID-less objects here
# on the basis that they will be re-added by the formset saving mechanism.
no_id_instances = [obj for obj in manager.all() if obj.pk is None]
if no_id_instances:
manager.remove(*no_id_instances)
manager.add(*saved_instances)
manager.remove(*self.deleted_objects)
self.save_m2m() # ensures any parental-m2m fields are saved.
if commit:
manager.commit()
return saved_instances
def clean(self, *args, **kwargs):
self.validate_unique()
return super(BaseChildFormSet, self).clean(*args, **kwargs)
def validate_unique(self):
'''This clean method will check for unique_together condition'''
# Collect unique_checks and to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
unique_checks, date_checks = form.instance._get_unique_checks()
all_unique_checks.update(unique_checks)
all_date_checks.update(date_checks)
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# Get the data for the set of fields that must be unique among the forms.
row_data = (
field if field in self.unique_fields else form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data
)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
if errors:
raise ValidationError(errors)
def childformset_factory(
parent_model, model, form=ModelForm,
formset=BaseChildFormSet, fk_name=None, fields=None, exclude=None,
extra=3, can_order=False, can_delete=True, max_num=None, validate_max=False,
formfield_callback=None, widgets=None, min_num=None, validate_min=False
):
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
validate_max = True
if exclude is None:
exclude = []
exclude += [fk.name]
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
# if the model supplies a sort_order_field, enable ordering regardless of
# the current setting of can_order
'can_order': (can_order or hasattr(model, 'sort_order_field')),
'fields': fields,
'exclude': exclude,
'max_num': max_num,
'validate_max': validate_max,
'widgets': widgets,
'min_num': min_num,
'validate_min': validate_min,
}
FormSet = transientmodelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
class ClusterFormOptions(ModelFormOptions):
def __init__(self, options=None):
super(ClusterFormOptions, self).__init__(options=options)
self.formsets = getattr(options, 'formsets', None)
self.exclude_formsets = getattr(options, 'exclude_formsets', None)
class ClusterFormMetaclass(ModelFormMetaclass):
extra_form_count = 3
@classmethod
def child_form(cls):
return ClusterForm
def __new__(cls, name, bases, attrs):
try:
parents = [b for b in bases if issubclass(b, ClusterForm)]
except NameError:
# We are defining ClusterForm itself.
parents = None
# grab any formfield_callback that happens to be defined in attrs -
# so that we can pass it on to child formsets - before ModelFormMetaclass deletes it.
# BAD METACLASS NO BISCUIT.
formfield_callback = attrs.get('formfield_callback')
new_class = super(ClusterFormMetaclass, cls).__new__(cls, name, bases, attrs)
if not parents:
return new_class
# ModelFormMetaclass will have set up new_class._meta as a ModelFormOptions instance;
# replace that with ClusterFormOptions so that we can access _meta.formsets
opts = new_class._meta = ClusterFormOptions(getattr(new_class, 'Meta', None))
if opts.model:
formsets = {}
for rel in get_all_child_relations(opts.model):
# to build a childformset class from this relation, we need to specify:
# - the base model (opts.model)
# - the child model (rel.field.model)
# - the fk_name from the child model to the base (rel.field.name)
rel_name = rel.get_accessor_name()
# apply 'formsets' and 'exclude_formsets' rules from meta
if opts.formsets is not None and rel_name not in opts.formsets:
continue
if opts.exclude_formsets and rel_name in opts.exclude_formsets:
continue
try:
widgets = opts.widgets.get(rel_name)
except AttributeError: # thrown if opts.widgets is None
widgets = None
kwargs = {
'extra': cls.extra_form_count,
'form': cls.child_form(),
'formfield_callback': formfield_callback,
'fk_name': rel.field.name,
'widgets': widgets
}
# see if opts.formsets looks like a dict; if so, allow the value
# to override kwargs
try:
kwargs.update(opts.formsets.get(rel_name))
except AttributeError:
pass
formset = childformset_factory(opts.model, rel.field.model, **kwargs)
formsets[rel_name] = formset
new_class.formsets = formsets
new_class._has_explicit_formsets = (opts.formsets is not None or opts.exclude_formsets is not None)
return new_class
class ClusterForm(ModelForm, metaclass=ClusterFormMetaclass):
def __init__(self, data=None, files=None, instance=None, prefix=None, **kwargs):
super(ClusterForm, self).__init__(data, files, instance=instance, prefix=prefix, **kwargs)
self.formsets = {}
for rel_name, formset_class in self.__class__.formsets.items():
if prefix:
formset_prefix = "%s-%s" % (prefix, rel_name)
else:
formset_prefix = rel_name
self.formsets[rel_name] = formset_class(data, files, instance=instance, prefix=formset_prefix)
if self.is_bound and not self._has_explicit_formsets:
# check which formsets have actually been provided as part of the form submission -
# if no `formsets` or `exclude_formsets` was specified, we allow them to be omitted
# (https://github.com/wagtail/wagtail/issues/5414#issuecomment-567468127).
self._posted_formsets = [
formset
for formset in self.formsets.values()
if '%s-%s' % (formset.prefix, TOTAL_FORM_COUNT) in self.data
]
else:
# expect all defined formsets to be part of the post
self._posted_formsets = self.formsets.values()
def as_p(self):
form_as_p = super(ClusterForm, self).as_p()
return form_as_p + ''.join([formset.as_p() for formset in self.formsets.values()])
def is_valid(self):
form_is_valid = super(ClusterForm, self).is_valid()
formsets_are_valid = all(formset.is_valid() for formset in self._posted_formsets)
return form_is_valid and formsets_are_valid
def is_multipart(self):
return (
super(ClusterForm, self).is_multipart()
or any(formset.is_multipart() for formset in self.formsets.values())
)
@property
def media(self):
media = super(ClusterForm, self).media
for formset in self.formsets.values():
media = media + formset.media
return media
def save(self, commit=True):
# do we have any fields that expect us to call save_m2m immediately?
save_m2m_now = False
exclude = self._meta.exclude
fields = self._meta.fields
for f in self.instance._meta.get_fields():
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if getattr(f, '_need_commit_after_assignment', False):
save_m2m_now = True
break
instance = super(ClusterForm, self).save(commit=(commit and not save_m2m_now))
# The M2M-like fields designed for use with ClusterForm (currently
# ParentalManyToManyField and ClusterTaggableManager) will manage their own in-memory
# relations, and not immediately write to the database when we assign to them.
# For these fields (identified by the _need_commit_after_assignment
# flag), save_m2m() is a safe operation that does not affect the database and is thus
# valid for commit=False. In the commit=True case, committing to the database happens
# in the subsequent instance.save (so this needs to happen after save_m2m to ensure
# we have the updated relation data in place).
# For annoying legacy reasons we sometimes need to accommodate 'classic' M2M fields
# (particularly taggit.TaggableManager) within ClusterForm. These fields
# generally do require our instance to exist in the database at the point we call
# save_m2m() - for this reason, we only proceed with the customisation described above
# (i.e. postpone the instance.save() operation until after save_m2m) if there's a
# _need_commit_after_assignment field on the form that demands it.
if save_m2m_now:
self.save_m2m()
if commit:
instance.save()
for formset in self._posted_formsets:
formset.instance = instance
formset.save(commit=commit)
return instance
def has_changed(self):
"""Return True if data differs from initial."""
# Need to recurse over nested formsets so that the form is saved if there are changes
# to child forms but not the parent
if self.formsets:
for formset in self._posted_formsets:
for form in formset.forms:
if form.has_changed():
return True
return bool(self.changed_data)
| torchbox/django-modelcluster | modelcluster/forms.py | Python | bsd-3-clause | 16,632 |
"""
See http://pbpython.com/advanced-excel-workbooks.html for details on this script
"""
from __future__ import print_function
import pandas as pd
from xlsxwriter.utility import xl_rowcol_to_cell
def format_excel(writer, df_size):
""" Add Excel specific formatting to the workbook
df_size is a tuple representing the size of the dataframe - typically called
by df.shape -> (20,3)
"""
# Get the workbook and the summary sheet so we can add the formatting
workbook = writer.book
worksheet = writer.sheets['summary']
# Add currency formatting and apply it
money_fmt = workbook.add_format({'num_format': 42, 'align': 'center'})
worksheet.set_column('A:A', 20)
worksheet.set_column('B:C', 15, money_fmt)
# Add 1 to row so we can include a total
# subtract 1 from the column to handle because we don't care about index
table_end = xl_rowcol_to_cell(df_size[0] + 1, df_size[1] - 1)
# This assumes we start in the left hand corner
table_range = 'A1:{}'.format(table_end)
worksheet.add_table(table_range, {'columns': [{'header': 'account',
'total_string': 'Total'},
{'header': 'Total Sales',
'total_function': 'sum'},
{'header': 'Average Sales',
'total_function': 'average'}],
'autofilter': False,
'total_row': True,
'style': 'Table Style Medium 20'})
if __name__ == "__main__":
sales_df = pd.read_excel('https://github.com/chris1610/pbpython/blob/master/data/sample-salesv3.xlsx?raw=true')
sales_summary = sales_df.groupby(['name'])['ext price'].agg(['sum', 'mean'])
# Reset the index for consistency when saving in Excel
sales_summary.reset_index(inplace=True)
writer = pd.ExcelWriter('sales_summary.xlsx', engine='xlsxwriter')
sales_summary.to_excel(writer, 'summary', index=False)
format_excel(writer, sales_summary.shape)
writer.save()
| chris1610/pbpython | code/advanced_excel.py | Python | bsd-3-clause | 2,204 |
# !/usr/bin/env python
"""Testing a sprite.
The ball should bounce off the sides of the window. You may resize the
window.
This test should just run without failing.
"""
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os
import unittest
from pyglet.gl import glClear
import pyglet.window
import pyglet.window.event
from pyglet import clock
from scene2d import Sprite, Image2d, FlatView
from scene2d.image import TintEffect
from scene2d.camera import FlatCamera
ball_png = os.path.join(os.path.dirname(__file__), 'ball.png')
class BouncySprite(Sprite):
def update(self):
# move, check bounds
p = self.properties
self.x += p['dx']
self.y += p['dy']
if self.left < 0:
self.left = 0
p['dx'] = -p['dx']
elif self.right > 320:
self.right = 320
p['dx'] = -p['dx']
if self.bottom < 0:
self.bottom = 0
p['dy'] = -p['dy']
elif self.top > 320:
self.top = 320
p['dy'] = -p['dy']
class SpriteOverlapTest(unittest.TestCase):
def test_sprite(self):
w = pyglet.window.Window(width=320, height=320)
image = Image2d.load(ball_png)
ball1 = BouncySprite(0, 0, 64, 64, image, properties=dict(dx=10, dy=5))
ball2 = BouncySprite(288, 0, 64, 64, image,
properties=dict(dx=-10, dy=5))
view = FlatView(0, 0, 320, 320, sprites=[ball1, ball2])
view.fx, view.fy = 160, 160
clock.set_fps_limit(60)
e = TintEffect((.5, 1, .5, 1))
while not w.has_exit:
clock.tick()
w.dispatch_events()
ball1.update()
ball2.update()
if ball1.overlaps(ball2):
if 'overlap' not in ball2.properties:
ball2.properties['overlap'] = e
ball2.add_effect(e)
elif 'overlap' in ball2.properties:
ball2.remove_effect(e)
del ball2.properties['overlap']
view.clear()
view.draw()
w.flip()
w.close()
unittest.main()
| bitcraft/pyglet | contrib/scene2d/tests/scene2d/SPRITE_OVERLAP.py | Python | bsd-3-clause | 2,162 |
import os
from subprocess import call, Popen, PIPE
import sys
from . import Command
from . import utils
class OpenSequenceInRV(Command):
"""%prog [options] [paths]
Open the latest version for each given entity.
"""
def run(self, sgfs, opts, args):
# Parse them all.
arg_to_movie = {}
arg_to_entity = {}
for arg in args:
if os.path.exists(arg):
arg_to_movie[arg] = arg
continue
print 'Parsing %r...' % arg
data = utils.parse_spec(sgfs, arg.split(), ['Shot'])
type_ = data.get('type')
id_ = data.get('id')
if not (type_ or id_):
print 'no entities found for', repr(arg)
return 1
arg_to_entity.setdefault(type_, {})[arg] = sgfs.session.merge(dict(type=type_, id=id_))
tasks = arg_to_entity.pop('Task', {})
shots = arg_to_entity.pop('Shot', {})
if arg_to_entity:
print 'found entities that were not Task or Shot:', ', '.join(sorted(arg_to_entity))
return 2
if tasks:
print 'Getting shots from tasks...'
sgfs.session.fetch(tasks.values(), 'entity')
for arg, task in tasks.iteritems():
shots[arg] = task['entity']
if shots:
print 'Getting versions from shots...'
sgfs.session.fetch(shots.values(), ('sg_latest_version.Version.sg_path_to_movie', 'sg_latest_version.Version.sg_path_to_frames'))
for arg, shot in shots.iteritems():
version = shot.get('sg_latest_version')
if not version:
print 'no version for', shot
return 3
path = version.get('sg_path_to_movie') or version.get('sg_path_to_frames')
if not path:
print 'no movie or frames for', version
return 4
arg_to_movie[arg] = path
movies = [arg_to_movie[arg] for arg in args]
print 'Opening:'
print '\t' + '\n\t'.join(movies)
rvlink = Popen(['rv', '-bakeURL'] + movies, stderr=PIPE).communicate()[1].strip().split()[-1]
self.open(rvlink)
def open(self, x):
if sys.platform.startswith('darwin'):
call(['open', x])
else:
call(['xdg-open', x])
run = OpenSequenceInRV()
| westernx/sgfs | sgfs/commands/rv.py | Python | bsd-3-clause | 2,458 |
"""
External serialization for testing remote module loading.
"""
from tiddlyweb.serializations import SerializationInterface
class Serialization(SerializationInterface):
def list_recipes(self, recipes):
print recipes
def list_bags(self, bags):
print bags
def recipe_as(self, recipe):
print "r_as: %s" % recipe
def as_recipe(self, recipe, input):
print "as_r: %s" % input
def bag_as(self, bag):
print "b_as: %s" % bag
def as_bag(self, bag, input):
print "as_b: %s" % input
def tiddler_as(self, tiddler):
print "t_as: %s" % tiddler
def as_tiddler(self, tiddler, input):
print "as_t: %s" % input
| funkyeah/tiddlyweb | test/other/tiddlyweb/serializations/debug.py | Python | bsd-3-clause | 701 |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
# Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_xcode_version_cache = ()
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
def _GetStdout(self, cmdlist):
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def _GetSdkVersionInfoItem(self, sdk, infoitem):
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings():
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == "executable"):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath(config_name))
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return self._GetStdout(['sw_vers', '-buildVersion'])
def _XcodeVersion(self):
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
if len(XcodeSettings._xcode_version_cache) == 0:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
XcodeSettings._xcode_version_cache = (version, build)
return XcodeSettings._xcode_version_cache
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = self._XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
if self._XcodeVersion() < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
def _DefaultArch(self):
# For Mac projects, Xcode changed the default value used when ARCHS is not
# set from "i386" to "x86_64".
#
# For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
# building for a device, and the simulator binaries are always build for
# "i386".
#
# For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
# which correspond to "armv7 armv7s arm64", and when building the simulator
# the architecture is either "i386" or "x86_64" depending on the simulated
# device (respectively 32-bit or 64-bit device).
#
# Since the value returned by this function is only used when ARCHS is not
# set, then on iOS we return "i386", as the default xcode project generator
# does not set ARCHS if it is not set in the .gyp file.
if self.isIOS:
return 'i386'
version, build = self._XcodeVersion()
if version >= '0500':
return 'x86_64'
return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices."""
for target_dict in targets.values():
for config_name in target_dict['configurations'].keys():
config = target_dict['configurations'][config_name]
new_config_name = config_name + '-iphoneos'
new_config_dict = copy.deepcopy(config)
if target_dict['toolset'] == 'target':
new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
target_dict['configurations'][new_config_name] = new_config_dict
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts
| mistydemeo/gyp | pylib/gyp/xcode_emulation.py | Python | bsd-3-clause | 55,087 |
# -*- coding: utf-8 -*-
from unittest import TestCase
from scrapy.settings import Settings
from scrapy_tracker.storage.memory import MemoryStorage
from scrapy_tracker.storage.redis import RedisStorage
from scrapy_tracker.storage.sqlalchemy import SqlAlchemyStorage
from tests import TEST_KEY, TEST_CHECKSUM, mock
class TestMemoryStorage(TestCase):
def setUp(self):
self.storage = MemoryStorage(None)
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestSqlAlchemyStorage(TestCase):
def setUp(self):
self.storage = SqlAlchemyStorage(Settings({
'TRACKER_SQLALCHEMY_ENGINE': 'sqlite:///:memory:',
'TRACKER_SQLALCHEMY_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
class TestRedisStorage(TestCase):
def setUp(self):
with mock.patch("scrapy_tracker.storage.redis.StrictRedis") as mock_redis:
data = {}
def getset(key, val):
old_val = data.get(key)
data[key] = val
return old_val
mock_getset = mock.MagicMock()
mock_getset.getset.side_effect = getset
mock_redis.return_value = mock_getset
self.storage = RedisStorage(Settings({
'TRACKER_RADIS_FLUSH_DB': True
}))
def test_getset(self):
result = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertIsNone(result)
found = self.storage.getset(TEST_KEY, 'new_checksum')
self.assertEqual(TEST_CHECKSUM, found)
found = self.storage.getset(TEST_KEY, TEST_CHECKSUM)
self.assertEqual('new_checksum', found)
result = self.storage.getset('new_key', TEST_CHECKSUM)
self.assertIsNone(result)
| vkastyniuk/scrapy-tracker | tests/test_storage.py | Python | bsd-3-clause | 2,541 |
#! /usr/bin/python
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import unittest
import autoconfig
import pygccxml
from pygccxml.utils import *
from pygccxml.parser import *
from pygccxml import declarations
class tester_t( unittest.TestCase ):
def __init__(self, *args ):
unittest.TestCase.__init__( self, *args )
def __test_split_impl(self, decl_string, name, args):
self.failUnless( ( name, args ) == declarations.call_invocation.split( decl_string ) )
def __test_split_recursive_impl(self, decl_string, control_seq):
self.failUnless( control_seq == declarations.call_invocation.split_recursive( decl_string ) )
def __test_is_call_invocation_impl( self, decl_string ):
self.failUnless( declarations.call_invocation.is_call_invocation( decl_string ) )
def test_split_on_vector(self):
self.__test_is_call_invocation_impl( "vector(int,std::allocator(int) )" )
self.__test_split_impl( "vector(int,std::allocator(int) )"
, "vector"
, [ "int", "std::allocator(int)" ] )
self.__test_split_recursive_impl( "vector(int,std::allocator(int) )"
, [ ( "vector", [ "int", "std::allocator(int)" ] )
, ( "std::allocator", ["int"] ) ] )
def test_split_on_string(self):
self.__test_is_call_invocation_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )" )
self.__test_split_impl( "basic_string(char,std::char_traits(char),std::allocator(char) )"
, "basic_string"
, [ "char", "std::char_traits(char)", "std::allocator(char)" ] )
def test_split_on_map(self):
self.__test_is_call_invocation_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )" )
self.__test_split_impl( "map(long int,std::vector(int, std::allocator(int) ),std::less(long int),std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) ) )"
, "map"
, [ "long int"
, "std::vector(int, std::allocator(int) )"
, "std::less(long int)"
, "std::allocator(std::pair(const long int, std::vector(int, std::allocator(int) ) ) )" ] )
def test_join_on_vector(self):
self.failUnless( "vector( int, std::allocator(int) )"
== declarations.call_invocation.join("vector", ( "int", "std::allocator(int)" ) ) )
def test_find_args(self):
temp = 'x()()'
found = declarations.call_invocation.find_args( temp )
self.failUnless( (1,2) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (3, 4) == found )
temp = 'x(int,int)(1,2)'
found = declarations.call_invocation.find_args( temp )
self.failUnless( (1,9) == found )
found = declarations.call_invocation.find_args( temp, found[1]+1 )
self.failUnless( (10, 14) == found )
def test_bug_unmatched_brace( self ):
src = 'AlternativeName((&string("")), (&string("")), (&string("")))'
self.__test_split_impl( src
, 'AlternativeName'
, ['(&string(""))', '(&string(""))', '(&string(""))'] )
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
| avaitla/Haskell-to-C---Bridge | pygccxml-1.0.0/unittests/call_invocation_tester.py | Python | bsd-3-clause | 4,057 |
"""
Ludolph: Monitoring Jabber Bot
Copyright (C) 2012-2017 Erigones, s. r. o.
This file is part of Ludolph.
See the LICENSE file for copying permission.
"""
import os
import re
import sys
import signal
import logging
from collections import namedtuple
try:
# noinspection PyCompatibility,PyUnresolvedReferences
from configparser import RawConfigParser
except ImportError:
# noinspection PyCompatibility,PyUnresolvedReferences
from ConfigParser import RawConfigParser
try:
# noinspection PyCompatibility
from importlib import reload
except ImportError:
# noinspection PyUnresolvedReferences
from imp import reload
from ludolph.utils import parse_loglevel
from ludolph.bot import LudolphBot
from ludolph.plugins.plugin import LudolphPlugin
from ludolph import __version__
LOGFORMAT = '%(asctime)s %(levelname)-8s %(name)s: %(message)s'
logger = logging.getLogger('ludolph.main')
Plugin = namedtuple('Plugin', ('name', 'module', 'cls'))
def daemonize():
"""
http://code.activestate.com/recipes/278731-creating-a-daemon-the-python-way/
http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/
"""
try:
pid = os.fork() # Fork #1
if pid > 0:
sys.exit(0) # Exit first parent
except OSError as e:
sys.stderr.write('Fork #1 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# The first child. Decouple from parent environment
# Become session leader of this new session.
# Also be guaranteed not to have a controlling terminal
os.chdir('/')
# noinspection PyArgumentList
os.setsid()
os.umask(0o022)
try:
pid = os.fork() # Fork #2
if pid > 0:
sys.exit(0) # Exit from second parent
except OSError as e:
sys.stderr.write('Fork #2 failed: %d (%s)\n' % (e.errno, e.strerror))
sys.exit(1)
# Close all open file descriptors
import resource # Resource usage information
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY:
maxfd = 1024
# Iterate through and close all file descriptors
for fd in range(0, maxfd):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open (ignored)
pass
# Redirect standard file descriptors to /dev/null
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
return 0
def start():
"""
Start the daemon.
"""
ret = 0
cfg = 'ludolph.cfg'
cfg_fp = None
cfg_lo = ((os.path.expanduser('~'), '.' + cfg), (sys.prefix, 'etc', cfg), ('/etc', cfg))
config_base_sections = ('global', 'xmpp', 'webserver', 'cron', 'ludolph.bot')
# Try to read config file from ~/.ludolph.cfg or /etc/ludolph.cfg
for i in cfg_lo:
try:
cfg_fp = open(os.path.join(*i))
except IOError:
continue
else:
break
if not cfg_fp:
sys.stderr.write("""\nLudolph can't start!\n
You need to create a config file in one these locations: \n%s\n
You can rename ludolph.cfg.example and update the required options.
The example file is located in: %s\n\n""" % (
'\n'.join([os.path.join(*i) for i in cfg_lo]),
os.path.dirname(os.path.abspath(__file__))))
sys.exit(1)
# Read and parse configuration
# noinspection PyShadowingNames
def load_config(fp, reopen=False):
config = RawConfigParser()
if reopen:
fp = open(fp.name)
try: # config.readfp() is Deprecated since python 3.2
# noinspection PyDeprecation
read_file = config.readfp
except AttributeError:
read_file = config.read_file
read_file(fp)
fp.close()
return config
config = load_config(cfg_fp)
# Prepare logging configuration
logconfig = {
'level': parse_loglevel(config.get('global', 'loglevel')),
'format': LOGFORMAT,
}
if config.has_option('global', 'logfile'):
logfile = config.get('global', 'logfile').strip()
if logfile:
logconfig['filename'] = logfile
# Daemonize
if config.has_option('global', 'daemon'):
if config.getboolean('global', 'daemon'):
ret = daemonize()
# Save pid file
if config.has_option('global', 'pidfile'):
try:
with open(config.get('global', 'pidfile'), 'w') as fp:
fp.write('%s' % os.getpid())
except Exception as ex:
# Setup logging just to show this error
logging.basicConfig(**logconfig)
logger.critical('Could not write to pidfile (%s)\n', ex)
sys.exit(1)
# Setup logging
logging.basicConfig(**logconfig)
# All exceptions will be logged without exit
def log_except_hook(*exc_info):
logger.critical('Unhandled exception!', exc_info=exc_info)
sys.excepthook = log_except_hook
# Default configuration
use_tls = True
use_ssl = False
address = []
# Starting
logger.info('Starting Ludolph %s (%s %s)', __version__, sys.executable, sys.version.split()[0])
logger.info('Loaded configuration from %s', cfg_fp.name)
# Load plugins
# noinspection PyShadowingNames
def load_plugins(config, reinit=False):
plugins = []
for config_section in config.sections():
config_section = config_section.strip()
if config_section in config_base_sections:
continue
# Parse other possible imports
parsed_plugin = config_section.split('.')
if len(parsed_plugin) == 1:
modname = 'ludolph.plugins.' + config_section
plugin = config_section
else:
modname = config_section
plugin = parsed_plugin[-1]
logger.info('Loading plugin: %s', modname)
try:
# Translate super_ludolph_plugin into SuperLudolphPlugin
clsname = plugin[0].upper() + re.sub(r'_+([a-zA-Z0-9])', lambda m: m.group(1).upper(), plugin[1:])
module = __import__(modname, fromlist=[clsname])
if reinit and getattr(module, '_loaded_', False):
reload(module)
module._loaded_ = True
imported_class = getattr(module, clsname)
if not issubclass(imported_class, LudolphPlugin):
raise TypeError('Plugin: %s is not LudolphPlugin instance' % modname)
plugins.append(Plugin(config_section, modname, imported_class))
except Exception as ex:
logger.exception(ex)
logger.critical('Could not load plugin: %s', modname)
return plugins
plugins = load_plugins(config)
# XMPP connection settings
if config.has_option('xmpp', 'host'):
address = [config.get('xmpp', 'host'), '5222']
if config.has_option('xmpp', 'port'):
address[1] = config.get('xmpp', 'port')
logger.info('Connecting to jabber server %s', ':'.join(address))
else:
logger.info('Using DNS SRV lookup to find jabber server')
if config.has_option('xmpp', 'tls'):
use_tls = config.getboolean('xmpp', 'tls')
if config.has_option('xmpp', 'ssl'):
use_ssl = config.getboolean('xmpp', 'ssl')
# Here we go
xmpp = LudolphBot(config, plugins=plugins)
signal.signal(signal.SIGINT, xmpp.shutdown)
signal.signal(signal.SIGTERM, xmpp.shutdown)
if hasattr(signal, 'SIGHUP'): # Windows does not support SIGHUP - bug #41
# noinspection PyUnusedLocal,PyShadowingNames
def sighup(signalnum, handler):
if xmpp.reloading:
logger.warning('Reload already in progress')
else:
xmpp.reloading = True
try:
config = load_config(cfg_fp, reopen=True)
logger.info('Reloaded configuration from %s', cfg_fp.name)
xmpp.prereload()
plugins = load_plugins(config, reinit=True)
xmpp.reload(config, plugins=plugins)
finally:
xmpp.reloading = False
signal.signal(signal.SIGHUP, sighup)
# signal.siginterrupt(signal.SIGHUP, false) # http://stackoverflow.com/a/4302037
if xmpp.client.connect(tuple(address), use_tls=use_tls, use_ssl=use_ssl):
xmpp.client.process(block=True)
sys.exit(ret)
else:
logger.error('Ludolph is unable to connect to jabber server')
sys.exit(2)
if __name__ == '__main__':
start()
| erigones/Ludolph | ludolph/main.py | Python | bsd-3-clause | 8,916 |
from django_nose.tools import assert_false, assert_true
from pontoon.base.tests import TestCase
from pontoon.base.utils import extension_in
class UtilsTests(TestCase):
def test_extension_in(self):
assert_true(extension_in('filename.txt', ['bat', 'txt']))
assert_true(extension_in('filename.biff', ['biff']))
assert_true(extension_in('filename.tar.gz', ['gz']))
assert_false(extension_in('filename.txt', ['png', 'jpg']))
assert_false(extension_in('.dotfile', ['bat', 'txt']))
# Unintuitive, but that's how splitext works.
assert_false(extension_in('filename.tar.gz', ['tar.gz']))
| yfdyh000/pontoon | pontoon/base/tests/test_utils.py | Python | bsd-3-clause | 644 |
import os
import tempfile
from pkg_resources import Requirement
from infi.unittest import parameters
from .test_cases import ForgeTest
from pydeploy.environment import Environment
from pydeploy.environment_utils import EnvironmentUtils
from pydeploy.checkout_cache import CheckoutCache
from pydeploy.installer import Installer
from pydeploy import sources
from pydeploy.scm import git
from pydeploy import command
from pydeploy import exceptions
class SourceTest(ForgeTest):
def setUp(self):
super(SourceTest, self).setUp()
self.env = self.forge.create_mock(Environment)
self.env.installer = self.forge.create_mock(Installer)
self.env.utils = self.forge.create_mock(EnvironmentUtils)
class SourceFromStringTest(ForgeTest):
def setUp(self):
super(SourceFromStringTest, self).setUp()
self.S = sources.Source.from_anything
def test__git(self):
self.assertIsInstance(self.S("git://bla"), sources.Git)
def test__path(self):
filename = tempfile.mkdtemp()
self.assertIsInstance(self.S(filename), sources.Path)
def test__easy_install(self):
self.assertIsInstance(self.S("blablabla"), sources.EasyInstall)
def test__invalid_source(self):
for invalid_value in [2, 2.5, True]:
with self.assertRaises(ValueError):
self.S(invalid_value)
class PathSourceTest(SourceTest):
def setUp(self):
super(PathSourceTest, self).setUp()
self.path = tempfile.mkdtemp()
self.source = sources.Path(self.path)
def test__get_name(self):
self.assertEquals(self.source.get_name(), self.path)
def test__uses_expanduser(self):
source = sources.Path("~/a/b/c")
self.assertEquals(source._param, os.path.expanduser("~/a/b/c"))
def test__get_signature(self):
self.assertEquals(self.source.get_signature(), "Path({0})".format(self.path))
def test__checkout(self):
self.assertEquals(self.source.checkout(self.env), self.path)
with self.assertRaises(NotImplementedError):
self.source.checkout(self.env, '/another/path')
@parameters.toggle('reinstall')
def test__install(self, reinstall):
self.env.installer.install_unpacked_package(self.path, self.path, reinstall=reinstall)
self.forge.replay()
self.source.install(self.env, reinstall=reinstall)
class DelegateToPathInstallTest(SourceTest):
def setUp(self):
super(DelegateToPathInstallTest, self).setUp()
self.path_class = self.forge.create_class_mock(sources.Path)
self.orig_path_class = sources.Path
self.forge.replace_with(sources, "Path", self.path_class)
def expect_delegation_to_path_install(self, path, name, reinstall):
path_mock = self.forge.create_mock(self.orig_path_class)
self.path_class(path, name=name).and_return(path_mock)
return path_mock.install(self.env, reinstall=reinstall)
class GitSourceTest(DelegateToPathInstallTest):
def setUp(self):
super(GitSourceTest, self).setUp()
self.repo_url = "some/repo/url"
self.branch = 'some_branch'
self.source = sources.Git(self.repo_url, self.branch)
self.forge.replace_many(git, "clone_to_or_update", "reset_submodules")
def test__master_is_default_branch(self):
self.assertEquals(sources.Git('bla')._branch, 'master')
def test__get_name(self):
self.assertEquals(self.source.get_name(), self.repo_url + "@" + self.branch)
def test__repr(self):
self.assertEquals(repr(self.source), 'Git({})'.format(self.source.get_name()))
def test__get_signature(self):
self.assertEquals(self.source.get_signature(), repr(self.source))
@parameters.toggle('reinstall')
def test__git_source_install(self, reinstall):
self.forge.replace(self.source, "checkout")
checkout_path = "some/checkout/path"
self.source.checkout(self.env).and_return(checkout_path)
self.expect_delegation_to_path_install(checkout_path, name=self.repo_url, reinstall=reinstall)
with self.forge.verified_replay_context():
self.source.install(self.env, reinstall=reinstall)
def test__git_source_checkout_with_path_argument(self):
checkout_path = "/some/path/to/checkout"
git.clone_to_or_update(url=self.repo_url, path=checkout_path, branch=self.branch)
git.reset_submodules(checkout_path)
with self.forge.verified_replay_context():
result = self.source.checkout(self.env, checkout_path)
self.assertIs(result, checkout_path)
def test__git_source_checkout_no_path_argument(self):
checkout_path = "/some/path/to/checkout"
checkout_cache = self.forge.create_mock(CheckoutCache)
self.env.get_checkout_cache().and_return(checkout_cache)
checkout_cache.get_checkout_path(self.repo_url).and_return(checkout_path)
git.clone_to_or_update(url=self.repo_url, branch=self.branch, path=checkout_path)
git.reset_submodules(checkout_path)
with self.forge.verified_replay_context():
result = self.source.checkout(self.env)
self.assertIs(result, checkout_path)
def test__git_identifies_git_prefix(self):
url = "git://bla"
source = sources.Source.from_anything(url)
self.assertIsInstance(source, sources.Git)
class GitContraintsTest(ForgeTest):
def setUp(self):
super(GitContraintsTest, self).setUp()
self.forge.replace(git, "get_remote_references_dict")
self.url = "some_url"
self.source = sources.Git(self.url)
def test__more_than_one_constraint_not_supported(self):
with self.assertRaises(NotImplementedError):
self.source.resolve_constraints([('>=', '2.0.0'), ('<=', '3.0.0')])
@parameters.iterate('tag', ['v2.0.0', '2.0.0'])
def test__exact_version_matches_tag(self, tag):
self._assert_chooses("x==2.0.0", {
git.Tag(tag) : "some_hash"
}, 'tags/{}'.format(tag))
def test__exact_version_with_no_match_raises_exception(self):
self._assert_no_match('x==2.0.0', {
git.Tag('bla') : 'h1',
git.Branch('bloop') : 'h2'
})
@parameters.iterate('branch_name', ['v2.0.0', '2.0.0'])
def test__minimum_version_inclusive_selects_exact(self, branch_name):
self._assert_chooses("x>=2.0.0", {
git.Branch(branch_name) : "h1"
}, branch_name)
@parameters.toggle('inclusive')
@parameters.iterate('branch_name', ['3.0.0', 'v3.0.0', '2.3.2', 'v2.3'])
def test__minimum_version_with_matches(self, inclusive, branch_name):
self._assert_chooses("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch(branch_name)
}, branch_name)
@parameters.toggle('inclusive')
@parameters.iterate('branch_name', ['2.0.0-a1', 'v2.0.0-b1', 'v1.9'])
def test__minimum_version_without_matches(self, inclusive, branch_name):
self._assert_no_match("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch(branch_name)
})
@parameters.toggle('inclusive')
def test__unbound_version_takes_from_master(self, inclusive):
self._assert_chooses("x{0}2.0.0".format(">=" if inclusive else ">"), {
git.Branch('master')
}, 'master')
def _assert_chooses(self, requirement, options, chosen):
requirement = Requirement.parse(requirement)
git.get_remote_references_dict(self.url).and_return(options)
self.forge.replay()
new_source = self.source.resolve_constraints(requirement.specs)
self.assertIsInstance(new_source, sources.Git)
self.assertEquals(new_source._url, self.url)
self.assertEquals(new_source._branch, chosen)
def _assert_no_match(self, requirement, options):
specs = Requirement.parse(requirement).specs
git.get_remote_references_dict(self.url).and_return(options)
self.forge.replay()
with self.assertRaises(exceptions.RequiredVersionNotFound):
self.source.resolve_constraints(specs)
class ExternalToolSourceTest(SourceTest):
def setUp(self):
super(ExternalToolSourceTest, self).setUp()
self.package_name = "some_package==1.0.0"
self.forge.replace(command, "execute_assert_success")
class PIPSourceTest(ExternalToolSourceTest):
@parameters.toggle('reinstall')
def test__install(self, reinstall):
source = sources.PIP(self.package_name)
self.env.execute_pip_install(self.package_name, reinstall=reinstall)
with self.forge.verified_replay_context():
source.install(self.env, reinstall=reinstall)
def test__checkout_not_implemented(self):
with self.assertRaises(NotImplementedError):
sources.PIP(self.package_name).checkout(self.env, '/some/path')
with self.assertRaises(NotImplementedError):
sources.PIP(self.package_name).checkout(self.env)
class EasyInstallSourceTest(ExternalToolSourceTest):
@parameters.toggle('reinstall')
def test__install(self, reinstall):
self.env.execute_easy_install(self.package_name, reinstall=reinstall)
source = sources.EasyInstall(self.package_name)
with self.forge.verified_replay_context():
source.install(self.env, reinstall=reinstall)
def test__checkout_not_implemented(self):
with self.assertRaises(NotImplementedError):
sources.EasyInstall(self.package_name).checkout(self.env, '/some/path')
with self.assertRaises(NotImplementedError):
sources.EasyInstall(self.package_name).checkout(self.env)
class SCMTest(SourceTest):
def test__git(self):
repo = "git://some_repo"
result = sources.SCM(repo)
self.assertIsInstance(result, sources.Git)
self.assertEquals(result._url, repo)
def test__git_with_branch(self):
result = sources.SCM("git://some_repo@branch_name")
self.assertIsInstance(result, sources.Git)
self.assertEquals(result._url, "git://some_repo")
self.assertEquals(result._branch, "branch_name")
def test__other(self):
with self.assertRaises(ValueError):
sources.SCM("bla")
| vmalloc/pydeploy | tests/test__sources.py | Python | bsd-3-clause | 10,265 |
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
home = TemplateView.as_view(template_name='home.html')
urlpatterns = patterns(
'',
url(r'^filter/', include('demoproject.filter.urls')),
# An informative homepage.
url(r'', home, name='home')
)
| jgsogo/django-generic-filters | demo/demoproject/urls.py | Python | bsd-3-clause | 312 |
import datetime
import time
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import Q
from django.db.models.signals import pre_save
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.auth.models import User
from tidings.models import NotificationsMixin
from kitsune import forums
from kitsune.access.utils import has_perm, perm_is_defined_on
from kitsune.flagit.models import FlaggedObject
from kitsune.sumo.templatetags.jinja_helpers import urlparams, wiki_to_html
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.models import ModelBase
from kitsune.search.models import (
SearchMappingType, SearchMixin, register_for_indexing,
register_mapping_type)
def _last_post_from(posts, exclude_post=None):
"""Return the most recent post in the given set, excluding the given post.
If there are none, return None.
"""
if exclude_post:
posts = posts.exclude(id=exclude_post.id)
posts = posts.order_by('-created')
try:
return posts[0]
except IndexError:
return None
class ThreadLockedError(Exception):
"""Trying to create a post in a locked thread."""
class Forum(NotificationsMixin, ModelBase):
name = models.CharField(max_length=50, unique=True)
slug = models.SlugField(unique=True)
description = models.TextField(null=True)
last_post = models.ForeignKey('Post', related_name='last_post_in_forum',
null=True, on_delete=models.SET_NULL)
# Dictates the order in which forums are displayed in the forum list.
display_order = models.IntegerField(default=1, db_index=True)
# Whether or not this forum is visible in the forum list.
is_listed = models.BooleanField(default=True, db_index=True)
class Meta(object):
ordering = ['display_order', 'id']
permissions = (
('view_in_forum', 'Can view restricted forums'),
('post_in_forum', 'Can post in restricted forums'))
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('forums.threads', kwargs={'forum_slug': self.slug})
def allows_viewing_by(self, user):
"""Return whether a user can view me, my threads, and their posts."""
return (self._allows_public_viewing() or
has_perm(user, 'forums_forum.view_in_forum', self))
def _allows_public_viewing(self):
"""Return whether I am a world-readable forum.
If a django-authority permission relates to me, I am considered non-
public. (We assume that you attached a permission to me in order to
assign it to some users or groups.) Considered adding a Public flag to
this model, but we didn't want it to show up on form and thus be
accidentally flippable by readers of the Admin forum, who are all
privileged enough to do so.
"""
return not perm_is_defined_on('forums_forum.view_in_forum', self)
def allows_posting_by(self, user):
"""Return whether a user can make threads and posts in me."""
return (self._allows_public_posting() or
has_perm(user, 'forums_forum.post_in_forum', self))
def _allows_public_posting(self):
"""Return whether I am a world-writable forum."""
return not perm_is_defined_on('forums_forum.post_in_forum', self)
def update_last_post(self, exclude_thread=None, exclude_post=None):
"""Set my last post to the newest, excluding given thread and post."""
posts = Post.objects.filter(thread__forum=self)
if exclude_thread:
posts = posts.exclude(thread=exclude_thread)
self.last_post = _last_post_from(posts, exclude_post=exclude_post)
@classmethod
def authorized_forums_for_user(cls, user):
"""Returns the forums this user is authorized to view"""
return [f for f in Forum.objects.all() if f.allows_viewing_by(user)]
class Thread(NotificationsMixin, ModelBase, SearchMixin):
title = models.CharField(max_length=255)
forum = models.ForeignKey('Forum')
created = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
creator = models.ForeignKey(User)
last_post = models.ForeignKey('Post', related_name='last_post_in',
null=True, on_delete=models.SET_NULL)
replies = models.IntegerField(default=0)
is_locked = models.BooleanField(default=False)
is_sticky = models.BooleanField(default=False, db_index=True)
class Meta:
ordering = ['-is_sticky', '-last_post__created']
def __setattr__(self, attr, val):
"""Notice when the forum field changes.
A property won't do here, because it usurps the "forum" name and
prevents us from using lookups like Thread.objects.filter(forum=f).
"""
if attr == 'forum' and not hasattr(self, '_old_forum'):
try:
self._old_forum = self.forum
except ObjectDoesNotExist:
pass
super(Thread, self).__setattr__(attr, val)
@property
def last_page(self):
"""Returns the page number for the last post."""
return self.replies / forums.POSTS_PER_PAGE + 1
def __unicode__(self):
return self.title
def delete(self, *args, **kwargs):
"""Override delete method to update parent forum info."""
forum = Forum.objects.get(pk=self.forum.id)
if forum.last_post and forum.last_post.thread_id == self.id:
forum.update_last_post(exclude_thread=self)
forum.save()
super(Thread, self).delete(*args, **kwargs)
def new_post(self, author, content):
"""Create a new post, if the thread is unlocked."""
if self.is_locked:
raise ThreadLockedError
return self.post_set.create(author=author, content=content)
def get_absolute_url(self):
return reverse('forums.posts', args=[self.forum.slug, self.id])
def get_last_post_url(self):
query = {'last': self.last_post_id}
page = self.last_page
if page > 1:
query['page'] = page
url = reverse('forums.posts', args=[self.forum.slug, self.id])
return urlparams(url, hash='post-%s' % self.last_post_id, **query)
def save(self, *args, **kwargs):
super(Thread, self).save(*args, **kwargs)
old_forum = getattr(self, '_old_forum', None)
new_forum = self.forum
if old_forum and old_forum != new_forum:
old_forum.update_last_post(exclude_thread=self)
old_forum.save()
new_forum.update_last_post()
new_forum.save()
del self._old_forum
def update_last_post(self, exclude_post=None):
"""Set my last post to the newest, excluding the given post."""
last = _last_post_from(self.post_set, exclude_post=exclude_post)
self.last_post = last
# If self.last_post is None, and this was called from Post.delete,
# then Post.delete will erase the thread, as well.
@classmethod
def get_mapping_type(cls):
return ThreadMappingType
@register_mapping_type
class ThreadMappingType(SearchMappingType):
seconds_ago_filter = 'last_post__created__gte'
@classmethod
def search(cls):
return super(ThreadMappingType, cls).search().order_by('created')
@classmethod
def get_model(cls):
return Thread
@classmethod
def get_query_fields(cls):
return ['post_title', 'post_content']
@classmethod
def get_mapping(cls):
return {
'properties': {
'id': {'type': 'long'},
'model': {'type': 'string', 'index': 'not_analyzed'},
'url': {'type': 'string', 'index': 'not_analyzed'},
'indexed_on': {'type': 'integer'},
'created': {'type': 'integer'},
'updated': {'type': 'integer'},
'post_forum_id': {'type': 'integer'},
'post_title': {'type': 'string', 'analyzer': 'snowball'},
'post_is_sticky': {'type': 'boolean'},
'post_is_locked': {'type': 'boolean'},
'post_author_id': {'type': 'integer'},
'post_author_ord': {'type': 'string', 'index': 'not_analyzed'},
'post_content': {'type': 'string', 'analyzer': 'snowball',
'store': 'yes',
'term_vector': 'with_positions_offsets'},
'post_replies': {'type': 'integer'}
}
}
@classmethod
def extract_document(cls, obj_id, obj=None):
"""Extracts interesting thing from a Thread and its Posts"""
if obj is None:
model = cls.get_model()
obj = model.objects.select_related('last_post').get(pk=obj_id)
d = {}
d['id'] = obj.id
d['model'] = cls.get_mapping_type_name()
d['url'] = obj.get_absolute_url()
d['indexed_on'] = int(time.time())
# TODO: Sphinx stores created and updated as seconds since the
# epoch, so we convert them to that format here so that the
# search view works correctly. When we ditch Sphinx, we should
# see if it's faster to filter on ints or whether we should
# switch them to dates.
d['created'] = int(time.mktime(obj.created.timetuple()))
if obj.last_post is not None:
d['updated'] = int(time.mktime(obj.last_post.created.timetuple()))
else:
d['updated'] = None
d['post_forum_id'] = obj.forum.id
d['post_title'] = obj.title
d['post_is_sticky'] = obj.is_sticky
d['post_is_locked'] = obj.is_locked
d['post_replies'] = obj.replies
author_ids = set()
author_ords = set()
content = []
posts = Post.objects.filter(
thread_id=obj.id).select_related('author')
for post in posts:
author_ids.add(post.author.id)
author_ords.add(post.author.username)
content.append(post.content)
d['post_author_id'] = list(author_ids)
d['post_author_ord'] = list(author_ords)
d['post_content'] = content
return d
register_for_indexing('forums', Thread)
class Post(ModelBase):
thread = models.ForeignKey('Thread')
content = models.TextField()
author = models.ForeignKey(User)
created = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
updated = models.DateTimeField(default=datetime.datetime.now,
db_index=True)
updated_by = models.ForeignKey(User,
related_name='post_last_updated_by',
null=True)
flags = GenericRelation(FlaggedObject)
class Meta:
ordering = ['created']
def __unicode__(self):
return self.content[:50]
def save(self, *args, **kwargs):
"""
Override save method to update parent thread info and take care of
created and updated.
"""
new = self.id is None
if not new:
self.updated = datetime.datetime.now()
super(Post, self).save(*args, **kwargs)
if new:
self.thread.replies = self.thread.post_set.count() - 1
self.thread.last_post = self
self.thread.save()
self.thread.forum.last_post = self
self.thread.forum.save()
def delete(self, *args, **kwargs):
"""Override delete method to update parent thread info."""
thread = Thread.objects.get(pk=self.thread.id)
if thread.last_post_id and thread.last_post_id == self.id:
thread.update_last_post(exclude_post=self)
thread.replies = thread.post_set.count() - 2
thread.save()
forum = Forum.objects.get(pk=thread.forum.id)
if forum.last_post_id and forum.last_post_id == self.id:
forum.update_last_post(exclude_post=self)
forum.save()
super(Post, self).delete(*args, **kwargs)
# If I was the last post in the thread, delete the thread.
if thread.last_post is None:
thread.delete()
@property
def page(self):
"""Get the page of the thread on which this post is found."""
t = self.thread
earlier = t.post_set.filter(created__lte=self.created).count() - 1
if earlier < 1:
return 1
return earlier / forums.POSTS_PER_PAGE + 1
def get_absolute_url(self):
query = {}
if self.page > 1:
query = {'page': self.page}
url_ = self.thread.get_absolute_url()
return urlparams(url_, hash='post-%s' % self.id, **query)
@property
def content_parsed(self):
return wiki_to_html(self.content)
register_for_indexing('forums', Post, instance_to_indexee=lambda p: p.thread)
def user_pre_save(sender, instance, **kw):
"""When a user's username is changed, we must reindex the threads
they participated in.
"""
if instance.id:
user = User.objects.get(id=instance.id)
if user.username != instance.username:
threads = (
Thread.objects
.filter(
Q(creator=instance) |
Q(post__author=instance))
.only('id')
.distinct())
for t in threads:
t.index_later()
pre_save.connect(
user_pre_save, sender=User, dispatch_uid='forums_user_pre_save')
| anushbmx/kitsune | kitsune/forums/models.py | Python | bsd-3-clause | 13,732 |
#!/usr/bin/env python
import dockci.commands
from dockci.server import APP, app_init, MANAGER
if __name__ == "__main__":
app_init()
MANAGER.run()
| sprucedev/DockCI | manage.py | Python | isc | 157 |
import os, logging
from PIL import Image
from sqlalchemy.orm.session import object_session
from sqlalchemy.orm.util import identity_key
from iktomi.unstable.utils.image_resizers import ResizeFit
from iktomi.utils import cached_property
from ..files import TransientFile, PersistentFile
from .files import FileEventHandlers, FileProperty
logger = logging.getLogger(__name__)
class ImageFile(PersistentFile):
def _get_properties(self, properties=['width', 'height']):
if 'width' in properties or 'height' in properties:
image = Image.open(self.path)
self.width, self.height = image.size
@cached_property
def width(self):
self._get_properties(['width'])
return self.width
@cached_property
def height(self):
self._get_properties(['height'])
return self.height
class ImageEventHandlers(FileEventHandlers):
def _2persistent(self, target, transient):
# XXX move this method to file_manager
# XXX Do this check or not?
image = Image.open(transient.path)
assert image.format in Image.SAVE and image.format != 'bmp',\
'Unsupported image format'
if self.prop.image_sizes:
session = object_session(target)
persistent_name = getattr(target, self.prop.attribute_name)
pn, ext = os.path.splitext(persistent_name)
image_crop = self.prop.resize(image, self.prop.image_sizes)
if self.prop.force_rgb and image_crop.mode not in ['RGB', 'RGBA']:
image_crop = image_crop.convert('RGB')
if ext == '.gif':
image_crop.format = 'jpeg'
ext = '.jpeg'
if self.prop.enhancements:
for enhance, factor in self.prop.enhancements:
image_crop = enhance(image_crop).enhance(factor)
if self.prop.filter:
image_crop = image_crop.filter(self.prop.filter)
if not ext:
# set extension if it is not set
ext = '.' + image.format.lower()
if pn + ext != persistent_name:
persistent_name = pn + ext
# XXX hack?
setattr(target, self.prop.attribute_name, persistent_name)
image_attr = getattr(target.__class__, self.prop.key)
file_manager = persistent = session.find_file_manager(image_attr)
persistent = file_manager.get_persistent(persistent_name,
self.prop.persistent_cls)
transient = session.find_file_manager(image_attr).new_transient(ext)
kw = dict(quality=self.prop.quality)
if self.prop.optimize:
kw = dict(kw, optimize=True)
image_crop.save(transient.path, **kw)
session.find_file_manager(image_attr).store(transient, persistent)
return persistent
else:
# Attention! This method can accept PersistentFile.
# In this case one shold NEVER been deleted or rewritten.
assert isinstance(transient, TransientFile), repr(transient)
return FileEventHandlers._2persistent(self, target, transient)
def before_update(self, mapper, connection, target):
FileEventHandlers.before_update(self, mapper, connection, target)
self._fill_img(mapper, connection, target)
def before_insert(self, mapper, connection, target):
FileEventHandlers.before_insert(self, mapper, connection, target)
self._fill_img(mapper, connection, target)
def _fill_img(self, mapper, connection, target):
if self.prop.fill_from:
# XXX Looks hacky
value = getattr(target, self.prop.key)
if value is None:
base = getattr(target, self.prop.fill_from)
if base is None:
return
if not os.path.isfile(base.path):
logger.warn('Original file is absent %s %s %s',
identity_key(instance=target),
self.prop.fill_from,
base.path)
return
ext = os.path.splitext(base.name)[1]
session = object_session(target)
image_attr = getattr(target.__class__, self.prop.key)
name = session.find_file_manager(image_attr).new_file_name(
self.prop.name_template, target, ext, '')
setattr(target, self.prop.attribute_name, name)
persistent = self._2persistent(target, base)
setattr(target, self.prop.key, persistent)
class ImageProperty(FileProperty):
event_cls = ImageEventHandlers
def _set_options(self, options):
# XXX rename image_sizes?
options = dict(options)
self.image_sizes = options.pop('image_sizes', None)
self.resize = options.pop('resize', None) or ResizeFit()
# XXX implement
self.fill_from = options.pop('fill_from', None)
self.filter = options.pop('filter', None)
self.enhancements = options.pop('enhancements', [])
self.force_rgb = self.enhancements or \
self.filter or \
options.pop('force_rgb', True)
self.quality = options.pop('quality', 85)
self.optimize = options.pop('optimize', False)
assert self.fill_from is None or self.image_sizes is not None
options.setdefault('persistent_cls', ImageFile)
FileProperty._set_options(self, options)
| Lehych/iktomi | iktomi/unstable/db/sqla/images.py | Python | mit | 5,666 |
t = int(raw_input())
MOD = 10**9 + 7
def modexp(a,b):
res = 1
while b:
if b&1:
res *= a
res %= MOD
a = (a*a)%MOD
b /= 2
return res
fn = [1 for _ in xrange(100001)]
ifn = [1 for _ in xrange(100001)]
for i in range(1,100000):
fn[i] = fn[i-1] * i
fn[i] %= MOD
ifn[i] = modexp(fn[i],MOD-2)
def nCr(n,k):
return fn[n] * ifn[k] * ifn[n-k]
for ti in range(t):
n = int(raw_input())
a = map(int,raw_input().split())
ans = 0
for i in range(n):
if i%2==0:
ans += nCr(n-1,i)%MOD * a[i]%MOD
else:
ans -= nCr(n-1,i)%MOD * a[i]%MOD
ans %= MOD
print ans | ManrajGrover/CodeSprint_India_2014 | Qualification_Round_2/Editorials/array_simp_2.py | Python | mit | 683 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/lok/shared_mining_cave_01.iff"
result.attribute_template_id = -1
result.stfName("building_name","cave")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | obi-two/Rebelion | data/scripts/templates/object/building/lok/shared_mining_cave_01.py | Python | mit | 440 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_rebel_brigadier_general_sullustan_male.iff"
result.attribute_template_id = 9
result.stfName("npc_name","sullustan_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/mobile/shared_dressed_rebel_brigadier_general_sullustan_male.py | Python | mit | 475 |
from __future__ import absolute_import
from jinja2 import Markup
from rstblog.programs import RSTProgram
import typogrify
class TypogrifyRSTProgram(RSTProgram):
def get_fragments(self):
if self._fragment_cache is not None:
return self._fragment_cache
with self.context.open_source_file() as f:
self.get_header(f)
rv = self.context.render_rst(f.read().decode('utf-8'))
rv['fragment'] = Markup(typogrify.typogrify(rv['fragment']))
self._fragment_cache = rv
return rv
def setup(builder):
builder.programs['rst'] = TypogrifyRSTProgram | ericam/sidesaddle | modules/typogrify.py | Python | mit | 620 |
from datetime import datetime, timedelta
from time import sleep
from random import uniform
class SleepSchedule(object):
"""Pauses the execution of the bot every day for some time
Simulates the user going to sleep every day for some time, the sleep time
and the duration is changed every day by a random offset defined in the
config file
Example Config:
"sleep_schedule": [
{
"time": "12:00",
"duration": "5:30",
"time_random_offset": "00:30",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
},
{
"time": "17:45",
"duration": "3:00",
"time_random_offset": "01:00",
"duration_random_offset": "00:30",
"wake_up_at_location": ""
}
]
time: (HH:MM) local time that the bot should sleep
duration: (HH:MM) the duration of sleep
time_random_offset: (HH:MM) random offset of time that the sleep will start
for this example the possible start time is 11:30-12:30
duration_random_offset: (HH:MM) random offset of duration of sleep
for this example the possible duration is 5:00-6:00
wake_up_at_location: (lat, long | lat, long, alt | "") the location at which the bot wake up
*Note that an empty string ("") will not change the location*. """
LOG_INTERVAL_SECONDS = 600
SCHEDULING_MARGIN = timedelta(minutes=10) # Skip if next sleep is RESCHEDULING_MARGIN from now
def __init__(self, bot, config):
self.bot = bot
self._process_config(config)
self._schedule_next_sleep()
self._calculate_current_sleep()
def work(self):
if self._should_sleep_now():
self._sleep()
wake_up_at_location = self._wake_up_at_location
self._schedule_next_sleep()
if wake_up_at_location:
if hasattr(self.bot, 'api'): # Check if api is already initialized
self.bot.api.set_position(wake_up_at_location[0],wake_up_at_location[1],wake_up_at_location[2])
else:
self.bot.wake_location = wake_up_at_location
if hasattr(self.bot, 'api'): self.bot.login() # Same here
def _process_config(self, config):
self.entries = []
for entry in config:
prepared = {}
prepared['time'] = datetime.strptime(entry['time'] if 'time' in entry else '01:00', '%H:%M')
# Using datetime for easier stripping of timedeltas
raw_duration = datetime.strptime(entry['duration'] if 'duration' in entry else '07:00', '%H:%M')
duration = int(timedelta(hours=raw_duration.hour, minutes=raw_duration.minute).total_seconds())
raw_time_random_offset = datetime.strptime(entry['time_random_offset'] if 'time_random_offset' in entry else '01:00', '%H:%M')
time_random_offset = int(
timedelta(
hours=raw_time_random_offset.hour, minutes=raw_time_random_offset.minute).total_seconds())
raw_duration_random_offset = datetime.strptime(entry['duration_random_offset'] if 'duration_random_offset' in entry else '00:30', '%H:%M')
duration_random_offset = int(
timedelta(
hours=raw_duration_random_offset.hour, minutes=raw_duration_random_offset.minute).total_seconds())
raw_wake_up_at_location = entry['wake_up_at_location'] if 'wake_up_at_location' in entry else ''
if raw_wake_up_at_location:
try:
wake_up_at_location = raw_wake_up_at_location.split(',',2)
lat=float(wake_up_at_location[0])
lng=float(wake_up_at_location[1])
if len(wake_up_at_location) == 3:
alt=float(wake_up_at_location[2])
else:
alt = uniform(self.bot.config.alt_min, self.bot.config.alt_max)
except ValueError:
raise ValueError('SleepSchedule wake_up_at_location, parsing error in location') #TODO there must be a more elegant way to do it...
prepared['wake_up_at_location'] = [lat, lng, alt]
prepared['duration'] = duration
prepared['time_random_offset'] = time_random_offset
prepared['duration_random_offset'] = duration_random_offset
self.entries.append(prepared)
def _schedule_next_sleep(self):
self._next_sleep, self._next_duration, self._wake_up_at_location = self._get_next_sleep_schedule()
self.bot.event_manager.emit(
'next_sleep',
sender=self,
formatted="Next sleep at {time}",
data={
'time': str(self._next_sleep)
}
)
def _calculate_current_sleep(self):
self._current_sleep = self._next_sleep - timedelta(days=1)
current_duration = self._next_duration
self._current_end = self._current_sleep + timedelta(seconds = current_duration)
def _should_sleep_now(self):
if datetime.now() >= self._next_sleep:
return True
if datetime.now() >= self._current_sleep and datetime.now() < self._current_end:
self._next_duration = (self._current_end - datetime.now()).total_seconds()
return True
return False
def _get_next_sleep_schedule(self):
now = datetime.now() + self.SCHEDULING_MARGIN
times = []
for index in range(len(self.entries)):
next_time = now.replace(hour=self.entries[index]['time'].hour, minute=self.entries[index]['time'].minute)
next_time += timedelta(seconds=self._get_random_offset(self.entries[index]['time_random_offset']))
# If sleep time is passed add one day
if next_time <= now:
next_time += timedelta(days=1)
times.append(next_time)
diffs = {}
for index in range(len(self.entries)):
diff = (times[index]-now).total_seconds()
if diff >= 0: diffs[index] = diff
closest = min(diffs.iterkeys(), key=lambda x: diffs[x])
next_time = times[closest]
next_duration = self._get_next_duration(self.entries[closest])
location = self.entries[closest]['wake_up_at_location'] if 'wake_up_at_location' in self.entries[closest] else ''
return next_time, next_duration, location
def _get_next_duration(self, entry):
duration = entry['duration'] + self._get_random_offset(entry['duration_random_offset'])
return duration
def _get_random_offset(self, max_offset):
offset = uniform(-max_offset, max_offset)
return int(offset)
def _sleep(self):
sleep_to_go = self._next_duration
sleep_m, sleep_s = divmod(sleep_to_go, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
now = datetime.now()
wake = str(now + timedelta(seconds=sleep_to_go))
self.bot.event_manager.emit(
'bot_sleep',
sender=self,
formatted="Sleeping for {time_hms}, wake at {wake}",
data={
'time_hms': sleep_hms,
'wake': wake
}
)
while sleep_to_go > 0:
if sleep_to_go < self.LOG_INTERVAL_SECONDS:
sleep(sleep_to_go)
sleep_to_go = 0
else:
sleep(self.LOG_INTERVAL_SECONDS)
sleep_to_go -= self.LOG_INTERVAL_SECONDS
| bbiiggppiigg/PokemonGo-Bot | pokemongo_bot/sleep_schedule.py | Python | mit | 7,614 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/crafted/shared_cll_8_binary_load_lifter_advanced.iff"
result.attribute_template_id = 3
result.stfName("droid_name","cll_8_binary_load_lifter_crafted_advanced")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/creature/npc/droid/crafted/shared_cll_8_binary_load_lifter_advanced.py | Python | mit | 506 |
import operator
import mock
import pytest
from okcupyd import User
from okcupyd import magicnumbers
from okcupyd.magicnumbers import maps
from okcupyd.profile import Profile
from okcupyd.json_search import SearchFetchable, search
from okcupyd.location import LocationQueryCache
from okcupyd.session import Session
from . import util
SEARCH_FILTERS_BEING_REIMPLEMENTED = "SEARCH_FILTERS_ARE_BEING_REIMPLEMENTED"
@util.use_cassette
def test_age_filter():
age = 22
search_fetchable = SearchFetchable(gentation='everybody',
minimum_age=age, maximum_age=age)
for profile in search_fetchable[:5]:
assert profile.age == age
@util.use_cassette
def test_count_variable(request):
profiles = search(gentation='everybody', count=14)
assert len(profiles) == 14
for profile in profiles:
profile.username
profile.age
profile.location
profile.match_percentage
profile.enemy_percentage
profile.id
profile.rating
profile.contacted
@util.use_cassette
def test_location_filter():
session = Session.login()
location_cache = LocationQueryCache(session)
location = 'Portland, OR'
search_fetchable = SearchFetchable(location=location, location_cache=location_cache, radius=1)
for profile in search_fetchable[:5]:
assert profile.location == 'Portland, OR'
@util.use_cassette(path='search_function')
def test_search_function():
profile, = search(count=1)
assert isinstance(profile, Profile)
profile.username
profile.age
profile.location
profile.match_percentage
profile.enemy_percentage
profile.id
profile.rating
profile.contacted
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_search_fetchable_iter():
search_fetchable = SearchFetchable(gentation='everybody',
religion='buddhist', age_min=25, age_max=25,
location='new york, ny', keywords='bicycle')
for count, profile in enumerate(search_fetchable):
assert isinstance(profile, Profile)
if count > 30:
break
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_easy_search_filters():
session = Session.login()
query_test_pairs = [# ('bodytype', maps.bodytype),
# TODO(@IvanMalison) this is an alist feature,
# so it can't be tested for now.
('drugs', maps.drugs), ('smokes', maps.smokes),
('diet', maps.diet,), ('job', maps.job)]
for query_param, re_map in query_test_pairs:
for value in sorted(re_map.pattern_to_value.keys()):
profile = SearchFetchable(**{
'gentation': '',
'session': session,
'count': 1,
query_param: value
})[0]
attribute = getattr(profile.details, query_param)
assert value in (attribute or '').lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_children_filter():
session = Session.login()
profile = SearchFetchable(session, wants_kids="wants kids", count=1)[0]
assert "wants" in profile.details.children.lower()
profile = SearchFetchable(session, has_kids=["has kids"],
wants_kids="doesn't want kids",
count=0)[0]
assert "has kids" in profile.details.children.lower()
assert "doesn't want" in profile.details.children.lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_pets_queries():
session = Session.login()
profile = SearchFetchable(session, cats=['dislikes cats', 'likes cats'],
count=1)[0]
assert 'likes cats' in profile.details.pets.lower()
profile = SearchFetchable(session, dogs='likes dogs', cats='has cats', count=1)[0]
assert 'likes dogs' in profile.details.pets.lower()
assert 'has cats' in profile.details.pets.lower()
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_height_filter():
session = Session.login()
profile = SearchFetchable(session, height_min='5\'6"', height_max='5\'6"',
gentation='girls who like guys', radius=25, count=1)[0]
match = magicnumbers.imperial_re.search(profile.details.height)
assert int(match.group(1)) == 5
assert int(match.group(2)) == 6
profile = SearchFetchable(session, height_min='2.00m', count=1)[0]
match = magicnumbers.metric_re.search(profile.details.height)
assert float(match.group(1)) >= 2.00
profile = SearchFetchable(session, height_max='1.5m', count=1)[0]
match = magicnumbers.metric_re.search(profile.details.height)
assert float(match.group(1)) <= 1.5
@pytest.mark.xfail(reason=SEARCH_FILTERS_BEING_REIMPLEMENTED)
@util.use_cassette
def test_language_filter():
session = Session.login()
profile = SearchFetchable(session, language='french', count=1)[0]
assert 'french' in [language_info[0].lower()
for language_info in profile.details.languages]
profile = SearchFetchable(session, language='Afrikaans', count=1)[0]
assert 'afrikaans' in map(operator.itemgetter(0), profile.details.languages)
@pytest.mark.xfail
@util.use_cassette
def test_attractiveness_filter():
session = Session.login()
profile = SearchFetchable(session, attractiveness_min=4000,
attractiveness_max=6000, count=1)[0]
assert profile.attractiveness > 4000
assert profile.attractiveness < 6000
@pytest.mark.xfail
@util.use_cassette
def test_question_filter():
user = User()
user_question = user.questions.somewhat_important[0]
for profile in user.search(question=user_question)[:5]:
question = profile.find_question(user_question.id)
assert question.their_answer_matches
@pytest.mark.xfail
@util.use_cassette
def test_question_filter_with_custom_answers():
user = User()
user_question = user.questions.somewhat_important[1]
unacceptable_answers = [answer_option.id
for answer_option in user_question.answer_options
if not answer_option.is_match]
for profile in user.search(question=user_question.id,
question_answers=unacceptable_answers)[:5]:
question = profile.find_question(user_question.id)
assert not question.their_answer_matches
@pytest.mark.xfail
@util.use_cassette
def test_question_count_filter():
user = User()
for profile in user.search(question_count_min=250)[:5]:
assert profile.questions[249]
@pytest.mark.xfail(reason="ProfileBuilder needs to be improved to actually get data from profile results")
@util.use_cassette
def test_search_populates_upfront():
user = User()
search_fetchable = user.search()
for profile in search_fetchable[:4]:
profile_session = profile._session
with mock.patch.object(profile, '_session') as mock_session:
mock_session.okc_get.side_effect = profile_session.okc_get
assert profile.id > 0
assert mock_session.okc_get.call_count == 0
profile.essays.self_summary
assert mock_session.okc_get.call_count == 1
| IvanMalison/okcupyd | tests/search_test.py | Python | mit | 7,482 |
from .base import DerivedType
from categorical import CategoricalComparator
from .categorical_type import CategoricalType
class ExistsType(CategoricalType) :
type = "Exists"
_predicate_functions = []
def __init__(self, definition) :
super(CategoricalType, self ).__init__(definition)
self.cat_comparator = CategoricalComparator([0,1])
self.higher_vars = []
for higher_var in self.cat_comparator.dummy_names :
dummy_var = DerivedType({'name' : higher_var,
'type' : 'Dummy',
'has missing' : self.has_missing})
self.higher_vars.append(dummy_var)
def comparator(self, field_1, field_2) :
if field_1 and field_2 :
return self.cat_comparator(1, 1)
elif field_1 or field_2 :
return self.cat_comparator(0, 1)
else :
return self.cat_comparator(0, 0)
# This flag tells fieldDistances in dedupe.core to pass
# missing values (None) into the comparator
comparator.missing = True
| tfmorris/dedupe | dedupe/variables/exists.py | Python | mit | 1,117 |
import sys, os
import pickle
import nltk
import paths
from utils import *
def words_to_dict(words):
return dict(zip(words, range(0, len(words))))
nltk.data.path.append(paths.nltk_data_path)
use_wordnet = True
if use_wordnet:
stemmer = nltk.stem.wordnet.WordNetLemmatizer()
stem = stemmer.lemmatize
else:
stemmer = nltk.stem.porter.PorterStemmer()
stem = stemmer.stem
def tokens(text):
replacements = [("---"," "),
("--"," "),
("-", "")] # trying to capture multi-word keywords
for (src,tgt) in replacements:
text = text.replace(src,tgt)
return preprocess(text)
def make_bow(doc,d):
bow = {}
for word in doc:
if word in d:
wordid = d[word]
bow[wordid] = bow.get(wordid,0) + 1
# XXX we should notify something about non-stopwords that we couldn't parse
return bow
modes = ["fulltext","abstracts"]
ks = ["20","50","100","200"]
dist = ["kl","euclidean"]
if __name__ == '__main__':
args = sys.argv[1:]
mode = modes[0]
k = ks[0]
dfun = dist[0]
num = 20
while len(args) > 1:
if args[0] == "-k":
if args[1] in ks:
k = args[1]
args = args[2:]
if args[0] in ["-m","--mode"]:
if args[1] in modes:
mode = args[1]
args = args[2:]
if args[0] in ["-n","--num"]:
if int(args[1]) in range(1,50):
num = int(args[1])
args = args[2:]
if args[0] in ["-d","--distance"]:
if args[1] in dist:
dfun = args[1]
args = args[2:]
model = os.path.join(mode,"lda" + k,"final")
words = os.path.join(mode,"vocab.dat")
docs = os.path.join(mode,"docs.dat")
pdf_file = args[0]
(base,_) = os.path.splitext(pdf_file)
text = os.popen("/usr/bin/pdftotext \"%s\" -" % pdf_file).read() # XXX safe filenames!
vocab = words_to_dict(open(words).read().split())
bow = make_bow(map(stem,tokens(text)),vocab)
dat_file = base + ".dat"
out = open(dat_file,"w")
out.write(str(len(bow)))
out.write(' ')
for term in bow:
out.write(str(term))
out.write(':')
out.write(str(bow[term]))
out.write(' ')
out.write('\n')
out.close()
log = base + ".log"
os.system(paths.lda + " inf settings.txt %s %s %s >%s 2>&1" % (model,dat_file,base,log))
# XXX capture output, handle errors
inf = read(base + "-gamma.dat")
gammas = read(model + ".gamma")
papers = zip(read(docs), map(lambda s: map(float,s.split()), gammas))
tgt = ["INPUT PDF"] + map(lambda s: map(float,s.split()), inf)
# XXX these are the topic values, if we want to visualize them
# XXX be careful to not leak our filenames
if dfun == "euclidean":
metric = distance
fmt = '%d'
elif dfun == "kl":
metric = kl_divergence
fmt = '%f'
else:
metric = kl_divergence
fmt = '%f'
papers = map(lambda s: (metric(s[1],tgt[1]),s), papers)
papers.sort(lambda x,y: cmp(x[0],y[0]))
print "\nRelated papers:\n"
for (d,(doc,gs)) in papers[0:num]:
print (' %s (' + fmt + ')') % (doc,d)
| mgree/tmpl | www/backend/infer.py | Python | mit | 3,314 |
"""calibrated_image.py was written by Ryan Petersburg for use with fiber
characterization on the EXtreme PREcision Spectrograph
"""
import numpy as np
from .base_image import BaseImage
from .numpy_array_handler import filter_image, subframe_image
class CalibratedImage(BaseImage):
"""Fiber face image analysis class
Class that contains calibration images and executes corrections based on
those images
Attributes
----------
dark : str, array_like, or None
The input used to set the dark image. See
BaseImage.convert_image_to_array() for details
ambient : str, array_like, or None
The input used to set the ambient image. See
BaseImage.convert_image_to_array() for details
flat : str, array_like, or None
The input used to set the flat image. See
BaseImage.convert_image_to_array() for details
kernel_size : int (odd)
The kernel side length used when filtering the image. This value may
need to be tweaked, especially with few co-added images, due to random
noise. The filtered image is used for the centering algorithms, so for
a "true test" use kernel_size=1, but be careful, because this may
lead to needing a fairly high threshold for the noise.
new_calibration : bool
Whether or not self.calibration has been set with new images
Args
----
image_input : str, array_like, or None, optional
See BaseImage class for details
dark : str, array_like, or None, optional
Image input to instantiate BaseImage for dark image
ambient : str, array_like, or None, optional
Image input to instantiate BaseImage for ambient image
flat : str, array_like, or None, optional
Image input to instantiate BaseImage for flat image
kernel_size : int (odd), optional
Set the kernel size for filtering
**kwargs : keworded arguments
Passed into the BaseImage superclass
"""
def __init__(self, image_input, dark=None, ambient=None, flat=None,
kernel_size=9, **kwargs):
self.dark = dark
self.ambient = ambient
self.flat = flat
self.kernel_size = kernel_size
self.new_calibration = True
super(CalibratedImage, self).__init__(image_input, **kwargs)
#=========================================================================#
#==== Primary Image Getters ==============================================#
#=========================================================================#
def get_uncorrected_image(self):
"""Return the raw image without corrections or filtering.
Returns
-------
uncorrected_image : 2D numpy array
Raw image or average of images (depending on image_input)
"""
return self.convert_image_to_array(self.image_input)
def get_image(self):
"""Return the corrected image
This method must be called to get access to the corrected 2D numpy
array being analyzed. Attempts to access a previously saved image
under self.image_file or otherwise applies corrections to the raw
images pulled from their respective files
Returns
-------
image : 2D numpy array
Image corrected by calibration images
"""
if self.image_file is not None and not self.new_calibration:
return self.image_from_file(self.image_file)
return self.execute_error_corrections(self.get_uncorrected_image())
def get_uncorrected_filtered_image(self, kernel_size=None, **kwargs):
"""Return a median filtered image
Args
----
kernel_size : {None, int (odd)}, optional
The side length of the kernel used to median filter the image. Uses
self.kernel_size if None.
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size
"""
image = self.get_uncorrected_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
def get_filtered_image(self, kernel_size=None, **kwargs):
"""Return an error corrected and median filtered image
Returns
-------
filtered_image : 2D numpy array
The stored image median filtered with the given kernel_size and
error corrected using the given method
"""
image = self.get_image()
if image is None:
return None
if kernel_size is None:
kernel_size = self.kernel_size
return filter_image(image, kernel_size, **kwargs)
#=========================================================================#
#==== Calibration Image Getters ==========================================#
#=========================================================================#
def get_dark_image(self):
"""Returns the dark image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return BaseImage(self.dark).get_image()
def get_ambient_image(self):
"""Returns the ambient image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.ambient, dark=self.dark).get_image()
def get_flat_image(self):
"""Returns the flat image.
Args
----
full_output : boolean, optional
Passed to converImageToArray function
Returns
-------
dark_image : 2D numpy array
The dark image
output_obj : ImageInfo, optional
Object containing information about the image, if full_output=True
"""
return CalibratedImage(self.flat, dark=self.dark).get_image()
def set_dark(self, dark):
"""Sets the dark calibration image."""
self.dark = dark
self.new_calibration = True
def set_ambient(self, ambient):
"""Sets the ambient calibration image."""
self.ambient = ambient
self.new_calibration = True
def set_flat(self, flat):
"""Sets the flat calibration images."""
self.flat = flat
self.new_calibration = True
#=========================================================================#
#==== Image Calibration Algorithm ========================================#
#=========================================================================#
def execute_error_corrections(self, image):
"""Applies corrective images to image
Applies dark image to the flat field and ambient images. Then applies
flat field and ambient image correction to the primary image
Args
----
image : 2D numpy array
Image to be corrected
Returns
-------
corrected_image : 2D numpy array
Corrected image
"""
if image is None:
return None
corrected_image = image
dark_image = self.get_dark_image()
if dark_image is not None and dark_image.shape != corrected_image.shape:
dark_image = subframe_image(dark_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
corrected_image = self.remove_dark_image(corrected_image,
dark_image)
ambient_image = self.get_ambient_image()
if ambient_image is not None:
if ambient_image.shape != corrected_image.shape:
ambient_image = subframe_image(ambient_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
ambient_exp_time = BaseImage(self.ambient).exp_time
if self.exp_time is not None and ambient_exp_time != self.exp_time:
corrected_image = self.remove_dark_image(corrected_image,
ambient_image
* self.exp_time
/ ambient_exp_time)
else:
corrected_image = self.remove_dark_image(corrected_image,
ambient_image)
flat_image = self.get_flat_image()
if flat_image is not None:
if flat_image.shape != corrected_image.shape:
flat_image = subframe_image(flat_image, self.subframe_x,
self.subframe_y, self.width,
self.height)
corrected_image *= flat_image.mean() / flat_image
self.new_calibration = False
return corrected_image
def remove_dark_image(self, image, dark_image=None):
"""Uses dark image to correct image
Args
----
image : 2D numpy array
numpy array of the image
dark_image : 2D numpy array
dark image to be removed
Returns
-------
output_array : 2D numpy array
corrected image
"""
if dark_image is None:
dark_image = self.get_dark_image()
if dark_image is None:
dark_image = np.zeros_like(image)
output_image = image - dark_image
# Renormalize to the approximate smallest value (avoiding hot pixels)
output_image -= filter_image(output_image, 5).min()
# Prevent any dark/ambient image hot pixels from leaking through
output_image *= (output_image > -1000.0).astype('uint8')
return output_image
#=========================================================================#
#==== Attribute Setters ==================================================#
#=========================================================================#
def set_attributes_from_object(self, object_file):
super(CalibratedImage, self).set_attributes_from_object(object_file)
self.dark = self.change_path(self.dark)
self.ambient = self.change_path(self.ambient)
self.flat = self.change_path(self.flat)
| rpetersburg/fiber_properties | fiber_properties/calibrated_image.py | Python | mit | 11,258 |
from collections.abc import Iterable
from django import template
from django.db.models import Model
register = template.Library()
@register.filter
def get_type(value):
# inspired by: https://stackoverflow.com/a/12028864
return type(value)
@register.filter
def is_model(value):
return isinstance(value, Model)
@register.filter
def is_iterable(value):
return isinstance(value, Iterable)
@register.filter
def is_str(value):
return isinstance(value, str)
@register.filter
def is_bool(value):
return isinstance(value, bool)
| pbanaszkiewicz/amy | amy/autoemails/templatetags/type_extras.py | Python | mit | 555 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtaildocs', '0003_add_verbose_names'),
('articles', '0075_auto_20151015_2022'),
]
operations = [
migrations.AddField(
model_name='articlepage',
name='video_document',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtaildocs.Document', null=True),
),
]
| CIGIHub/greyjay | greyjay/articles/migrations/0076_articlepage_video_document.py | Python | mit | 598 |
from keras.models import Sequential
from keras.layers import convolutional
from keras.layers.core import Dense, Flatten
from SGD_exponential_decay import SGD_exponential_decay as SGD
### Parameters obtained from paper ###
K = 152 # depth of convolutional layers
LEARNING_RATE = .003 # initial learning rate
DECAY = 8.664339379294006e-08 # rate of exponential learning_rate decay
class value_trainer:
def __init__(self):
self.model = Sequential()
self.model.add(convolutional.Convolution2D(input_shape=(49, 19, 19), nb_filter=K, nb_row=5, nb_col=5,
init='uniform', activation='relu', border_mode='same'))
for i in range(2,13):
self.model.add(convolutional.Convolution2D(nb_filter=K, nb_row=3, nb_col=3,
init='uniform', activation='relu', border_mode='same'))
self.model.add(convolutional.Convolution2D(nb_filter=1, nb_row=1, nb_col=1,
init='uniform', activation='linear', border_mode='same'))
self.model.add(Flatten())
self.model.add(Dense(256,init='uniform'))
self.model.add(Dense(1,init='uniform',activation="tanh"))
sgd = SGD(lr=LEARNING_RATE, decay=DECAY)
self.model.compile(loss='mean_squared_error', optimizer=sgd)
def get_samples(self):
# TODO non-terminating loop that draws training samples uniformly at random
pass
def train(self):
# TODO use self.model.fit_generator to train from data source
pass
if __name__ == '__main__':
trainer = value_trainer()
# TODO command line instantiation
| wrongu/AlphaGo | AlphaGo/models/value.py | Python | mit | 1,711 |
#!/usr/bin/env python
#
# Created by Samvel Khalatyan on Mar 23, 2014
# Copyright (c) 2014 Samvel Khalatyan. All rights reserved
#
# Use of this source code is governed by a license that can be found in
# the LICENSE file.
import random
import unittest
from lib import unigraph
class UnigraphExtra(unigraph.Unigraph):
def has_edge(self, left_vertex, right_vertex):
if left_vertex == right_vertex:
return True
else:
return right_vertex in self._vertices[left_vertex]
class UnigraphEdgeTestCase(unittest.TestCase):
def setUp(self):
self.graph = UnigraphExtra(random.randrange(10, 15))
for edge in range(2 * self.graph.vertices()):
f, t = (random.randrange(self.graph.vertices()) for x in range(2))
self.graph.add_edge(f, t)
def test_edge(self):
for vertex in range(self.graph.vertices()):
existing_vertices = set(self.graph._vertices[vertex])
all_vertices = set(range(self.graph.vertices()))
missing_vertices = all_vertices - all_vertices
for adj_vertex in existing_vertices:
self.assertTrue(self.graph.has_edge(vertex, adj_vertex))
for adj_vertex in missing_vertices:
self.assertFalse(self.graph.has_edge(vertex, adj_vertex))
def test_self_loop(self):
for vertex in range(self.graph.vertices()):
self.assertTrue(self.graph.has_edge(vertex, vertex))
if "__main__" == __name__:
unittest.main()
| ksamdev/algorithms_old | ch4/python/ch4_ex4.1.4.py | Python | mit | 1,523 |
from django import template
from django.utils.safestring import mark_safe
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.contrib.admin.views.main import PAGE_VAR, ALL_VAR
from django.conf import settings
from django.contrib.sites.models import Site
from BeautifulSoup import BeautifulSoup
register = template.Library()
@register.simple_tag
def atb_site_link():
if settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK:
return '''
<li><a href="%s" class="top-icon" title="%s" rel="popover" data-placement="below"><i
class="icon-home icon-white"></i></a></li>
<li class="divider-vertical"></li>
''' % (settings.ADMINTOOLS_BOOTSTRAP_SITE_LINK, _('Open site'))
else:
return ''
@register.simple_tag
def atb_site_name():
if 'django.contrib.sites' in settings.INSTALLED_APPS:
return Site.objects.get_current().name
else:
return _('Django site')
@register.simple_tag
def bootstrap_page_url(cl, page_num):
"""
generates page URL for given page_num, uses for prev and next links
django numerates pages from 0
"""
return escape(cl.get_query_string({PAGE_VAR: page_num-1}))
DOT = '.'
def bootstrap_paginator_number(cl,i, li_class=None):
"""
Generates an individual page index link in a paginated list.
"""
if i == DOT:
return u'<li><a href="#">...</a></li>'
elif i == cl.page_num:
return mark_safe(u'<li class="active"><a href="#">%d</a></li> ' % (i+1))
else:
return mark_safe(u'<li><a href="%s">%d</a></li>' % (escape(cl.get_query_string({PAGE_VAR: i})), i+1))
paginator_number = register.simple_tag(bootstrap_paginator_number)
def bootstrap_pagination(cl):
"""
Generates the series of links to the pages in a paginated list.
"""
paginator, page_num = cl.paginator, cl.page_num
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
if not pagination_required:
page_range = []
else:
ON_EACH_SIDE = 3
ON_ENDS = 2
# If there are 10 or fewer pages, display links to every page.
# Otherwise, do some fancy
if paginator.num_pages <= 10:
page_range = range(paginator.num_pages)
else:
# Insert "smart" pagination links, so that there are always ON_ENDS
# links at either end of the list of pages, and there are always
# ON_EACH_SIDE links at either end of the "current page" link.
page_range = []
if page_num > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(0, ON_EACH_SIDE - 1))
page_range.append(DOT)
page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1))
else:
page_range.extend(range(0, page_num + 1))
if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages))
else:
page_range.extend(range(page_num + 1, paginator.num_pages))
need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page
return {
'cl': cl,
'pagination_required': pagination_required,
'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}),
'page_range': page_range,
'ALL_VAR': ALL_VAR,
'1': 1,
'curr_page': cl.paginator.page(cl.page_num+1),
}
bootstrap_pagination = register.inclusion_tag('admin/pagination.html')(bootstrap_pagination)
# breadcrumbs tag
class BreadcrumbsNode(template.Node):
"""
renders bootstrap breadcrumbs list.
usage::
{% breadcrumbs %}
url1|text1
url2|text2
text3
{% endbreadcrumbs %}
| is delimiter by default, you can use {% breadcrumbs delimiter_char %} to change it.
lines without delimiters are interpreted as active breadcrumbs
"""
def __init__(self, nodelist, delimiter):
self.nodelist = nodelist
self.delimiter = delimiter
def render(self, context):
data = self.nodelist.render(context).strip()
if not data:
return ''
try:
data.index('<div class="breadcrumbs">')
except ValueError:
lines = [ l.strip().split(self.delimiter) for l in data.split("\n") if l.strip() ]
else:
# data is django-style breadcrumbs, parsing
try:
soup = BeautifulSoup(data)
lines = [ (a.get('href'), a.text) for a in soup.findAll('a')]
lines.append([soup.find('div').text.split('›')[-1].strip()])
except Exception, e:
lines = [["Cannot parse breadcrumbs: %s" % unicode(e)]]
out = '<ul class="breadcrumb">'
curr = 0
for d in lines:
if d[0][0] == '*':
active = ' class="active"'
d[0] = d[0][1:]
else:
active = ''
curr += 1
if (len(lines) == curr):
# last
divider = ''
else:
divider = '<span class="divider">/</span>'
if len(d) == 2:
out += '<li%s><a href="%s">%s</a>%s</li>' % (active, d[0], d[1], divider)
elif len(d) == 1:
out += '<li%s>%s%s</li>' % (active, d[0], divider)
else:
raise ValueError('Invalid breadcrumb line: %s' % self.delimiter.join(d))
out += '</ul>'
return out
@register.tag(name='breadcrumbs')
def do_breadcrumbs(parser, token):
try:
tag_name, delimiter = token.contents.split(None, 1)
except ValueError:
delimiter = '|'
nodelist = parser.parse(('endbreadcrumbs',))
parser.delete_first_token()
return BreadcrumbsNode(nodelist, delimiter)
| quinode/django-admintools-bootstrap | admintools_bootstrap/templatetags/admintools_bootstrap.py | Python | mit | 6,117 |
# -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import sys
import popupcad
import qt.QtCore as qc
import qt.QtGui as qg
if __name__=='__main__':
app = qg.QApplication(sys.argv[0])
filename_from = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R08.cad'
filename_to = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R09.cad'
d = popupcad.filetypes.design.Design.load_yaml(filename_from)
widget = qg.QDialog()
layout = qg.QVBoxLayout()
layout1 = qg.QHBoxLayout()
layout2 = qg.QHBoxLayout()
list1 = qg.QListWidget()
list2 = qg.QListWidget()
button_ok = qg.QPushButton('Ok')
button_cancel = qg.QPushButton('Cancel')
subdesign_list = list(d.subdesigns.values())
for item in subdesign_list:
list1.addItem(str(item))
list2.addItem(str(item))
layout1.addWidget(list1)
layout1.addWidget(list2)
layout2.addWidget(button_ok)
layout2.addWidget(button_cancel)
layout.addLayout(layout1)
layout.addLayout(layout2)
widget.setLayout(layout)
button_ok.pressed.connect(widget.accept)
button_cancel.pressed.connect(widget.reject)
if widget.exec_():
if len(list1.selectedIndexes())==1 and len(list2.selectedIndexes())==1:
ii_from = list1.selectedIndexes()[0].row()
ii_to = list2.selectedIndexes()[0].row()
print(ii_from,ii_to)
d.replace_subdesign_refs(subdesign_list[ii_from].id,subdesign_list[ii_to].id)
d.subdesigns.pop(subdesign_list[ii_from].id)
d.save_yaml(filename_to)
sys.exit(app.exec_()) | danaukes/popupcad | api_examples/switch_subdesign.py | Python | mit | 1,814 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-10 18:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0006_auto_20160616_1640'),
]
operations = [
migrations.AlterField(
model_name='episode',
name='edit_key',
field=models.CharField(blank=True, default='41086227', help_text='key to allow unauthenticated users to edit this item.', max_length=32, null=True),
),
]
| xfxf/veyepar | dj/main/migrations/0007_auto_20160710_1833.py | Python | mit | 560 |
# @author Avtandil Kikabidze
# @copyright Copyright (c) 2008-2015, Avtandil Kikabidze aka LONGMAN ([email protected])
# @link http://longman.me
# @license The MIT License (MIT)
import os
import sys
import sublime
import sublime_plugin
st_version = 2
if sublime.version() == '' or int(sublime.version()) > 3000:
st_version = 3
reloader_name = 'codeformatter.reloader'
# ST3 loads each package as a module, so it needs an extra prefix
if st_version == 3:
reloader_name = 'CodeFormatter.' + reloader_name
from imp import reload
if reloader_name in sys.modules:
reload(sys.modules[reloader_name])
try:
# Python 3
from .codeformatter.formatter import Formatter
except (ValueError):
# Python 2
from codeformatter.formatter import Formatter
# fix for ST2
cprint = globals()['__builtins__']['print']
debug_mode = False
def plugin_loaded():
cprint('CodeFormatter: Plugin Initialized')
# settings = sublime.load_settings('CodeFormatter.sublime-settings')
# debug_mode = settings.get('codeformatter_debug', False)
# if debug_mode:
# from pprint import pprint
# pprint(settings)
# debug_write('Debug mode enabled')
# debug_write('Platform ' + sublime.platform() + ' ' + sublime.arch())
# debug_write('Sublime Version ' + sublime.version())
# debug_write('Settings ' + pprint(settings))
if (sublime.platform() != 'windows'):
import stat
path = (
sublime.packages_path() +
'/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'
)
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
if st_version == 2:
plugin_loaded()
class CodeFormatterCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None, saving=None):
run_formatter(self.view, edit, syntax=syntax, saving=saving)
class CodeFormatterOpenTabsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=None):
window = sublime.active_window()
for view in window.views():
run_formatter(view, edit, quiet=True)
class CodeFormatterEventListener(sublime_plugin.EventListener):
def on_pre_save(self, view):
view.run_command('code_formatter', {'saving': True})
class CodeFormatterShowPhpTransformationsCommand(sublime_plugin.TextCommand):
def run(self, edit, syntax=False):
import subprocess
import re
platform = sublime.platform()
settings = sublime.load_settings('CodeFormatter.sublime-settings')
opts = settings.get('codeformatter_php_options')
php_path = 'php'
if ('php_path' in opts and opts['php_path']):
php_path = opts['php_path']
php55_compat = False
if ('php55_compat' in opts and opts['php55_compat']):
php55_compat = opts['php55_compat']
cmd = []
cmd.append(str(php_path))
if php55_compat:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/fmt.phar'.format(
sublime.packages_path()))
else:
cmd.append(
'{}/CodeFormatter/codeformatter/lib/phpbeautifier/phpf.phar'.format(
sublime.packages_path()))
cmd.append('--list')
#print(cmd)
stderr = ''
stdout = ''
try:
if (platform == 'windows'):
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo,
shell=False, creationflags=subprocess.SW_HIDE)
else:
p = subprocess.Popen(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
except Exception as e:
stderr = str(e)
if (not stderr and not stdout):
stderr = 'Error while gethering list of php transformations'
if len(stderr) == 0 and len(stdout) > 0:
text = stdout.decode('utf-8')
text = re.sub(
'Usage:.*?PASSNAME', 'Available PHP Tranformations:', text)
window = self.view.window()
pt = window.get_output_panel('paneltranformations')
pt.set_read_only(False)
pt.insert(edit, pt.size(), text)
window.run_command(
'show_panel', {'panel': 'output.paneltranformations'})
else:
show_error('Formatter error:\n' + stderr)
def run_formatter(view, edit, *args, **kwargs):
if view.is_scratch():
show_error('File is scratch')
return
# default parameters
syntax = kwargs.get('syntax')
saving = kwargs.get('saving', False)
quiet = kwargs.get('quiet', False)
formatter = Formatter(view, syntax)
if not formatter.exists():
if not quiet and not saving:
show_error('Formatter for this file type ({}) not found.'.format(
formatter.syntax))
return
if (saving and not formatter.format_on_save_enabled()):
return
file_text = sublime.Region(0, view.size())
file_text_utf = view.substr(file_text).encode('utf-8')
if (len(file_text_utf) == 0):
return
stdout, stderr = formatter.format(file_text_utf)
if len(stderr) == 0 and len(stdout) > 0:
view.replace(edit, file_text, stdout)
elif not quiet:
show_error('Format error:\n' + stderr)
def console_write(text, prefix=False):
if prefix:
sys.stdout.write('CodeFormatter: ')
sys.stdout.write(text + '\n')
def debug_write(text, prefix=False):
console_write(text, True)
def show_error(text):
sublime.error_message(u'CodeFormatter\n\n%s' % text)
| crlang/sublime-text---front-end-config | Data/Packages/CodeFormatter/CodeFormatter.py | Python | mit | 6,068 |
# -*- coding:utf-8 -*-
"""
# Author: Pegasus Wang ([email protected], http://ningning.today)
# Created Time : Fri Feb 20 21:38:57 2015
# File Name: wechatService.py
# Description:
# :copyright: (c) 2015 by Pegasus Wang.
# :license: MIT, see LICENSE for more details.
"""
import json
import time
import urllib
import urllib2
from wechatUtil import MessageUtil
from wechatReply import TextReply
class RobotService(object):
"""Auto reply robot service"""
KEY = 'd92d20bc1d8bb3cff585bf746603b2a9'
url = 'http://www.tuling123.com/openapi/api'
@staticmethod
def auto_reply(req_info):
query = {'key': RobotService.KEY, 'info': req_info.encode('utf-8')}
headers = {'Content-type': 'text/html', 'charset': 'utf-8'}
data = urllib.urlencode(query)
req = urllib2.Request(RobotService.url, data)
f = urllib2.urlopen(req).read()
return json.loads(f).get('text').replace('<br>', '\n')
#return json.loads(f).get('text')
class WechatService(object):
"""process request"""
@staticmethod
def processRequest(request):
"""process different message types.
:param request: post request message
:return: None
"""
requestMap = MessageUtil.parseXml(request)
fromUserName = requestMap.get(u'FromUserName')
toUserName = requestMap.get(u'ToUserName')
createTime = requestMap.get(u'CreateTime')
msgType = requestMap.get(u'MsgType')
msgId = requestMap.get(u'MsgId')
textReply = TextReply()
textReply.setToUserName(fromUserName)
textReply.setFromUserName(toUserName)
textReply.setCreateTime(time.time())
textReply.setMsgType(MessageUtil.RESP_MESSAGE_TYPE_TEXT)
if msgType == MessageUtil.REQ_MESSAGE_TYPE_TEXT:
content = requestMap.get('Content').decode('utf-8') # note: decode first
#respContent = u'您发送的是文本消息:' + content
respContent = RobotService.auto_reply(content)
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_IMAGE:
respContent = u'您发送的是图片消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VOICE:
respContent = u'您发送的是语音消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VIDEO:
respContent = u'您发送的是视频消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LOCATION:
respContent = u'您发送的是地理位置消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LINK:
respContent = u'您发送的是链接消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_EVENT:
eventType = requestMap.get(u'Event')
if eventType == MessageUtil.EVENT_TYPE_SUBSCRIBE:
respContent = u'^_^谢谢您的关注,本公众号由王宁宁开发(python2.7+django1.4),如果你有兴趣继续开发,' \
u'可以联系我,就当打发时间了.'
elif eventType == MessageUtil.EVENT_TYPE_UNSUBSCRIBE:
pass
elif eventType == MessageUtil.EVENT_TYPE_SCAN:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_LOCATION:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_CLICK:
# TODO
pass
textReply.setContent(respContent)
respXml = MessageUtil.class2xml(textReply)
return respXml
"""
if msgType == 'text':
content = requestMap.get('Content')
# TODO
elif msgType == 'image':
picUrl = requestMap.get('PicUrl')
# TODO
elif msgType == 'voice':
mediaId = requestMap.get('MediaId')
format = requestMap.get('Format')
# TODO
elif msgType == 'video':
mediaId = requestMap.get('MediaId')
thumbMediaId = requestMap.get('ThumbMediaId')
# TODO
elif msgType == 'location':
lat = requestMap.get('Location_X')
lng = requestMap.get('Location_Y')
label = requestMap.get('Label')
scale = requestMap.get('Scale')
# TODO
elif msgType == 'link':
title = requestMap.get('Title')
description = requestMap.get('Description')
url = requestMap.get('Url')
"""
| PegasusWang/WeiPython | wechat/wechatService.py | Python | mit | 4,478 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Sharing.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| XDSETeamA/XD_SE_TeamA | team9/1/Sharing/manage.py | Python | mit | 250 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import base64
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
BlobKey,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implementing provided
kind.
Args:
kind: Entity kind string.
Returns:
Class implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def query_descendants(model_instance):
"""Returns a query for all the descendants of a model instance.
Args:
model_instance: Model instance to find the descendants of.
Returns:
Query that will retrieve all entities that have the given model instance
as an ancestor. Unlike normal ancestor queries, this does not include the
ancestor itself.
"""
result = Query().ancestor(model_instance);
result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
model_instance.key());
return result;
def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
"""Encodes a model instance as a protocol buffer.
Args:
model_instance: Model instance to encode.
Returns:
entity_pb.EntityProto representation of the model instance
"""
return model_instance._populate_entity(_entity_class).ToPb()
def model_from_protobuf(pb, _entity_class=datastore.Entity):
"""Decodes a model instance from a protocol buffer.
Args:
pb: The protocol buffer representation of the model instance. Can be an
entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
Returns:
Model instance resulting from decoding the protocol buffer
"""
entity = _entity_class.FromPb(pb)
return class_for_kind(entity.kind()).from_entity(entity)
def _initialize_properties(model_class, name, bases, dct):
"""Initialize Property attributes for Model-class.
Args:
model_class: Model class to initialize properties for.
"""
model_class._properties = {}
property_source = {}
def get_attr_source(name, cls):
for src_cls in cls.mro():
if name in src_cls.__dict__:
return src_cls
defined = set()
for base in bases:
if hasattr(base, '_properties'):
property_keys = set(base._properties.keys())
duplicate_property_keys = defined & property_keys
for dupe_prop_name in duplicate_property_keys:
old_source = property_source[dupe_prop_name] = get_attr_source(
dupe_prop_name, property_source[dupe_prop_name])
new_source = get_attr_source(dupe_prop_name, base)
if old_source != new_source:
raise DuplicatePropertyError(
'Duplicate property, %s, is inherited from both %s and %s.' %
(dupe_prop_name, old_source.__name__, new_source.__name__))
property_keys -= duplicate_property_keys
if property_keys:
defined |= property_keys
property_source.update(dict.fromkeys(property_keys, base))
model_class._properties.update(base._properties)
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Property):
check_reserved_word(attr_name)
if attr_name in defined:
raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
defined.add(attr_name)
model_class._properties[attr_name] = attr
attr.__property_config__(model_class, attr_name)
model_class._unindexed_properties = frozenset(
name for name, prop in model_class._properties.items() if not prop.indexed)
def _coerce_to_key(value):
"""Returns the value's key.
Args:
value: a Model or Key instance or string encoded key or None
Returns:
The corresponding key, or None if value is None.
"""
if value is None:
return None
value, multiple = datastore.NormalizeAndTypeCheck(
value, (Model, Key, basestring))
if len(value) > 1:
raise datastore_errors.BadArgumentError('Expected only one model or key')
value = value[0]
if isinstance(value, Model):
return value.key()
elif isinstance(value, basestring):
return Key(value)
else:
return value
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
Used for initializing Properties defined in the context of a model.
By using a meta-class much of the configuration of a Property
descriptor becomes implicit. By using this meta-class, descriptors
that are of class Model are notified about which class they
belong to and what attribute they are associated with and can
do appropriate initialization via __property_config__.
Duplicate properties are not permitted.
"""
def __init__(cls, name, bases, dct, map_kind=True):
"""Initializes a class that might have property definitions.
This method is called when a class is created with the PropertiedClass
meta-class.
Loads all properties for this model and its base classes in to a dictionary
for easy reflection via the 'properties' method.
Configures each property defined in the new class.
Duplicate properties, either defined in the new class or defined separately
in two base classes are not permitted.
Properties may not assigned to names which are in the list of
_RESERVED_WORDS. It is still possible to store a property using a reserved
word in the datastore by using the 'name' keyword argument to the Property
constructor.
Args:
cls: Class being initialized.
name: Name of new class.
bases: Base classes of new class.
dct: Dictionary of new definitions for class.
Raises:
DuplicatePropertyError when a property is duplicated either in the new
class or separately in two base classes.
ReservedWordError when a property is given a name that is in the list of
reserved words, attributes of Model and names of the form '__.*__'.
"""
super(PropertiedClass, cls).__init__(name, bases, dct)
_initialize_properties(cls, name, bases, dct)
if map_kind:
_kind_map[cls.kind()] = cls
class Property(object):
"""A Property is an attribute of a Model.
It defines the type of the attribute, which determines how it is stored
in the datastore and how the property values are validated. Different property
types support different options, which change validation rules, default
values, etc. The simplest example of a property is a StringProperty:
class Story(db.Model):
title = db.StringProperty()
"""
creation_counter = 0
def __init__(self,
verbose_name=None,
name=None,
default=None,
required=False,
validator=None,
choices=None,
indexed=True):
"""Initializes this Property with the given options.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
default: Default value for property if none is assigned.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
indexed: Whether property is indexed.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.indexed = indexed
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
"""Configure property, connecting it to its model.
Configure the property so that it knows its property name and what class
it belongs to.
Args:
model_class: Model class which Property will belong to.
property_name: Name of property within Model instance to store property
values in. By default this will be the property name preceded by
an underscore, but may change for different subclasses.
"""
self.model_class = model_class
if self.name is None:
self.name = property_name
def __get__(self, model_instance, model_class):
"""Returns the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean."""
if model_instance is None:
return self
try:
return getattr(model_instance, self._attr_name())
except AttributeError:
return None
def __set__(self, model_instance, value):
"""Sets the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean.
"""
value = self.validate(value)
setattr(model_instance, self._attr_name(), value)
def default_value(self):
"""Default value for unassigned values.
Returns:
Default value as provided by __init__(default).
"""
return self.default
def validate(self, value):
"""Assert that provided value is compatible with this property.
Args:
value: Value to validate against this Property.
Returns:
A valid value, either the input unchanged or adapted to the
required type.
Raises:
BadValueError if the value is not appropriate for this
property in any way.
"""
if self.empty(value):
if self.required:
raise BadValueError('Property %s is required' % self.name)
else:
if self.choices:
match = False
for choice in self.choices:
if choice == value:
match = True
if not match:
raise BadValueError('Property %s is %r; must be one of %r' %
(self.name, value, self.choices))
if self.validator is not None:
self.validator(value)
return value
def empty(self, value):
"""Determine if value is empty in the context of this property.
For most kinds, this is equivalent to "not value", but for kinds like
bool, the test is more subtle, so subclasses can override this method
if necessary.
Args:
value: Value to validate against this Property.
Returns:
True if this value is considered empty in the context of this Property
type, otherwise False.
"""
return not value
def get_value_for_datastore(self, model_instance):
"""Datastore representation of this property.
Looks for this property in the given model instance, and returns the proper
datastore representation of the value that can be stored in a datastore
entity. Most critically, it will fetch the datastore key value for
reference properties.
Args:
model_instance: Instance to fetch datastore value from.
Returns:
Datastore representation of the model value in a form that is
appropriate for storing in the datastore.
"""
return self.__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Native representation of this property.
Given a value retrieved from a datastore entity, return a value,
possibly converted, to be stored on the model instance. Usually
this returns the value unchanged, but a property class may
override this when it uses a different datatype on the model
instance than on the entity.
This API is not quite symmetric with get_value_for_datastore(),
because the model instance on which to store the converted value
may not exist yet -- we may be collecting values to be passed to a
model constructor.
Args:
value: value retrieved from the datastore entity.
Returns:
The value converted for use as a model instance attribute.
"""
return value
def _require_parameter(self, kwds, parameter, value):
"""Sets kwds[parameter] to value.
If kwds[parameter] exists and is not value, raises ConfigurationError.
Args:
kwds: The parameter dict, which maps parameter names (strings) to values.
parameter: The name of the parameter to set.
value: The value to set it to.
"""
if parameter in kwds and kwds[parameter] != value:
raise ConfigurationError('%s must be %s.' % (parameter, value))
kwds[parameter] = value
def _attr_name(self):
"""Attribute name we use for this property in model instances.
DO NOT USE THIS METHOD.
"""
return '_' + self.name
data_type = str
def datastore_type(self):
"""Deprecated backwards-compatible accessor method for self.data_type."""
return self.data_type
class Model(object):
"""Model is the superclass of all object entities in the datastore.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want
to publish a story with title, body, and created date, you would do it like
this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
A model instance can have a single parent. Model instances without any
parent are root entities. It is possible to efficiently query for
instances by their shared parent. All descendents of a single root
instance also behave as a transaction group. This means that when you
work one member of the group within a transaction all descendents of that
root join the transaction. All operations within a transaction on this
group are ACID.
"""
__metaclass__ = PropertiedClass
def __init__(self,
parent=None,
key_name=None,
_app=None,
_from_entity=False,
**kwds):
"""Creates a new instance of this model.
To create a new entity, you instantiate a model and then call put(),
which saves the entity to the datastore:
person = Person()
person.name = 'Bret'
person.put()
You can initialize properties in the model in the constructor with keyword
arguments:
person = Person(name='Bret')
We initialize all other properties to the default value (as defined by the
properties in the model definition) if they are not provided in the
constructor.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_from_entity: Intentionally undocumented.
kwds: Keyword arguments mapping to properties of model. Also:
key: Key instance for this instance, if provided makes parent and
key_name redundant (they do not need to be set but if they are
they must match the key).
"""
key = kwds.get('key', None)
if key is not None:
if isinstance(key, (tuple, list)):
key = Key.from_path(*key)
if isinstance(key, basestring):
key = Key(encoded=key)
if not isinstance(key, Key):
raise TypeError('Expected Key type; received %s (is %s)' %
(key, key.__class__.__name__))
if not key.has_id_or_name():
raise BadKeyError('Key must have an id or name')
if key.kind() != self.kind():
raise BadKeyError('Expected Key kind to be %s; received %s' %
(self.kind(), key.kind()))
if _app is not None and key.app() != _app:
raise BadKeyError('Expected Key app to be %s; received %s' %
(_app, key.app()))
if key_name and key_name != key.name():
raise BadArgumentError('Cannot use key and key_name at the same time'
' with different values')
if parent and parent != key.parent():
raise BadArgumentError('Cannot use key and parent at the same time'
' with different values')
self._key = key
self._key_name = None
self._parent = None
self._parent_key = None
else:
if key_name == '':
raise BadKeyError('Name cannot be empty.')
elif key_name is not None and not isinstance(key_name, basestring):
raise BadKeyError('Name must be string type, not %s' %
key_name.__class__.__name__)
if parent is not None:
if not isinstance(parent, (Model, Key)):
raise TypeError('Expected Model type; received %s (is %s)' %
(parent, parent.__class__.__name__))
if isinstance(parent, Model) and not parent.has_key():
raise BadValueError(
"%s instance must have a complete key before it can be used as a "
"parent." % parent.kind())
if isinstance(parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = parent.key()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._key_name = key_name
self._key = None
self._entity = None
if _app is not None and isinstance(_app, Key):
raise BadArgumentError('_app should be a string; received Key(\'%s\'):\n'
' This may be the result of passing \'key\' as '
'a positional parameter in SDK 1.2.6. Please '
'only pass \'key\' as a keyword parameter.' % _app)
self._app = _app
for prop in self.properties().values():
if prop.name in kwds:
value = kwds[prop.name]
else:
value = prop.default_value()
try:
prop.__set__(self, value)
except DerivedPropertyError, e:
if prop.name in kwds and not _from_entity:
raise
def key(self):
"""Unique key for this entity.
This property is only available if this entity is already stored in the
datastore or if it has a full key, so it is available if this entity was
fetched returned from a query, or after put() is called the first time
for new entities, or if a complete key was given when constructed.
Returns:
Datastore key of persisted entity.
Raises:
NotSavedError when entity is not persistent.
"""
if self.is_saved():
return self._entity.key()
elif self._key:
return self._key
elif self._key_name:
parent = self._parent_key or (self._parent and self._parent.key())
self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
return self._key
else:
raise NotSavedError()
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
Args:
entity: Entity to save information on.
"""
for prop in self.properties().values():
datastore_value = prop.get_value_for_datastore(self)
if datastore_value == []:
try:
del entity[prop.name]
except KeyError:
pass
else:
entity[prop.name] = datastore_value
entity.set_unindexed_properties(self._unindexed_properties)
def _populate_internal_entity(self, _entity_class=datastore.Entity):
"""Populates self._entity, saving its state to the datastore.
After this method is called, calling is_saved() will return True.
Returns:
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
for attr in ('_key_name', '_key'):
try:
delattr(self, attr)
except AttributeError:
pass
return self._entity
def put(self, **kwargs):
"""Writes this model instance to the datastore.
If this instance is new, we add an entity to the datastore.
Otherwise, we update this instance, and the key will remain the
same.
Returns:
The key of the instance (either the existing key or a new key).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
self._populate_internal_entity()
return datastore.Put(self._entity, rpc=rpc)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
"""Internal helper -- Populate self._entity or create a new one
if that one does not exist. Does not change any state of the instance
other than the internal state of the entity.
This method is separate from _populate_internal_entity so that it is
possible to call to_xml without changing the state of an unsaved entity
to saved.
Returns:
self._entity or a new Entity which is not stored on the instance.
"""
if self.is_saved():
entity = self._entity
else:
kwds = {'_app': self._app,
'unindexed_properties': self._unindexed_properties}
if self._key is not None:
if self._key.id():
kwds['id'] = self._key.id()
else:
kwds['name'] = self._key.name()
if self._key.parent():
kwds['parent'] = self._key.parent()
else:
if self._key_name is not None:
kwds['name'] = self._key_name
if self._parent_key is not None:
kwds['parent'] = self._parent_key
elif self._parent is not None:
kwds['parent'] = self._parent._entity
entity = _entity_class(self.kind(), **kwds)
self._to_entity(entity)
return entity
def delete(self, **kwargs):
"""Deletes this entity from the datastore.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
datastore.Delete(self.key(), rpc=rpc)
self._key = self.key()
self._key_name = None
self._parent_key = None
self._entity = None
def is_saved(self):
"""Determine if entity is persisted in the datastore.
New instances of Model do not start out saved in the data. Objects which
are saved to or loaded from the Datastore will have a True saved state.
Returns:
True if object has been persisted to the datastore, otherwise False.
"""
return self._entity is not None
def has_key(self):
"""Determine if this model instance has a complete key.
When not using a fully self-assigned Key, ids are not assigned until the
data is saved to the Datastore, but instances with a key name always have
a full key.
Returns:
True if the object has been persisted to the datastore or has a key
or has a key_name, otherwise False.
"""
return self.is_saved() or self._key or self._key_name
def dynamic_properties(self):
"""Returns a list of all dynamic properties defined for instance."""
return []
def instance_properties(self):
"""Alias for dyanmic_properties."""
return self.dynamic_properties()
def parent(self):
"""Get the parent of the model instance.
Returns:
Parent of contained entity or parent provided in constructor, None if
instance has no parent.
"""
if self._parent is None:
parent_key = self.parent_key()
if parent_key is not None:
self._parent = get(parent_key)
return self._parent
def parent_key(self):
"""Get the parent's key.
This method is useful for avoiding a potential fetch from the datastore
but still get information about the instances parent.
Returns:
Parent key of entity, None if there is no parent.
"""
if self._parent_key is not None:
return self._parent_key
elif self._parent is not None:
return self._parent.key()
elif self._entity is not None:
return self._entity.parent()
elif self._key is not None:
return self._key.parent()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
"""Generate an XML representation of this model instance.
atom and gd:namespace properties are converted to XML according to their
respective schemas. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
"""
entity = self._populate_entity(_entity_class)
return entity.ToXml()
@classmethod
def get(cls, keys, **kwargs):
"""Fetch instance from the datastore of a specific Model type using key.
We support Key objects and string keys (we convert them to Key objects
automatically).
Useful for ensuring that specific instance types are retrieved from the
datastore. It also helps that the source code clearly indicates what
kind of object is being retreived. Example:
story = Story.get(story_key)
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for provided class if it exists in the datastore, otherwise
None; if a list of keys was given: a list whose items are either
a Model instance or None.
Raises:
KindError if any of the retreived objects are not instances of the
type associated with call to 'get'.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = get(keys, rpc=rpc)
if results is None:
return None
if isinstance(results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if not(instance is None or isinstance(instance, cls)):
raise KindError('Kind %r is not a subclass of kind %r' %
(instance.kind(), cls.kind()))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None, **kwargs):
"""Get instance of Model class by its key's name.
Args:
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
try:
parent = _coerce_to_key(parent)
except BadKeyError, e:
raise BadArgumentError(str(e))
rpc = datastore.GetRpcFromKwargs(kwargs)
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_by_id(cls, ids, parent=None, **kwargs):
"""Get instance of Model class by id.
Args:
key_names: A single id or a list of ids.
parent: Parent of instances to get. Can be a model or key.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if isinstance(parent, Model):
parent = parent.key()
ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
for id in ids]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_or_insert(cls, key_name, **kwds):
"""Transactionally retrieve or create an instance of Model class.
This acts much like the Python dictionary setdefault() method, where we
first try to retrieve a Model instance with the given key name and parent.
If it's not present, then we create a new instance (using the *kwds
supplied) and insert that with the supplied key name.
Subsequent calls to this method with the same key_name and parent will
always yield the same entity (though not the same actual object instance),
regardless of the *kwds supplied. If the specified entity has somehow
been deleted separately, then the next call will create a new entity and
return it.
If the 'parent' keyword argument is supplied, it must be a Model instance.
It will be used as the parent of the new instance of this Model class if
one is created.
This method is especially useful for having just one unique entity for
a specific identifier. Insertion/retrieval is done transactionally, which
guarantees uniqueness.
Example usage:
class WikiTopic(db.Model):
creation_date = db.DatetimeProperty(auto_now_add=True)
body = db.TextProperty(required=True)
# The first time through we'll create the new topic.
wiki_word = 'CommonIdioms'
topic = WikiTopic.get_or_insert(wiki_word,
body='This topic is totally new!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
# The second time through will just retrieve the entity.
overwrite_topic = WikiTopic.get_or_insert(wiki_word,
body='A totally different message!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
Args:
key_name: Key name to retrieve or create.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists, the
rest of these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key_name and parent
or a new one that has just been created.
Raises:
TransactionFailedError if the specified Model instance could not be
retrieved or created transactionally (due to high contention, etc).
"""
def txn():
entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwds)
entity.put()
return entity
return run_in_transaction(txn)
@classmethod
def all(cls, **kwds):
"""Returns a query over all instances of this model from the datastore.
Returns:
Query that will retrieve all instances from entity collection.
"""
return Query(cls, **kwds)
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a query using GQL query string.
See appengine/ext/gql for more information about GQL.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
*args, **kwds)
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Loads attributes which are not defined as part of the entity in
to the model instance.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = {}
for prop in cls.properties().values():
if prop.name in entity:
try:
value = prop.make_value_from_datastore(entity[prop.name])
entity_values[prop.name] = value
except KeyError:
entity_values[prop.name] = []
return entity_values
@classmethod
def from_entity(cls, entity):
"""Converts the entity representation of this model to an instance.
Converts datastore.Entity instance to an instance of cls.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when cls is incorrect model for entity.
"""
if cls.kind() != entity.kind():
raise KindError('Class %s cannot handle kind \'%s\'' %
(repr(cls), entity.kind()))
entity_values = cls._load_entity_values(entity)
if entity.key().has_id_or_name():
entity_values['key'] = entity.key()
instance = cls(None, _from_entity=True, **entity_values)
if entity.is_saved():
instance._entity = entity
del instance._key_name
del instance._key
return instance
@classmethod
def kind(cls):
"""Returns the datastore kind we use for this model.
We just use the name of the model for now, ignoring potential collisions.
"""
return cls.__name__
@classmethod
def entity_type(cls):
"""Soon to be removed alias for kind."""
return cls.kind()
@classmethod
def properties(cls):
"""Returns a dictionary of all the properties defined for this model."""
return dict(cls._properties)
@classmethod
def fields(cls):
"""Soon to be removed alias for properties."""
return cls.properties()
def create_rpc(deadline=None, callback=None, read_policy=STRONG_CONSISTENCY):
"""Create an rpc for use in configuring datastore calls.
Args:
deadline: float, deadline for calls in seconds.
callback: callable, a callback triggered when this rpc completes,
accepts one argument: the returned rpc.
read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
consistent reads
Returns:
A datastore.DatastoreRPC instance.
"""
return datastore.CreateRPC(
deadline=deadline, callback=callback, read_policy=read_policy)
def get(keys, **kwargs):
"""Fetch the specific Model instance with the given key from the datastore.
We support Key objects and string keys (we convert them to Key objects
automatically).
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for if it exists in the datastore, otherwise None; if a list of
keys was given: a list whose items are either a Model instance or
None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
try:
entities = datastore.Get(keys, rpc=rpc)
except datastore_errors.EntityNotFoundError:
assert not multiple
return None
models = []
for entity in entities:
if entity is None:
model = None
else:
cls1 = class_for_kind(entity.kind())
model = cls1.from_entity(entity)
models.append(model)
if multiple:
return models
assert len(models) == 1
return models[0]
def put(models, **kwargs):
"""Store one or more Model instances.
Args:
models: Model instance or list of Model instances.
Returns:
A Key or a list of Keys (corresponding to the argument's plurality).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
keys = datastore.Put(entities, rpc=rpc)
if multiple:
return keys
assert len(keys) == 1
return keys[0]
save = put
def delete(models, **kwargs):
"""Delete one or more Model instances.
Args:
models_or_keys: Model instance or list of Model instances.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if not isinstance(models, (list, tuple)):
models = [models]
keys = [_coerce_to_key(v) for v in models]
datastore.Delete(keys, rpc=rpc)
def allocate_ids(model, size, **kwargs):
"""Allocates a range of IDs of size for the model_key defined by model.
Allocates a range of IDs in the datastore such that those IDs will not
be automatically assigned to new entities. You can only allocate IDs
for model keys from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
model: Model instance, Key or string to serve as a template specifying the
ID sequence in which to allocate IDs. Returned ids should only be used
in entities with the same parent (if any) and kind as this key.
Returns:
(start, end) of the allocated range, inclusive.
"""
return datastore.AllocateIds(_coerce_to_key(model), size, **kwargs)
class Expando(Model):
"""Dynamically expandable model.
An Expando does not require (but can still benefit from) the definition
of any properties before it can be used to store information in the
datastore. Properties can be added to an expando object by simply
performing an assignment. The assignment of properties is done on
an instance by instance basis, so it is possible for one object of an
expando type to have different properties from another or even the same
properties with different types. It is still possible to define
properties on an expando, allowing those properties to behave the same
as on any other model.
Example:
import datetime
class Song(db.Expando):
title = db.StringProperty()
crazy = Song(title='Crazy like a diamond',
author='Lucy Sky',
publish_date='yesterday',
rating=5.0)
hoboken = Song(title='The man from Hoboken',
author=['Anthony', 'Lou'],
publish_date=datetime.datetime(1977, 5, 3))
crazy.last_minute_note=db.Text('Get a train to the station.')
Possible Uses:
One use of an expando is to create an object without any specific
structure and later, when your application mature and it in the right
state, change it to a normal model object and define explicit properties.
Additional exceptions for expando:
Protected attributes (ones whose names begin with '_') cannot be used
as dynamic properties. These are names that are reserved for protected
transient (non-persisted) attributes.
Order of lookup:
When trying to set or access an attribute value, any other defined
properties, such as methods and other values in __dict__ take precedence
over values in the datastore.
1 - Because it is not possible for the datastore to know what kind of
property to store on an undefined expando value, setting a property to
None is the same as deleting it from the expando.
2 - Persistent variables on Expando must not begin with '_'. These
variables considered to be 'protected' in Python, and are used
internally.
3 - Expando's dynamic properties are not able to store empty lists.
Attempting to assign an empty list to a dynamic property will raise
ValueError. Static properties on Expando can still support empty
lists but like normal Model properties is restricted from using
None.
"""
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
"""Creates a new instance of this expando model.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
if prop not in self.properties() and prop != 'key':
setattr(self, prop, value)
def __setattr__(self, key, value):
"""Dynamically set field values that are not defined.
Tries to set the value on the object normally, but failing that
sets the value on the contained entity.
Args:
key: Name of attribute.
value: Value to set for attribute. Must be compatible with
datastore.
Raises:
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
if (key[:1] != '_' and
not hasattr(getattr(type(self), key, None), '__set__')):
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
raise TypeError("Expando cannot accept values of type '%s'." %
type(value).__name__)
if self._dynamic_properties is None:
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
super(Expando, self).__setattr__(key, value)
def __getattribute__(self, key):
"""Get attribute from expando.
Must be overridden to allow dynamic properties to obscure class attributes.
Since all attributes are stored in self._dynamic_properties, the normal
__getattribute__ does not attempt to access it until __setattr__ is called.
By then, the static attribute being overwritten has already been located
and returned from the call.
This method short circuits the usual __getattribute__ call when finding a
dynamic property and returns it to the user via __getattr__. __getattr__
is called to preserve backward compatibility with older Expando models
that may have overridden the original __getattr__.
NOTE: Access to properties defined by Python descriptors are not obscured
because setting those attributes are done through the descriptor and does
not place those attributes in self._dynamic_properties.
"""
if not key.startswith('_'):
dynamic_properties = self._dynamic_properties
if dynamic_properties is not None and key in dynamic_properties:
return self.__getattr__(key)
return super(Expando, self).__getattribute__(key)
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
Tries to get the value on the object normally, but failing that
retrieves value from contained entity.
Args:
key: Name of attribute.
Raises:
AttributeError when there is no attribute for key on object or
contained entity.
"""
_dynamic_properties = self._dynamic_properties
if _dynamic_properties is not None and key in _dynamic_properties:
return _dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
def __delattr__(self, key):
"""Remove attribute from expando.
Expando is not like normal entities in that undefined fields
can be removed.
Args:
key: Dynamic property to be deleted.
"""
if self._dynamic_properties and key in self._dynamic_properties:
del self._dynamic_properties[key]
else:
object.__delattr__(self, key)
def dynamic_properties(self):
"""Determine which properties are particular to instance of entity.
Returns:
Set of names which correspond only to the dynamic properties.
"""
if self._dynamic_properties is None:
return []
return self._dynamic_properties.keys()
def _to_entity(self, entity):
"""Store to entity, deleting dynamic properties that no longer exist.
When the expando is saved, it is possible that a given property no longer
exists. In this case, the property will be removed from the saved instance.
Args:
entity: Entity which will receive dynamic properties.
"""
super(Expando, self)._to_entity(entity)
if self._dynamic_properties is None:
self._dynamic_properties = {}
for key, value in self._dynamic_properties.iteritems():
entity[key] = value
all_properties = set(self._dynamic_properties.iterkeys())
all_properties.update(self.properties().iterkeys())
for key in entity.keys():
if key not in all_properties:
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Expando needs to do a second pass to add the entity values which were
ignored by Model because they didn't have an corresponding predefined
property on the model.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = super(Expando, cls)._load_entity_values(entity)
for key, value in entity.iteritems():
if key not in entity_values:
entity_values[str(key)] = value
return entity_values
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
_compile = False
def __init__(self, model_class=None, keys_only=False, compile=True,
cursor=None):
"""Constructor.
Args:
model_class: Model class from which entities are constructed.
keys_only: Whether the query should return full entities or only keys.
compile: Whether the query should also return a compiled query.
cursor: A compiled query from which to resume.
"""
self._model_class = model_class
self._keys_only = keys_only
self._compile = compile
self.with_cursor(cursor)
def is_keys_only(self):
"""Returns whether this query is keys only.
Returns:
True if this query returns keys, False if it returns entities.
"""
return self._keys_only
def _get_query(self):
"""Subclass must override (and not call their super method).
Returns:
A datastore.Query instance representing the query.
"""
raise NotImplementedError
def run(self, **kwargs):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
iterator = raw_query.Run(rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return iterator
else:
return _QueryIterator(self._model_class, iter(iterator))
def __iter__(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
"""
return self.run()
def get(self, **kwargs):
"""Get first result from this.
Beware: get() ignores the LIMIT clause on GQL queries.
Returns:
First result from running the query if there are any, else None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = self.fetch(1, rpc=rpc)
try:
return results[0]
except IndexError:
return None
def count(self, limit=None, **kwargs):
"""Number of entities this query fetches.
Beware: count() ignores the LIMIT clause on GQL queries.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
Number of entities this query fetches.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
result = raw_query.Count(limit=limit, rpc=rpc)
self._last_raw_query = None
return result
def fetch(self, limit, offset=0, **kwargs):
"""Return a list of items selected using SQL-like limit and offset.
Whenever possible, use fetch() instead of iterating over the query
results with run() or __iter__() . fetch() is more efficient.
Beware: fetch() ignores the LIMIT clause on GQL queries.
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
rpc: datastore.DatastoreRPC to use for this request.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
results if there aren't enough results to satisfy the request.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
accepted = (int, long)
if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
raise TypeError('Arguments to fetch() must be integers')
if limit < 0 or offset < 0:
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
raw_query = self._get_query()
raw = raw_query.Get(limit, offset, rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return raw
else:
if self._model_class is not None:
return [self._model_class.from_entity(e) for e in raw]
else:
return [class_for_kind(e.kind()).from_entity(e) for e in raw]
def cursor(self):
"""Get a serialized cursor for an already executed query.
The returned cursor effectively lets a future invocation of a similar
query to begin fetching results immediately after the last returned
result from this query invocation.
Returns:
A base64-encoded serialized cursor.
"""
if not self._compile:
raise AssertionError(
'Query must be created with compile=True to produce cursors')
try:
return base64.urlsafe_b64encode(
self._last_raw_query.GetCompiledCursor().Encode())
except AttributeError:
raise AssertionError('No cursor available.')
def with_cursor(self, cursor):
"""Set the start of this query to the given serialized cursor.
When executed, this query will start from the next result for a previous
invocation of a similar query.
Returns:
This Query instance, for chaining.
"""
if not cursor:
cursor = None
elif not isinstance(cursor, basestring):
raise BadValueError(
'Cursor must be a str or unicode instance, not a %s'
% type(cursor).__name__)
else:
cursor = str(cursor)
try:
decoded = base64.urlsafe_b64decode(cursor)
cursor = datastore_pb.CompiledCursor(decoded)
except (ValueError, TypeError), e:
raise datastore_errors.BadValueError(
'Invalid cursor %s. Details: %s' % (cursor, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadValueError('Invalid cursor %s. '
'Details: %s' % (cursor, e))
else:
raise
self._cursor = cursor
return self
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
Beware: this ignores the LIMIT clause on GQL queries.
Args:
arg: Either a single integer, corresponding to the query[index]
syntax, or a Python slice object, corresponding to the
query[start:stop] or query[start:stop:step] syntax.
Returns:
A single Model instance when the argument is a single integer.
A list of Model instances when the argument is a slice.
"""
if isinstance(arg, slice):
start, stop, step = arg.start, arg.stop, arg.step
if start is None:
start = 0
if stop is None:
raise ValueError('Open-ended slices are not supported')
if step is None:
step = 1
if start < 0 or stop < 0 or step != 1:
raise ValueError(
'Only slices with start>=0, stop>=0, step==1 are supported')
limit = stop - start
if limit < 0:
return []
return self.fetch(limit, start)
elif isinstance(arg, (int, long)):
if arg < 0:
raise ValueError('Only indices >= 0 are supported')
results = self.fetch(1, arg)
if results:
return results[0]
else:
raise IndexError('The query returned fewer than %d results' % (arg+1))
else:
raise TypeError('Only integer indices and slices are supported')
class _QueryIterator(object):
"""Wraps the datastore iterator to return Model instances.
The datastore returns entities. We wrap the datastore iterator to
return Model instances instead.
"""
def __init__(self, model_class, datastore_iterator):
"""Iterator constructor
Args:
model_class: Model class from which entities are constructed.
datastore_iterator: Underlying datastore iterator.
"""
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
"""Iterator on self.
Returns:
Self.
"""
return self
def next(self):
"""Get next Model instance in query results.
Returns:
Next model instance.
Raises:
StopIteration when there are no more results in query.
"""
if self.__model_class is not None:
return self.__model_class.from_entity(self.__iterator.next())
else:
entity = self.__iterator.next()
return class_for_kind(entity.kind()).from_entity(entity)
def _normalize_query_parameter(value):
"""Make any necessary type conversions to a query parameter.
The following conversions are made:
- Model instances are converted to Key instances. This is necessary so
that querying reference properties will work.
- datetime.date objects are converted to datetime.datetime objects (see
_date_to_datetime for details on this conversion). This is necessary so
that querying date properties with date objects will work.
- datetime.time objects are converted to datetime.datetime objects (see
_time_to_datetime for details on this conversion). This is necessary so
that querying time properties with time objects will work.
Args:
value: The query parameter value.
Returns:
The input value, or a converted value if value matches one of the
conversions specified above.
"""
if isinstance(value, Model):
value = value.key()
if (isinstance(value, datetime.date) and
not isinstance(value, datetime.datetime)):
value = _date_to_datetime(value)
elif isinstance(value, datetime.time):
value = _time_to_datetime(value)
return value
class Query(_BaseQuery):
"""A Query instance queries over instances of Models.
You construct a query with a model class, like this:
class Story(db.Model):
title = db.StringProperty()
date = db.DateTimeProperty()
query = Query(Story)
You modify a query with filters and orders like this:
query.filter('title =', 'Foo')
query.order('-date')
query.ancestor(key_or_model_instance)
Every query can return an iterator, so you access the results of a query
by iterating over it:
for story in query:
print story.title
For convenience, all of the filtering and ordering methods return "self",
so the easiest way to use the query interface is to cascade all filters and
orders in the iterator line like this:
for story in Query(story).filter('title =', 'Foo').order('-date'):
print story.title
"""
def __init__(self, model_class=None, keys_only=False, cursor=None):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
keys_only: Whether the query should return full entities or only keys.
cursor: A compiled query from which to resume.
"""
super(Query, self).__init__(model_class, keys_only, cursor=cursor)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
def _get_query(self,
_query_class=datastore.Query,
_multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
if self._model_class is not None:
kind = self._model_class.kind()
else:
kind = None
query = _query_class(kind,
query_set,
keys_only=self._keys_only,
compile=self._compile,
cursor=self._cursor)
query.Order(*self.__orderings)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
queries.append(query)
if (_query_class != datastore.Query and
_multi_query_class == datastore.MultiQuery):
warnings.warn(
'Custom _query_class specified without corresponding custom'
' _query_multi_class. Things will break if you use queries with'
' the "IN" or "!=" operators.', RuntimeWarning)
if len(queries) > 1:
raise datastore_errors.BadArgumentError(
'Query requires multiple subqueries to satisfy. If _query_class'
' is overridden, _multi_query_class must also be overridden.')
elif (_query_class == datastore.Query and
_multi_query_class != datastore.MultiQuery):
raise BadArgumentError('_query_class must also be overridden if'
' _multi_query_class is overridden.')
if len(queries) == 1:
return queries[0]
else:
return _multi_query_class(queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"""Add a disjunction of several filters and several values to the query.
This is implemented by duplicating queries and combining the
results later.
Args:
operations: a string or list of strings. Each string contains a
property name and an operator to filter by. The operators
themselves must not require multiple queries to evaluate
(currently, this means that 'in' and '!=' are invalid).
values: a value or list of filter values, normalized by
_normalize_query_parameter.
"""
if not isinstance(operations, (list, tuple)):
operations = [operations]
if not isinstance(values, (list, tuple)):
values = [values]
new_query_sets = []
for operation in operations:
if operation.lower().endswith('in') or operation.endswith('!='):
raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = copy.deepcopy(query_set)
datastore._AddOrAppend(new_query_set, operation, value)
new_query_sets.append(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
"""Add filter to query.
Args:
property_operator: string with the property and operator to filter by.
value: the filter value.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
match = _FILTER_REGEX.match(property_operator)
prop = match.group(1)
if match.group(3) is not None:
operator = match.group(3)
else:
operator = '=='
if self._model_class is None:
if prop != datastore_types._KEY_SPECIAL_PROPERTY:
raise BadQueryError(
'Only %s filters are allowed on kindless queries.' %
datastore_types._KEY_SPECIAL_PROPERTY)
elif prop in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % prop)
if operator.lower() == 'in':
if self._keys_only:
raise BadQueryError('Keys only queries do not support IN filters.')
elif not isinstance(value, (list, tuple)):
raise BadValueError('Argument to the "in" operator must be a list')
values = [_normalize_query_parameter(v) for v in value]
self.__filter_disjunction(prop + ' =', values)
else:
if isinstance(value, (list, tuple)):
raise BadValueError('Filtering on lists is not supported')
if operator == '!=':
if self._keys_only:
raise BadQueryError('Keys only queries do not support != filters.')
self.__filter_disjunction([prop + ' <', prop + ' >'],
_normalize_query_parameter(value))
else:
value = _normalize_query_parameter(value)
for query_set in self.__query_sets:
datastore._AddOrAppend(query_set, property_operator, value)
return self
def order(self, property):
"""Set order of query result.
To use descending order, prepend '-' (minus) to the property
name, e.g., '-date' rather than 'date'.
Args:
property: Property to sort on.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
if property.startswith('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if self._model_class is None:
if (property != datastore_types._KEY_SPECIAL_PROPERTY or
order != datastore.Query.ASCENDING):
raise BadQueryError(
'Only %s ascending orders are supported on kindless queries' %
datastore_types._KEY_SPECIAL_PROPERTY)
else:
if not issubclass(self._model_class, Expando):
if (property not in self._model_class.properties() and
property not in datastore_types._SPECIAL_PROPERTIES):
raise PropertyError('Invalid property name \'%s\'' % property)
if property in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.__orderings.append((property, order))
return self
def ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return results that descend from
a given model instance. In other words, all of the results will
have the ancestor as their parent, or parent's parent, etc. The
ancestor itself is also a possible result!
Args:
ancestor: Model or Key (that has already been saved)
Returns:
Self to support method chaining.
Raises:
TypeError if the argument isn't a Key or Model; NotSavedError
if it is, but isn't saved yet.
"""
if isinstance(ancestor, datastore.Key):
if ancestor.has_id_or_name():
self.__ancestor = ancestor
else:
raise NotSavedError()
elif isinstance(ancestor, Model):
if ancestor.has_key():
self.__ancestor = ancestor.key()
else:
raise NotSavedError()
else:
raise TypeError('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
"""A Query class that uses GQL query syntax instead of .filter() etc."""
def __init__(self, query_string, *args, **kwds):
"""Constructor.
Args:
query_string: Properly formatted GQL query string.
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
Raises:
PropertyError if the query filters or sorts on a property that's not
indexed.
"""
from google.appengine.ext import gql
app = kwds.pop('_app', None)
self._proto_query = gql.GQL(query_string, _app=app)
if self._proto_query._entity is not None:
model_class = class_for_kind(self._proto_query._entity)
else:
model_class = None
super(GqlQuery, self).__init__(model_class,
keys_only=self._proto_query._keys_only)
if model_class is not None:
for property, unused in (self._proto_query.filters().keys() +
self._proto_query.orderings()):
if property in model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.bind(*args, **kwds)
def bind(self, *args, **kwds):
"""Bind arguments (positional or keyword) to the query.
Note that you can also pass arguments directly to the query
constructor. Each time you call bind() the previous set of
arguments is replaced with the new set. This is useful because
the hard work in in parsing the query; so if you expect to be
using the same query with different sets of arguments, you should
hold on to the GqlQuery() object and call bind() on it each time.
Args:
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
self._args = []
for arg in args:
self._args.append(_normalize_query_parameter(arg))
self._kwds = {}
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
def run(self, **kwargs):
"""Iterator for this query that handles the LIMIT clause property.
If the GQL query string contains a LIMIT clause, this function fetches
all results before returning an iterator. Otherwise results are retrieved
in batches by the iterator.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
if self._proto_query.limit() >= 0:
return iter(self.fetch(limit=self._proto_query.limit(),
offset=self._proto_query.offset(),
**kwargs))
else:
results = _BaseQuery.run(self, **kwargs)
try:
for _ in xrange(self._proto_query.offset()):
results.next()
except StopIteration:
pass
return results
def _get_query(self):
return self._proto_query.Bind(self._args, self._kwds, self._cursor)
class UnindexedProperty(Property):
"""A property that isn't indexed by either built-in or composite indices.
TextProperty and BlobProperty derive from this class.
"""
def __init__(self, *args, **kwds):
"""Construct property. See the Property class for details.
Raises:
ConfigurationError if indexed=True.
"""
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
super(UnindexedProperty, self).__init__(*args, **kwds)
def validate(self, value):
"""Validate property.
Returns:
A valid value.
Raises:
BadValueError if property is not an instance of data_type.
"""
if value is not None and not isinstance(value, self.data_type):
try:
value = self.data_type(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a %s instance (%s)' %
(self.name, self.data_type.__name__, err))
value = super(UnindexedProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s instance' %
(self.name, self.data_type.__name__))
return value
class TextProperty(UnindexedProperty):
"""A string that can be longer than 500 bytes."""
data_type = Text
class StringProperty(Property):
"""A textual property, which can be multi- or single-line."""
def __init__(self, verbose_name=None, multiline=False, **kwds):
"""Construct string property.
Args:
verbose_name: Verbose name is always first parameter.
multi-line: Carriage returns permitted in property.
"""
super(StringProperty, self).__init__(verbose_name, **kwds)
self.multiline = multiline
def validate(self, value):
"""Validate string property.
Returns:
A valid value.
Raises:
BadValueError if property is not multi-line but value is.
"""
value = super(StringProperty, self).validate(value)
if value is not None and not isinstance(value, basestring):
raise BadValueError(
'Property %s must be a str or unicode instance, not a %s'
% (self.name, type(value).__name__))
if not self.multiline and value and value.find('\n') != -1:
raise BadValueError('Property %s is not multi-line' % self.name)
return value
data_type = basestring
class _CoercingProperty(Property):
"""A Property subclass that extends validate() to coerce to self.data_type."""
def validate(self, value):
"""Coerce values (except None) to self.data_type.
Args:
value: The value to be validated and coerced.
Returns:
The coerced and validated value. It is guaranteed that this is
either None or an instance of self.data_type; otherwise an exception
is raised.
Raises:
BadValueError if the value could not be validated or coerced.
"""
value = super(_CoercingProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class CategoryProperty(_CoercingProperty):
"""A property whose values are Category instances."""
data_type = Category
class LinkProperty(_CoercingProperty):
"""A property whose values are Link instances."""
def validate(self, value):
value = super(LinkProperty, self).validate(value)
if value is not None:
scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
if not scheme or not netloc:
raise BadValueError('Property %s must be a full URL (\'%s\')' %
(self.name, value))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
"""A property whose values are Email instances."""
data_type = Email
class GeoPtProperty(_CoercingProperty):
"""A property whose values are GeoPt instances."""
data_type = GeoPt
class IMProperty(_CoercingProperty):
"""A property whose values are IM instances."""
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
"""A property whose values are PhoneNumber instances."""
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
"""A property whose values are PostalAddress instances."""
data_type = PostalAddress
class BlobProperty(UnindexedProperty):
"""A byte string that can be longer than 500 bytes."""
data_type = Blob
class ByteStringProperty(Property):
"""A short (<=500 bytes) byte string.
This type should be used for short binary values that need to be indexed. If
you do not require indexing (regardless of length), use BlobProperty instead.
"""
def validate(self, value):
"""Validate ByteString property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'ByteString'.
"""
if value is not None and not isinstance(value, ByteString):
try:
value = ByteString(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a ByteString instance (%s)' % (self.name, err))
value = super(ByteStringProperty, self).validate(value)
if value is not None and not isinstance(value, ByteString):
raise BadValueError('Property %s must be a ByteString instance'
% self.name)
return value
data_type = ByteString
class DateTimeProperty(Property):
"""The base class of all of our date/time properties.
We handle common operations, like converting between time tuples and
datetime instances.
"""
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
**kwds):
"""Construct a DateTimeProperty
Args:
verbose_name: Verbose name is always first parameter.
auto_now: Date/time property is updated with the current time every time
it is saved to the datastore. Useful for properties that want to track
the modification time of an instance.
auto_now_add: Date/time is set to the when its instance is created.
Useful for properties that record the creation time of an entity.
"""
super(DateTimeProperty, self).__init__(verbose_name, **kwds)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"""Validate datetime.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'datetime'.
"""
value = super(DateTimeProperty, self).validate(value)
if value and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s' %
(self.name, self.data_type.__name__))
return value
def default_value(self):
"""Default value for datetime.
Returns:
value of now() as appropriate to the date-time instance if auto_now
or auto_now_add is set, else user configured default value implementation.
"""
if self.auto_now or self.auto_now_add:
return self.now()
return Property.default_value(self)
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
auto_now is set to True, else the default implementation.
"""
if self.auto_now:
return self.now()
else:
return super(DateTimeProperty,
self).get_value_for_datastore(model_instance)
data_type = datetime.datetime
@staticmethod
def now():
"""Get now as a full datetime value.
Returns:
'now' as a whole timestamp, including both time and date.
"""
return datetime.datetime.now()
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A date property, which stores a date without a time."""
@staticmethod
def now():
"""Get now as a date datetime value.
Returns:
'date' part of 'now' only.
"""
return datetime.datetime.now().date()
def validate(self, value):
"""Validate date.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'date',
or if it is an instance of 'datetime' (which is a subclass
of 'date', but for all practical purposes a different type).
"""
value = super(DateProperty, self).validate(value)
if isinstance(value, datetime.datetime):
raise BadValueError('Property %s must be a %s, not a datetime' %
(self.name, self.data_type.__name__))
return value
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.date from the model instance and return a
datetime.datetime instance with the time set to zero.
See base class method documentation for details.
"""
value = super(DateProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.date)
value = _date_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its date portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.date()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
"""A time property, which stores a time without a date."""
@staticmethod
def now():
"""Get now as a time datetime value.
Returns:
'time' part of 'now' only.
"""
return datetime.datetime.now().time()
def empty(self, value):
"""Is time property empty.
"0:0" (midnight) is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.time from the model instance and return a
datetime.datetime instance with the date set to 1/1/1970.
See base class method documentation for details.
"""
value = super(TimeProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.time), repr(value)
value = _time_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its time portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.time()
return value
data_type = datetime.time
class IntegerProperty(Property):
"""An integer property."""
def validate(self, value):
"""Validate integer property.
Returns:
A valid value.
Raises:
BadValueError if value is not an integer or long instance.
"""
value = super(IntegerProperty, self).validate(value)
if value is None:
return value
if not isinstance(value, (int, long)) or isinstance(value, bool):
raise BadValueError('Property %s must be an int or long, not a %s'
% (self.name, type(value).__name__))
if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
raise BadValueError('Property %s must fit in 64 bits' % self.name)
return value
data_type = int
def empty(self, value):
"""Is integer property empty.
0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class RatingProperty(_CoercingProperty, IntegerProperty):
"""A property whose values are Rating instances."""
data_type = Rating
class FloatProperty(Property):
"""A float property."""
def validate(self, value):
"""Validate float.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'float'.
"""
value = super(FloatProperty, self).validate(value)
if value is not None and not isinstance(value, float):
raise BadValueError('Property %s must be a float' % self.name)
return value
data_type = float
def empty(self, value):
"""Is float property empty.
0.0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class BooleanProperty(Property):
"""A boolean property."""
def validate(self, value):
"""Validate boolean.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'bool'.
"""
value = super(BooleanProperty, self).validate(value)
if value is not None and not isinstance(value, bool):
raise BadValueError('Property %s must be a bool' % self.name)
return value
data_type = bool
def empty(self, value):
"""Is boolean property empty.
False is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class UserProperty(Property):
"""A user property."""
def __init__(self,
verbose_name=None,
name=None,
required=False,
validator=None,
choices=None,
auto_current_user=False,
auto_current_user_add=False,
indexed=True):
"""Initializes this Property with the given options.
Note: this does *not* support the 'default' keyword argument.
Use auto_current_user_add=True instead.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
auto_current_user: If true, the value is set to the current user
each time the entity is written to the datastore.
auto_current_user_add: If true, the value is set to the current user
the first time the entity is written to the datastore.
indexed: Whether property is indexed.
"""
super(UserProperty, self).__init__(verbose_name, name,
required=required,
validator=validator,
choices=choices,
indexed=indexed)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"""Validate user.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'User'.
"""
value = super(UserProperty, self).validate(value)
if value is not None and not isinstance(value, users.User):
raise BadValueError('Property %s must be a User' % self.name)
return value
def default_value(self):
"""Default value for user.
Returns:
Value of users.get_current_user() if auto_current_user or
auto_current_user_add is set; else None. (But *not* the default
implementation, since we don't support the 'default' keyword
argument.)
"""
if self.auto_current_user or self.auto_current_user_add:
return users.get_current_user()
return None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
else the default implementation.
"""
if self.auto_current_user:
return users.get_current_user()
return super(UserProperty, self).get_value_for_datastore(model_instance)
data_type = users.User
class ListProperty(Property):
"""A property that stores a list of things.
This is a parameterized property; the parameter must be a valid
non-list data type, and all items must conform to this type.
"""
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"""Construct ListProperty.
Args:
item_type: Type for the list items; must be one of the allowed property
types.
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to base class.
Note that the only permissible value for 'required' is True.
"""
if item_type is str:
item_type = basestring
if not isinstance(item_type, type):
raise TypeError('Item type should be a type object')
if item_type not in _ALLOWED_PROPERTY_TYPES:
raise ValueError('Item type %s is not acceptable' % item_type.__name__)
if issubclass(item_type, (Blob, Text)):
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
self._require_parameter(kwds, 'required', True)
if default is None:
default = []
self.item_type = item_type
super(ListProperty, self).__init__(verbose_name,
default=default,
**kwds)
def validate(self, value):
"""Validate list.
Returns:
A valid value.
Raises:
BadValueError if property is not a list whose items are instances of
the item_type given to the constructor.
"""
value = super(ListProperty, self).validate(value)
if value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
"""Validates that all items in the list are of the correct type.
Returns:
The validated list.
Raises:
BadValueError if the list has items are not instances of the
item_type given to the constructor.
"""
if self.item_type in (int, long):
item_type = (int, long)
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
raise BadValueError('Items in the %s list must all be integers.' %
self.name)
else:
raise BadValueError(
'Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
return value
def empty(self, value):
"""Is list property empty.
[] is not an empty value.
Returns:
True if value is None, else false.
"""
return value is None
data_type = list
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
return list(super(ListProperty, self).default_value())
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
validated list appropriate to save in the datastore.
"""
value = self.validate_list_contents(
super(ListProperty, self).get_value_for_datastore(model_instance))
if self.validator:
self.validator(value)
return value
class StringListProperty(ListProperty):
"""A property that stores a list of strings.
A shorthand for the most common type of ListProperty.
"""
def __init__(self, verbose_name=None, default=None, **kwds):
"""Construct StringListProperty.
Args:
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to ListProperty().
"""
super(StringListProperty, self).__init__(basestring,
verbose_name=verbose_name,
default=default,
**kwds)
class ReferenceProperty(Property):
"""A property that represents a many-to-one reference to another model.
For example, a reference property in model A that refers to model B forms
a many-to-one relationship from A to B: every instance of A refers to a
single B instance, and every B instance can have many A instances refer
to it.
"""
def __init__(self,
reference_class=None,
verbose_name=None,
collection_name=None,
**attrs):
"""Construct ReferenceProperty.
Args:
reference_class: Which model class this property references.
verbose_name: User friendly name of property.
collection_name: If provided, alternate name of collection on
reference_class to store back references. Use this to allow
a Model to have multiple fields which refer to the same class.
"""
super(ReferenceProperty, self).__init__(verbose_name, **attrs)
self.collection_name = collection_name
if reference_class is None:
reference_class = Model
if not ((isinstance(reference_class, type) and
issubclass(reference_class, Model)) or
reference_class is _SELF_REFERENCE):
raise KindError('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def __property_config__(self, model_class, property_name):
"""Loads all of the references that point to this model.
We need to do this to create the ReverseReferenceProperty properties for
this model and create the <reference>_set attributes on the referenced
model, e.g.:
class Story(db.Model):
title = db.StringProperty()
class Comment(db.Model):
story = db.ReferenceProperty(Story)
story = Story.get(id)
print [c for c in story.comment_set]
In this example, the comment_set property was created based on the reference
from Comment to Story (which is inherently one to many).
Args:
model_class: Model class which will have its reference properties
initialized.
property_name: Name of property being configured.
Raises:
DuplicatePropertyError if referenced class already has the provided
collection name as a property.
"""
super(ReferenceProperty, self).__property_config__(model_class,
property_name)
if self.reference_class is _SELF_REFERENCE:
self.reference_class = self.data_type = model_class
if self.collection_name is None:
self.collection_name = '%s_set' % (model_class.__name__.lower())
existing_prop = getattr(self.reference_class, self.collection_name, None)
if existing_prop is not None:
if not (isinstance(existing_prop, _ReverseReferenceProperty) and
existing_prop._prop_name == property_name and
existing_prop._model.__name__ == model_class.__name__ and
existing_prop._model.__module__ == model_class.__module__):
raise DuplicatePropertyError('Class %s already has property %s '
% (self.reference_class.__name__,
self.collection_name))
setattr(self.reference_class,
self.collection_name,
_ReverseReferenceProperty(model_class, property_name))
def __get__(self, model_instance, model_class):
"""Get reference object.
This method will fetch unresolved entities from the datastore if
they are not already loaded.
Returns:
ReferenceProperty to Model object if property is set, else None.
"""
if model_instance is None:
return self
if hasattr(model_instance, self.__id_attr_name()):
reference_id = getattr(model_instance, self.__id_attr_name())
else:
reference_id = None
if reference_id is not None:
resolved = getattr(model_instance, self.__resolved_attr_name())
if resolved is not None:
return resolved
else:
instance = get(reference_id)
if instance is None:
raise Error('ReferenceProperty failed to be resolved')
setattr(model_instance, self.__resolved_attr_name(), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
"""Set reference."""
value = self.validate(value)
if value is not None:
if isinstance(value, datastore.Key):
setattr(model_instance, self.__id_attr_name(), value)
setattr(model_instance, self.__resolved_attr_name(), None)
else:
setattr(model_instance, self.__id_attr_name(), value.key())
setattr(model_instance, self.__resolved_attr_name(), value)
else:
setattr(model_instance, self.__id_attr_name(), None)
setattr(model_instance, self.__resolved_attr_name(), None)
def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
return getattr(model_instance, self.__id_attr_name())
def validate(self, value):
"""Validate reference.
Returns:
A valid value.
Raises:
BadValueError for the following reasons:
- Value is not saved.
- Object not of correct model type for reference.
"""
if isinstance(value, datastore.Key):
return value
if value is not None and not value.has_key():
raise BadValueError(
'%s instance must have a complete key before it can be stored as a '
'reference' % self.reference_class.kind())
value = super(ReferenceProperty, self).validate(value)
if value is not None and not isinstance(value, self.reference_class):
raise KindError('Property %s must be an instance of %s' %
(self.name, self.reference_class.kind()))
return value
def __id_attr_name(self):
"""Get attribute of referenced id.
Returns:
Attribute where to store id of referenced entity.
"""
return self._attr_name()
def __resolved_attr_name(self):
"""Get attribute of resolved attribute.
The resolved attribute is where the actual loaded reference instance is
stored on the referring model instance.
Returns:
Attribute name of where to store resolved reference model instance.
"""
return '_RESOLVED' + self._attr_name()
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"""Create a self reference.
Function for declaring a self referencing property on a model.
Example:
class HtmlNode(db.Model):
parent = db.SelfReferenceProperty('Parent', 'children')
Args:
verbose_name: User friendly name of property.
collection_name: Name of collection on model.
Raises:
ConfigurationError if reference_class provided as parameter.
"""
if 'reference_class' in attrs:
raise ConfigurationError(
'Do not provide reference_class to self-reference.')
return ReferenceProperty(_SELF_REFERENCE,
verbose_name,
collection_name,
**attrs)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
"""The inverse of the Reference property above.
We construct reverse references automatically for the model to which
the Reference property is pointing to create the one-to-many property for
that model. For example, if you put a Reference property in model A that
refers to model B, we automatically create a _ReverseReference property in
B called a_set that can fetch all of the model A instances that refer to
that instance of model B.
"""
def __init__(self, model, prop):
"""Constructor for reverse reference.
Constructor does not take standard values of other property types.
Args:
model: Model class that this property is a collection of.
property: Name of foreign property on referred model that points back
to this properties entity.
"""
self.__model = model
self.__property = prop
@property
def _model(self):
"""Internal helper to access the model class, read-only."""
return self.__model
@property
def _prop_name(self):
"""Internal helper to access the property name, read-only."""
return self.__property
def __get__(self, model_instance, model_class):
"""Fetches collection of model instances of this collection property."""
if model_instance is not None:
query = Query(self.__model)
return query.filter(self.__property + ' =', model_instance.key())
else:
return self
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
raise BadValueError('Virtual property is read-only')
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries
| rev2004/android2cloud.app-engine | google_appengine/google/appengine/ext/db/__init__.py | Python | mit | 101,475 |
# Generated by Django 3.2 on 2021-05-11 13:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stock', '0062_auto_20210511_2151'),
]
operations = [
migrations.RemoveField(
model_name='stockitemtracking',
name='link',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='quantity',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='system',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='title',
),
]
| inventree/InvenTree | InvenTree/stock/migrations/0063_auto_20210511_2343.py | Python | mit | 676 |
def linear_search(lst,size,value):
i = 0
while i < size:
if lst[i] == value:
return i
i = i + 1
return -1
def main():
lst = [-31, 0, 1, 2, 2, 4, 65, 83, 99, 782]
size = len(lst)
original_list = ""
value = int(input("\nInput a value to search for: "))
print("\nOriginal Array: ")
for i in lst:
original_list += str(i) + " "
print(original_list)
print("\nLinear Search Big O Notation:\n--> Best Case: O(1)\n--> Average Case: O(n)\n--> Worst Case: O(n)\n")
index = linear_search(lst,size,value)
if index == -1:
print(str(value) + " was not found in that array\n")
else:
print(str(value) + " was found at index " + str(index))
if __name__ == '__main__':
main()
| EverythingAbout/Python | Searches/linear_search.py | Python | mit | 775 |
# Analyze Color of Object
import os
import cv2
import numpy as np
from . import print_image
from . import plot_image
from . import fatal_error
from . import plot_colorbar
def _pseudocolored_image(device, histogram, bins, img, mask, background, channel, filename, resolution,
analysis_images, debug):
"""Pseudocolor image.
Inputs:
histogram = a normalized histogram of color values from one color channel
bins = number of color bins the channel is divided into
img = input image
mask = binary mask image
background = what background image?: channel image (img) or white
channel = color channel name
filename = input image filename
resolution = output image resolution
analysis_images = list of analysis image filenames
debug = print or plot. Print = save to file, Plot = print to screen.
Returns:
analysis_images = list of analysis image filenames
:param histogram: list
:param bins: int
:param img: numpy array
:param mask: numpy array
:param background: str
:param channel: str
:param filename: str
:param resolution: int
:param analysis_images: list
:return analysis_images: list
"""
mask_inv = cv2.bitwise_not(mask)
cplant = cv2.applyColorMap(histogram, colormap=2)
cplant1 = cv2.bitwise_and(cplant, cplant, mask=mask)
output_imgs = {"pseudo_on_img": {"background": "img", "img": None},
"pseudo_on_white": {"background": "white", "img": None}}
if background == 'img' or background == 'both':
# mask the background and color the plant with color scheme 'jet'
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_back = cv2.bitwise_and(img_gray, img_gray, mask=mask_inv)
img_back3 = np.dstack((img_back, img_back, img_back))
output_imgs["pseudo_on_img"]["img"] = cv2.add(cplant1, img_back3)
if background == 'white' or background == 'both':
# Get the image size
if np.shape(img)[2] == 3:
ix, iy, iz = np.shape(img)
else:
ix, iy = np.shape(img)
size = ix, iy
back = np.zeros(size, dtype=np.uint8)
w_back = back + 255
w_back3 = np.dstack((w_back, w_back, w_back))
img_back3 = cv2.bitwise_and(w_back3, w_back3, mask=mask_inv)
output_imgs["pseudo_on_white"]["img"] = cv2.add(cplant1, img_back3)
if filename:
for key in output_imgs:
if output_imgs[key]["img"] is not None:
fig_name_pseudo = str(filename[0:-4]) + '_' + str(channel) + '_pseudo_on_' + \
output_imgs[key]["background"] + '.jpg'
path = os.path.dirname(filename)
print_image(output_imgs[key]["img"], fig_name_pseudo)
analysis_images.append(['IMAGE', 'pseudo', fig_name_pseudo])
else:
path = "."
if debug is not None:
if debug == 'print':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
print_image(output_imgs[key]["img"], (str(device) + "_" + output_imgs[key]["background"] +
'_pseudocolor.jpg'))
fig_name = 'VIS_pseudocolor_colorbar_' + str(channel) + '_channel.svg'
if not os.path.isfile(os.path.join(path, fig_name)):
plot_colorbar(path, fig_name, bins)
elif debug == 'plot':
for key in output_imgs:
if output_imgs[key]["img"] is not None:
plot_image(output_imgs[key]["img"])
return analysis_images
def analyze_color(img, imgname, mask, bins, device, debug=None, hist_plot_type=None, pseudo_channel='v',
pseudo_bkg='img', resolution=300, filename=False):
"""Analyze the color properties of an image object
Inputs:
img = image
imgname = name of input image
mask = mask made from selected contours
device = device number. Used to count steps in the pipeline
debug = None, print, or plot. Print = save to file, Plot = print to screen.
hist_plot_type = 'None', 'all', 'rgb','lab' or 'hsv'
color_slice_type = 'None', 'rgb', 'hsv' or 'lab'
pseudo_channel = 'None', 'l', 'm' (green-magenta), 'y' (blue-yellow), h','s', or 'v', creates pseduocolored image
based on the specified channel
pseudo_bkg = 'img' => channel image, 'white' => white background image, 'both' => both img and white options
filename = False or image name. If defined print image
Returns:
device = device number
hist_header = color histogram data table headers
hist_data = color histogram data table values
analysis_images = list of output images
:param img: numpy array
:param imgname: str
:param mask: numpy array
:param bins: int
:param device: int
:param debug: str
:param hist_plot_type: str
:param pseudo_channel: str
:param pseudo_bkg: str
:param resolution: int
:param filename: str
:return device: int
:return hist_header: list
:return hist_data: list
:return analysis_images: list
"""
device += 1
masked = cv2.bitwise_and(img, img, mask=mask)
b, g, r = cv2.split(masked)
lab = cv2.cvtColor(masked, cv2.COLOR_BGR2LAB)
l, m, y = cv2.split(lab)
hsv = cv2.cvtColor(masked, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(hsv)
# Color channel dictionary
norm_channels = {"b": b / (256 / bins),
"g": g / (256 / bins),
"r": r / (256 / bins),
"l": l / (256 / bins),
"m": m / (256 / bins),
"y": y / (256 / bins),
"h": h / (256 / bins),
"s": s / (256 / bins),
"v": v / (256 / bins)
}
# Histogram plot types
hist_types = {"all": ("b", "g", "r", "l", "m", "y", "h", "s", "v"),
"rgb": ("b", "g", "r"),
"lab": ("l", "m", "y"),
"hsv": ("h", "s", "v")}
# If the user-input pseudo_channel is not None and is not found in the list of accepted channels, exit
if pseudo_channel is not None and pseudo_channel not in norm_channels:
fatal_error("Pseudocolor channel was " + str(pseudo_channel) +
', but can only be one of the following: None, "l", "m", "y", "h", "s" or "v"!')
# If the user-input pseudocolored image background is not in the accepted input list, exit
if pseudo_bkg not in ["white", "img", "both"]:
fatal_error("The pseudocolored image background was " + str(pseudo_bkg) +
', but can only be one of the following: "white", "img", or "both"!')
# If the user-input histogram color-channel plot type is not in the list of accepted channels, exit
if hist_plot_type is not None and hist_plot_type not in hist_types:
fatal_error("The histogram plot type was " + str(hist_plot_type) +
', but can only be one of the following: None, "all", "rgb", "lab", or "hsv"!')
histograms = {
"b": {"label": "blue", "graph_color": "blue",
"hist": cv2.calcHist([norm_channels["b"]], [0], mask, [bins], [0, (bins - 1)])},
"g": {"label": "green", "graph_color": "forestgreen",
"hist": cv2.calcHist([norm_channels["g"]], [0], mask, [bins], [0, (bins - 1)])},
"r": {"label": "red", "graph_color": "red",
"hist": cv2.calcHist([norm_channels["r"]], [0], mask, [bins], [0, (bins - 1)])},
"l": {"label": "lightness", "graph_color": "dimgray",
"hist": cv2.calcHist([norm_channels["l"]], [0], mask, [bins], [0, (bins - 1)])},
"m": {"label": "green-magenta", "graph_color": "magenta",
"hist": cv2.calcHist([norm_channels["m"]], [0], mask, [bins], [0, (bins - 1)])},
"y": {"label": "blue-yellow", "graph_color": "yellow",
"hist": cv2.calcHist([norm_channels["y"]], [0], mask, [bins], [0, (bins - 1)])},
"h": {"label": "hue", "graph_color": "blueviolet",
"hist": cv2.calcHist([norm_channels["h"]], [0], mask, [bins], [0, (bins - 1)])},
"s": {"label": "saturation", "graph_color": "cyan",
"hist": cv2.calcHist([norm_channels["s"]], [0], mask, [bins], [0, (bins - 1)])},
"v": {"label": "value", "graph_color": "orange",
"hist": cv2.calcHist([norm_channels["v"]], [0], mask, [bins], [0, (bins - 1)])}
}
hist_data_b = [l[0] for l in histograms["b"]["hist"]]
hist_data_g = [l[0] for l in histograms["g"]["hist"]]
hist_data_r = [l[0] for l in histograms["r"]["hist"]]
hist_data_l = [l[0] for l in histograms["l"]["hist"]]
hist_data_m = [l[0] for l in histograms["m"]["hist"]]
hist_data_y = [l[0] for l in histograms["y"]["hist"]]
hist_data_h = [l[0] for l in histograms["h"]["hist"]]
hist_data_s = [l[0] for l in histograms["s"]["hist"]]
hist_data_v = [l[0] for l in histograms["v"]["hist"]]
binval = np.arange(0, bins)
bin_values = [l for l in binval]
# Store Color Histogram Data
hist_header = [
'HEADER_HISTOGRAM',
'bin-number',
'bin-values',
'blue',
'green',
'red',
'lightness',
'green-magenta',
'blue-yellow',
'hue',
'saturation',
'value'
]
hist_data = [
'HISTOGRAM_DATA',
bins,
bin_values,
hist_data_b,
hist_data_g,
hist_data_r,
hist_data_l,
hist_data_m,
hist_data_y,
hist_data_h,
hist_data_s,
hist_data_v
]
analysis_images = []
if pseudo_channel is not None:
analysis_images = _pseudocolored_image(device, norm_channels[pseudo_channel], bins, img, mask, pseudo_bkg,
pseudo_channel, filename, resolution, analysis_images, debug)
if hist_plot_type is not None and filename:
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
# Create Histogram Plot
for channel in hist_types[hist_plot_type]:
plt.plot(histograms[channel]["hist"], color=histograms[channel]["graph_color"],
label=histograms[channel]["label"])
plt.xlim([0, bins - 1])
plt.legend()
# Print plot
fig_name = (str(filename[0:-4]) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
analysis_images.append(['IMAGE', 'hist', fig_name])
if debug == 'print':
fig_name = (str(device) + '_' + str(hist_plot_type) + '_hist.svg')
plt.savefig(fig_name)
plt.clf()
return device, hist_header, hist_data, analysis_images
| AntonSax/plantcv | plantcv/analyze_color.py | Python | mit | 11,048 |
class Solution:
# @param {integer[]} height
# @return {integer}
def largestRectangleArea(self, height):
n = len(height)
ma = 0
stack = [-1]
for i in xrange(n):
while(stack[-1] > -1):
if height[i]<height[stack[-1]]:
top = stack.pop()
ma = max(ma, height[top]*(i-1-stack[-1]))
else:
break
stack.append(i)
while(stack[-1] != -1):
top = stack.pop()
ma = max(ma, height[top]*(n-1-stack[-1]))
return ma | saai/codingbitch | DP/largestRectangleArea.py | Python | mit | 597 |
'''
Created on Jan 18, 2010
@author: Paul
'''
from SQLEng import SQLEng
class PduSender(object):
'''
classdocs
This class is designed for Gammu-smsd
Inserting a record into MySQL
Gammu-smsd will send the record
Using command line will cause smsd stop for a while
'''
def get_mesg(self,byte_array):
mesg = ""
for byte in byte_array:
if byte < 16 :
val = hex(byte)
if val == "0x0" :
val = "00"
else :
val = val.lstrip("0x")
val = "{0}{1}".format('0', val)
else :
val = hex(byte)
val = val.lstrip("0x")
mesg += val
return mesg
def send(self,to,byte_array):
sEng = SQLEng()
sEng.exeSQL(sEng.getInsetSentBox(to, self.get_mesg(byte_array)))
def __init__(self):
'''
Constructor
'''
pass
| lubao/UjU_Windows | src/GammuSender.py | Python | mit | 1,013 |
from __future__ import absolute_import
# Copyright (c) 2010-2017 openpyxl
from .cell import Cell, WriteOnlyCell
from .read_only import ReadOnlyCell
| 171121130/SWI | venv/Lib/site-packages/openpyxl/cell/__init__.py | Python | mit | 149 |
from Robinhood import Robinhood
#Setup
my_trader = Robinhood(username="YOUR_USERNAME", password="YOUR_PASSWORD");
#Get stock information
#Note: Sometimes more than one instrument may be returned for a given stock symbol
stock_instrument = my_trader.instruments("GEVO")[0]
#Get a stock's quote
my_trader.print_quote("AAPL")
#Prompt for a symbol
my_trader.print_quote();
#Print multiple symbols
my_trader.print_quotes(stocks=["BBRY", "FB", "MSFT"])
#View all data for a given stock ie. Ask price and size, bid price and size, previous close, adjusted previous close, etc.
quote_info = my_trader.quote_data("GEVO")
print(quote_info);
#Place a buy order (uses market bid price)
buy_order = my_trader.place_buy_order(stock_instrument, 1)
#Place a sell order
sell_order = my_trader.place_sell_order(stock_instrument, 1)
| itsff/Robinhood | example.py | Python | mit | 826 |
#!/usr/bin/env python3
from multiprocessing import Process, Pool
import os, time
def proc(name):
print(time.asctime(), 'child process(name: %s) id %s. ppid %s' % (name, os.getpid(), os.getppid()))
time.sleep(3)
print(time.asctime(), 'child process end')
if __name__ == '__main__':
p = Process(target = proc, args = ('child',))
print(time.asctime(), 'child process will start')
p.start()
p.join()
print('first child process end')
pl = Pool(4)
for index in range(4):
pl.apply_async(proc, args = (index,))
pl.close()
pl.join()
print(time.asctime(), 'parent process end')
| JShadowMan/package | python/multi-process-thread/multiprocess.py | Python | mit | 660 |
dimensions(8,8)
wall((2,0),(2,4))
wall((2,4),(4,4))
wall((2,6),(6,6))
wall((6,6),(6,0))
wall((6,2),(4,2))
initialRobotLoc(1.0, 1.0)
| Cynary/distro6.01 | arch/6.01Soft/lib601-F13-4/soar/worlds/bigFrustrationWorld.py | Python | mit | 133 |
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestBlogSettings(unittest.TestCase):
pass
| adityahase/frappe | frappe/website/doctype/blog_settings/test_blog_settings.py | Python | mit | 224 |
#!/usr/bin/env python
import sys,os
import textwrap
def print_header():
print textwrap.dedent("""\
##fileformat=VCFv4.1
##phasing=none
##INDIVIDUAL=TRUTH
##SAMPLE=<ID=TRUTH,Individual="TRUTH",Description="bamsurgeon spike-in">
##INFO=<ID=CIPOS,Number=2,Type=Integer,Description="Confidence interval around POS for imprecise variants">
##INFO=<ID=IMPRECISE,Number=0,Type=Flag,Description="Imprecise structural variation">
##INFO=<ID=SVTYPE,Number=1,Type=String,Description="Type of structural variant">
##INFO=<ID=SVLEN,Number=.,Type=Integer,Description="Difference in length between REF and ALT alleles">
##INFO=<ID=SOMATIC,Number=0,Type=Flag,Description="Somatic mutation in primary">
##INFO=<ID=VAF,Number=1,Type=Float,Description="Variant Allele Frequency">
##INFO=<ID=DPR,Number=1,Type=Float,Description="Avg Depth in Region (+/- 1bp)">
##INFO=<ID=MATEID,Number=1,Type=String,Description="Breakend mate">
##ALT=<ID=INV,Description="Inversion">
##ALT=<ID=DUP,Description="Duplication">
##ALT=<ID=DEL,Description="Deletion">
##ALT=<ID=INS,Description="Insertion">
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tSPIKEIN""")
if len(sys.argv) == 2:
print_header()
logdir_files = os.listdir(sys.argv[1])
for filename in logdir_files:
if filename.endswith('.log'):
with open(sys.argv[1] + '/' + filename, 'r') as infile:
for line in infile:
if line.startswith('snv'):
#chrom, pos, mut = line.strip().split()
c = line.strip().split()
chrom = c[1].split(':')[0]
pos = c[3]
mut = c[4]
dpr = c[6]
vaf = c[7]
ref,alt = mut.split('-->')
print "\t".join((chrom,pos,'.',ref,alt,'100','PASS','SOMATIC;VAF=' + vaf + ';DPR=' + dpr,'GT','0/1'))
else:
print "usage:", sys.argv[0], "<log directory>"
| MischaLundberg/bamsurgeon | scripts/makevcf.py | Python | mit | 2,120 |
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
# ------------------------------------------------------------------------------
# Enumerations
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
| CloudI/CloudI | src/service_api/python/jsonrpclib/jsonrpclib/utils.py | Python | mit | 3,412 |
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import camera
import time
class Display(object):
# Inheritrance convinience functions
def init(self): pass
def close(self): pass
def mouse(self, mouseButton, buttonState, x, y): pass
def mouseMotion(self, x, y, dx, dy): pass
def passiveMouseMotion(self, x, y, dx, dy): pass
def keyboard(self, key, x, y): pass
def specialKeys(self, key, x, y): pass
def timerFired(self, value): pass
def draw(self): pass
# Initialization function
def __init__(self, width = 1280, height = 720, frameName = "OpenGL"):
self.frameSize = (self.width, self.height) = (width, height)
self.frameName = frameName
self.timerDelay = 20
self.clearColor = (135.0/255, 206.0/255, 250.0/255, 1)
self.defaultColor = (1, 1, 1)
# Camera positioning
self.pos = (0, 0, 0)
self.ypr = (0, 0, 0)
self.init()
# Set up graphics
self.initGL()
self.initGLUT()
self.camera = camera.Camera(self.width, self.height)
# For mouse motion
self._mouseX = None
self._mouseY = None
# One-time GL commands
def initGL(self):
glClearColor(*self.clearColor)
# Initialize the window manager (GLUT)
def initGLUT(self):
glutInit()
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(*self.frameSize)
glutCreateWindow(self.frameName)
# Register all the convenience functions
glutDisplayFunc(self.drawWrapper)
glutIdleFunc(self.drawWrapper)
glutTimerFunc(self.timerDelay, self.timerFired, 0)
glutMouseFunc(self.mouse)
glutMotionFunc(self.mouseMotionWrapper)
glutPassiveMotionFunc(self.passiveMouseMotionWrapper)
glutKeyboardFunc(self.keyboard)
glutSpecialFunc(self.specialKeys)
glutReshapeFunc(self.reshape)
# Try to register a close function (fall back to a different one)
try:
glutCloseFunc(self.close)
except:
glutWMCloseFunc(self.close)
# GL commands executed before drawing
def preGL(self):
glShadeModel(GL_FLAT)
glEnable(GL_DEPTH_TEST)
# Set up colors and clear buffers
glClearColor(*self.clearColor)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3f(*self.defaultColor)
glLoadIdentity()
# Commands after GL is done
def postGL(self):
glutSwapBuffers()
time.sleep(1/60.0)
# Wrapper to re-register timer event
def timerFiredWrapper(self, value):
self.timerFired(value)
glutTimerFunc(self.timerDelay, self.timerFired, value + 1)
# Wrapper to handle as much GL as possible
def drawWrapper(self):
self.preGL()
# Let the camera draw the view
self.camera.draw(self.draw, self.pos, self.ypr)
self.postGL()
# Wrapper to pass change in position as well as position
# Only called when mouse motion and button pressed
def mouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.mouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Wrapper to pass change in position as well as position
# Called when mouse motion and not button pressed
def passiveMouseMotionWrapper(self, x, y):
if(self._mouseX == None or self._mouseY == None):
(self._mouseX, self._mouseY) = (x, y)
(dx, dy) = (x - self._mouseX, y - self._mouseY)
self.passiveMouseMotion(x, y, dx, dy)
(self._mouseX, self._mouseY) = (x, y)
# Update when resizing the window
def reshape(self, width, height):
if(self.width != width or self.height != height):
glutReshapeWindow(width, height)
self.camera.width = width
self.camera.height = height
# Run the GL
def run(self):
glutMainLoop()
| Alex4913/PyOpenGL-Boilerplate | src/display.py | Python | mit | 3,803 |
import copy
from collections import OrderedDict
from collections import defaultdict
from conans.model.env_info import EnvValues
from conans.model.options import OptionsValues
from conans.model.values import Values
class Profile(object):
"""A profile contains a set of setting (with values), environment variables
"""
def __init__(self):
# Sections
self.settings = OrderedDict()
self.package_settings = defaultdict(OrderedDict)
self.env_values = EnvValues()
self.options = OptionsValues()
self.build_requires = OrderedDict() # conan_ref Pattern: list of conan_ref
@property
def settings_values(self):
return Values.from_list(list(self.settings.items()))
@property
def package_settings_values(self):
result = {}
for pkg, settings in self.package_settings.items():
result[pkg] = list(settings.items())
return result
def dumps(self):
result = ["[settings]"]
for name, value in self.settings.items():
result.append("%s=%s" % (name, value))
for package, values in self.package_settings.items():
for name, value in values.items():
result.append("%s:%s=%s" % (package, name, value))
result.append("[options]")
result.append(self.options.dumps())
result.append("[build_requires]")
for pattern, req_list in self.build_requires.items():
result.append("%s: %s" % (pattern, ", ".join(str(r) for r in req_list)))
result.append("[env]")
result.append(self.env_values.dumps())
return "\n".join(result).replace("\n\n", "\n")
def update(self, other):
self.update_settings(other.settings)
self.update_package_settings(other.package_settings)
# this is the opposite
other.env_values.update(self.env_values)
self.env_values = other.env_values
self.options.update(other.options)
for pattern, req_list in other.build_requires.items():
self.build_requires.setdefault(pattern, []).extend(req_list)
def update_settings(self, new_settings):
"""Mix the specified settings with the current profile.
Specified settings are prioritized to profile"""
assert(isinstance(new_settings, OrderedDict))
# apply the current profile
res = copy.copy(self.settings)
if new_settings:
# Invalidate the current subsettings if the parent setting changes
# Example: new_settings declare a different "compiler", so invalidate the current "compiler.XXX"
for name, value in new_settings.items():
if "." not in name:
if name in self.settings and self.settings[name] != value:
for cur_name, _ in self.settings.items():
if cur_name.startswith("%s." % name):
del res[cur_name]
# Now merge the new values
res.update(new_settings)
self.settings = res
def update_package_settings(self, package_settings):
"""Mix the specified package settings with the specified profile.
Specified package settings are prioritized to profile"""
for package_name, settings in package_settings.items():
self.package_settings[package_name].update(settings)
| birsoyo/conan | conans/model/profile.py | Python | mit | 3,409 |
__author__ = 'sei'
DEFAULT_SERIAL = '/dev/ttyUSB0'
DEFAULT_BAUDRATE = 57600
| sdickreuter/python-pistage | build/lib/PIStage/_defines.py | Python | mit | 77 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
patterns = [r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_center)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_center)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$']*10
strings = ["/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470409.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470408_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470407_alt01.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470406_1.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/346880405.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470404_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470403.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/336470402.jpg"]*10
def matches_pattern(str, patterns):
for pattern in patterns:
if pattern.match(str):
return pattern.match(str), pattern
return False
def regex_matcherator(strings,patterns):
import re
compiled_patterns = list(map(re.compile, patterns))
for s in strings:
if matches_pattern(s, compiled_patterns):
print matches_pattern(s, compiled_patterns)[1].pattern
print '--'.join(s.split('/')[-2:])
print matches_pattern(s, compiled_patterns)[0].groups()
print '\n'
r = regex_matcherator(strings,patterns)
#print r.next() | relic7/prodimages | python/regex_matcherator_naturectr.py | Python | mit | 1,999 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/scout/trap/shared_trap_webber.iff"
result.attribute_template_id = -1
result.stfName("item_n","trap_webber")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/scout/trap/shared_trap_webber.py | Python | mit | 444 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/ithorian/shared_ith_shirt_s09.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","ith_shirt_s09")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/wearables/ithorian/shared_ith_shirt_s09.py | Python | mit | 464 |
"""Extension to execute code outside the Python shell window.
This adds the following commands:
- Check module does a full syntax check of the current module.
It also runs the tabnanny to catch any inconsistent tabs.
- Run module executes the module's code in the __main__ namespace. The window
must have been saved previously. The module is added to sys.modules, and is
also added to the __main__ namespace.
XXX GvR Redesign this interface (yet again) as follows:
- Present a dialog box for ``Run Module''
- Allow specify command line arguments in the dialog box
"""
import os
import re
import string
import tabnanny
import tokenize
import tkMessageBox
from idlelib import PyShell
from idlelib.configHandler import idleConf
IDENTCHARS = string.ascii_letters + string.digits + "_"
indent_message = """Error: Inconsistent indentation detected!
1) Your indentation is outright incorrect (easy to fix), OR
2) Your indentation mixes tabs and spaces.
To fix case 2, change all tabs to spaces by using Edit->Select All followed \
by Format->Untabify Region and specify the number of columns used by each tab.
"""
class ScriptBinding:
menudefs = [
('run', [None,
('Check Module', '<<check-module>>'),
('Run Module', '<<run-module>>'), ]), ]
def __init__(self, editwin):
self.editwin = editwin
# Provide instance variables referenced by Debugger
# XXX This should be done differently
self.flist = self.editwin.flist
self.root = self.editwin.root
def check_module_event(self, event):
filename = self.getfilename()
if not filename:
return 'break'
if not self.checksyntax(filename):
return 'break'
if not self.tabnanny(filename):
return 'break'
def tabnanny(self, filename):
f = open(filename, 'r')
try:
tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError, msg:
msgtxt, (lineno, start) = msg
self.editwin.gotoline(lineno)
self.errorbox("Tabnanny Tokenizing Error",
"Token Error: %s" % msgtxt)
return False
except tabnanny.NannyNag, nag:
# The error messages from tabnanny are too confusing...
self.editwin.gotoline(nag.get_lineno())
self.errorbox("Tab/space error", indent_message)
return False
return True
def checksyntax(self, filename):
self.shell = shell = self.flist.open_shell()
saved_stream = shell.get_warning_stream()
shell.set_warning_stream(shell.stderr)
f = open(filename, 'r')
source = f.read()
f.close()
if '\r' in source:
source = re.sub(r"\r\n", "\n", source)
source = re.sub(r"\r", "\n", source)
if source and source[-1] != '\n':
source = source + '\n'
text = self.editwin.text
text.tag_remove("ERROR", "1.0", "end")
try:
try:
# If successful, return the compiled code
return compile(source, filename, "exec")
except (SyntaxError, OverflowError), err:
try:
msg, (errorfilename, lineno, offset, line) = err
if not errorfilename:
err.args = msg, (filename, lineno, offset, line)
err.filename = filename
self.colorize_syntax_error(msg, lineno, offset)
except:
msg = "*** " + str(err)
self.errorbox("Syntax error",
"There's an error in your program:\n" + msg)
return False
finally:
shell.set_warning_stream(saved_stream)
def colorize_syntax_error(self, msg, lineno, offset):
text = self.editwin.text
pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
text.tag_add("ERROR", pos)
char = text.get(pos)
if char and char in IDENTCHARS:
text.tag_add("ERROR", pos + " wordstart", pos)
if '\n' == text.get(pos): # error at line end
text.mark_set("insert", pos)
else:
text.mark_set("insert", pos + "+1c")
text.see(pos)
def run_module_event(self, event):
"""Run the module after setting up the environment.
First check the syntax. If OK, make sure the shell is active and
then transfer the arguments, set the run environment's working
directory to the directory of the module being executed and also
add that directory to its sys.path if not already included.
"""
filename = self.getfilename()
if not filename:
return 'break'
code = self.checksyntax(filename)
if not code:
return 'break'
if not self.tabnanny(filename):
return 'break'
shell = self.shell
interp = shell.interp
if PyShell.use_subprocess:
shell.restart_shell()
dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1:
_filename = %r
import sys as _sys
from os.path import basename as _basename
if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)):
_sys.argv = [_filename]
import os as _os
_os.chdir(%r)
del _filename, _sys, _basename, _os
\n""" % (filename, dirname))
interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell.
# Need to change streams in PyShell.ModifiedInterpreter.
interp.runcode(code)
return 'break'
def getfilename(self):
"""Get source filename. If not saved, offer to save (or create) file
The debugger requires a source file. Make sure there is one, and that
the current version of the source buffer has been saved. If the user
declines to save or cancels the Save As dialog, return None.
If the user has configured IDLE for Autosave, the file will be
silently saved if it already exists and is dirty.
"""
filename = self.editwin.io.filename
if not self.editwin.get_saved():
autosave = idleConf.GetOption('main', 'General',
'autosave', type='bool')
if autosave and filename:
self.editwin.io.save(None)
else:
reply = self.ask_save_dialog()
self.editwin.text.focus_set()
if reply == "ok":
self.editwin.io.save(None)
filename = self.editwin.io.filename
else:
filename = None
return filename
def ask_save_dialog(self):
msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
mb = tkMessageBox.Message(title="Save Before Run or Check",
message=msg,
icon=tkMessageBox.QUESTION,
type=tkMessageBox.OKCANCEL,
default=tkMessageBox.OK,
master=self.editwin.text)
return mb.show()
def errorbox(self, title, message):
# XXX This should really be a function of EditorWindow...
tkMessageBox.showerror(title, message, master=self.editwin.text)
self.editwin.text.focus_set()
| babyliynfg/cross | tools/project-creator/Python2.6.6/Lib/idlelib/ScriptBinding.py | Python | mit | 7,992 |
# -*- coding:utf8 -*-
from fabric.api import task, run, local, cd, hosts, env
import time
from oozappa.config import get_config, procure_common_functions
_settings = get_config()
procure_common_functions()
import sys
from common_multiple_fabric_environment import _deploy_template_sample_a
test_host = ('192.168.0.110',) #FIXME
@task
def ls():
u'''run ls command on local machine.'''
local('ls -la')
@task
def ps():
u'''run ls command on local machine.'''
local('ps ax')
@task
def sys_path():
import sys
print(sys.path)
@task
def sleep():
u'''sleep 5 second.'''
print('stop 5 sec...')
time.sleep(5)
print('5 sec... passed')
@task
def printsetting():
u'''print setting from staging.vars and common.vars'''
print(_settings)
@task
@hosts(test_host)
def deploy_template_sample_a():
_deploy_template_sample_a(_settings.sample_template_vars.sample_a)
@task
def launch_instance_from_app_a_image():
u'''eg. launch instance from app a image.'''
print('launch_instance_from_app_a_image')
@task
def set_env_latest_app_a():
u'''eg. search latest app type a instance and set fabric env.'''
print('set_env_latest_app_a')
@task
def set_env_latest_app_b():
u'''eg. search latest app type b instance and set fabric env.'''
print('set_env_latest_app_b')
@task
def launch_instance_from_app_b_image():
u'''eg. launch instance from app b image.'''
print('launch_instance_from_app_b_image')
@task
def production_specific_setting():
u'''eg. production specific setting'''
print('production_specific_setting')
| frkwy/oozappa | sample/ops/staging/fabfile/__init__.py | Python | mit | 1,594 |
KEY_UP = "up"
KEY_DOWN = "down"
KEY_RIGHT = "right"
KEY_LEFT = "left"
KEY_INSERT = "insert"
KEY_HOME = "home"
KEY_END = "end"
KEY_PAGEUP = "pageup"
KEY_PAGEDOWN = "pagedown"
KEY_BACKSPACE = "backspace"
KEY_DELETE = "delete"
KEY_TAB = "tab"
KEY_ENTER = "enter"
KEY_PAUSE = "pause"
KEY_ESCAPE = "escape"
KEY_SPACE = "space"
KEY_KEYPAD0 = "keypad0"
KEY_KEYPAD1 = "keypad1"
KEY_KEYPAD2 = "keypad2"
KEY_KEYPAD3 = "keypad3"
KEY_KEYPAD4 = "keypad4"
KEY_KEYPAD5 = "keypad5"
KEY_KEYPAD6 = "keypad6"
KEY_KEYPAD7 = "keypad7"
KEY_KEYPAD8 = "keypad8"
KEY_KEYPAD9 = "keypad9"
KEY_KEYPAD_PERIOD = "keypad_period"
KEY_KEYPAD_DIVIDE = "keypad_divide"
KEY_KEYPAD_MULTIPLY = "keypad_multiply"
KEY_KEYPAD_MINUS = "keypad_minus"
KEY_KEYPAD_PLUS = "keypad_plus"
KEY_KEYPAD_ENTER = "keypad_enter"
KEY_CLEAR = "clear"
KEY_F1 = "f1"
KEY_F2 = "f2"
KEY_F3 = "f3"
KEY_F4 = "f4"
KEY_F5 = "f5"
KEY_F6 = "f6"
KEY_F7 = "f7"
KEY_F8 = "f8"
KEY_F9 = "f9"
KEY_F10 = "f10"
KEY_F11 = "f11"
KEY_F12 = "f12"
KEY_F13 = "f13"
KEY_F14 = "f14"
KEY_F15 = "f15"
KEY_F16 = "f16"
KEY_F17 = "f17"
KEY_F18 = "f18"
KEY_F19 = "f19"
KEY_F20 = "f20"
KEY_SYSREQ = "sysreq"
KEY_BREAK = "break"
KEY_CONTEXT_MENU = "context_menu"
KEY_BROWSER_BACK = "browser_back"
KEY_BROWSER_FORWARD = "browser_forward"
KEY_BROWSER_REFRESH = "browser_refresh"
KEY_BROWSER_STOP = "browser_stop"
KEY_BROWSER_SEARCH = "browser_search"
KEY_BROWSER_FAVORITES = "browser_favorites"
KEY_BROWSER_HOME = "browser_home"
| FichteFoll/CSScheme | my_sublime_lib/constants.py | Python | mit | 2,230 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
import json
from frappe.model.document import Document
from frappe.utils import get_fullname, parse_addr
exclude_from_linked_with = True
class ToDo(Document):
DocType = 'ToDo'
def validate(self):
self._assignment = None
if self.is_new():
if self.assigned_by == self.allocated_to:
assignment_message = frappe._("{0} self assigned this task: {1}").format(get_fullname(self.assigned_by), self.description)
else:
assignment_message = frappe._("{0} assigned {1}: {2}").format(get_fullname(self.assigned_by), get_fullname(self.allocated_to), self.description)
self._assignment = {
"text": assignment_message,
"comment_type": "Assigned"
}
else:
# NOTE the previous value is only available in validate method
if self.get_db_value("status") != self.status:
if self.allocated_to == frappe.session.user:
removal_message = frappe._("{0} removed their assignment.").format(
get_fullname(frappe.session.user))
else:
removal_message = frappe._("Assignment of {0} removed by {1}").format(
get_fullname(self.allocated_to), get_fullname(frappe.session.user))
self._assignment = {
"text": removal_message,
"comment_type": "Assignment Completed"
}
def on_update(self):
if self._assignment:
self.add_assign_comment(**self._assignment)
self.update_in_reference()
def on_trash(self):
self.delete_communication_links()
self.update_in_reference()
def add_assign_comment(self, text, comment_type):
if not (self.reference_type and self.reference_name):
return
frappe.get_doc(self.reference_type, self.reference_name).add_comment(comment_type, text)
def delete_communication_links(self):
# unlink todo from linked comments
return frappe.db.delete("Communication Link", {
"link_doctype": self.doctype,
"link_name": self.name
})
def update_in_reference(self):
if not (self.reference_type and self.reference_name):
return
try:
assignments = frappe.get_all("ToDo", filters={
"reference_type": self.reference_type,
"reference_name": self.reference_name,
"status": ("!=", "Cancelled")
}, pluck="allocated_to")
assignments.reverse()
frappe.db.set_value(self.reference_type, self.reference_name,
"_assign", json.dumps(assignments), update_modified=False)
except Exception as e:
if frappe.db.is_table_missing(e) and frappe.flags.in_install:
# no table
return
elif frappe.db.is_column_missing(e):
from frappe.database.schema import add_column
add_column(self.reference_type, "_assign", "Text")
self.update_in_reference()
else:
raise
@classmethod
def get_owners(cls, filters=None):
"""Returns list of owners after applying filters on todo's.
"""
rows = frappe.get_all(cls.DocType, filters=filters or {}, fields=['allocated_to'])
return [parse_addr(row.allocated_to)[1] for row in rows if row.allocated_to]
# NOTE: todo is viewable if a user is an owner, or set as assigned_to value, or has any role that is allowed to access ToDo doctype.
def on_doctype_update():
frappe.db.add_index("ToDo", ["reference_type", "reference_name"])
def get_permission_query_conditions(user):
if not user: user = frappe.session.user
todo_roles = frappe.permissions.get_doctype_roles('ToDo')
if 'All' in todo_roles:
todo_roles.remove('All')
if any(check in todo_roles for check in frappe.get_roles(user)):
return None
else:
return """(`tabToDo`.allocated_to = {user} or `tabToDo`.assigned_by = {user})"""\
.format(user=frappe.db.escape(user))
def has_permission(doc, ptype="read", user=None):
user = user or frappe.session.user
todo_roles = frappe.permissions.get_doctype_roles('ToDo', ptype)
if 'All' in todo_roles:
todo_roles.remove('All')
if any(check in todo_roles for check in frappe.get_roles(user)):
return True
else:
return doc.allocated_to==user or doc.assigned_by==user
@frappe.whitelist()
def new_todo(description):
frappe.get_doc({
'doctype': 'ToDo',
'description': description
}).insert()
| frappe/frappe | frappe/desk/doctype/todo/todo.py | Python | mit | 4,119 |
'''
Created on Mar 4, 2017
@author: preiniger
'''
def __validate_alliance(alliance_color, teams, official_sr):
team1sr = None
team2sr = None
team3sr = None
# TODO: there has to be a better way... but I'd rather not touch the DB
for sr in teams[0].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team1sr = sr
break
for sr in teams[1].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team2sr = sr
break
for sr in teams[2].scoreresult_set.all():
if sr.match.matchNumber == official_sr.official_match.matchNumber:
team3sr = sr
break
team_srs = [team1sr, team2sr, team3sr]
team_srs = [sr for sr in team_srs if sr != None]
warning_messages = []
error_messages = []
for team in teams:
if team != official_sr.team1 and team != official_sr.team2 and team != official_sr.team3:
error_messages.append((alliance_color + " team mismatch", teams, team.teamNumber))
if len(team_srs) != 3:
error_messages.append((alliance_color + " wrong number of teams", 3, len(team_srs)))
tele_high_tubes = 0
tele_mid_tubes = 0
tele_low_tubes = 0
for sr in team_srs:
tele_high_tubes += sr.high_tubes_hung
tele_mid_tubes += sr.mid_tubes_hung
tele_low_tubes += sr.low_tubes_hung
total_score = tele_high_tubes * 3 + tele_mid_tubes * 2 + tele_low_tubes
if total_score != official_sr.total_score:
warning_messages.append((alliance_color + " total score", official_sr.total_score, total_score))
return warning_messages, error_messages
def validate_match(match, official_match, official_srs):
error_level = 0
warning_messages = []
error_messages = []
red_teams = [match.red1, match.red2, match.red3]
blue_teams = [match.blue1, match.blue2, match.blue3]
red_sr = official_srs[0]
blue_sr = official_srs[1]
red_warning, red_error = __validate_alliance("Red", red_teams, red_sr)
blue_warning, blue_error = __validate_alliance("Blue", blue_teams, blue_sr)
warning_messages.extend(red_warning)
warning_messages.extend(blue_warning)
error_messages.extend(red_error)
error_messages.extend(blue_error)
if len(error_messages) != 0:
error_level = 2
elif len(warning_messages) != 0:
error_level = 1
return error_level, warning_messages, error_messages
| ArcticWarriors/scouting-app | ScoutingWebsite/Scouting2011/model/validate_match.py | Python | mit | 2,608 |
import json
import requests
import key
API_key = key.getAPIkey()
#load all champion pictures
def load_champion_pictures(champion_json):
print len(champion_json['data'])
version = champion_json['version']
print "version: " + version
for champion in champion_json['data']:
print champion
r = requests.get('http://ddragon.leagueoflegends.com/cdn/' + version + '/img/champion/' + champion + '.png')
if r.status_code == 200:
img = r.content
with open('static/images/champions/' + champion_json['data'][champion]['name'] + '.png', 'w') as f:
f.write(img)
print "img created"
else:
print "pictures: something went wrong"
#load champion json
#converts to python dict using json() and json.dump() for error checking
def load_champion_json():
try:
r = requests.get('https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?&api_key=' + API_key)
champion_json = r.json()
if 'status' in champion_json:
print champion_json['status']['message']
return
load_champion_pictures(champion_json)
# quick fix to change MonkeyKing to Wukong so that sort_keys sorts it properly
champion_json['data']['Wukong'] = champion_json['data']['MonkeyKing']
del champion_json['data']['MonkeyKing']
except ValueError as e:
print e.message
return
with open('static/json/champion.json', 'w') as f:
json.dump(champion_json, f, sort_keys=True)
load_champion_json()
| dzhang55/riftwatch | static_images.py | Python | mit | 1,397 |
#!/usr/bin/env python
from util import nodeenv_delegate
from setup import setup
if __name__ == "__main__":
setup(skip_dependencies=True)
nodeenv_delegate("npx")
| outoftime/learnpad | tools/npx.py | Python | mit | 171 |
# -*- coding: utf-8 -*-
from math import floor
from typing import (
Tuple,
Any
)
from PyQt5.QtCore import (
QPointF,
QRectF,
Qt
)
from PyQt5.QtGui import (
QBrush,
QPen,
QPainterPath,
QPolygonF,
QMouseEvent,
QPainter
)
from PyQt5.QtWidgets import (
qApp,
QGraphicsItem,
QGraphicsPathItem,
QGraphicsRectItem,
QGraphicsEllipseItem,
QStyleOptionGraphicsItem,
QWidget,
QGraphicsSceneMouseEvent,
QGraphicsSceneHoverEvent
)
from cadnano.gui.palette import getColorObj
from cadnano.views.pathview import pathstyles as styles
from cadnano.views.pathview.tools.pathselection import SelectionItemGroup
from cadnano.views.pathview import (
PathVirtualHelixItemT,
PathXoverItemT,
PathStrandItemT,
PathNucleicAcidPartItemT
)
from cadnano.cntypes import (
StrandT,
DocT,
Vec2T,
WindowT
)
_BASE_WIDTH = styles.PATH_BASE_WIDTH
PP_L5 = QPainterPath() # Left 5' PainterPath
PP_R5 = QPainterPath() # Right 5' PainterPath
PP_L3 = QPainterPath() # Left 3' PainterPath
PP_R3 = QPainterPath() # Right 3' PainterPath
PP_53 = QPainterPath() # Left 5', Right 3' PainterPath
PP_35 = QPainterPath() # Left 5', Right 3' PainterPath
# set up PP_L5 (left 5' blue square)
PP_L5.addRect(0.25 * _BASE_WIDTH,
0.125 * _BASE_WIDTH,
0.75 * _BASE_WIDTH,
0.75 * _BASE_WIDTH)
# set up PP_R5 (right 5' blue square)
PP_R5.addRect(0, 0.125 * _BASE_WIDTH, 0.75 * _BASE_WIDTH, 0.75 * _BASE_WIDTH)
# set up PP_L3 (left 3' blue triangle)
L3_POLY = QPolygonF()
L3_POLY.append(QPointF(_BASE_WIDTH, 0))
L3_POLY.append(QPointF(0.25 * _BASE_WIDTH, 0.5 * _BASE_WIDTH))
L3_POLY.append(QPointF(_BASE_WIDTH, _BASE_WIDTH))
L3_POLY.append(QPointF(_BASE_WIDTH, 0))
PP_L3.addPolygon(L3_POLY)
# set up PP_R3 (right 3' blue triangle)
R3_POLY = QPolygonF()
R3_POLY.append(QPointF(0, 0))
R3_POLY.append(QPointF(0.75 * _BASE_WIDTH, 0.5 * _BASE_WIDTH))
R3_POLY.append(QPointF(0, _BASE_WIDTH))
R3_POLY.append(QPointF(0, 0))
PP_R3.addPolygon(R3_POLY)
# single base left 5'->3'
PP_53.addRect(0, 0.125 * _BASE_WIDTH, 0.5 * _BASE_WIDTH, 0.75 * _BASE_WIDTH)
POLY_53 = QPolygonF()
POLY_53.append(QPointF(0.5 * _BASE_WIDTH, 0))
POLY_53.append(QPointF(_BASE_WIDTH, 0.5 * _BASE_WIDTH))
POLY_53.append(QPointF(0.5 * _BASE_WIDTH, _BASE_WIDTH))
PP_53.addPolygon(POLY_53)
# single base left 3'<-5'
PP_35.addRect(0.50 * _BASE_WIDTH,
0.125 * _BASE_WIDTH,
0.5 * _BASE_WIDTH,
0.75 * _BASE_WIDTH)
POLY_35 = QPolygonF()
POLY_35.append(QPointF(0.5 * _BASE_WIDTH, 0))
POLY_35.append(QPointF(0, 0.5 * _BASE_WIDTH))
POLY_35.append(QPointF(0.5 * _BASE_WIDTH, _BASE_WIDTH))
PP_35.addPolygon(POLY_35)
_DEFAULT_RECT = QRectF(0, 0, _BASE_WIDTH, _BASE_WIDTH)
_NO_PEN = QPen(Qt.NoPen)
MOD_RECT = QRectF(.25*_BASE_WIDTH, -.25*_BASE_WIDTH, 0.5*_BASE_WIDTH, 0.5*_BASE_WIDTH)
class EndpointItem(QGraphicsPathItem):
FILTER_NAME = "endpoint"
def __init__(self, strand_item: PathStrandItemT,
cap_type: str, # low, high, dual
is_drawn5to3: bool):
"""The parent should be a StrandItem."""
super(EndpointItem, self).__init__(strand_item.virtualHelixItem())
self._strand_item = strand_item
self._getActiveTool = strand_item._getActiveTool
self.cap_type = cap_type
self._low_drag_bound = None
self._high_drag_bound = None
self._mod_item = None
self._isdrawn5to3 = is_drawn5to3
self._initCapSpecificState(is_drawn5to3)
p = QPen()
p.setCosmetic(True)
self.setPen(p)
# for easier mouseclick
self._click_area = cA = QGraphicsRectItem(_DEFAULT_RECT, self)
self._click_area.setAcceptHoverEvents(True)
cA.hoverMoveEvent = self.hoverMoveEvent
cA.mousePressEvent = self.mousePressEvent
cA.mouseMoveEvent = self.mouseMoveEvent
cA.setPen(_NO_PEN)
self.setFlag(QGraphicsItem.ItemIsSelectable)
# end def
### SIGNALS ###
### SLOTS ###
### ACCESSORS ###
def idx(self) -> int:
"""Look up ``base_idx``, as determined by :class:`StrandItem `idxs and
cap type."""
if self.cap_type == 'low':
return self._strand_item.idxs()[0]
else: # high or dual, doesn't matter
return self._strand_item.idxs()[1]
# end def
def partItem(self) -> PathNucleicAcidPartItemT:
return self._strand_item.partItem()
# end def
def disableEvents(self):
self._click_area.setAcceptHoverEvents(False)
self.mouseMoveEvent = QGraphicsPathItem.mouseMoveEvent
self.mousePressEvent = QGraphicsPathItem.mousePressEvent
# end def
def window(self) -> WindowT:
return self._strand_item.window()
### PUBLIC METHODS FOR DRAWING / LAYOUT ###
def updatePosIfNecessary(self, idx: int) -> Tuple[bool, SelectionItemGroup]:
"""Update position if necessary and return ``True`` if updated."""
group = self.group()
self.tempReparent()
x = int(idx * _BASE_WIDTH)
if x != self.x():
self.setPos(x, self.y())
# if group:
# group.addToGroup(self)
return True, group
else:
# if group:
# group.addToGroup(self)
return False, group
def safeSetPos(self, x: float, y: float):
"""
Required to ensure proper reparenting if selected
"""
group = self.group()
self.tempReparent()
self.setPos(x, y)
if group:
group.addToGroup(self)
# end def
def resetEndPoint(self, is_drawn5to3: bool):
self.setParentItem(self._strand_item.virtualHelixItem())
self._initCapSpecificState(is_drawn5to3)
upperLeftY = 0 if is_drawn5to3 else _BASE_WIDTH
self.setY(upperLeftY)
# end def
def showMod(self, mod_id: str, color: str):
self._mod_item = QGraphicsEllipseItem(MOD_RECT, self)
self.changeMod(mod_id, color)
self._mod_item.show()
# print("Showing {}".format(mod_id))
# end def
def changeMod(self, mod_id: str, color: str):
self._mod_id = mod_id
self._mod_item.setBrush(QBrush(getColorObj(color)))
# end def
def destroyMod(self):
self.scene().removeItem(self._mod_item)
self._mod_item = None
self._mod_id = None
# end def
def destroyItem(self):
'''Remove this object and references to it from the view
'''
scene = self.scene()
if self._mod_item is not None:
self.destroyMod()
scene.removeItem(self._click_area)
self._click_area = None
scene.removeItem(self)
# end def
### PRIVATE SUPPORT METHODS ###
def _initCapSpecificState(self, is_drawn5to3: bool):
c_t = self.cap_type
if c_t == 'low':
path = PP_L5 if is_drawn5to3 else PP_L3
elif c_t == 'high':
path = PP_R3 if is_drawn5to3 else PP_R5
elif c_t == 'dual':
path = PP_53 if is_drawn5to3 else PP_35
self.setPath(path)
# end def
### EVENT HANDLERS ###
def mousePressEvent(self, event: QGraphicsSceneMouseEvent):
"""Parses a :meth:`mousePressEvent`, calling the appropriate tool
method as necessary. Stores ``_move_idx`` for future comparison.
"""
self.scene().views()[0].addToPressList(self)
idx = self._strand_item.setActiveEndpoint(self.cap_type)
self._move_idx = idx
active_tool_str = self._getActiveTool().methodPrefix()
tool_method_name = active_tool_str + "MousePress"
if hasattr(self, tool_method_name): # if the tool method exists
modifiers = event.modifiers()
getattr(self, tool_method_name)(modifiers, event, self.idx())
def hoverLeaveEvent(self, event: QGraphicsSceneHoverEvent):
self._strand_item.hoverLeaveEvent(event)
# end def
def hoverMoveEvent(self, event: QGraphicsSceneHoverEvent):
"""Parses a :meth:`hoverMoveEvent`, calling the approproate tool
method as necessary.
"""
vhi_num = self._strand_item.idNum()
oligo_length = self._strand_item._model_strand.oligo().length()
msg = "%d[%d]\tlength: %d" % (vhi_num, self.idx(), oligo_length)
self.partItem().updateStatusBar(msg)
active_tool_str = self._getActiveTool().methodPrefix()
if active_tool_str == 'createTool':
return self._strand_item.createToolHoverMove(event, self.idx())
elif active_tool_str == 'addSeqTool':
return self.addSeqToolHoverMove(event, self.idx())
def mouseMoveEvent(self, event: QGraphicsSceneMouseEvent):
"""Parses a :meth:`mouseMoveEvent`, calling the appropriate tool
method as necessary. Updates ``_move_idx`` if it changed.
"""
tool_method_name = self._getActiveTool().methodPrefix() + "MouseMove"
if hasattr(self, tool_method_name): # if the tool method exists
idx = int(floor((self.x() + event.pos().x()) / _BASE_WIDTH))
if idx != self._move_idx: # did we actually move?
modifiers = event.modifiers()
self._move_idx = idx
getattr(self, tool_method_name)(modifiers, idx)
def customMouseRelease(self, event: QMouseEvent):
"""Parses a :meth:`mouseReleaseEvent` from view, calling the appropriate
tool method as necessary. Deletes ``_move_idx`` if necessary.
"""
tool_method_name = self._getActiveTool().methodPrefix() + "MouseRelease"
if hasattr(self, tool_method_name): # if the tool method exists
modifiers = event.modifiers()
x = event.pos().x()
getattr(self, tool_method_name)(modifiers, x) # call tool method
if hasattr(self, '_move_idx'):
del self._move_idx
### TOOL METHODS ###
def modsToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""
Checks that a scaffold was clicked, and then calls apply sequence
to the clicked strand via its oligo.
"""
m_strand = self._strand_item._model_strand
self._getActiveTool().applyMod(m_strand, idx)
# end def
def breakToolMouseRelease(self, modifiers: Qt.KeyboardModifiers,
x):
"""Shift-click to merge without switching back to select tool."""
m_strand = self._strand_item._model_strand
if modifiers & Qt.ShiftModifier:
m_strand.merge(self.idx())
# end def
def eraseToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Erase the strand."""
m_strand = self._strand_item._model_strand
m_strand.strandSet().removeStrand(m_strand)
# end def
def insertionToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
m_strand.addInsertion(idx, 1)
# end def
def paintToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
if qApp.keyboardModifiers() & Qt.ShiftModifier:
color = self.window().path_color_panel.shiftColorName()
else:
color = self.window().path_color_panel.colorName()
m_strand.oligo().applyColor(color)
# end def
def addSeqToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
oligo = self._strand_item._model_strand.oligo()
add_seq_tool = self._getActiveTool()
add_seq_tool.applySequence(oligo)
# end def
def addSeqToolHoverMove(self, event: QGraphicsSceneHoverEvent,
idx: int):
# m_strand = self._model_strand
# vhi = self._strand_item._virtual_helix_item
add_seq_tool = self._getActiveTool()
add_seq_tool.hoverMove(self, event, flag=self._isdrawn5to3)
# end def
def addSeqToolHoverLeave(self, event: QGraphicsSceneHoverEvent):
self._getActiveTool().hoverLeaveEvent(event)
# end def
def createToolHoverMove(self, idx: int):
"""Create the strand is possible."""
m_strand = self._strand_item._model_strand
vhi = self._strand_item._virtual_helix_item
active_tool = self._getActiveTool()
if not active_tool.isFloatingXoverBegin():
temp_xover = active_tool.floatingXover()
temp_xover.updateFloatingFromStrandItem(vhi, m_strand, idx)
# end def
def createToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Break the strand is possible."""
m_strand = self._strand_item._model_strand
vhi = self._strand_item._virtual_helix_item
active_tool = self._getActiveTool()
if active_tool.isFloatingXoverBegin():
if m_strand.idx5Prime() == idx:
return
else:
temp_xover = active_tool.floatingXover()
temp_xover.updateBase(vhi, m_strand, idx)
active_tool.setFloatingXoverBegin(False)
else:
active_tool.setFloatingXoverBegin(True)
# install Xover
active_tool.attemptToCreateXover(vhi, m_strand, idx)
# end def
def selectToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Set the allowed drag bounds for use by selectToolMouseMove.
"""
# print("%s.%s [%d]" % (self, util.methodName(), self.idx()))
self._low_drag_bound, self._high_drag_bound = self._strand_item._model_strand.getResizeBounds(self.idx())
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
if (all(f in current_filter_set for f in s_i.strandFilter()) and self.FILTER_NAME in current_filter_set):
selection_group = viewroot.strandItemSelectionGroup()
mod = Qt.MetaModifier
if not (modifiers & mod):
selection_group.clearSelection(False)
selection_group.setSelectionLock(selection_group)
selection_group.pendToAdd(self)
selection_group.processPendingToAddList()
return selection_group.mousePressEvent(event)
# end def
def selectToolMouseMove(self, modifiers: Qt.KeyboardModifiers, idx: int):
"""
Given a new index (pre-validated as different from the prev index),
calculate the new x coordinate for self, move there, and notify the
parent strandItem to redraw its horizontal line.
"""
# end def
def selectToolMouseRelease(self, modifiers: Qt.KeyboardModifiers, x):
"""
If the positional-calculated idx differs from the model idx, it means
we have moved and should notify the model to resize.
If the mouse event had a key modifier, perform special actions:
shift = attempt to merge with a neighbor
alt = extend to max drag bound
"""
m_strand = self._strand_item._model_strand
if modifiers & Qt.ShiftModifier:
self.setSelected(False)
self.restoreParent()
m_strand.merge(self.idx())
# end def
def skipToolMousePress(self, modifiers: Qt.KeyboardModifiers,
event: QGraphicsSceneMouseEvent,
idx: int):
"""Add an insert to the strand if possible."""
m_strand = self._strand_item._model_strand
m_strand.addInsertion(idx, -1)
# end def
def restoreParent(self, pos: QPointF = None):
"""
Required to restore parenting and positioning in the partItem
"""
# map the position
self.tempReparent(pos=pos)
self.setSelectedColor(False)
self.setSelected(False)
# end def
def tempReparent(self, pos: QPointF = None):
vh_item = self._strand_item.virtualHelixItem()
if pos is None:
pos = self.scenePos()
self.setParentItem(vh_item)
temp_point = vh_item.mapFromScene(pos)
self.setPos(temp_point)
# end def
def setSelectedColor(self, use_default: bool):
if use_default == True:
color = getColorObj(styles.SELECTED_COLOR)
else:
oligo = self._strand_item.strand().oligo()
if oligo.shouldHighlight():
color = getColorObj(oligo.getColor(), alpha=128)
else:
color = getColorObj(oligo.getColor())
brush = self.brush()
brush.setColor(color)
self.setBrush(brush)
# end def
def updateHighlight(self, brush: QBrush):
if not self.isSelected():
self.setBrush(brush)
# end def
def itemChange(self, change: QGraphicsItem.GraphicsItemChange,
value: Any) -> bool:
"""Used for selection of the :class:`EndpointItem`
Args:
change: parameter that is changing
value : new value whose type depends on the ``change`` argument
Returns:
If the change is a ``QGraphicsItem.ItemSelectedChange``::
``True`` if selected, other ``False``
Otherwise default to :meth:`QGraphicsPathItem.itemChange()` result
"""
# for selection changes test against QGraphicsItem.ItemSelectedChange
# intercept the change instead of the has changed to enable features.
if change == QGraphicsItem.ItemSelectedChange and self.scene():
active_tool = self._getActiveTool()
if str(active_tool) == "select_tool":
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
selection_group = viewroot.strandItemSelectionGroup()
# only add if the selection_group is not locked out
if value == True and self.FILTER_NAME in current_filter_set:
if all(f in current_filter_set for f in s_i.strandFilter()):
if self.group() != selection_group or not self.isSelected():
selection_group.pendToAdd(self)
selection_group.setSelectionLock(selection_group)
self.setSelectedColor(True)
return True
else:
return False
# end if
elif value == True:
# don't select
return False
else:
# Deselect
# print("deselect ep")
# Check if strand is being added to the selection group still
if not selection_group.isPending(self._strand_item):
selection_group.pendToRemove(self)
self.tempReparent()
self.setSelectedColor(False)
return False
else: # don't deselect, because the strand is still selected
return True
# end else
# end if
elif str(active_tool) == "paint_tool":
s_i = self._strand_item
viewroot = s_i.viewroot()
current_filter_set = viewroot.selectionFilterSet()
if all(f in current_filter_set for f in s_i.strandFilter()):
if not active_tool.isMacrod():
active_tool.setMacrod()
self.paintToolMousePress(None, None, None)
# end elif
return False
# end if
return QGraphicsPathItem.itemChange(self, change, value)
# end def
def modelDeselect(self, document: DocT):
"""A strand is selected based on whether its low or high endpoints
are selected. this value is a tuple ``(is_low, is_high)`` of booleans
"""
strand = self._strand_item.strand()
test = document.isModelStrandSelected(strand)
low_val, high_val = document.getSelectedStrandValue(strand) if test else (False, False)
if self.cap_type == 'low':
out_value = (False, high_val)
else:
out_value = (low_val, False)
if not out_value[0] and not out_value[1] and test:
document.removeStrandFromSelection(strand)
elif out_value[0] or out_value[1]:
document.addStrandToSelection(strand, out_value)
self.restoreParent()
# end def
def modelSelect(self, document: DocT):
"""A strand is selected based on whether its low or high endpoints
are selected. this value is a tuple ``(is_low, is_high)`` of booleans
"""
strand = self._strand_item.strand()
test = document.isModelStrandSelected(strand)
low_val, high_val = document.getSelectedStrandValue(strand) if test else (False, False)
if self.cap_type == 'low':
out_value = (True, high_val)
else:
out_value = (low_val, True)
self.setSelected(True)
self.setSelectedColor(True)
document.addStrandToSelection(strand, out_value)
# end def
def paint(self, painter: QPainter,
option: QStyleOptionGraphicsItem,
widget: QWidget):
painter.setPen(self.pen())
painter.setBrush(self.brush())
painter.drawPath(self.path())
# end def
| scholer/cadnano2.5 | cadnano/views/pathview/strand/endpointitem.py | Python | mit | 22,478 |
Subsets and Splits