code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library containing Tokenizer definitions.
The RougeScorer class can be instantiated with the tokenizers defined here. New
tokenizers can be defined by creating a subclass of the Tokenizer abstract class
and overriding the tokenize() method.
"""
import abc
from nltk.stem import porter
from rouge import tokenize
class Tokenizer(abc.ABC):
"""Abstract base class for a tokenizer.
Subclasses of Tokenizer must implement the tokenize() method.
"""
@abc.abstractmethod
def tokenize(self, text):
raise NotImplementedError("Tokenizer must override tokenize() method")
class DefaultTokenizer(Tokenizer):
"""Default tokenizer which tokenizes on whitespace."""
def __init__(self, use_stemmer=False):
"""Constructor for DefaultTokenizer.
Args:
use_stemmer: boolean, indicating whether Porter stemmer should be used to
strip word suffixes to improve matching.
"""
self._stemmer = porter.PorterStemmer() if use_stemmer else None
def tokenize(self, text):
return tokenize.tokenize(text, self._stemmer)
| google-research/google-research | rouge/tokenizers.py | Python | apache-2.0 | 1,661 |
import asyncio
import io
import pytest
import asynctest
import mitmproxy.io
from mitmproxy import exceptions
from mitmproxy.addons import readfile
from mitmproxy.test import taddons
from mitmproxy.test import tflow
@pytest.fixture
def data():
f = io.BytesIO()
w = mitmproxy.io.FlowWriter(f)
flows = [
tflow.tflow(resp=True),
tflow.tflow(err=True),
tflow.ttcpflow(),
tflow.ttcpflow(err=True)
]
for flow in flows:
w.add(flow)
f.seek(0)
return f
@pytest.fixture
def corrupt_data(data):
f = io.BytesIO(data.getvalue())
f.seek(0, io.SEEK_END)
f.write(b"qibble")
f.seek(0)
return f
class TestReadFile:
def test_configure(self):
rf = readfile.ReadFile()
with taddons.context(rf) as tctx:
tctx.configure(rf, readfile_filter="~q")
with pytest.raises(Exception, match="Invalid readfile filter"):
tctx.configure(rf, readfile_filter="~~")
@pytest.mark.asyncio
async def test_read(self, tmpdir, data, corrupt_data):
rf = readfile.ReadFile()
with taddons.context(rf) as tctx:
assert not rf.reading()
tf = tmpdir.join("tfile")
with asynctest.patch('mitmproxy.master.Master.load_flow') as mck:
tf.write(data.getvalue())
tctx.configure(
rf,
rfile = str(tf),
readfile_filter = ".*"
)
assert not mck.awaited
rf.running()
await asyncio.sleep(0)
assert mck.awaited
tf.write(corrupt_data.getvalue())
tctx.configure(rf, rfile=str(tf))
rf.running()
assert await tctx.master.await_log("corrupted")
@pytest.mark.asyncio
async def test_corrupt(self, corrupt_data):
rf = readfile.ReadFile()
with taddons.context(rf) as tctx:
with pytest.raises(exceptions.FlowReadException):
await rf.load_flows(io.BytesIO(b"qibble"))
tctx.master.clear()
with pytest.raises(exceptions.FlowReadException):
await rf.load_flows(corrupt_data)
assert await tctx.master.await_log("file corrupted")
@pytest.mark.asyncio
async def test_nonexistent_file(self):
rf = readfile.ReadFile()
with taddons.context(rf) as tctx:
with pytest.raises(exceptions.FlowReadException):
await rf.load_flows_from_path("nonexistent")
assert await tctx.master.await_log("nonexistent")
class TestReadFileStdin:
@asynctest.patch('sys.stdin')
@pytest.mark.asyncio
async def test_stdin(self, stdin, data, corrupt_data):
rf = readfile.ReadFileStdin()
with taddons.context(rf):
with asynctest.patch('mitmproxy.master.Master.load_flow') as mck:
stdin.buffer = data
assert not mck.awaited
await rf.load_flows(stdin.buffer)
assert mck.awaited
stdin.buffer = corrupt_data
with pytest.raises(exceptions.FlowReadException):
await rf.load_flows(stdin.buffer)
@pytest.mark.asyncio
async def test_normal(self, tmpdir, data):
rf = readfile.ReadFileStdin()
with taddons.context(rf) as tctx:
tf = tmpdir.join("tfile")
with asynctest.patch('mitmproxy.master.Master.load_flow') as mck:
tf.write(data.getvalue())
tctx.configure(rf, rfile=str(tf))
assert not mck.awaited
rf.running()
await asyncio.sleep(0)
assert mck.awaited
| vhaupert/mitmproxy | test/mitmproxy/addons/test_readfile.py | Python | mit | 3,747 |
"""Generic builder."""
from regolith.htmlbuilder import HtmlBuilder
BUILDERS = {
'html': HtmlBuilder,
}
def builder(btype, rc):
"""Returns builder of the approriate type."""
return BUILDERS[btype](rc)
| hodger/regolith | regolith/builder.py | Python | cc0-1.0 | 221 |
# -*- coding: utf-8 -*-
import json
import six.moves.xmlrpc_client
from functools import wraps
from celery import chain, group
from celery.result import AsyncResult
from dateutil import parser
from django.contrib import messages
from django.contrib.auth import views
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db import transaction
from django.db.models import Q
from django.http import HttpResponse, Http404, HttpResponseForbidden
from django.shortcuts import render, redirect
from appliances.api import json_response
from appliances.models import (
Provider, AppliancePool, Appliance, Group, Template, MismatchVersionMailer, User, BugQuery,
GroupShepherd)
from appliances.tasks import (appliance_power_on, appliance_power_off, appliance_suspend,
anyvm_power_on, anyvm_power_off, anyvm_suspend, anyvm_delete, delete_template_from_provider,
appliance_rename, wait_appliance_ready, mark_appliance_ready, appliance_reboot,
nuke_template_configuration)
from sprout.log import create_logger
from cfme.utils.bz import Bugzilla
from cfme.utils.providers import get_mgmt
from cfme.utils.version import Version
from wrapanapi import Openshift
def go_home(request):
return redirect(index)
def go_back_or_home(request):
ref = request.META.get('HTTP_REFERER')
if ref:
return redirect(ref)
else:
return go_home(request)
def only_authenticated(view):
@wraps(view)
def g(request, *args, **kwargs):
if not request.user.is_authenticated():
messages.error(
request, 'You need to be authenticated to access "{}"'.format(request.path))
return go_home(request)
else:
return view(request, *args, **kwargs)
if not hasattr(g, '__wrapped__'):
g.__wrapped__ = view
return g
def logout(request):
views.logout(request)
messages.info(request, 'You have been logged out')
return go_home(request)
def index(request):
superusers = User.objects.filter(is_superuser=True).order_by("last_name", "first_name")
return render(request, 'index.html', locals())
def providers(request, provider_id=None):
if request.user.is_staff or request.user.is_superuser:
user_filter = {}
else:
user_filter = {'user_groups__in': request.user.groups.all()}
if provider_id is None:
try:
provider = Provider.objects.filter(hidden=False, **user_filter).order_by("id")[0]
return redirect(
"specific_provider",
provider_id=provider.id)
except IndexError:
# No Provider
messages.info(request, "No provider present, redirected to the homepage.")
return go_home(request)
else:
try:
provider = Provider.objects.filter(id=provider_id, **user_filter).distinct().first()
if provider is None:
messages.error(
request,
'Could not find a provider with name {} that you would have access to.'.format(
provider_id))
return go_home(request)
if provider.hidden:
messages.warning(request, 'Provider {} is hidden.'.format(provider_id))
return redirect('providers')
except ObjectDoesNotExist:
messages.warning(request, "Provider '{}' does not exist.".format(provider_id))
return redirect("providers")
providers = Provider.objects.filter(hidden=False, **user_filter).order_by("id").distinct()
return render(request, 'appliances/providers.html', locals())
def provider_usage(request):
complete_usage = Provider.complete_user_usage(request.user)
return render(request, 'appliances/provider_usage.html', locals())
def templates(request, group_id=None, prov_id=None):
if request.user.is_staff or request.user.is_superuser:
user_filter = {}
user_filter_2 = {}
else:
user_filter = {'user_groups__in': request.user.groups.all()}
user_filter_2 = {'provider__user_groups__in': request.user.groups.all()}
if group_id is None:
try:
return redirect("group_templates", group_id=Group.objects.order_by("id")[0].id)
except IndexError:
# No Group
messages.info(request, "No group present, redirected to the homepage.")
return go_home(request)
else:
try:
group = Group.objects.get(id=group_id)
except ObjectDoesNotExist:
messages.warning(request, "Group '{}' does not exist.".format(group_id))
return redirect("templates")
if prov_id is not None:
try:
provider = Provider.objects.filter(id=prov_id, **user_filter).distinct().first()
except ObjectDoesNotExist:
messages.warning(request, "Provider '{}' does not exist.".format(prov_id))
return redirect("templates")
else:
provider = None
if provider is not None:
user_filter_2 = {'provider': provider}
groups = Group.objects.order_by("id")
mismatched_versions = MismatchVersionMailer.objects.order_by("id")
prepared_table = []
zstream_rowspans = {}
version_rowspans = {}
date_version_rowspans = {}
items = group.zstreams_versions.items()
items.sort(key=lambda pair: Version(pair[0]), reverse=True)
for zstream, versions in items:
for version in versions:
for template in Template.objects.filter(
template_group=group, version=version, exists=True,
ready=True, **user_filter_2).order_by('-date', 'provider').distinct():
if zstream in zstream_rowspans:
zstream_rowspans[zstream] += 1
zstream_append = None
else:
zstream_rowspans[zstream] = 1
zstream_append = zstream
if version in version_rowspans:
version_rowspans[version] += 1
version_append = None
else:
version_rowspans[version] = 1
version_append = version
datetuple = (template.date, version)
if datetuple in date_version_rowspans:
date_version_rowspans[datetuple] += 1
date_append = None
else:
date_version_rowspans[datetuple] = 1
date_append = template.date
prepared_table.append((
zstream_append, version_append, date_append, datetuple, template.provider,
template))
return render(request, 'appliances/templates.html', locals())
@only_authenticated
def shepherd(request):
groups = Group.objects.all()
shepherds = GroupShepherd.objects.filter(
template_group__in=groups, user_group__in=request.user.groups.all()).distinct().order_by(
'template_group__id')
return render(request, 'appliances/shepherd.html', locals())
@only_authenticated
def versions_for_group(request):
group_id = request.POST.get("stream")
template_type = request.POST.get("template_type")
latest_version = None
preconfigured = request.POST.get("preconfigured", "false").lower() == "true"
if group_id == "<None>":
versions = []
group = None
else:
try:
group = Group.objects.get(id=group_id)
except ObjectDoesNotExist:
versions = []
else:
filters = {
'template_group': group,
'ready': True,
'usable': True,
'exists': True,
'provider__working': True,
'provider__disabled': False,
'provider__user_groups__in': request.user.groups.all(),
'preconfigured': preconfigured,
'template_type': template_type
}
versions = [
(version, Template.ga_version(version))
for version in Template.get_versions(**filters)]
if versions:
if versions[0][1]:
latest_version = '{} (GA)'.format(versions[0][0])
else:
latest_version = versions[0][0]
return render(request, 'appliances/_versions.html', locals())
@only_authenticated
def date_for_group_and_version(request):
group_id = request.POST.get("stream")
template_type = request.POST.get("template_type")
latest_date = None
preconfigured = request.POST.get("preconfigured", "false").lower() == "true"
if group_id == "<None>":
dates = []
else:
try:
group = Group.objects.get(id=group_id)
except ObjectDoesNotExist:
dates = []
else:
version = request.POST.get("version")
filters = {
"template_group": group,
"ready": True,
"exists": True,
"usable": True,
"preconfigured": preconfigured,
"provider__working": True,
'provider__disabled': False,
"provider__user_groups__in": request.user.groups.all(),
'template_type': template_type
}
if version == "latest":
try:
versions = Template.get_versions(**filters)
filters["version"] = versions[0]
except IndexError:
pass # No such thing as version for this template group
else:
filters["version"] = version
dates = Template.get_dates(**filters)
if dates:
latest_date = dates[0]
return render(request, 'appliances/_dates.html', locals())
@only_authenticated
def providers_for_date_group_and_version(request):
total_provisioning_slots = 0
total_appliance_slots = 0
total_shepherd_slots = 0
shepherd_appliances = {}
group_id = request.POST.get("stream")
provider_type = request.POST.get("provider_type")
template_type = request.POST.get("template_type")
if provider_type == 'any' or not provider_type:
provider_type = None
preconfigured = request.POST.get("preconfigured", "false").lower() == "true"
if group_id == "<None>":
providers = []
else:
try:
group = Group.objects.get(id=group_id)
except ObjectDoesNotExist:
providers = []
else:
version = request.POST.get("version")
filters = {
"template_group": group,
"ready": True,
"exists": True,
"usable": True,
"preconfigured": preconfigured,
"provider__working": True,
"provider__disabled": False,
"provider__user_groups__in": request.user.groups.all(),
"template_type": template_type,
}
if version == "latest":
try:
versions = Template.get_versions(**filters)
filters["version"] = versions[0]
except IndexError:
pass # No such thing as version for this template group
else:
filters["version"] = version
date = request.POST.get("date")
if date == "latest":
try:
dates = Template.get_dates(**filters)
filters["date"] = dates[0]
except IndexError:
pass # No such thing as date for this template group
else:
filters["date"] = parser.parse(date)
providers = Template.objects.filter(**filters).values("provider").distinct()
providers = sorted([p.values()[0] for p in providers])
providers = list(Provider.objects.filter(id__in=providers))
if provider_type is None:
providers = list(providers)
else:
providers = [
provider
for provider
in providers
if provider.provider_type == provider_type]
for provider in providers:
appl_filter = dict(
appliance_pool=None, ready=True, template__provider=provider,
template__preconfigured=filters["preconfigured"],
template__template_group=filters["template_group"],
template__template_type=filters["template_type"])
if "date" in filters:
appl_filter["template__date"] = filters["date"]
if "version" in filters:
appl_filter["template__version"] = filters["version"]
shepherd_appliances[provider.id] = len(
Appliance.objects.filter(**appl_filter))
total_shepherd_slots += shepherd_appliances[provider.id]
total_appliance_slots += provider.remaining_appliance_slots
total_provisioning_slots += provider.remaining_provisioning_slots
render_providers = {}
for provider in providers:
render_providers[provider.id] = {
"shepherd_count": shepherd_appliances[provider.id], "object": provider}
return render(request, 'appliances/_providers.html', locals())
@only_authenticated
def my_appliances(request, show_user="my"):
if not request.user.is_superuser:
if not (show_user == "my" or show_user == request.user.username):
messages.info(request, "You can't view others' appliances!")
return redirect("my_appliances")
if show_user == request.user.username:
return redirect("my_appliances")
else:
other_users = User.objects.exclude(pk=request.user.pk).order_by("last_name", "first_name")
if show_user == "my":
pools = AppliancePool.objects.filter(owner=request.user).order_by("id")
elif show_user == "all":
pools = AppliancePool.objects.order_by("id")
else:
pools = AppliancePool.objects.filter(owner__username=show_user).order_by("id")
pools = pools.select_related('group', 'provider', 'owner')
page = request.GET.get("page")
try:
per_page = int(request.GET.get("per_page", 25))
except (ValueError, TypeError):
per_page = 5
pools_paginator = Paginator(pools, per_page)
try:
pools_paged = pools_paginator.page(page)
page = int(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
pools_paged = pools_paginator.page(1)
page = 1
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
pools_paged = pools_paginator.page(pools_paginator.num_pages)
page = pools_paginator.num_pages
pages = list(pools_paginator.page_range)
if pools_paginator.num_pages <= 5:
start_index = 0
end_index = 5
else:
if page - 2 < 0:
start_index = 0
end_index = 5
elif page + 2 > pools_paginator.num_pages:
end_index = pools_paginator.num_pages
start_index = end_index - 5
else:
start_index = page - 3
end_index = page + 2
if start_index < 0:
end_index -= start_index
start_index = 0
pages = pages[start_index:end_index]
available_groups = Group.objects.filter(
id__in=Template.objects.values_list('template_group', flat=True).distinct())
group_tuples = []
for grp in available_groups:
group_tuples.append((grp.templates.order_by('-date')[0].date, grp))
group_tuples.sort(key=lambda gt: gt[0], reverse=True)
template_types = [t for t in Template.TEMPLATE_TYPES]
can_order_pool = show_user == "my"
new_pool_possible = True
display_legend = False
for pool in pools:
if not pool.finished:
display_legend = True
per_pool_quota = None
pools_remaining = None
num_user_vms = Appliance.objects.filter(appliance_pool__owner=request.user).count()
if request.user.has_quotas:
if request.user.quotas.total_pool_quota is not None:
if request.user.quotas.total_pool_quota <= len(pools):
new_pool_possible = False
pools_remaining = request.user.quotas.total_pool_quota - len(pools)
if request.user.quotas.total_vm_quota is not None:
if request.user.quotas.total_vm_quota <= num_user_vms:
new_pool_possible = False
if request.user.quotas.per_pool_quota is not None:
per_pool_quota = request.user.quotas.per_pool_quota
remaining_vms = request.user.quotas.total_vm_quota - num_user_vms
if remaining_vms < per_pool_quota:
per_pool_quota = remaining_vms
per_pool_quota_enabled = per_pool_quota is not None
can_change_hw = request.user.has_perm('appliances.can_modify_hw')
provider_types = Provider.get_available_provider_types(request.user)
return render(request, 'appliances/my_appliances.html', locals())
def can_operate_appliance_or_pool(appliance_or_pool, user):
if user.is_superuser:
return True
else:
return appliance_or_pool.owner == user
@only_authenticated
def appliance_action(request, appliance_id, action, x=None):
try:
appliance = Appliance.objects.get(id=appliance_id)
except ObjectDoesNotExist:
messages.warning(request, 'Appliance with ID {} does not exist!.'.format(appliance_id))
return go_back_or_home(request)
if not can_operate_appliance_or_pool(appliance, request.user):
messages.warning(request, 'This appliance belongs either to some other user or nobody.')
return go_back_or_home(request)
if action == "start":
if appliance.power_state != Appliance.Power.ON:
chain(
appliance_power_on.si(appliance.id),
(wait_appliance_ready if appliance.preconfigured else mark_appliance_ready).si(
appliance.id))()
messages.success(request, 'Initiated launch of appliance.')
return go_back_or_home(request)
else:
messages.info(request, 'Appliance was already powered on.')
return go_back_or_home(request)
elif action == "reboot":
if appliance.power_state == Appliance.Power.ON:
chain(
appliance_reboot.si(appliance.id),
mark_appliance_ready.si(appliance.id))()
messages.success(request, 'Initiated reboot of appliance.')
return go_back_or_home(request)
else:
messages.warning(request, 'Only powered on appliances can be rebooted')
return go_back_or_home(request)
elif action == "stop":
if appliance.power_state != Appliance.Power.OFF:
appliance_power_off.delay(appliance.id)
messages.success(request, 'Initiated stop of appliance.')
return go_back_or_home(request)
else:
messages.info(request, 'Appliance was already powered off.')
return go_back_or_home(request)
elif action == "suspend":
if appliance.power_state != Appliance.Power.SUSPENDED:
appliance_suspend.delay(appliance.id)
messages.success(request, 'Initiated suspend of appliance.')
return go_back_or_home(request)
else:
messages.info(request, 'Appliance was already suspended.')
return go_back_or_home(request)
elif action == "kill":
Appliance.kill(appliance)
messages.success(request, 'Kill initiated.')
return go_back_or_home(request)
elif action == "dont_expire":
if not request.user.is_superuser:
messages.warning(request, 'Disabling expiration time is allowed only for superusers.')
return go_back_or_home(request)
with transaction.atomic():
appliance.leased_until = None
appliance.save()
messages.success(request, 'Lease disabled successfully. Be careful.')
return go_back_or_home(request)
elif action == "set_lease":
if not can_operate_appliance_or_pool(appliance, request.user):
messages.warning(request, 'This appliance belongs either to some other user or nobody.')
return go_back_or_home(request)
appliance.prolong_lease(time=int(x))
messages.success(request, 'Lease prolonged successfully.')
return go_back_or_home(request)
else:
messages.warning(request, "Unknown action '{}'".format(action))
@only_authenticated
def prolong_lease_pool(request, pool_id, minutes):
try:
appliance_pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
messages.warning(request, 'Appliance pool with ID {} does not exist!.'.format(pool_id))
return go_back_or_home(request)
if not can_operate_appliance_or_pool(appliance_pool, request.user):
messages.warning(request, 'This appliance belongs either to some other user or nobody.')
return go_back_or_home(request)
appliance_pool.prolong_lease(time=int(minutes))
messages.success(request, 'Lease prolonged successfully.')
return go_back_or_home(request)
@only_authenticated
def dont_expire_pool(request, pool_id):
try:
appliance_pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
messages.warning(request, 'Pool with ID {} does not exist!.'.format(pool_id))
return go_back_or_home(request)
if not request.user.is_superuser:
messages.warning(request, 'Disabling expiration time is allowed only for superusers.')
return go_back_or_home(request)
with transaction.atomic():
for appliance in appliance_pool.appliances:
appliance.leased_until = None
appliance.save()
messages.success(request, 'Lease disabled successfully. Be careful.')
return go_back_or_home(request)
@only_authenticated
def kill_pool(request, pool_id):
try:
pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
messages.warning(request, 'Pool with ID {} does not exist!.'.format(pool_id))
return go_back_or_home(request)
if not can_operate_appliance_or_pool(pool, request.user):
messages.warning(request, 'This pool belongs either to some other user or nobody.')
return go_back_or_home(request)
try:
pool.kill()
except Exception as e:
messages.warning(request, "Exception {}: {}".format(type(e).__name__, str(e)))
else:
messages.success(request, 'Kill successfully initiated.')
return go_back_or_home(request)
@only_authenticated
def kill_all_pools(request, user_id):
try:
user = User.objects.get(id=user_id)
except ObjectDoesNotExist:
messages.warning(request, 'User with ID {} does not exist!.'.format(user_id))
return go_back_or_home(request)
if user != request.user and not (user.is_staff or user.is_superuser):
messages.warning(request, 'You cannot operate other users pools.')
return go_back_or_home(request)
for pool in AppliancePool.objects.filter(owner=user):
pool_id = pool.id
try:
pool.kill()
except Exception as e:
messages.warning(
request, "Exception during pool {} kill: {}: {}".format(
pool_id, type(e).__name__, str(e)))
else:
messages.success(request, 'Kill of pool {} successfully initiated'.format(pool_id))
return go_back_or_home(request)
@only_authenticated
def delete_pool(request, pool_id):
try:
pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
messages.warning(request, 'Pool with ID {} does not exist!.'.format(pool_id))
return go_back_or_home(request)
if not can_operate_appliance_or_pool(pool, request.user):
messages.warning(request, 'This pool belongs either to some other user or nobody.')
return go_back_or_home(request)
try:
pool.delete()
except Exception as e:
messages.warning(request, "Exception {}: {}".format(type(e).__name__, str(e)))
else:
messages.success(request, 'Deleted.')
return go_back_or_home(request)
def set_pool_description(request):
if not request.user.is_authenticated():
raise PermissionDenied()
try:
pool_id = request.POST.get("pool_id")
pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
raise Http404('Pool with ID {} does not exist!.'.format(pool_id))
if not can_operate_appliance_or_pool(pool, request.user):
raise PermissionDenied()
description = request.POST.get("description")
pool.description = description
pool.save()
return HttpResponse("")
def delete_template_provider(request):
if not request.user.is_authenticated() or not request.user.is_superuser:
return HttpResponseForbidden("Only authenticated superusers can operate this action.")
template_id = request.POST["template_id"]
try:
template = Template.objects.get(id=template_id)
except ObjectDoesNotExist:
raise Http404('Template with ID {} does not exist!.'.format(template_id))
if not request.user.is_superuser:
return HttpResponseForbidden("Only superusers can operate this action.")
task = delete_template_from_provider.delay(template.id)
return HttpResponse(task.id)
@only_authenticated
def clone_pool(request):
try:
count = int(request.POST["count"])
source_pool_id = int(request.POST["source_pool_id"])
pool = AppliancePool.objects.get(id=source_pool_id)
result_pool = pool.clone(num_appliances=count, owner=request.user)
messages.success(request, "Pool cloned - id {}".format(result_pool.id))
except Exception as e:
messages.warning(request, "{}: {}".format(type(e).__name__, e))
return go_back_or_home(request)
@only_authenticated
def request_pool(request):
try:
group = request.POST["stream"]
version = request.POST["version"]
template_type = request.POST["template_type"]
if version == "latest":
version = None
date = request.POST["date"]
if date == "latest":
date = None
provider = request.POST["provider"]
if provider == "any":
provider = None
preconfigured = request.POST.get("preconfigured", "false").lower() == "true"
yum_update = request.POST.get("yum_update", "false").lower() == "true"
provider_type = request.POST.get("provider_type", "any").lower()
if not provider_type or provider_type == 'any':
provider_type = None
count = int(request.POST["count"])
lease_time = int(request.POST.get("expiration", 60))
ram = None
cpu = None
if request.user.has_perm('appliances.can_modify_hw'):
if 'ram' in request.POST:
ram = int(request.POST['ram'])
if 'cpu' in request.POST:
cpu = int(request.POST['cpu'])
pool_id = AppliancePool.create(
request.user, group, version, date, provider, count, lease_time, preconfigured,
yum_update, ram, cpu, provider_type, template_type).id
messages.success(request, "Pool requested - id {}".format(pool_id))
except Exception as e:
messages.warning(request, "{}: {}".format(type(e).__name__, e))
return go_back_or_home(request)
@only_authenticated
def transfer_pool(request):
try:
pool_id = int(request.POST["pool_id"])
user_id = int(request.POST["user_id"])
with transaction.atomic():
pool = AppliancePool.objects.get(id=pool_id)
if not request.user.is_superuser:
if pool.owner != request.user:
raise Exception("User does not have the right to change this pool's owner!")
user = User.objects.get(id=user_id)
if user == request.user:
raise Exception("Why changing owner back to yourself? That does not make sense!")
# original_owner = pool.owner
pool.owner = user
pool.save()
# Rename appliances
# for appliance in pool.appliances:
# if appliance.name.startswith("{}_".format(original_owner.username)):
# # Change name
# appliance_rename.delay(
# appliance.id, user.username + appliance.name[len(original_owner.username):])
except Exception as e:
messages.warning(request, "Exception {} happened: {}".format(type(e).__name__, str(e)))
else:
messages.success(request, "Success!")
finally:
return go_back_or_home(request)
def vms(request, current_provider=None):
if not request.user.is_authenticated() or not request.user.is_superuser:
return go_home(request)
all_provider_keys = sorted(Provider.get_available_provider_keys())
providers = []
provider_keys = []
if request.user.is_staff or request.user.is_superuser:
user_filter = {}
else:
user_filter = {'user_groups__in': request.user.groups.all()}
for provider_key in all_provider_keys:
try:
provider = Provider.objects.filter(id=provider_key, **user_filter).distinct().first()
except ObjectDoesNotExist:
continue
if provider is not None:
providers.append((provider_key, provider.is_working))
provider_keys.append(provider_key)
if current_provider is None and providers:
return redirect("vms_at_provider", current_provider=provider_keys[0])
return render(request, 'appliances/vms/index.html', locals())
def vms_table(request, current_provider=None):
if not request.user.is_authenticated() or not request.user.is_superuser:
return go_home(request)
try:
manager = get_mgmt(current_provider)
vms = sorted([vm.name for vm in manager.list_vms()])
return render(request, 'appliances/vms/_list.html', locals())
except Exception as e:
return HttpResponse('{}: {}'.format(type(e).__name__, str(e)), content_type="text/plain")
def power_state(request, current_provider):
if not request.user.is_authenticated() or not request.user.is_superuser:
return go_home(request)
vm_name = request.POST["vm_name"]
manager = get_mgmt(current_provider)
# TODO: change after openshift wrapanapi refactor
if isinstance(manager, Openshift):
state = manager.vm_status(vm_name)
else:
state = manager.get_vm(vm_name).state
state = Appliance.POWER_STATES_MAPPING.get(state, "unknown")
return HttpResponse(state, content_type="text/plain")
def power_state_buttons(request, current_provider):
if not request.user.is_authenticated() or not request.user.is_superuser:
return go_home(request)
manager = get_mgmt(current_provider)
vm_name = request.POST["vm_name"]
power_state = request.POST["power_state"]
can_power_on = power_state in {Appliance.Power.SUSPENDED, Appliance.Power.OFF}
can_power_off = power_state in {Appliance.Power.ON}
can_suspend = power_state in {Appliance.Power.ON} and manager.can_suspend
can_delete = power_state in {Appliance.Power.OFF}
return render(request, 'appliances/vms/_buttons.html', locals())
def vm_action(request, current_provider):
if not request.user.is_authenticated() or not request.user.is_superuser:
return HttpResponse("Not authenticated", content_type="text/plain")
try:
get_mgmt(current_provider)
except Exception as e:
return HttpResponse(
"Troubles with provider {}: {}".format(current_provider, str(e)),
content_type="text/plain")
vm_name = request.POST["vm_name"]
action = request.POST["action"]
if action == "poweron":
anyvm_power_on.delay(current_provider, vm_name)
elif action == "poweroff":
anyvm_power_off.delay(current_provider, vm_name)
elif action == "suspend":
anyvm_suspend.delay(current_provider, vm_name)
elif action == "delete":
anyvm_delete.delay(current_provider, vm_name)
else:
HttpResponse("No such action {}!".format(action), content_type="text/plain")
logger().info("User {} initiated {} on {}@{}".format(
request.user.username, action, vm_name, current_provider))
return HttpResponse("Action {} was initiated".format(action), content_type="text/plain")
def logger():
return create_logger("sprout_vm_actions")
def rename_appliance(request):
post = json.loads(request.body)
if not request.user.is_authenticated():
raise PermissionDenied()
try:
appliance_id = post.get("appliance_id")
appliance = Appliance.objects.get(id=appliance_id)
except ObjectDoesNotExist:
raise Http404('Appliance with ID {} does not exist!.'.format(appliance_id))
if not can_operate_appliance_or_pool(appliance, request.user):
raise PermissionDenied("Permission denied")
new_name = post.get("new_name")
return HttpResponse(str(appliance_rename.delay(appliance.id, new_name).task_id))
def set_appliance_description(request):
if request.method != 'POST':
messages.error(request, "Invalid request.")
return go_home(request)
post = json.loads(request.body)
if not request.user.is_authenticated():
raise PermissionDenied()
try:
appliance_id = post.get("appliance_id")
appliance = Appliance.objects.get(id=appliance_id)
except ObjectDoesNotExist:
raise Http404('Appliance with ID {} does not exist!.'.format(appliance_id))
if not can_operate_appliance_or_pool(appliance, request.user):
raise PermissionDenied("Permission denied")
new_description = post.get("description")
appliance.description = new_description
appliance.save()
return json_response(True)
def task_result(request):
post = json.loads(request.body)
task_id = post.get("task_id")
result = AsyncResult(task_id)
if not result.ready():
return json_response(None)
return json_response(result.get(timeout=1))
def provider_enable_disable(request, provider_id, disabled=None):
if not request.user.is_authenticated():
return go_home(request)
try:
provider = Provider.objects.get(id=provider_id)
except ObjectDoesNotExist:
messages.warning(request, 'Provider with ID {} does not exist!.'.format(provider_id))
return go_back_or_home(request)
if not request.user.is_superuser:
messages.warning(request, 'Providers can be modified only by superusers.')
return go_back_or_home(request)
provider.disabled = disabled
provider.save()
messages.success(
request, 'Provider {}, {}.'.format(provider_id, "disabled" if disabled else "enabled"))
return go_back_or_home(request)
def check_appliance(request, provider_id, appliance_name):
try:
appliance = Appliance.objects.get(name=appliance_name, template__provider=provider_id)
except ObjectDoesNotExist:
return json_response(None)
owner = appliance.owner
if owner is not None:
owner = owner.username
data = {
'stream': appliance.template.template_group.id,
'version': appliance.template.version,
'date': appliance.template.date.strftime('%Y-%m-%d'),
'preconfigured': appliance.template.preconfigured,
'owner': owner,
}
return json_response(data)
def check_template(request, provider_id, template_name):
try:
template = Template.objects.get(name=template_name, provider=provider_id)
except ObjectDoesNotExist:
return json_response(None)
data = {
'stream': template.template_group.id,
'version': template.version,
'date': template.date.strftime('%Y-%m-%d'),
'preconfigured': template.preconfigured,
}
return json_response(data)
def check_pool(request, pool_id):
try:
pool = AppliancePool.objects.get(id=pool_id)
except ObjectDoesNotExist:
return json_response(None)
data = {
'description': pool.description,
'stream': pool.group.id,
'version': pool.version,
'date': pool.date.strftime('%Y-%m-%d'),
'preconfigured': pool.preconfigured,
'finished': pool.finished,
'owner': pool.owner.username,
'appliances': [[a.name, a.template.provider.id] for a in pool.appliances]
}
return json_response(data)
def check_pools(request):
data = []
for pool in AppliancePool.objects.all():
pool_data = {
'description': pool.description,
'id': pool.id,
'stream': pool.group.id,
'version': pool.version,
'date': pool.date.strftime('%Y-%m-%d'),
'preconfigured': pool.preconfigured,
'finished': pool.finished,
'owner': pool.owner.username,
'appliances': [[a.name, a.template.provider.id] for a in pool.appliances]
}
data.append(pool_data)
return json_response(data)
def view_bug_query(request, query_id):
if not request.user.is_authenticated():
return go_home(request)
queries = BugQuery.visible_for_user(request.user)
query = BugQuery.objects.get(id=query_id)
if query.owner is not None and query.owner != request.user:
if not request.user.is_superuser:
messages.info(request, "You cannot view BugQuery {}.".format(query.id))
return go_home(request)
try:
bugs = query.list_bugs(request.user)
except six.moves.xmlrpc_client.Fault as e:
messages.error(request, 'Bugzilla query error {}: {}'.format(e.faultCode, e.faultString))
return go_home(request)
return render(request, 'bugs/list_query.html', locals())
def view_bug_queries(request):
if not request.user.is_authenticated():
return go_home(request)
try:
first_query = BugQuery.visible_for_user(request.user)[0]
except IndexError:
first_query = None
if first_query is not None:
return redirect('view_bug_query', first_query.id)
else:
# No Group
messages.info(request, "No query present, redirected to the homepage.")
return go_home(request)
def new_bug_query(request):
if not request.user.is_authenticated():
return go_home(request)
queries = BugQuery.visible_for_user(request.user)
query = None
if request.method == 'GET':
return render(request, 'bugs/new_query.html', locals())
elif request.method != 'POST':
messages.error(request, "Invalid request.")
return go_home(request)
# Create a new one
name = request.POST['name']
url = request.POST['url']
global_ = request.POST.get('global', 'false') == 'true'
if not request.user.is_superuser:
global_ = False
if global_:
owner = None
else:
owner = request.user
bug_query = BugQuery(name=name, url=url, owner=owner)
bug_query.save()
messages.info(request, "Query with name {} added.".format(name))
return redirect('view_bug_query', bug_query.id)
def delete_bug_query(request, query_id):
if not request.user.is_authenticated():
return go_home(request)
query = BugQuery.objects.get(id=query_id)
if query.owner == request.user or request.user.is_superuser:
query.delete()
messages.info(request, "Query with name {} deleted.".format(query.name))
return redirect('view_bug_queries')
else:
messages.error(request, "You cannot delete query with name {}.".format(query.name))
return redirect('view_bug_queries')
def check_query(request):
if not request.user.is_authenticated():
return go_home(request)
if request.method != 'POST':
return HttpResponseForbidden('Only POST allowed')
bz = Bugzilla.from_config().bugzilla
try:
parsed = bz.url_to_query(request.POST['url'])
if not parsed:
parsed = None
except Exception:
parsed = None
if 'cmdtype' in parsed:
# It is a command and that is not supported within .query()
parsed = None
return json_response(parsed)
def swap_offenders(request):
appliances = Appliance.objects.filter(
power_state=Appliance.Power.ON).exclude(Q(swap=None) | Q(swap=0)).order_by('-swap')[:15]
failed_ssh = Appliance.objects.filter(ssh_failed=True, power_state=Appliance.Power.ON).order_by(
'appliance_pool__owner__username', 'name')
return render(request, 'appliances/swap_offenders.html', locals())
def template_configurations(request):
if not request.user.is_superuser or not request.user.is_staff:
messages.info(request, 'You do not have the right to see the template configuration view')
return go_back_or_home(request)
templates_configuring = Template.objects\
.select_related('provider', 'template_group')\
.filter(ready=False, preconfigured=True)
return render(request, 'appliances/template_conf.html', locals())
@only_authenticated
def nuke_template(request):
if not request.user.is_superuser:
return HttpResponseForbidden("Only authenticated superusers can operate this action.")
if request.method != 'POST':
return HttpResponseForbidden('Only POST allowed')
template_id = request.POST["template_id"]
try:
template = Template.objects.get(id=template_id)
except ObjectDoesNotExist:
raise Http404('Template with ID {} does not exist!.'.format(template_id))
task = nuke_template_configuration.delay(template.id)
return HttpResponse(task.id)
@only_authenticated
def purge_templates(request):
if not request.user.is_superuser:
return HttpResponseForbidden("Only authenticated superusers can operate this action.")
if request.method != 'POST':
return HttpResponseForbidden('Only POST allowed')
try:
template_ids = json.loads(request.POST['templates_json'])
group_tasks = []
for template_id in template_ids:
group_tasks.append(delete_template_from_provider.s(template_id))
return HttpResponse(group(group_tasks).apply_async().id)
except KeyError:
return HttpResponseForbidden('templates_json required')
except ValueError:
return HttpResponseForbidden('Malformed JSON')
| anurag03/integration_tests | sprout/appliances/views.py | Python | gpl-2.0 | 42,946 |
#!/usr/bin/python
# ReachView code is placed under the GPL license.
# Written by Egor Fedorov ([email protected])
# Copyright (c) 2015, Emlid Limited
# All rights reserved.
# If you are interested in using ReachView code as a part of a
# closed source project, please contact Emlid Limited ([email protected]).
# This file is part of ReachView.
# ReachView is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# ReachView is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with ReachView. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
from threading import Thread
from GPIO import GPIO
class ReachLED:
pwm_prefix = "/sys/class/pwm/pwmchip0/"
def __init__(self):
self.pins = [GPIO(12), GPIO(13), GPIO(182)] # green, red, blue
# thread, used to blink later
self.blinker_thread = None
# to stop blinker later
self.blinker_not_interrupted = True
# keep current state in order to restore later
self.current_blink_pattern = ""
self.colors_dict = {
"off": [0, 0, 0],
"red": [1, 0, 0],
"green": [0, 1, 0],
"blue": [0, 0, 1],
"white": [1, 1, 1],
"yellow": [1, 1, 0],
"cyan": [0, 1, 1],
"magenta": [1, 0, 1],
"orange": [1, 0.4, 0],
"weakred": [0.1, 0, 0]
}
# channel numbers
self.pwm_channels = [0, 1, 2] # red, green, blue
# first, we need to change the pin's pinmux to mode1
for pin in self.pins:
pin.setPinmux("mode1")
# then, export the 3 pwn channels if needed
for ch in self.pwm_channels:
if not os.path.exists(self.pwm_prefix + "/pwm" + str(ch)):
with open(self.pwm_prefix + "export", "w") as f:
f.write(str(ch))
# enable all of the channels
for ch in self.pwm_channels:
with open(self.pwm_prefix + "pwm" + str(ch) + "/enable", "w") as f:
f.write("1")
# set period
for ch in self.pwm_channels:
with open(self.pwm_prefix + "pwm" + str(ch) + "/period", "w") as f:
f.write("1000000")
# turn off all of it by default
#for ch in self.pwm_channels:
# self.setDutyCycle(ch, 0)
def setDutyCycle(self, channel, percentage = None):
# 0% = 1000000
# 100% = 0
duty_value = (100 - percentage) * 10000
duty_value = int(duty_value)
with open(self.pwm_prefix + "pwm" + str(channel) + "/duty_cycle", "w") as f:
f.write(str(duty_value))
def setColor(self, color, power_percentage = None):
# available colors:
# red
# green
# blue
# white
# yellow
# cyan
# magenta
# defalt power percentage value
if power_percentage == None:
power_percentage = 100
if color in self.colors_dict:
for i in range(0, 3):
self.setDutyCycle(i, self.colors_dict[color][i] * power_percentage)
return 0
else:
# no such color available :(
return -1
def startBlinker(self, pattern, delay = None):
# start a new thread that blinks
self.current_blink_pattern = pattern
if self.blinker_thread == None:
self.blinker_not_interrupted = True
self.blinker_thread = Thread(target = self.blinkPattern, args = (pattern, delay))
self.blinker_thread.start()
else:
# we already have a blinker started and need to restart it using new colors
self.stopBlinker()
self.startBlinker(pattern, delay)
def stopBlinker(self):
# stop existing thread
self.blinker_not_interrupted = False
if self.blinker_thread is not None:
self.blinker_thread.join()
self.blinker_thread = None
def blinkPattern(self, pattern, delay = None):
# start blinking in a special pattern
# pattern is a string of colors, separated by commas
# for example: "red,blue,off"
# they will be flashed one by one
# and separated by a time of delay, which 0.5s by default
color_list = pattern.split(",")
if delay == None:
delay = 0.5
while self.blinker_not_interrupted:
for color in color_list:
if self.blinker_not_interrupted == False:
break
self.setColor(color)
time.sleep(delay)
def test():
led = ReachLED()
print("Starting...")
led.setDutyCycle(0, 0)
led.setDutyCycle(0, 0)
led.setDutyCycle(0, 0)
time.sleep(1)
print("After pause...")
print("Channel 0")
led.setDutyCycle(0, 100)
time.sleep(1)
print("Channel 1")
led.setDutyCycle(0, 0)
led.setDutyCycle(1, 100)
time.sleep(1)
print("Channel 2")
led.setDutyCycle(1, 0)
led.setDutyCycle(2, 100)
time.sleep(1)
if __name__ == "__main__":
# test()
led = ReachLED()
if len(sys.argv) < 2:
print("You need to specify a color")
print("List of colors:")
colors = ""
for color in led.colors_dict:
colors += color + ", "
print(colors)
else:
if led.setColor(sys.argv[1]) < 0:
print("Can't set this color. You may add this in the colors_dict variable")
| emlid/ReachView | ReachLED.py | Python | gpl-3.0 | 5,937 |
# -*- coding: utf-8 -*-
"""
Tests for video outline API
"""
import ddt
import itertools
from uuid import uuid4
from collections import namedtuple
from edxval import api
from mobile_api.models import MobileApiConfig
from xmodule.modulestore.tests.factories import ItemFactory
from xmodule.video_module import transcripts_utils
from xmodule.modulestore.django import modulestore
from xmodule.partitions.partitions import Group, UserPartition
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from openedx.core.djangoapps.course_groups.models import CourseUserGroupPartitionGroup
from ..testutils import MobileAPITestCase, MobileAuthTestMixin, MobileCourseAccessTestMixin
class TestVideoAPITestCase(MobileAPITestCase):
"""
Base test class for video related mobile APIs
"""
def setUp(self):
super(TestVideoAPITestCase, self).setUp()
self.section = ItemFactory.create(
parent=self.course,
category="chapter",
display_name=u"test factory section omega \u03a9",
)
self.sub_section = ItemFactory.create(
parent=self.section,
category="sequential",
display_name=u"test subsection omega \u03a9",
)
self.unit = ItemFactory.create(
parent=self.sub_section,
category="vertical",
metadata={'graded': True, 'format': 'Homework'},
display_name=u"test unit omega \u03a9",
)
self.other_unit = ItemFactory.create(
parent=self.sub_section,
category="vertical",
metadata={'graded': True, 'format': 'Homework'},
display_name=u"test unit omega 2 \u03a9",
)
self.nameless_unit = ItemFactory.create(
parent=self.sub_section,
category="vertical",
metadata={'graded': True, 'format': 'Homework'},
display_name=None,
)
self.edx_video_id = 'testing-123'
self.video_url = 'http://val.edx.org/val/video.mp4'
self.video_url_high = 'http://val.edx.org/val/video_high.mp4'
self.youtube_url = 'http://val.edx.org/val/youtube.mp4'
self.html5_video_url = 'http://video.edx.org/html5/video.mp4'
api.create_profile('youtube')
api.create_profile('mobile_high')
api.create_profile('mobile_low')
# create the video in VAL
api.create_video({
'edx_video_id': self.edx_video_id,
'status': 'test',
'client_video_id': u"test video omega \u03a9",
'duration': 12,
'courses': [unicode(self.course.id)],
'encoded_videos': [
{
'profile': 'youtube',
'url': 'xyz123',
'file_size': 0,
'bitrate': 1500
},
{
'profile': 'mobile_low',
'url': self.video_url,
'file_size': 12345,
'bitrate': 250
},
{
'profile': 'mobile_high',
'url': self.video_url_high,
'file_size': 99999,
'bitrate': 250
},
]})
# Set requested profiles
MobileApiConfig(video_profiles="mobile_low,mobile_high,youtube").save()
class TestVideoAPIMixin(object):
"""
Mixin class that provides helpers for testing video related mobile APIs
"""
def _create_video_with_subs(self, custom_subid=None):
"""
Creates and returns a video with stored subtitles.
"""
subid = custom_subid or uuid4().hex
transcripts_utils.save_subs_to_store(
{
'start': [100, 200, 240, 390, 1000],
'end': [200, 240, 380, 1000, 1500],
'text': [
'subs #1',
'subs #2',
'subs #3',
'subs #4',
'subs #5'
]
},
subid,
self.course)
return ItemFactory.create(
parent=self.unit,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"test video omega \u03a9",
sub=subid
)
def _verify_paths(self, course_outline, path_list, outline_index=0):
"""
Takes a path_list and compares it against the course_outline
Attributes:
course_outline (list): A list of dictionaries that includes a 'path'
and 'named_path' field which we will be comparing path_list to
path_list (list): A list of the expected strings
outline_index (int): Index into the course_outline list for which the
path is being tested.
"""
path = course_outline[outline_index]['path']
self.assertEqual(len(path), len(path_list))
for i in range(len(path_list)):
self.assertEqual(path_list[i], path[i]['name'])
#named_path will be deprecated eventually
named_path = course_outline[outline_index]['named_path']
self.assertEqual(len(named_path), len(path_list))
for i in range(len(path_list)):
self.assertEqual(path_list[i], named_path[i])
def _setup_course_partitions(self, scheme_id='random', is_cohorted=False):
"""Helper method to configure the user partitions in the course."""
self.partition_id = 0 # pylint: disable=attribute-defined-outside-init
self.course.user_partitions = [
UserPartition(
self.partition_id, 'first_partition', 'First Partition',
[Group(0, 'alpha'), Group(1, 'beta')],
scheme=None, scheme_id=scheme_id
),
]
self.course.cohort_config = {'cohorted': is_cohorted}
self.store.update_item(self.course, self.user.id)
def _setup_group_access(self, xblock, partition_id, group_ids):
"""Helper method to configure the partition and group mapping for the given xblock."""
xblock.group_access = {partition_id: group_ids}
self.store.update_item(xblock, self.user.id)
def _setup_split_module(self, sub_block_category):
"""Helper method to configure a split_test unit with children of type sub_block_category."""
self._setup_course_partitions()
self.split_test = ItemFactory.create( # pylint: disable=attribute-defined-outside-init
parent=self.unit,
category="split_test",
display_name=u"split test unit",
user_partition_id=0,
)
sub_block_a = ItemFactory.create(
parent=self.split_test,
category=sub_block_category,
display_name=u"split test block a",
)
sub_block_b = ItemFactory.create(
parent=self.split_test,
category=sub_block_category,
display_name=u"split test block b",
)
self.split_test.group_id_to_child = {
str(index): url for index, url in enumerate([sub_block_a.location, sub_block_b.location])
}
self.store.update_item(self.split_test, self.user.id)
return sub_block_a, sub_block_b
class TestNonStandardCourseStructure(MobileAPITestCase, TestVideoAPIMixin):
"""
Tests /api/mobile/v0.5/video_outlines/courses/{course_id} with no course set
"""
REVERSE_INFO = {'name': 'video-summary-list', 'params': ['course_id']}
def setUp(self):
super(TestNonStandardCourseStructure, self).setUp()
self.chapter_under_course = ItemFactory.create(
parent=self.course,
category="chapter",
display_name=u"test factory chapter under course omega \u03a9",
)
self.section_under_course = ItemFactory.create(
parent=self.course,
category="sequential",
display_name=u"test factory section under course omega \u03a9",
)
self.section_under_chapter = ItemFactory.create(
parent=self.chapter_under_course,
category="sequential",
display_name=u"test factory section under chapter omega \u03a9",
)
self.vertical_under_course = ItemFactory.create(
parent=self.course,
category="vertical",
display_name=u"test factory vertical under course omega \u03a9",
)
self.vertical_under_section = ItemFactory.create(
parent=self.section_under_chapter,
category="vertical",
display_name=u"test factory vertical under section omega \u03a9",
)
def test_structure_course_video(self):
"""
Tests when there is a video without a vertical directly under course
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.course,
category="video",
display_name=u"test factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(section_url, r'courseware$')
self.assertEqual(section_url, unit_url)
self._verify_paths(course_outline, [])
def test_structure_course_vert_video(self):
"""
Tests when there is a video under vertical directly under course
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.vertical_under_course,
category="video",
display_name=u"test factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(
section_url,
r'courseware/test_factory_vertical_under_course_omega_%CE%A9/$'
)
self.assertEqual(section_url, unit_url)
self._verify_paths(
course_outline,
[
u'test factory vertical under course omega \u03a9'
]
)
def test_structure_course_chap_video(self):
"""
Tests when there is a video directly under chapter
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.chapter_under_course,
category="video",
display_name=u"test factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(
section_url,
r'courseware/test_factory_chapter_under_course_omega_%CE%A9/$'
)
self.assertEqual(section_url, unit_url)
self._verify_paths(
course_outline,
[
u'test factory chapter under course omega \u03a9',
]
)
def test_structure_course_section_video(self):
"""
Tests when chapter is none, and video under section under course
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.section_under_course,
category="video",
display_name=u"test factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(
section_url,
r'courseware/test_factory_section_under_course_omega_%CE%A9/$'
)
self.assertEqual(section_url, unit_url)
self._verify_paths(
course_outline,
[
u'test factory section under course omega \u03a9',
]
)
def test_structure_course_chap_section_video(self):
"""
Tests when chapter and sequential exists, with a video with no vertical.
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.section_under_chapter,
category="video",
display_name=u"meow factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(
section_url,
(
r'courseware/test_factory_chapter_under_course_omega_%CE%A9/' +
'test_factory_section_under_chapter_omega_%CE%A9/$'
)
)
self.assertEqual(section_url, unit_url)
self._verify_paths(
course_outline,
[
u'test factory chapter under course omega \u03a9',
u'test factory section under chapter omega \u03a9',
]
)
def test_structure_course_section_vert_video(self):
"""
Tests chapter->section->vertical->unit
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.vertical_under_section,
category="video",
display_name=u"test factory video omega \u03a9",
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertRegexpMatches(
section_url,
(
r'courseware/test_factory_chapter_under_course_omega_%CE%A9/' +
'test_factory_section_under_chapter_omega_%CE%A9/$'
)
)
self.assertRegexpMatches(
unit_url,
(
r'courseware/test_factory_chapter_under_course_omega_%CE%A9/' +
'test_factory_section_under_chapter_omega_%CE%A9/1$'
)
)
self._verify_paths(
course_outline,
[
u'test factory chapter under course omega \u03a9',
u'test factory section under chapter omega \u03a9',
u'test factory vertical under section omega \u03a9'
]
)
@ddt.ddt
class TestVideoSummaryList(
TestVideoAPITestCase, MobileAuthTestMixin, MobileCourseAccessTestMixin, TestVideoAPIMixin # pylint: disable=bad-continuation
):
"""
Tests for /api/mobile/v0.5/video_outlines/courses/{course_id}..
"""
REVERSE_INFO = {'name': 'video-summary-list', 'params': ['course_id']}
def test_only_on_web(self):
self.login_and_enroll()
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 0)
subid = uuid4().hex
transcripts_utils.save_subs_to_store(
{
'start': [100],
'end': [200],
'text': [
'subs #1',
]
},
subid,
self.course)
ItemFactory.create(
parent=self.unit,
category="video",
display_name=u"test video",
only_on_web=True,
subid=subid
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
self.assertIsNone(course_outline[0]["summary"]["video_url"])
self.assertIsNone(course_outline[0]["summary"]["video_thumbnail_url"])
self.assertEqual(course_outline[0]["summary"]["duration"], 0)
self.assertEqual(course_outline[0]["summary"]["size"], 0)
self.assertEqual(course_outline[0]["summary"]["name"], "test video")
self.assertEqual(course_outline[0]["summary"]["transcripts"], {})
self.assertIsNone(course_outline[0]["summary"]["language"])
self.assertEqual(course_outline[0]["summary"]["category"], "video")
self.assertTrue(course_outline[0]["summary"]["only_on_web"])
def test_mobile_api_config(self):
"""
Tests VideoSummaryList with different MobileApiConfig video_profiles
"""
self.login_and_enroll()
edx_video_id = "testing_mobile_high"
api.create_video({
'edx_video_id': edx_video_id,
'status': 'test',
'client_video_id': u"test video omega \u03a9",
'duration': 12,
'courses': [unicode(self.course.id)],
'encoded_videos': [
{
'profile': 'youtube',
'url': self.youtube_url,
'file_size': 2222,
'bitrate': 4444
},
{
'profile': 'mobile_high',
'url': self.video_url_high,
'file_size': 111,
'bitrate': 333
},
]})
ItemFactory.create(
parent=self.other_unit,
category="video",
display_name=u"testing mobile high video",
edx_video_id=edx_video_id,
)
expected_output = {
'category': u'video',
'video_thumbnail_url': None,
'language': u'en',
'name': u'testing mobile high video',
'video_url': self.video_url_high,
'duration': 12.0,
'transcripts': {
'en': 'http://testserver/api/mobile/v0.5/video_outlines/transcripts/{}/testing_mobile_high_video/en'.format(self.course.id) # pylint: disable=line-too-long
},
'only_on_web': False,
'encoded_videos': {
u'mobile_high': {
'url': self.video_url_high,
'file_size': 111
},
u'youtube': {
'url': self.youtube_url,
'file_size': 2222
}
},
'size': 111
}
# Testing when video_profiles='mobile_low,mobile_high,youtube'
course_outline = self.api_response().data
course_outline[0]['summary'].pop("id")
self.assertEqual(course_outline[0]['summary'], expected_output)
# Testing when there is no mobile_low, and that mobile_high doesn't show
MobileApiConfig(video_profiles="mobile_low,youtube").save()
course_outline = self.api_response().data
expected_output['encoded_videos'].pop('mobile_high')
expected_output['video_url'] = self.youtube_url
expected_output['size'] = 2222
course_outline[0]['summary'].pop("id")
self.assertEqual(course_outline[0]['summary'], expected_output)
# Testing where youtube is the default video over mobile_high
MobileApiConfig(video_profiles="youtube,mobile_high").save()
course_outline = self.api_response().data
expected_output['encoded_videos']['mobile_high'] = {
'url': self.video_url_high,
'file_size': 111
}
course_outline[0]['summary'].pop("id")
self.assertEqual(course_outline[0]['summary'], expected_output)
def test_video_not_in_val(self):
self.login_and_enroll()
self._create_video_with_subs()
ItemFactory.create(
parent=self.other_unit,
category="video",
edx_video_id="some_non_existent_id_in_val",
display_name=u"some non existent video in val",
html5_sources=[self.html5_video_url]
)
summary = self.api_response().data[1]['summary']
self.assertEqual(summary['name'], "some non existent video in val")
self.assertIsNone(summary['encoded_videos'])
self.assertIsNone(summary['duration'])
self.assertEqual(summary['size'], 0)
self.assertEqual(summary['video_url'], self.html5_video_url)
def test_course_list(self):
self.login_and_enroll()
self._create_video_with_subs()
ItemFactory.create(
parent=self.other_unit,
category="video",
display_name=u"test video omega 2 \u03a9",
html5_sources=[self.html5_video_url]
)
ItemFactory.create(
parent=self.other_unit,
category="video",
display_name=u"test video omega 3 \u03a9",
source=self.html5_video_url
)
ItemFactory.create(
parent=self.unit,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"test draft video omega \u03a9",
visible_to_staff_only=True,
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 3)
vid = course_outline[0]
self.assertTrue('test_subsection_omega_%CE%A9' in vid['section_url'])
self.assertTrue('test_subsection_omega_%CE%A9/1' in vid['unit_url'])
self.assertTrue(u'test_video_omega_\u03a9' in vid['summary']['id'])
self.assertEqual(vid['summary']['video_url'], self.video_url)
self.assertEqual(vid['summary']['size'], 12345)
self.assertTrue('en' in vid['summary']['transcripts'])
self.assertFalse(vid['summary']['only_on_web'])
self.assertEqual(course_outline[1]['summary']['video_url'], self.html5_video_url)
self.assertEqual(course_outline[1]['summary']['size'], 0)
self.assertFalse(course_outline[1]['summary']['only_on_web'])
self.assertEqual(course_outline[1]['path'][2]['name'], self.other_unit.display_name)
self.assertEqual(course_outline[1]['path'][2]['id'], unicode(self.other_unit.location))
self.assertEqual(course_outline[2]['summary']['video_url'], self.html5_video_url)
self.assertEqual(course_outline[2]['summary']['size'], 0)
self.assertFalse(course_outline[2]['summary']['only_on_web'])
def test_with_nameless_unit(self):
self.login_and_enroll()
ItemFactory.create(
parent=self.nameless_unit,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"test draft video omega 2 \u03a9"
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
self.assertEqual(course_outline[0]['path'][2]['name'], self.nameless_unit.location.block_id)
def test_with_video_in_sub_section(self):
"""
Tests a non standard xml format where a video is underneath a sequential
We are expecting to return the same unit and section url since there is
no unit vertical.
"""
self.login_and_enroll()
ItemFactory.create(
parent=self.sub_section,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"video in the sub section"
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
self.assertEqual(len(course_outline[0]['path']), 2)
section_url = course_outline[0]["section_url"]
unit_url = course_outline[0]["unit_url"]
self.assertIn(
u'courseware/test_factory_section_omega_%CE%A9/test_subsection_omega_%CE%A9',
section_url
)
self.assertTrue(section_url)
self.assertTrue(unit_url)
self.assertEqual(section_url, unit_url)
@ddt.data(
*itertools.product([True, False], ["video", "problem"])
)
@ddt.unpack
def test_with_split_block(self, is_user_staff, sub_block_category):
"""Test with split_module->sub_block_category and for both staff and non-staff users."""
self.login_and_enroll()
self.user.is_staff = is_user_staff
self.user.save()
self._setup_split_module(sub_block_category)
video_outline = self.api_response().data
num_video_blocks = 1 if sub_block_category == "video" else 0
self.assertEqual(len(video_outline), num_video_blocks)
for block_index in range(num_video_blocks):
self._verify_paths(
video_outline,
[
self.section.display_name,
self.sub_section.display_name,
self.unit.display_name,
self.split_test.display_name
],
block_index
)
self.assertIn(u"split test block", video_outline[block_index]["summary"]["name"])
def test_with_split_vertical(self):
"""Test with split_module->vertical->video structure."""
self.login_and_enroll()
split_vertical_a, split_vertical_b = self._setup_split_module("vertical")
ItemFactory.create(
parent=split_vertical_a,
category="video",
display_name=u"video in vertical a",
)
ItemFactory.create(
parent=split_vertical_b,
category="video",
display_name=u"video in vertical b",
)
video_outline = self.api_response().data
# user should see only one of the videos (a or b).
self.assertEqual(len(video_outline), 1)
self.assertIn(u"video in vertical", video_outline[0]["summary"]["name"])
a_or_b = video_outline[0]["summary"]["name"][-1:]
self._verify_paths(
video_outline,
[
self.section.display_name,
self.sub_section.display_name,
self.unit.display_name,
self.split_test.display_name,
u"split test block " + a_or_b
],
)
def _create_cohorted_video(self, group_id):
"""Creates a cohorted video block, giving access to only the given group_id."""
video_block = ItemFactory.create(
parent=self.unit,
category="video",
display_name=u"video for group " + unicode(group_id),
)
self._setup_group_access(video_block, self.partition_id, [group_id])
def _create_cohorted_vertical_with_video(self, group_id):
"""Creates a cohorted vertical with a child video block, giving access to only the given group_id."""
vertical_block = ItemFactory.create(
parent=self.sub_section,
category="vertical",
display_name=u"vertical for group " + unicode(group_id),
)
self._setup_group_access(vertical_block, self.partition_id, [group_id])
ItemFactory.create(
parent=vertical_block,
category="video",
display_name=u"video for group " + unicode(group_id),
)
@ddt.data("_create_cohorted_video", "_create_cohorted_vertical_with_video")
def test_with_cohorted_content(self, content_creator_method_name):
self.login_and_enroll()
self._setup_course_partitions(scheme_id='cohort', is_cohorted=True)
cohorts = []
for group_id in [0, 1]:
getattr(self, content_creator_method_name)(group_id)
cohorts.append(CohortFactory(course_id=self.course.id, name=u"Cohort " + unicode(group_id)))
link = CourseUserGroupPartitionGroup(
course_user_group=cohorts[group_id],
partition_id=self.partition_id,
group_id=group_id,
)
link.save()
for cohort_index in range(len(cohorts)):
# add user to this cohort
cohorts[cohort_index].users.add(self.user)
# should only see video for this cohort
video_outline = self.api_response().data
self.assertEqual(len(video_outline), 1)
self.assertEquals(
u"video for group " + unicode(cohort_index),
video_outline[0]["summary"]["name"]
)
# remove user from this cohort
cohorts[cohort_index].users.remove(self.user)
# un-cohorted user should see no videos
video_outline = self.api_response().data
self.assertEqual(len(video_outline), 0)
# staff user sees all videos
self.user.is_staff = True
self.user.save()
video_outline = self.api_response().data
self.assertEqual(len(video_outline), 2)
def test_with_hidden_blocks(self):
self.login_and_enroll()
hidden_subsection = ItemFactory.create(
parent=self.section,
category="sequential",
hide_from_toc=True,
)
unit_within_hidden_subsection = ItemFactory.create(
parent=hidden_subsection,
category="vertical",
)
hidden_unit = ItemFactory.create(
parent=self.sub_section,
category="vertical",
hide_from_toc=True,
)
ItemFactory.create(
parent=unit_within_hidden_subsection,
category="video",
edx_video_id=self.edx_video_id,
)
ItemFactory.create(
parent=hidden_unit,
category="video",
edx_video_id=self.edx_video_id,
)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 0)
def test_language(self):
self.login_and_enroll()
video = ItemFactory.create(
parent=self.nameless_unit,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"test draft video omega 2 \u03a9"
)
language_case = namedtuple('language_case', ['transcripts', 'expected_language'])
language_cases = [
# defaults to english
language_case({}, "en"),
# supports english
language_case({"en": 1}, "en"),
# supports another language
language_case({"lang1": 1}, "lang1"),
# returns first alphabetically-sorted language
language_case({"lang1": 1, "en": 2}, "en"),
language_case({"lang1": 1, "lang2": 2}, "lang1"),
]
for case in language_cases:
video.transcripts = case.transcripts
modulestore().update_item(video, self.user.id)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
self.assertEqual(course_outline[0]['summary']['language'], case.expected_language)
def test_transcripts(self):
self.login_and_enroll()
video = ItemFactory.create(
parent=self.nameless_unit,
category="video",
edx_video_id=self.edx_video_id,
display_name=u"test draft video omega 2 \u03a9"
)
transcript_case = namedtuple('transcript_case', ['transcripts', 'english_subtitle', 'expected_transcripts'])
transcript_cases = [
# defaults to english
transcript_case({}, "", ["en"]),
transcript_case({}, "en-sub", ["en"]),
# supports english
transcript_case({"en": 1}, "", ["en"]),
transcript_case({"en": 1}, "en-sub", ["en"]),
# keeps both english and other languages
transcript_case({"lang1": 1, "en": 2}, "", ["lang1", "en"]),
transcript_case({"lang1": 1, "en": 2}, "en-sub", ["lang1", "en"]),
# adds english to list of languages only if english_subtitle is specified
transcript_case({"lang1": 1, "lang2": 2}, "", ["lang1", "lang2"]),
transcript_case({"lang1": 1, "lang2": 2}, "en-sub", ["lang1", "lang2", "en"]),
]
for case in transcript_cases:
video.transcripts = case.transcripts
video.sub = case.english_subtitle
modulestore().update_item(video, self.user.id)
course_outline = self.api_response().data
self.assertEqual(len(course_outline), 1)
self.assertSetEqual(
set(course_outline[0]['summary']['transcripts'].keys()),
set(case.expected_transcripts)
)
class TestTranscriptsDetail(
TestVideoAPITestCase, MobileAuthTestMixin, MobileCourseAccessTestMixin, TestVideoAPIMixin # pylint: disable=bad-continuation
):
"""
Tests for /api/mobile/v0.5/video_outlines/transcripts/{course_id}..
"""
REVERSE_INFO = {'name': 'video-transcripts-detail', 'params': ['course_id']}
def setUp(self):
super(TestTranscriptsDetail, self).setUp()
self.video = self._create_video_with_subs()
def reverse_url(self, reverse_args=None, **kwargs):
reverse_args = reverse_args or {}
reverse_args.update({
'block_id': self.video.location.block_id,
'lang': kwargs.get('lang', 'en'),
})
return super(TestTranscriptsDetail, self).reverse_url(reverse_args, **kwargs)
def test_incorrect_language(self):
self.login_and_enroll()
self.api_response(expected_response_code=404, lang='pl')
def test_transcript_with_unicode_file_name(self):
self.video = self._create_video_with_subs(custom_subid=u'你好')
self.login_and_enroll()
self.api_response(expected_response_code=200, lang='en')
| hamzehd/edx-platform | lms/djangoapps/mobile_api/video_outlines/tests.py | Python | agpl-3.0 | 33,407 |
# -*- coding: utf-8 -*-
{
'name': 'Product status at website shop',
'version': '1.0.1',
'author': 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category': 'eCommerce',
'website': 'https://yelizariev.github.io',
'depends': ['website_sale', 'stock'],
'data': [
'website_sale_stock_status_views.xml',
'website_sale_stock_status_data.xml',
],
'installable': True
}
| ilmir-k/website-addons | website_sale_stock_status/__openerp__.py | Python | lgpl-3.0 | 428 |
"""
This page is in the table of contents.
Craft is a script to access the plugins which craft a gcode file.
The plugin buttons which are commonly used are bolded and the ones which are rarely used have normal font weight.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import os
import sys
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def addSubmenus( menu, pluginFileName, pluginFolderPath, pluginPath ):
"Add a tool plugin menu."
submenu = settings.Tkinter.Menu( menu, tearoff = 0 )
menu.add_cascade( label = pluginFileName.capitalize(), menu = submenu )
settings.ToolDialog().addPluginToMenu( submenu, pluginPath )
submenu.add_separator()
submenuFileNames = archive.getPluginFileNamesFromDirectoryPath( pluginFolderPath )
for submenuFileName in submenuFileNames:
settings.ToolDialog().addPluginToMenu( submenu, os.path.join( pluginFolderPath, submenuFileName ) )
def addToCraftMenu( menu ):
"Add a craft plugin menu."
settings.ToolDialog().addPluginToMenu(menu, archive.getUntilDot(archive.getSkeinforgePluginsPath('craft.py')))
menu.add_separator()
directoryPath = skeinforge_craft.getPluginsDirectoryPath()
directoryFolders = settings.getFolders(directoryPath)
pluginFileNames = skeinforge_craft.getPluginFileNames()
for pluginFileName in pluginFileNames:
pluginFolderName = pluginFileName + '_plugins'
pluginPath = os.path.join( directoryPath, pluginFileName )
if pluginFolderName in directoryFolders:
addSubmenus( menu, pluginFileName, os.path.join( directoryPath, pluginFolderName ), pluginPath )
else:
settings.ToolDialog().addPluginToMenu( menu, pluginPath )
def addToMenu( master, menu, repository, window ):
"Add a tool plugin menu."
CraftMenuSaveListener( menu, window )
def getNewRepository():
'Get new repository.'
return skeinforge_craft.CraftRepository()
def writeOutput(fileName):
"Craft a gcode file."
return skeinforge_craft.writeOutput(fileName)
class CraftMenuSaveListener:
"A class to update a craft menu."
def __init__( self, menu, window ):
"Set the menu."
self.menu = menu
addToCraftMenu( menu )
euclidean.addElementToListDictionaryIfNotThere( self, window, settings.globalProfileSaveListenerListTable )
def save(self):
"Profile has been saved and profile menu should be updated."
settings.deleteMenuItems( self.menu )
addToCraftMenu( self.menu )
class CraftRadioButtonsSaveListener:
"A class to update the craft radio buttons."
def addToDialog( self, gridPosition ):
"Add this to the dialog."
euclidean.addElementToListDictionaryIfNotThere( self, self.repository.repositoryDialog, settings.globalProfileSaveListenerListTable )
self.gridPosition = gridPosition.getCopy()
self.gridPosition.increment()
self.gridPosition.row = gridPosition.rowStart
self.setRadioButtons()
def getFromRadioPlugins( self, radioPlugins, repository ):
"Initialize."
self.name = 'CraftRadioButtonsSaveListener'
self.radioPlugins = radioPlugins
self.repository = repository
repository.displayEntities.append(self)
return self
def save(self):
"Profile has been saved and craft radio plugins should be updated."
self.setRadioButtons()
def setRadioButtons(self):
"Profile has been saved and craft radio plugins should be updated."
craftSequence = skeinforge_profile.getCraftTypePluginModule().getCraftSequence()
gridPosition = self.gridPosition.getCopy()
maximumValue = False
activeRadioPlugins = []
for radioPlugin in self.radioPlugins:
if radioPlugin.name in craftSequence:
activeRadioPlugins.append( radioPlugin )
radioPlugin.incrementGridPosition(gridPosition)
maximumValue = max( radioPlugin.value, maximumValue )
else:
radioPlugin.radiobutton.grid_remove()
if not maximumValue:
selectedRadioPlugin = settings.getSelectedRadioPlugin( self.repository.importantFileNames + [ activeRadioPlugins[0].name ], activeRadioPlugins ).setSelect()
self.repository.pluginFrame.update()
def main():
"Display the craft dialog."
if len(sys.argv) > 1:
settings.startMainLoopFromWindow(writeOutput(' '.join(sys.argv[1 :])))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == "__main__":
main()
| AlexeyKruglov/Skeinforge-fabmetheus | skeinforge_application/skeinforge_plugins/craft.py | Python | agpl-3.0 | 4,781 |
import json
from abc import ABCMeta
from collections import defaultdict
from django.template.loader import render_to_string
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import Promise
from cms.constants import RIGHT, LEFT, REFRESH_PAGE, URL_CHANGE
class ItemSearchResult(object):
def __init__(self, item, index):
self.item = item
self.index = index
def __add__(self, other):
return ItemSearchResult(self.item, self.index + other)
def __sub__(self, other):
return ItemSearchResult(self.item, self.index - other)
def __int__(self):
return self.index
def may_be_lazy(thing):
if isinstance(thing, Promise):
return thing._proxy____args[0]
else:
return thing
class ToolbarAPIMixin(six.with_metaclass(ABCMeta)):
REFRESH_PAGE = REFRESH_PAGE
URL_CHANGE = URL_CHANGE
LEFT = LEFT
RIGHT = RIGHT
def __init__(self):
self.items = []
self.menus = {}
self._memo = defaultdict(list)
def _memoize(self, item):
self._memo[item.__class__].append(item)
def _unmemoize(self, item):
self._memo[item.__class__].remove(item)
def _item_position(self, item):
return self.items.index(item)
def _add_item(self, item, position):
if position is not None:
self.items.insert(position, item)
else:
self.items.append(item)
def _remove_item(self, item):
if item in self.items:
self.items.remove(item)
else:
raise KeyError("Item %r not found" % item)
def get_item_count(self):
return len(self.items)
def add_item(self, item, position=None):
if not isinstance(item, BaseItem):
raise ValueError("Items must be subclasses of cms.toolbar.items.BaseItem, %r isn't" % item)
if isinstance(position, ItemSearchResult):
position = position.index
elif isinstance(position, BaseItem):
position = self._item_position(position)
elif not (position is None or isinstance(position, (int,))):
raise ValueError("Position must be None, an integer, an item or an ItemSearchResult, got %r instead" % position)
self._add_item(item, position)
self._memoize(item)
return item
def find_items(self, item_type, **attributes):
results = []
attr_items = attributes.items()
notfound = object()
for candidate in self._memo[item_type]:
if all(may_be_lazy(getattr(candidate, key, notfound)) == value for key, value in attr_items):
results.append(ItemSearchResult(candidate, self._item_position(candidate)))
return results
def find_first(self, item_type, **attributes):
try:
return self.find_items(item_type, **attributes)[0]
except IndexError:
return None
#
# This will only work if it is used to determine the insert position for
# all items in the same menu.
#
def get_alphabetical_insert_position(self, new_menu_name, item_type,
default=0):
results = self.find_items(item_type)
# No items yet? Use the default value provided
if not len(results):
return default
last_position = 0
for result in sorted(results, key=lambda x: x.item.name):
if result.item.name > new_menu_name:
return result.index
if result.index > last_position:
last_position = result.index
else:
return last_position + 1
def remove_item(self, item):
self._remove_item(item)
self._unmemoize(item)
def add_sideframe_item(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=None, side=LEFT, position=None):
item = SideframeItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
side=side,
)
self.add_item(item, position=position)
return item
def add_modal_item(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=REFRESH_PAGE, side=LEFT, position=None):
item = ModalItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
side=side,
)
self.add_item(item, position=position)
return item
def add_link_item(self, name, url, active=False, disabled=False,
extra_classes=None, side=LEFT, position=None):
item = LinkItem(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
side=side
)
self.add_item(item, position=position)
return item
def add_ajax_item(self, name, action, active=False, disabled=False,
extra_classes=None, data=None, question=None,
side=LEFT, position=None, on_success=None):
item = AjaxItem(name, action, self.csrf_token,
active=active,
disabled=disabled,
extra_classes=extra_classes,
data=data,
question=question,
side=side,
on_success=on_success,
)
self.add_item(item, position=position)
return item
class BaseItem(six.with_metaclass(ABCMeta)):
template = None
def __init__(self, side=LEFT):
self.side = side
@property
def right(self):
return self.side is RIGHT
def render(self):
return render_to_string(self.template, self.get_context())
def get_context(self):
return {}
class TemplateItem(BaseItem):
def __init__(self, template, extra_context=None, side=LEFT):
super(TemplateItem, self).__init__(side)
self.template = template
self.extra_context = extra_context
def get_context(self):
if self.extra_context:
return self.extra_context
return {}
class SubMenu(ToolbarAPIMixin, BaseItem):
template = "cms/toolbar/items/menu.html"
sub_level = True
active = False
def __init__(self, name, csrf_token, side=LEFT):
ToolbarAPIMixin.__init__(self)
BaseItem.__init__(self, side)
self.name = name
self.csrf_token = csrf_token
def __repr__(self):
return '<Menu:%s>' % force_text(self.name)
def add_break(self, identifier=None, position=None):
item = Break(identifier)
self.add_item(item, position=position)
return item
def get_items(self):
return self.items
def get_context(self):
return {
'active': self.active,
'items': self.get_items(),
'title': self.name,
'sub_level': self.sub_level
}
class Menu(SubMenu):
sub_level = False
def get_or_create_menu(self, key, verbose_name, side=LEFT, position=None):
if key in self.menus:
return self.menus[key]
menu = SubMenu(verbose_name, self.csrf_token, side=side)
self.menus[key] = menu
self.add_item(menu, position=position)
return menu
class LinkItem(BaseItem):
template = "cms/toolbar/items/item_link.html"
def __init__(self, name, url, active=False, disabled=False, extra_classes=None, side=LEFT):
super(LinkItem, self).__init__(side)
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
def __repr__(self):
return '<LinkItem:%s>' % force_text(self.name)
def get_context(self):
return {
'url': self.url,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
}
class FrameItem(BaseItem):
# Be sure to define the correct template
def __init__(self, name, url, active=False, disabled=False,
extra_classes=None, on_close=None, side=LEFT):
super(FrameItem, self).__init__(side)
self.name = "%s..." % force_text(name)
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
self.on_close = on_close
def __repr__(self):
# Should be overridden
return '<FrameItem:%s>' % force_text(self.name)
def get_context(self):
return {
'url': self.url,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'on_close': self.on_close,
}
class SideframeItem(FrameItem):
template = "cms/toolbar/items/item_sideframe.html"
def __repr__(self):
return '<SideframeItem:%s>' % force_text(self.name)
class ModalItem(FrameItem):
template = "cms/toolbar/items/item_modal.html"
def __repr__(self):
return '<ModalItem:%s>' % force_text(self.name)
class AjaxItem(BaseItem):
template = "cms/toolbar/items/item_ajax.html"
def __init__(self, name, action, csrf_token, data=None, active=False,
disabled=False, extra_classes=None,
question=None, side=LEFT, on_success=None):
super(AjaxItem, self).__init__(side)
self.name = name
self.action = action
self.active = active
self.disabled = disabled
self.csrf_token = csrf_token
self.data = data or {}
self.extra_classes = extra_classes or []
self.question = question
self.on_success = on_success
def __repr__(self):
return '<AjaxItem:%s>' % force_text(self.name)
def get_context(self):
data = {}
data.update(self.data)
data['csrfmiddlewaretoken'] = self.csrf_token
data = json.dumps(data)
return {
'action': self.action,
'name': self.name,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'data': data,
'question': self.question,
'on_success': self.on_success
}
class Break(BaseItem):
template = "cms/toolbar/items/break.html"
def __init__(self, identifier=None):
self.identifier = identifier
class BaseButton(six.with_metaclass(ABCMeta)):
template = None
def render(self):
return render_to_string(self.template, self.get_context())
def get_context(self):
return {}
class Button(BaseButton):
template = "cms/toolbar/items/button.html"
def __init__(self, name, url, active=False, disabled=False,
extra_classes=None):
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
def __repr__(self):
return '<Button:%s>' % force_text(self.name)
def get_context(self):
return {
'name': self.name,
'url': self.url,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
}
class ModalButton(Button):
template = "cms/toolbar/items/button_modal.html"
def __init__(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None):
self.name = name
self.url = url
self.active = active
self.disabled = disabled
self.extra_classes = extra_classes or []
self.on_close = on_close
def __repr__(self):
return '<ModalButton:%s>' % force_text(self.name)
def get_context(self):
return {
'name': self.name,
'url': self.url,
'active': self.active,
'disabled': self.disabled,
'extra_classes': self.extra_classes,
'on_close': self.on_close,
}
class SideframeButton(ModalButton):
template = "cms/toolbar/items/button_sideframe.html"
def __repr__(self):
return '<SideframeButton:%s>' % force_text(self.name)
class ButtonList(BaseItem):
template = "cms/toolbar/items/button_list.html"
def __init__(self, identifier=None, extra_classes=None, side=LEFT):
super(ButtonList, self).__init__(side)
self.extra_classes = extra_classes or []
self.buttons = []
self.identifier = identifier
def __repr__(self):
return '<ButtonList:%s>' % self.identifier
def add_item(self, item):
if not isinstance(item, Button):
raise ValueError("Expected instance of cms.toolbar.items.Button, got %r instead" % item)
self.buttons.append(item)
def add_button(self, name, url, active=False, disabled=False,
extra_classes=None):
item = Button(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes
)
self.buttons.append(item)
return item
def add_modal_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=REFRESH_PAGE):
item = ModalButton(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
)
self.buttons.append(item)
return item
def add_sideframe_button(self, name, url, active=False, disabled=False, extra_classes=None, on_close=None):
item = SideframeButton(name, url,
active=active,
disabled=disabled,
extra_classes=extra_classes,
on_close=on_close,
)
self.buttons.append(item)
return item
def get_context(self):
return {
'buttons': self.buttons,
'extra_classes': self.extra_classes
}
| Venturi/cms | env/lib/python2.7/site-packages/cms/toolbar/items.py | Python | gpl-2.0 | 14,513 |
#!/usr/bin/env python
from golbase import GameOfLifeBase, Cell
import random
import time
import logging
class GameOfLifeBlockSwitch(GameOfLifeBase):
def __init__(self, *args, **kwargs):
super(GameOfLifeBlockSwitch, self).__init__(*args, **kwargs)
self.toroidal = True
def run(self):
self.initializeCells()
y = 15
x = 8
self.cells[x + 11][y + 6].alive = True
self.cells[x + 13][y + 6].alive = True
self.cells[x + 13][y + 5].alive = True
self.cells[x + 15][y + 4].alive = True
self.cells[x + 15][y + 3].alive = True
self.cells[x + 15][y + 2].alive = True
self.cells[x + 17][y + 3].alive = True
self.cells[x + 17][y + 2].alive = True
self.cells[x + 17][y + 1].alive = True
self.cells[x + 18][y + 2].alive = True
while True:
self.drawCells()
self.canvas = self.matrix.SwapOnVSync(self.canvas)
time.sleep(0.3)
self.evolve()
# Main function
if __name__ == "__main__":
gol = GameOfLifeBlockSwitch()
if (not gol.process()):
gol.print_help()
| yanigisawa/coffee-scale | pubsub/animation/gol-block-switch.py | Python | mit | 1,163 |
from django.shortcuts import get_object_or_404, render
from .models import Student
def index(request):
latest_students = Student.objects.all()
context = {'latest_students': latest_students}
return render(request, 'students/index.html', context)
def detail(request, student_id):
latest_students = Student.objects.all()
student = get_object_or_404(Student, pk=student_id)
context = {'latest_students': latest_students, 'student': student}
return render(request, 'students/detail.html', context)
| mimoralea/gtosa | osa/students/views.py | Python | gpl-2.0 | 526 |
from ...language.visitor import Visitor
class ValidationRule(Visitor):
__slots__ = 'context',
def __init__(self, context):
self.context = context
| wandb/client | wandb/vendor/graphql-core-1.1/wandb_graphql/validation/rules/base.py | Python | mit | 165 |
# Copyright (c) 2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a Virtual Private Cloud.
"""
from boto.ec2.ec2object import EC2Object
class VPC(EC2Object):
def __init__(self, connection=None):
EC2Object.__init__(self, connection)
self.id = None
self.dhcp_options_id = None
self.state = None
self.cidr_block = None
def __repr__(self):
return 'VPC:%s' % self.id
def endElement(self, name, value, connection):
if name == 'vpcId':
self.id = value
elif name == 'dhcpOptionsId':
self.dhcp_options_id = value
elif name == 'state':
self.state = value
elif name == 'cidrBlock':
self.cidr_block = value
else:
setattr(self, name, value)
def delete(self):
return self.connection.delete_vpc(self.id)
| jxta/cc | vendor/boto/boto/vpc/vpc.py | Python | apache-2.0 | 1,921 |
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import pyqtgraph as pg
import numpy as np
class SRGraph(QWidget):
def __init__(self):
super(SRGraph, self).__init__()
#self.setFixedSize(500,375)
#self.labelFont = QFont("Arial", 10)
# Initialize the RGB values
self.redVal = 0
self.greenVal = 0
self.blueVal = 0
self.whiteVal = 0
# Build the GUI
self.home()
def home(self):
self.grid = QGridLayout()
self.grid.setSpacing(10)
'''
# Add sent button
saveButton = QPushButton("Save") # Create the button
self.grid.addWidget(saveButton,5,4) # Add to Gui
saveButton.clicked.connect(self.saveSettings) # Add button functionality
# Add IP address textbox
ipLabel = QLabel("IP Address")
ipLabel.setAlignment(Qt.AlignRight) # Set Alignment
#ipLabel.setFont(self.labelFont) # Set Font
ipBox = QLineEdit()
ipBox.setInputMask("000.000.000.000;0")
ipBox.setMaximumWidth(92)
ipBox.setAlignment(Qt.AlignLeft)
self.grid.addWidget(ipLabel,1,1)
self.grid.addWidget(ipBox,1,2)
# Add LED Pulse Functionality Options
self.ledPulseEnable = QRadioButton("Pulse LEDs")
#self.ledPulseEnable.setFont(self.labelFont)
self.ledSolidEnable = QRadioButton("Solid LEDs")
#self.ledSolidEnable.setFont(self.labelFont)
self.radioButtonGroup = QButtonGroup()
self.radioButtonGroup.addButton(self.ledPulseEnable)
self.radioButtonGroup.addButton(self.ledSolidEnable)
self.ledPulseSpeed = QDoubleSpinBox()
self.ledPulseSpeed.setMinimum(0)
self.ledPulseSpeed.setDisabled(True)
self.ledPulseLabel = QLabel("LED Pulse Time (Seconds)")
#self.ledPulseLabel.setFont(self.labelFont)
self.ledPulseLabel.setDisabled(True)
#self.radioButtonGroup.buttonClicked.connect(self.ledPulseState)
#self.radioButtonGroup.buttonClicked.connect(self.ledPulseState)
self.ledPulseEnable.toggled.connect(self.ledPulseState)
#self.connect(self.ledPulseEnable, SIGNAL"clicked()")
self.grid.addWidget(self.ledPulseEnable,3,0)
self.grid.addWidget(self.ledSolidEnable,2,0)
self.grid.addWidget(self.ledPulseSpeed,3,1)
self.grid.addWidget(self.ledPulseLabel,3,2)
'''
x = np.random.normal(size=1000)
y = np.random.normal(size=1000)
plotWidget = pg.PlotWidget()
plotWidget.plot(x, y)
self.grid.addWidget(plotWidget)
self.setLayout(self.grid) # Set Gui layout
# Enable/Disbale the LED Pulse settings
def ledPulseState(self, state):
self.ledPulseLabel.setEnabled(state)
self.ledPulseSpeed.setEnabled(state)
# Button press output
def saveSettings(self):
pass
| WubbaDuck/OverwatchTracker | graph.py | Python | gpl-3.0 | 3,048 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
| wallnerryan/quantum_migrate | quantum/rootwrap/__init__.py | Python | apache-2.0 | 685 |
def squart_sum( n ):
sum1 = 0
sum2 = 0
for i in range( 1, n + 1 ):
sum1 = sum1 + i * i
for i in range( 1, n + 1 ):
sum2 = sum2 + i
sum2 = sum2 * sum2
print sum2 - sum1
squart_sum(10)
squart_sum(100)
| ringsd/projecteuler | python/006.py | Python | mit | 254 |
from __future__ import absolute_import
import uncertainties as U
from .. import asrootpy
__all__ = [
'as_ufloat',
'correlated_values',
]
def as_ufloat(roorealvar):
"""
Cast a `RooRealVar` to an `uncertainties.ufloat`
"""
if isinstance(roorealvar, (U.AffineScalarFunc, U.Variable)):
return roorealvar
return U.ufloat((roorealvar.getVal(), roorealvar.getError()))
def correlated_values(param_names, roofitresult):
"""
Return symbolic values from a `RooFitResult` taking into account covariance
This is useful for numerically computing the uncertainties for expressions
using correlated values arising from a fit.
Parameters
----------
param_names: list of strings
A list of parameters to extract from the result. The order of the names
is the order of the return value.
roofitresult : RooFitResult
A RooFitResult from a fit.
Returns
-------
list of correlated values from the uncertainties package.
Examples
--------
.. sourcecode:: python
# Fit a pdf to a histogram
pdf = some_roofit_pdf_with_variables("f(x, a, b, c)")
fitresult = pdf.fitTo(histogram, ROOT.RooFit.Save())
a, b, c = correlated_values(["a", "b", "c"], fitresult)
# Arbitrary math expression according to what the `uncertainties`
# package supports, automatically computes correct error propagation
sum_value = a + b + c
value, error = sum_value.nominal_value, sum_value.std_dev()
"""
pars = roofitresult.floatParsFinal()
#pars.Print()
pars = [pars[i] for i in range(pars.getSize())]
parnames = [p.GetName() for p in pars]
values = [(p.getVal(), p.getError()) for p in pars]
#values = [as_ufloat(p) for p in pars]
matrix = asrootpy(roofitresult.correlationMatrix()).to_numpy()
uvalues = U.correlated_values_norm(values, matrix.tolist())
uvalues = dict((n, v) for n, v in zip(parnames, uvalues))
assert all(n in uvalues for n in parnames), (
"name {0} isn't in parameter list {1}".format(n, parnames))
# Return a tuple in the order it was asked for
return tuple(uvalues[n] for n in param_names)
| ndawe/rootpy | rootpy/stats/correlated_values.py | Python | bsd-3-clause | 2,219 |
#!/usr/bin/env python
from iris_sdk.models.maps.base_map import BaseMap
class LinksMap(BaseMap):
first = None
next = None | scottbarstow/iris-python | iris_sdk/models/maps/links.py | Python | mit | 132 |
from setuptools import setup
import gi
gi.require_version('Gtk', '3.0')
setup(name='nvim-pygtk3',
version='0.3.4',
description='PyGTK3 frontend to Neovim with some visual GUI elements.',
long_description=open('README.md').read(),
author='R. Liang',
author_email='[email protected]',
url='https://github.com/rliang/nvim-pygtk3',
license='MIT',
keywords='neovim pygtk3 gtk3',
install_requires=['neovim>=0.1.10'],
packages=['nvim_pygtk3'],
package_data={'nvim_pygtk3': ['runtime/*.vim',
'runtime/**/*.vim',
'runtime/**/**/*.vim']},
entry_points={'gui_scripts': ['nvim-pygtk3=nvim_pygtk3:main']},
data_files=[('share/applications',
['share/applications/nvim-pygtk3.desktop',
'share/applications/nvim-pygtk3-term.desktop']),
('share/icons/hicolor/scalable/apps',
['share/icons/hicolor/scalable/apps/neovim.svg'])])
| rliang/nvim-pygtk3 | setup.py | Python | mit | 1,042 |
#!/usr/bin/python
# _*_ coding: utf-8 _*_
print ""
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
This is a test
| louistin/fullstack | Python/io/public_module.py | Python | mit | 218 |
algorithm = "hagedorn"
potential = {}
potential["potential"] = [["1/4 * sigma * x**4", 0 ],
[0 , "1/4 * sigma * x**4"]]
potential["defaults"] = {"sigma":"0.05"}
T = 6
dt = 0.01
eps = 0.1
f = 3.0
ngn = 4096
basis_size = 64
P = 2.0j
Q = 0.5
S = 0.0
parameters = [ (P, Q, S, -0.5, 2.0), (P, Q, S, -0.5, 2.0) ]
coefficients = [[(0, 1.0)], [(0, 1.0)]]
leading_component = 0
write_nth = 2
| WaveBlocks/WaveBlocks | testsuite/basic_diagonal/test_quartic_oscillator_two_packet.py | Python | bsd-3-clause | 455 |
#!/usr/bin/env python
import os
import re
from yunohost.settings import settings_get
from yunohost.diagnosis import Diagnoser
from yunohost.regenconf import _get_regenconf_infos, _calculate_hash
from moulinette.utils.filesystem import read_file
class RegenconfDiagnoser(Diagnoser):
id_ = os.path.splitext(os.path.basename(__file__))[0].split("-")[1]
cache_duration = 300
dependencies = []
def run(self):
regenconf_modified_files = list(self.manually_modified_files())
if not regenconf_modified_files:
yield dict(
meta={"test": "regenconf"},
status="SUCCESS",
summary="diagnosis_regenconf_allgood",
)
else:
for f in regenconf_modified_files:
yield dict(
meta={
"test": "regenconf",
"category": f["category"],
"file": f["path"],
},
status="WARNING",
summary="diagnosis_regenconf_manually_modified",
details=["diagnosis_regenconf_manually_modified_details"],
)
if (
any(f["path"] == "/etc/ssh/sshd_config" for f in regenconf_modified_files)
and os.system(
"grep -q '^ *AllowGroups\\|^ *AllowUsers' /etc/ssh/sshd_config"
)
!= 0
):
yield dict(
meta={"test": "sshd_config_insecure"},
status="ERROR",
summary="diagnosis_sshd_config_insecure",
)
# Check consistency between actual ssh port in sshd_config vs. setting
ssh_port_setting = settings_get("security.ssh.port")
ssh_port_line = re.findall(
r"\bPort *([0-9]{2,5})\b", read_file("/etc/ssh/sshd_config")
)
if len(ssh_port_line) == 1 and int(ssh_port_line[0]) != ssh_port_setting:
yield dict(
meta={"test": "sshd_config_port_inconsistency"},
status="WARNING",
summary="diagnosis_sshd_config_inconsistent",
details=["diagnosis_sshd_config_inconsistent_details"],
)
def manually_modified_files(self):
for category, infos in _get_regenconf_infos().items():
for path, hash_ in infos["conffiles"].items():
if hash_ != _calculate_hash(path):
yield {"path": path, "category": category}
def main(args, env, loggers):
return RegenconfDiagnoser(args, env, loggers).diagnose()
| YunoHost/moulinette-yunohost | data/hooks/diagnosis/70-regenconf.py | Python | agpl-3.0 | 2,610 |
from django.db import models
from jsonfield.fields import JSONField
from accounts.models import JournalWriter
class Diary(models.Model):
writer = models.ForeignKey(JournalWriter, related_name='diaries')
title = models.CharField(max_length=100, null=True)
content = models.TextField(null=True)
place = models.CharField(max_length=100, null=True)
created_on = models.BigIntegerField()
updated_on = models.BigIntegerField()
extra_params = JSONField(default={})
| subramaniank/journal | journal/journal/diaries/models.py | Python | mit | 491 |
from __future__ import print_function
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
from pandas import DataFrame
from pandas.util.testing import set_trace
from pandas import compat
dirs = []
names = []
lengths = []
if len(sys.argv) > 1:
loc = sys.argv[1]
else:
loc = '.'
walked = os.walk(loc)
def _should_count_file(path):
return path.endswith('.py') or path.endswith('.pyx')
def _is_def_line(line):
"""def/cdef/cpdef, but not `cdef class`"""
return (line.endswith(':') and not 'class' in line.split() and
(line.startswith('def ') or
line.startswith('cdef ') or
line.startswith('cpdef ') or
' def ' in line or ' cdef ' in line or ' cpdef ' in line))
class LengthCounter(object):
"""
should add option for subtracting nested function lengths??
"""
def __init__(self, lines):
self.lines = lines
self.pos = 0
self.counts = []
self.n = len(lines)
def get_counts(self):
self.pos = 0
self.counts = []
while self.pos < self.n:
line = self.lines[self.pos]
self.pos += 1
if _is_def_line(line):
level = _get_indent_level(line)
self._count_function(indent_level=level)
return self.counts
def _count_function(self, indent_level=1):
indent = ' ' * indent_level
def _end_of_function(line):
return (line != '' and
not line.startswith(indent) and
not line.startswith('#'))
start_pos = self.pos
while self.pos < self.n:
line = self.lines[self.pos]
if _end_of_function(line):
self._push_count(start_pos)
return
self.pos += 1
if _is_def_line(line):
self._count_function(indent_level=indent_level + 1)
# end of file
self._push_count(start_pos)
def _push_count(self, start_pos):
func_lines = self.lines[start_pos:self.pos]
if len(func_lines) > 300:
set_trace()
# remove blank lines at end
while len(func_lines) > 0 and func_lines[-1] == '':
func_lines = func_lines[:-1]
# remove docstrings and comments
clean_lines = []
in_docstring = False
for line in func_lines:
line = line.strip()
if in_docstring and _is_triplequote(line):
in_docstring = False
continue
if line.startswith('#'):
continue
if _is_triplequote(line):
in_docstring = True
continue
self.counts.append(len(func_lines))
def _get_indent_level(line):
level = 0
while line.startswith(' ' * level):
level += 1
return level
def _is_triplequote(line):
return line.startswith('"""') or line.startswith("'''")
def _get_file_function_lengths(path):
lines = [x.rstrip() for x in open(path).readlines()]
counter = LengthCounter(lines)
return counter.get_counts()
# def test_get_function_lengths():
text = """
class Foo:
def foo():
def bar():
a = 1
b = 2
c = 3
foo = 'bar'
def x():
a = 1
b = 3
c = 7
pass
"""
expected = [5, 8, 7]
lines = [x.rstrip() for x in text.splitlines()]
counter = LengthCounter(lines)
result = counter.get_counts()
assert(result == expected)
def doit():
for directory, _, files in walked:
print(directory)
for path in files:
if not _should_count_file(path):
continue
full_path = os.path.join(directory, path)
print(full_path)
lines = len(open(full_path).readlines())
dirs.append(directory)
names.append(path)
lengths.append(lines)
result = DataFrame({'dirs': dirs, 'names': names,
'lengths': lengths})
def doit2():
counts = {}
for directory, _, files in walked:
print(directory)
for path in files:
if not _should_count_file(path) or path.startswith('test_'):
continue
full_path = os.path.join(directory, path)
counts[full_path] = _get_file_function_lengths(full_path)
return counts
counts = doit2()
# counts = _get_file_function_lengths('pandas/tests/test_series.py')
all_counts = []
for k, v in compat.iteritems(counts):
all_counts.extend(v)
all_counts = np.array(all_counts)
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111)
ax.hist(all_counts, bins=100)
n = len(all_counts)
nmore = (all_counts > 50).sum()
ax.set_title('%s function lengths, n=%d' % ('pandas', n))
ax.set_ylabel('N functions')
ax.set_xlabel('Function length')
ax.text(100, 300, '%.3f%% with > 50 lines' % ((n - nmore) / float(n)),
fontsize=18)
plt.show()
| jmmease/pandas | scripts/file_sizes.py | Python | bsd-3-clause | 4,949 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class PrivagalConfig(AppConfig):
name = 'privagal.core'
verbose_name = "Privagal"
def ready(self):
import privagal.core.checks # noqa
import privagal.core.signals.handlers # noqa
| ychab/privagal | privagal/core/apps.py | Python | bsd-3-clause | 311 |
# -*- coding: utf8 -*-
###
# Copyright (c) 2002-2004, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
import supybot.conf as conf
import supybot.plugin as plugin
import supybot.registry as registry
from supybot.utils.minisix import u
from . import plugin as Aka
class FunctionsTest(SupyTestCase):
def testFindBiggestDollar(self):
self.assertEqual(Aka.findBiggestDollar(''), 0)
self.assertEqual(Aka.findBiggestDollar('foo'), 0)
self.assertEqual(Aka.findBiggestDollar('$0'), 0)
self.assertEqual(Aka.findBiggestDollar('$1'), 1)
self.assertEqual(Aka.findBiggestDollar('$2'), 2)
self.assertEqual(Aka.findBiggestDollar('$2 $10'), 10)
self.assertEqual(Aka.findBiggestDollar('$3'), 3)
self.assertEqual(Aka.findBiggestDollar('$3 $2 $1'), 3)
self.assertEqual(Aka.findBiggestDollar('foo bar $1'), 1)
self.assertEqual(Aka.findBiggestDollar('foo $2 $1'), 2)
self.assertEqual(Aka.findBiggestDollar('foo $0 $1'), 1)
self.assertEqual(Aka.findBiggestDollar('foo $1 $3'), 3)
self.assertEqual(Aka.findBiggestDollar('$10 bar $1'), 10)
class AkaChannelTestCase(ChannelPluginTestCase):
plugins = ('Aka', 'Conditional', 'Filter', 'Math', 'Utilities',
'Format', 'Reply')
def testDoesNotOverwriteCommands(self):
# We don't have dispatcher commands anymore
#self.assertError('aka add aka "echo foo bar baz"')
self.assertError('aka add add "echo foo bar baz"')
self.assertError('aka add remove "echo foo bar baz"')
self.assertError('aka add lock "echo foo bar baz"')
self.assertError('aka add unlock "echo foo bar baz"')
def testAkaHelp(self):
self.assertNotError('aka add slashdot foo')
self.assertRegexp('help slashdot', "Alias for .*foo")
self.assertNotError('aka add nonascii echo éé')
self.assertRegexp('help nonascii', "Alias for .*echo éé")
def testShow(self):
self.assertNotError('aka add foo bar')
self.assertResponse('show foo', 'bar $*')
self.assertNotError('aka add "foo bar" baz')
self.assertResponse('show "foo bar"', 'baz $*')
def testRemove(self):
self.assertNotError('aka add foo echo bar')
self.assertResponse('foo', 'bar')
self.assertNotError('aka remove foo')
self.assertError('foo')
def testDollars(self):
self.assertNotError('aka add rot26 "rot13 [rot13 $1]"')
self.assertResponse('rot26 foobar', 'foobar')
def testMoreDollars(self):
self.assertNotError('aka add rev "echo $3 $2 $1"')
self.assertResponse('rev foo bar baz', 'baz bar foo')
def testAllArgs(self):
self.assertNotError('aka add swap "echo $2 $1 $*"')
self.assertResponse('swap 1 2 3 4 5', '2 1 3 4 5')
self.assertError('aka add foo "echo $1 @1 $*"')
self.assertNotError('aka add moo echo $1 $*')
self.assertError('moo')
self.assertResponse('moo foo', 'foo')
self.assertResponse('moo foo bar', 'foo bar')
self.assertNotError('aka add spam "echo [echo $*]"')
self.assertResponse('spam egg', 'egg')
self.assertResponse('spam egg bacon', 'egg bacon')
def testChannel(self):
self.assertNotError('aka add channel echo $channel')
self.assertResponse('aka channel', self.channel)
def testAddRemoveAka(self):
cb = self.irc.getCallback('Aka')
cb._add_aka('global', 'foobar', 'echo sbbone')
cb._db.lock_aka('global', 'foobar', 'evil_admin')
self.assertResponse('foobar', 'sbbone')
self.assertRegexp('aka list', 'foobar')
self.assertRaises(Aka.AkaError, cb._remove_aka, 'global', 'foobar')
cb._remove_aka('global', 'foobar', evenIfLocked=True)
self.assertNotRegexp('aka list', 'foobar')
self.assertError('foobar')
def testOptionalArgs(self):
self.assertNotError('aka add myrepr "repr @1"')
self.assertResponse('myrepr foo', '"foo"')
self.assertResponse('myrepr ""', '""')
def testNoExtraSpaces(self):
self.assertNotError('aka add foo "action takes $1\'s money"')
self.assertResponse('foo bar', '\x01ACTION takes bar\'s money\x01')
def testNoExtraQuotes(self):
self.assertNotError('aka add myre "echo s/$1/$2/g"')
self.assertResponse('myre foo bar', 's/foo/bar/g')
def testSimpleAkaWithoutArgsImpliesDollarStar(self):
self.assertNotError('aka add exo echo')
self.assertResponse('exo foo bar baz', 'foo bar baz')
def testChannelPriority(self):
self.assertNotError('aka add spam "echo foo"')
self.assertNotError('aka add --channel %s spam "echo bar"' %
self.channel)
self.assertResponse('spam', 'bar')
self.assertNotError('aka add --channel %s egg "echo baz"' %
self.channel)
self.assertNotError('aka add egg "echo qux"')
self.assertResponse('egg', 'baz')
def testComplicatedNames(self):
self.assertNotError(u('aka add café "echo coffee"'))
self.assertResponse(u('café'), 'coffee')
self.assertNotError('aka add "foo bar" "echo spam"')
self.assertResponse('foo bar', 'spam')
self.assertNotError('aka add "foo" "echo egg"')
self.assertResponse('foo', 'egg')
# You could expect 'spam' here, but in fact, this is dangerous.
# Just imagine this session:
# <evil_user> aka add "echo foo" quit
# <bot> The operation succeeded.
# ...
# <owner> echo foo
# * bot has quit
self.assertResponse('foo bar', 'egg')
def testNoOverride(self):
self.assertNotError('aka add "echo foo" "echo bar"')
self.assertResponse('echo foo', 'foo')
self.assertNotError('aka add foo "echo baz"')
self.assertNotError('aka add "foo bar" "echo qux"')
self.assertResponse('foo bar', 'baz')
def testRecursivity(self):
self.assertNotError('aka add fact '
r'"cif [nceq $1 0] \"echo 1\" '
r'\"calc $1 * [fact [calc $1 - 1]]\""')
self.assertResponse('fact 4', '24')
self.assertRegexp('fact 50', 'more nesting')
def testDollarStarNesting(self):
self.assertNotError('aka add alias aka $*')
self.assertNotError('alias add a+ aka add $*')
class AkaTestCase(PluginTestCase):
plugins = ('Aka', 'Alias', 'User', 'Utilities')
def testMaximumLength(self):
self.assertNotError('aka add "foo bar baz qux quux" "echo test"')
self.assertError('aka add "foo bar baz qux quux corge" "echo test"')
def testAkaLockedHelp(self):
self.assertNotError('register evil_admin foo')
self.assertNotError('aka add slashdot foo')
self.assertRegexp('help aka slashdot', "Alias for .*foo")
self.assertNotRegexp('help aka slashdot', 'Locked by')
self.assertNotError('aka lock slashdot')
self.assertRegexp('help aka slashdot', 'Locked by evil_admin')
self.assertNotError('aka unlock slashdot')
self.assertNotRegexp('help aka slashdot', 'Locked by')
def testAliasImport(self):
self.assertNotError('alias add foo "echo bar"')
self.assertNotError(u('alias add baz "echo café"'))
self.assertNotError('aka add qux "echo quux"')
self.assertResponse('alias foo', 'bar')
self.assertResponse('alias baz', 'café')
self.assertRegexp('aka foo', 'there is no command named')
self.assertResponse('aka qux', 'quux')
self.assertNotError('aka importaliasdatabase')
self.assertRegexp('alias foo', 'there is no command named')
self.assertResponse('aka foo', 'bar')
self.assertResponse('aka baz', 'café')
self.assertResponse('aka qux', 'quux')
self.assertNotError('alias add foo "echo test"')
self.assertNotError('alias add spam "echo egg"')
self.assertNotError('alias lock spam')
self.assertRegexp('aka importaliasdatabase',
r'the 1 following command: foo \(This Aka already exists.\)$')
self.assertResponse('aka foo', 'bar')
self.assertResponse('alias foo', 'test')
self.assertRegexp('alias spam', 'there is no command named')
self.assertResponse('aka spam', 'egg')
def testList(self):
self.assertNotError('aka add foo bar')
self.assertRegexp('aka list', 'foo.*?bar \$\*')
self.assertNotError('aka add "foo bar" baz')
self.assertRegexp('aka list', 'foo.*?bar \$\*.*?foo bar.*?baz \$\*')
def testListLockedUnlocked(self):
self.assertNotError('register tacocat hunter2')
self.assertNotError('aka add foo bar')
self.assertNotError('aka add abcd echo hi')
self.assertNotError('aka lock foo')
self.assertRegexp('aka list --locked', 'foo')
self.assertNotRegexp('aka list --locked', 'abcd')
self.assertNotRegexp('aka list --unlocked', 'foo')
self.assertRegexp('aka list --unlocked', 'abcd')
# Can't look up both.
self.assertError('aka list --locked --unlocked abcd')
def testSearch(self):
self.assertNotError('aka add foo bar')
self.assertNotError('aka add "many words" "much command"')
self.assertRegexp('aka search f', 'foo')
self.assertError('aka search abcdefghijklmnop')
self.assertRegexp('aka search many', 'many words')
# This should be case insensitive too.
self.assertRegexp('aka search MaNY', 'many words')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| ProgVal/Limnoria-test | plugins/Aka/test.py | Python | bsd-3-clause | 11,172 |
from tests.unit import unittest
from boto.exception import BotoServerError, S3CreateError, JSONResponseError
from httpretty import HTTPretty, httprettified
class TestBotoServerError(unittest.TestCase):
def test_botoservererror_basics(self):
bse = BotoServerError('400', 'Bad Request')
self.assertEqual(bse.status, '400')
self.assertEqual(bse.reason, 'Bad Request')
def test_message_elb_xml(self):
# This test XML response comes from #509
xml = """
<ErrorResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2011-11-15/">
<Error>
<Type>Sender</Type>
<Code>LoadBalancerNotFound</Code>
<Message>Cannot find Load Balancer webapp-balancer2</Message>
</Error>
<RequestId>093f80d0-4473-11e1-9234-edce8ec08e2d</RequestId>
</ErrorResponse>"""
bse = BotoServerError('400', 'Bad Request', body=xml)
self.assertEqual(bse.error_message, 'Cannot find Load Balancer webapp-balancer2')
self.assertEqual(bse.error_message, bse.message)
self.assertEqual(bse.request_id, '093f80d0-4473-11e1-9234-edce8ec08e2d')
self.assertEqual(bse.error_code, 'LoadBalancerNotFound')
self.assertEqual(bse.status, '400')
self.assertEqual(bse.reason, 'Bad Request')
def test_message_sd_xml(self):
# Sample XML response from: https://forums.aws.amazon.com/thread.jspa?threadID=87393
xml = """
<Response>
<Errors>
<Error>
<Code>AuthorizationFailure</Code>
<Message>Session does not have permission to perform (sdb:CreateDomain) on resource (arn:aws:sdb:us-east-1:xxxxxxx:domain/test_domain). Contact account owner.</Message>
<BoxUsage>0.0055590278</BoxUsage>
</Error>
</Errors>
<RequestID>e73bb2bb-63e3-9cdc-f220-6332de66dbbe</RequestID>
</Response>"""
bse = BotoServerError('403', 'Forbidden', body=xml)
self.assertEqual(bse.error_message,
'Session does not have permission to perform (sdb:CreateDomain) on '
'resource (arn:aws:sdb:us-east-1:xxxxxxx:domain/test_domain). '
'Contact account owner.')
self.assertEqual(bse.error_message, bse.message)
self.assertEqual(bse.box_usage, '0.0055590278')
self.assertEqual(bse.error_code, 'AuthorizationFailure')
self.assertEqual(bse.status, '403')
self.assertEqual(bse.reason, 'Forbidden')
@httprettified
def test_xmlns_not_loaded(self):
xml = '<ErrorResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2011-11-15/">'
bse = BotoServerError('403', 'Forbidden', body=xml)
self.assertEqual([], HTTPretty.latest_requests)
@httprettified
def test_xml_entity_not_loaded(self):
xml = '<!DOCTYPE Message [<!ENTITY xxe SYSTEM "http://aws.amazon.com/">]><Message>error:&xxe;</Message>'
bse = BotoServerError('403', 'Forbidden', body=xml)
self.assertEqual([], HTTPretty.latest_requests)
def test_message_storage_create_error(self):
# This test value comes from https://answers.launchpad.net/duplicity/+question/150801
xml = """<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>BucketAlreadyOwnedByYou</Code>
<Message>Your previous request to create the named bucket succeeded and you already own it.</Message>
<BucketName>cmsbk</BucketName>
<RequestId>FF8B86A32CC3FE4F</RequestId>
<HostId>6ENGL3DT9f0n7Tkv4qdKIs/uBNCMMA6QUFapw265WmodFDluP57esOOkecp55qhh</HostId>
</Error>
"""
s3ce = S3CreateError('409', 'Conflict', body=xml)
self.assertEqual(s3ce.bucket, 'cmsbk')
self.assertEqual(s3ce.error_code, 'BucketAlreadyOwnedByYou')
self.assertEqual(s3ce.status, '409')
self.assertEqual(s3ce.reason, 'Conflict')
self.assertEqual(s3ce.error_message,
'Your previous request to create the named bucket succeeded '
'and you already own it.')
self.assertEqual(s3ce.error_message, s3ce.message)
self.assertEqual(s3ce.request_id, 'FF8B86A32CC3FE4F')
def test_message_json_response_error(self):
# This test comes from https://forums.aws.amazon.com/thread.jspa?messageID=374936
body = {
'__type': 'com.amazon.coral.validate#ValidationException',
'message': 'The attempted filter operation is not supported '
'for the provided filter argument count'}
jre = JSONResponseError('400', 'Bad Request', body=body)
self.assertEqual(jre.status, '400')
self.assertEqual(jre.reason, 'Bad Request')
self.assertEqual(jre.error_message, body['message'])
self.assertEqual(jre.error_message, jre.message)
self.assertEqual(jre.code, 'ValidationException')
self.assertEqual(jre.code, jre.error_code)
def test_message_not_xml(self):
body = 'This is not XML'
bse = BotoServerError('400', 'Bad Request', body=body)
self.assertEqual(bse.error_message, 'This is not XML')
def test_getters(self):
body = "This is the body"
bse = BotoServerError('400', 'Bad Request', body=body)
self.assertEqual(bse.code, bse.error_code)
self.assertEqual(bse.message, bse.error_message)
| harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/third_party/boto/tests/unit/test_exception.py | Python | gpl-3.0 | 5,217 |
import os
import imp
import sys
import six
class DotImportHook:
def find_module(self, fullname, path=None):
bits = fullname.split('.')
if len(bits) <= 1:
return
for folder in sys.path:
if os.path.exists(os.path.join(folder, fullname)):
return self
for i in range(1, len(bits) - 1):
package, mod = '.'.join(bits[:i]), '.'.join(bits[i:])
path = sys.modules[package].__path__
for folder in path:
if os.path.exists(os.path.join(folder, mod)):
return self
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
sys.modules[fullname] = module = imp.new_module(fullname)
if '.' in fullname:
pkg, name = fullname.rsplit('.', 1)
path = sys.modules[pkg].__path__
else:
pkg, name = '', fullname
path = sys.path
module.__package__ = pkg
module.__loader__ = self
bits = fullname.split('.')
if len(bits) <= 1:
return module
for folder in sys.path:
pathfunc = lambda *args: os.path.join(folder, fullname, *args)
if os.path.exists(pathfunc()):
module.__path__ = [pathfunc()]
module.__file__ = pathfunc('__init__.pyc')
six.exec_(open(pathfunc('__init__.py')).read(), module.__dict__)
return module
for i in range(1, len(bits) - 1):
package, mod = '.'.join(bits[:i]), '.'.join(bits[i:])
path = sys.modules[package].__path__
for folder in path:
pathfunc = lambda *args: os.path.join(folder, mod, *args)
if os.path.exists(pathfunc()):
module.__path__ = [pathfunc()]
module.__file__ = pathfunc('__init__.pyc')
six.exec_(open(pathfunc('__init__.py')).read(), module.__dict__)
return module
# somehow not found, delete from sys.modules
del sys.modules[fullname]
# support reload()ing this module
try:
hook
except NameError:
pass
else:
try:
sys.meta_path.remove(hook)
except ValueError:
# not found, skip removing
pass
# automatically install hook
hook = DotImportHook()
sys.meta_path.insert(0, hook)
| sciyoshi/dotmod | dotmod.py | Python | mit | 1,987 |
#!/usr/bin/env python
import sys
requires = ['requests', 'requests_oauthlib']
console_script = """[console_scripts]
trovebox = trovebox.main:main
"""
# from trovebox._version import __version__
exec(open("trovebox/_version.py").read())
# Check the Python version
(major, minor) = sys.version_info[:2]
if (major, minor) < (2, 6):
raise SystemExit("Sorry, Python 2.6 or newer required")
try:
from setuptools import setup
kw = {'entry_points': console_script,
'zip_safe': True,
'install_requires': requires
}
except ImportError:
from distutils.core import setup
kw = {'scripts': ['bin/trovebox'],
'requires': requires}
setup(name='trovebox',
version=__version__,
description='The official Python client library for the Trovebox photo service',
long_description=open("README.rst").read(),
author='Pete Burgers, James Walker',
url='https://github.com/photo/openphoto-python',
packages=['trovebox', 'trovebox.objects', 'trovebox.api'],
keywords=['openphoto', 'pyopenphoto', 'openphoto-python',
'trovebox', 'pytrovebox', 'trovebox-python'],
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
test_suite='tests.unit',
**kw
)
| photo/openphoto-python | setup.py | Python | apache-2.0 | 1,685 |
# -*- coding: utf-8 -*-
"""
Generators for random graphs.
"""
# Copyright (C) 2004-2011 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
__author__ = "\n".join(['Aric Hagberg ([email protected])',
'Pieter Swart ([email protected])',
'Dan Schult ([email protected])'])
import itertools
import random
import math
import networkx as nx
from networkx.generators.classic import empty_graph, path_graph, complete_graph
from collections import defaultdict
__all__ = ['fast_gnp_random_graph',
'gnp_random_graph',
'dense_gnm_random_graph',
'gnm_random_graph',
'erdos_renyi_graph',
'binomial_graph',
'newman_watts_strogatz_graph',
'watts_strogatz_graph',
'connected_watts_strogatz_graph',
'random_regular_graph',
'barabasi_albert_graph',
'powerlaw_cluster_graph',
'random_lobster',
'random_shell_graph',
'random_powerlaw_tree',
'random_powerlaw_tree_sequence']
#-------------------------------------------------------------------------
# Some Famous Random Graphs
#-------------------------------------------------------------------------
def fast_gnp_random_graph(n, p, seed=None):
"""Return a random graph G_{n,p} (Erdős-Rényi graph, binomial graph).
Parameters
----------
n : int
The number of nodes.
p : float
Probability for edge creation.
seed : int, optional
Seed for random number generator (default=None).
Notes
-----
The G_{n,p} graph algorithm chooses each of the [n(n-1)]/2
(undirected) or n(n-1) (directed) possible edges with probability p.
This algorithm is O(n+m) where m is the expected number of
edges m=p*n*(n-1)/2.
It should be faster than gnp_random_graph when p is small and
the expected number of edges is small (sparse graph).
See Also
--------
gnp_random_graph
References
----------
.. [1] Batagelj and Brandes, "Efficient generation of large random networks",
Phys. Rev. E, 71, 036113, 2005.
"""
G=empty_graph(n)
G.name="fast_gnp_random_graph(%s,%s)"%(n,p)
if not seed is None:
random.seed(seed)
if p<=0 or p>=1:
return nx.gnp_random_graph(n,p)
v=1 # Nodes in graph are from 0,n-1 (this is the second node index).
w=-1
lp=math.log(1.0-p)
while v<n:
lr=math.log(1.0-random.random())
w=w+1+int(lr/lp)
while w>=v and v<n:
w=w-v
v=v+1
if v<n:
G.add_edge(v,w)
return G
def gnp_random_graph(n, p, seed=None, directed=False):
"""Return a random graph G_{n,p} (Erdős-Rényi graph, binomial graph).
Chooses each of the possible edges with probability p.
This is also called binomial_graph and erdos_renyi_graph.
Parameters
----------
n : int
The number of nodes.
p : float
Probability for edge creation.
seed : int, optional
Seed for random number generator (default=None).
directed : bool, optional (default=False)
If True return a directed graph
See Also
--------
fast_gnp_random_graph
Notes
-----
This is an O(n^2) algorithm. For sparse graphs (small p) see
fast_gnp_random_graph for a faster algorithm.
References
----------
.. [1] P. Erdős and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959).
.. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959).
"""
if directed:
G=nx.DiGraph()
else:
G=nx.Graph()
G.add_nodes_from(range(n))
G.name="gnp_random_graph(%s,%s)"%(n,p)
if p<=0:
return G
if p>=1:
return complete_graph(n,create_using=G)
if not seed is None:
random.seed(seed)
if G.is_directed():
edges=itertools.permutations(range(n),2)
else:
edges=itertools.combinations(range(n),2)
for e in edges:
if random.random() < p:
G.add_edge(*e)
return G
# add some aliases to common names
binomial_graph=gnp_random_graph
erdos_renyi_graph=gnp_random_graph
def dense_gnm_random_graph(n, m, seed=None):
"""Return the random graph G_{n,m}.
Gives a graph picked randomly out of the set of all graphs
with n nodes and m edges.
This algorithm should be faster than gnm_random_graph for dense graphs.
Parameters
----------
n : int
The number of nodes.
m : int
The number of edges.
seed : int, optional
Seed for random number generator (default=None).
See Also
--------
gnm_random_graph()
Notes
-----
Algorithm by Keith M. Briggs Mar 31, 2006.
Inspired by Knuth's Algorithm S (Selection sampling technique),
in section 3.4.2 of [1]_.
References
----------
.. [1] Donald E. Knuth, The Art of Computer Programming,
Volume 2/Seminumerical algorithms, Third Edition, Addison-Wesley, 1997.
"""
mmax=n*(n-1)/2
if m>=mmax:
G=complete_graph(n)
else:
G=empty_graph(n)
G.name="dense_gnm_random_graph(%s,%s)"%(n,m)
if n==1 or m>=mmax:
return G
if seed is not None:
random.seed(seed)
u=0
v=1
t=0
k=0
while True:
if random.randrange(mmax-t)<m-k:
G.add_edge(u,v)
k+=1
if k==m: return G
t+=1
v+=1
if v==n: # go to next row of adjacency matrix
u+=1
v=u+1
def gnm_random_graph(n, m, seed=None, directed=False):
"""Return the random graph G_{n,m}.
Produces a graph picked randomly out of the set of all graphs
with n nodes and m edges.
Parameters
----------
n : int
The number of nodes.
m : int
The number of edges.
seed : int, optional
Seed for random number generator (default=None).
directed : bool, optional (default=False)
If True return a directed graph
"""
if directed:
G=nx.DiGraph()
else:
G=nx.Graph()
G.add_nodes_from(range(n))
G.name="gnm_random_graph(%s,%s)"%(n,m)
if seed is not None:
random.seed(seed)
if n==1:
return G
max_edges=n*(n-1)
if not directed:
max_edges/=2.0
if m>=max_edges:
return complete_graph(n,create_using=G)
nlist=G.nodes()
edge_count=0
while edge_count < m:
# generate random edge,u,v
u = random.choice(nlist)
v = random.choice(nlist)
if u==v or G.has_edge(u,v):
continue
else:
G.add_edge(u,v)
edge_count=edge_count+1
return G
def newman_watts_strogatz_graph(n, k, p, create_using=None, seed=None):
"""Return a Newman-Watts-Strogatz small world graph.
Parameters
----------
n : int
The number of nodes
k : int
Each node is connected to k nearest neighbors in ring topology
p : float
The probability of adding a new edge for each edge
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
seed for random number generator (default=None)
Notes
-----
First create a ring over n nodes. Then each node in the ring is
connected with its k nearest neighbors (k-1 neighbors if k is odd).
Then shortcuts are created by adding new edges as follows:
for each edge u-v in the underlying "n-ring with k nearest neighbors"
with probability p add a new edge u-w with randomly-chosen existing
node w. In contrast with watts_strogatz_graph(), no edges are removed.
See Also
--------
watts_strogatz_graph()
References
----------
.. [1] M. E. J. Newman and D. J. Watts,
Renormalization group analysis of the small-world network model,
Physics Letters A, 263, 341, 1999.
http://dx.doi.org/10.1016/S0375-9601(99)00757-4
"""
if seed is not None:
random.seed(seed)
if k>=n // 2:
raise nx.NetworkXError("k>=n/2, choose smaller k or larger n")
if create_using is not None and create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
G=empty_graph(n,create_using)
G.name="newman_watts_strogatz_graph(%s,%s,%s)"%(n,k,p)
nlist = G.nodes()
fromv = nlist
# connect the k/2 neighbors
for n in range(1, k // 2+1):
tov = fromv[n:] + fromv[0:n] # the first n are now last
for i in range(len(fromv)):
G.add_edge(fromv[i], tov[i])
# for each edge u-v, with probability p, randomly select existing
# node w and add new edge u-w
e = G.edges()
for (u, v) in e:
if random.random() < p:
w = random.choice(nlist)
# no self-loops and reject if edge u-w exists
# is that the correct NWS model?
while w == u or G.has_edge(u, w):
w = random.choice(nlist)
G.add_edge(u,w)
return G
def watts_strogatz_graph(n, k, p, create_using=None, seed=None):
"""Return a Watts-Strogatz small-world graph.
Parameters
----------
n : int
The number of nodes
k : int
Each node is connected to k nearest neighbors in ring topology
p : float
The probability of rewiring each edge
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None)
See Also
--------
newman_watts_strogatz_graph()
connected_watts_strogatz_graph()
Notes
-----
First create a ring over n nodes. Then each node in the ring is
connected with its k nearest neighbors (k-1 neighbors if k is odd).
Then shortcuts are created by replacing some edges as follows:
for each edge u-v in the underlying "n-ring with k nearest neighbors"
with probability p replace it with a new edge u-w with uniformly
random choice of existing node w.
In contrast with newman_watts_strogatz_graph(), the random
rewiring does not increase the number of edges. The rewired graph
is not guaranteed to be connected as in connected_watts_strogatz_graph().
References
----------
.. [1] Duncan J. Watts and Steven H. Strogatz,
Collective dynamics of small-world networks,
Nature, 393, pp. 440--442, 1998.
"""
if k>=n/2:
raise nx.NetworkXError("k>=n/2, choose smaller k or larger n")
if create_using is None:
G = nx.Graph()
elif create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
else:
G = create_using
G.clear()
if seed is not None:
random.seed(seed)
G.name="watts_strogatz_graph(%s,%s,%s)"%(n,k,p)
nodes = list(range(n)) # nodes are labeled 0 to n-1
# connect each node to k/2 neighbors
for j in range(1, k // 2+1):
targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list
G.add_edges_from(zip(nodes,targets))
# rewire edges from each node
# loop over all nodes in order (label) and neighbors in order (distance)
# no self loops or multiple edges allowed
for j in range(1, k // 2+1): # outer loop is neighbors
targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list
# inner loop in node order
for u,v in zip(nodes,targets):
if random.random() < p:
w = random.choice(nodes)
# Enforce no self-loops or multiple edges
while w == u or G.has_edge(u, w):
w = random.choice(nodes)
G.remove_edge(u,v)
G.add_edge(u,w)
return G
def connected_watts_strogatz_graph(n, k, p, tries=100, create_using=None, seed=None):
"""Return a connected Watts-Strogatz small-world graph.
Attempt to generate a connected realization by repeated
generation of Watts-Strogatz small-world graphs.
An exception is raised if the maximum number of tries is exceeded.
Parameters
----------
n : int
The number of nodes
k : int
Each node is connected to k nearest neighbors in ring topology
p : float
The probability of rewiring each edge
tries : int
Number of attempts to generate a connected graph.
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
The seed for random number generator.
See Also
--------
newman_watts_strogatz_graph()
watts_strogatz_graph()
"""
G = watts_strogatz_graph(n, k, p, create_using, seed)
t=1
while not nx.is_connected(G):
G = watts_strogatz_graph(n, k, p, create_using, seed)
t=t+1
if t>tries:
raise nx.NetworkXError("Maximum number of tries exceeded")
return G
def random_regular_graph(d, n, create_using=None, seed=None):
"""Return a random regular graph of n nodes each with degree d.
The resulting graph G has no self-loops or parallel edges.
Parameters
----------
d : int
Degree
n : integer
Number of nodes. The value of n*d must be even.
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : hashable object
The seed for random number generator.
Notes
-----
The nodes are numbered form 0 to n-1.
Kim and Vu's paper [2]_ shows that this algorithm samples in an
asymptotically uniform way from the space of random graphs when
d = O(n**(1/3-epsilon)).
References
----------
.. [1] A. Steger and N. Wormald,
Generating random regular graphs quickly,
Probability and Computing 8 (1999), 377-396, 1999.
http://citeseer.ist.psu.edu/steger99generating.html
.. [2] Jeong Han Kim and Van H. Vu,
Generating random regular graphs,
Proceedings of the thirty-fifth ACM symposium on Theory of computing,
San Diego, CA, USA, pp 213--222, 2003.
http://portal.acm.org/citation.cfm?id=780542.780576
"""
if (n * d) % 2 != 0:
raise nx.NetworkXError("n * d must be even")
if not 0 <= d < n:
raise nx.NetworkXError("the 0 <= d < n inequality must be satisfied")
if create_using is None:
G = nx.Graph()
elif create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
else:
G = create_using
G.clear()
if seed is not None:
random.seed(seed)
def _suitable(edges, potential_edges):
# Helper subroutine to check if there are suitable edges remaining
# If False, the generation of the graph has failed
if not potential_edges:
return True
for s1 in potential_edges:
for s2 in potential_edges:
# Two iterators on the same dictionary are guaranteed
# to visit it in the same order if there are no
# intervening modifications.
if s1 == s2:
# Only need to consider s1-s2 pair one time
break
if s1 > s2:
s1, s2 = s2, s1
if (s1, s2) not in edges:
return True
return False
def _try_creation():
# Attempt to create an edge set
edges = set()
stubs = list(range(n)) * d
while stubs:
potential_edges = defaultdict(lambda: 0)
random.shuffle(stubs)
stubiter = iter(stubs)
for s1, s2 in zip(stubiter, stubiter):
if s1 > s2:
s1, s2 = s2, s1
if s1 != s2 and ((s1, s2) not in edges):
edges.add((s1, s2))
else:
potential_edges[s1] += 1
potential_edges[s2] += 1
if not _suitable(edges, potential_edges):
return None # failed to find suitable edge set
stubs = [node for node, potential in potential_edges.items()
for _ in range(potential)]
return edges
# Even though a suitable edge set exists,
# the generation of such a set is not guaranteed.
# Try repeatedly to find one.
edges = _try_creation()
while edges is None:
edges = _try_creation()
G.name = "random_regular_graph(%s, %s)" % (d, n)
G.add_edges_from(edges)
return G
def _random_subset(seq,m):
""" Return m unique elements from seq.
This differs from random.sample which can return repeated
elements if seq holds repeated elements.
"""
targets=set()
while len(targets)<m:
x=random.choice(seq)
targets.add(x)
return targets
def barabasi_albert_graph(n, m, create_using=None, seed=None):
"""Return random graph using Barabási-Albert preferential attachment model.
A graph of n nodes is grown by attaching new nodes each with m
edges that are preferentially attached to existing nodes with high
degree.
Parameters
----------
n : int
Number of nodes
m : int
Number of edges to attach from a new node to existing nodes
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None).
Returns
-------
G : Graph
Notes
-----
The initialization is a graph with with m nodes and no edges.
References
----------
.. [1] A. L. Barabási and R. Albert "Emergence of scaling in
random networks", Science 286, pp 509-512, 1999.
"""
if m < 1 or m >=n:
raise nx.NetworkXError(\
"Barabási-Albert network must have m>=1 and m<n, m=%d,n=%d"%(m,n))
if create_using is not None and create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
if seed is not None:
random.seed(seed)
# Add m initial nodes (m0 in barabasi-speak)
G=empty_graph(m,create_using)
G.name="barabasi_albert_graph(%s,%s)"%(n,m)
# Target nodes for new edges
targets=list(range(m))
# List of existing nodes, with nodes repeated once for each adjacent edge
repeated_nodes=[]
# Start adding the other n-m nodes. The first node is m.
source=m
while source<n:
# Add edges to m nodes from the source.
G.add_edges_from(zip([source]*m,targets))
# Add one node to the list for each new edge just created.
repeated_nodes.extend(targets)
# And the new node "source" has m edges to add to the list.
repeated_nodes.extend([source]*m)
# Now choose m unique nodes from the existing nodes
# Pick uniformly from repeated_nodes (preferential attachement)
targets = _random_subset(repeated_nodes,m)
source += 1
return G
def powerlaw_cluster_graph(n, m, p, create_using=None, seed=None):
"""Holme and Kim algorithm for growing graphs with powerlaw
degree distribution and approximate average clustering.
Parameters
----------
n : int
the number of nodes
m : int
the number of random edges to add for each new node
p : float,
Probability of adding a triangle after adding a random edge
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None).
Notes
-----
The average clustering has a hard time getting above
a certain cutoff that depends on m. This cutoff is often quite low.
Note that the transitivity (fraction of triangles to possible
triangles) seems to go down with network size.
It is essentially the Barabási-Albert (B-A) growth model with an
extra step that each random edge is followed by a chance of
making an edge to one of its neighbors too (and thus a triangle).
This algorithm improves on B-A in the sense that it enables a
higher average clustering to be attained if desired.
It seems possible to have a disconnected graph with this algorithm
since the initial m nodes may not be all linked to a new node
on the first iteration like the B-A model.
References
----------
.. [1] P. Holme and B. J. Kim,
"Growing scale-free networks with tunable clustering",
Phys. Rev. E, 65, 026107, 2002.
"""
if m < 1 or n < m:
raise nx.NetworkXError(\
"NetworkXError must have m>1 and m<n, m=%d,n=%d"%(m,n))
if p > 1 or p < 0:
raise nx.NetworkXError(\
"NetworkXError p must be in [0,1], p=%f"%(p))
if create_using is not None and create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
if seed is not None:
random.seed(seed)
G=empty_graph(m,create_using) # add m initial nodes (m0 in barabasi-speak)
G.name="Powerlaw-Cluster Graph"
repeated_nodes=G.nodes() # list of existing nodes to sample from
# with nodes repeated once for each adjacent edge
source=m # next node is m
while source<n: # Now add the other n-1 nodes
possible_targets = _random_subset(repeated_nodes,m)
# do one preferential attachment for new node
target=possible_targets.pop()
G.add_edge(source,target)
repeated_nodes.append(target) # add one node to list for each new link
count=1
while count<m: # add m-1 more new links
if random.random()<p: # clustering step: add triangle
neighborhood=[nbr for nbr in G.neighbors(target) \
if not G.has_edge(source,nbr) \
and not nbr==source]
if neighborhood: # if there is a neighbor without a link
nbr=random.choice(neighborhood)
G.add_edge(source,nbr) # add triangle
repeated_nodes.append(nbr)
count=count+1
continue # go to top of while loop
# else do preferential attachment step if above fails
target=possible_targets.pop()
G.add_edge(source,target)
repeated_nodes.append(target)
count=count+1
repeated_nodes.extend([source]*m) # add source node to list m times
source += 1
return G
def random_lobster(n, p1, p2, create_using=None, seed=None):
"""Return a random lobster.
A lobster is a tree that reduces to a caterpillar when pruning all
leaf nodes.
A caterpillar is a tree that reduces to a path graph when pruning
all leaf nodes (p2=0).
Parameters
----------
n : int
The expected number of nodes in the backbone
p1 : float
Probability of adding an edge to the backbone
p2 : float
Probability of adding an edge one level beyond backbone
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None).
"""
# a necessary ingredient in any self-respecting graph library
if seed is not None:
random.seed(seed)
llen=int(2*random.random()*n + 0.5)
if create_using is not None and create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
L=path_graph(llen,create_using)
L.name="random_lobster(%d,%s,%s)"%(n,p1,p2)
# build caterpillar: add edges to path graph with probability p1
current_node=llen-1
for n in range(llen):
if random.random()<p1: # add fuzzy caterpillar parts
current_node+=1
L.add_edge(n,current_node)
if random.random()<p2: # add crunchy lobster bits
current_node+=1
L.add_edge(current_node-1,current_node)
return L # voila, un lobster!
def random_shell_graph(constructor, create_using=None, seed=None):
"""Return a random shell graph for the constructor given.
Parameters
----------
constructor: a list of three-tuples
(n,m,d) for each shell starting at the center shell.
n : int
The number of nodes in the shell
m : int
The number or edges in the shell
d : float
The ratio of inter-shell (next) edges to intra-shell edges.
d=0 means no intra shell edges, d=1 for the last shell
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None).
Examples
--------
>>> constructor=[(10,20,0.8),(20,40,0.8)]
>>> G=nx.random_shell_graph(constructor)
"""
if create_using is not None and create_using.is_directed():
raise nx.NetworkXError("Directed Graph not supported")
G=empty_graph(0,create_using)
G.name="random_shell_graph(constructor)"
if seed is not None:
random.seed(seed)
glist=[]
intra_edges=[]
nnodes=0
# create gnm graphs for each shell
for (n,m,d) in constructor:
inter_edges=int(m*d)
intra_edges.append(m-inter_edges)
g=nx.convert_node_labels_to_integers(
gnm_random_graph(n,inter_edges),
first_label=nnodes)
glist.append(g)
nnodes+=n
G=nx.operators.union(G,g)
# connect the shells randomly
for gi in range(len(glist)-1):
nlist1=glist[gi].nodes()
nlist2=glist[gi+1].nodes()
total_edges=intra_edges[gi]
edge_count=0
while edge_count < total_edges:
u = random.choice(nlist1)
v = random.choice(nlist2)
if u==v or G.has_edge(u,v):
continue
else:
G.add_edge(u,v)
edge_count=edge_count+1
return G
def random_powerlaw_tree(n, gamma=3, create_using=None, seed=None, tries=100):
"""Return a tree with a powerlaw degree distribution.
Parameters
----------
n : int,
The number of nodes
gamma : float
Exponent of the power-law
create_using : graph, optional (default Graph)
The graph instance used to build the graph.
seed : int, optional
Seed for random number generator (default=None).
tries : int
Number of attempts to adjust sequence to make a tree
Notes
-----
A trial powerlaw degree sequence is chosen and then elements are
swapped with new elements from a powerlaw distribution until
the sequence makes a tree (#edges=#nodes-1).
"""
from networkx.generators.degree_seq import degree_sequence_tree
try:
s=random_powerlaw_tree_sequence(n,
gamma=gamma,
seed=seed,
tries=tries)
except:
raise nx.NetworkXError(\
"Exceeded max (%d) attempts for a valid tree sequence."%tries)
G=degree_sequence_tree(s,create_using)
G.name="random_powerlaw_tree(%s,%s)"%(n,gamma)
return G
def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100):
""" Return a degree sequence for a tree with a powerlaw distribution.
Parameters
----------
n : int,
The number of nodes
gamma : float
Exponent of the power-law
seed : int, optional
Seed for random number generator (default=None).
tries : int
Number of attempts to adjust sequence to make a tree
Notes
-----
A trial powerlaw degree sequence is chosen and then elements are
swapped with new elements from a powerlaw distribution until
the sequence makes a tree (#edges=#nodes-1).
"""
if seed is not None:
random.seed(seed)
# get trial sequence
z=nx.utils.powerlaw_sequence(n,exponent=gamma)
# round to integer values in the range [0,n]
zseq=[min(n, max( int(round(s)),0 )) for s in z]
# another sequence to swap values from
z=nx.utils.powerlaw_sequence(tries,exponent=gamma)
# round to integer values in the range [0,n]
swap=[min(n, max( int(round(s)),0 )) for s in z]
for deg in swap:
if n-sum(zseq)/2.0 == 1.0: # is a tree, return sequence
return zseq
index=random.randint(0,n-1)
zseq[index]=swap.pop()
raise nx.NetworkXError(\
"Exceeded max (%d) attempts for a valid tree sequence."%tries)
return False
| rainest/dance-partner-matching | networkx/generators/random_graphs.py | Python | bsd-2-clause | 29,117 |
import thread
import time
characters = '1234567890QWERTYUIOPASDFGHJKLZXCVBNMqwertyuiopasdfghjklzxcvbnm_'
characters = list(characters)
# print list(string) allowed_chars= list(characters)
keyboard = 'qwertyuiopasdfghjklzxcvbnm'
combos = ['qwe', 'wer', 'ert']
# print keyboard.items()
# for i in string:
# if i in characters:
# print 'check'
# else:
# print 'uncheck'
# print keyboard['top_row']
string = raw_input('string: ')
# score = 0
# threes = []
# for line in string:
# print line
# for i in range(len(line) - 2):
# print 'b'
# threes.append(line[i:+3])
# password = raw_input('enter: ')
#
# for three in threes:
# print 'c'
# letter_frequency = password.count(triple)
# score += 5 * letter_frequency
# print score
three = []
def three_check():
print three
for i in string:
print i
three.append(i)
if len(three) > 3:
three.pop(0)
return False
else:
return True
def key_check():
# thread.start_new_thread(three_check, ())
for x in string:
three_check()
time.sleep(0.1)
if three in keyboard:
print 'uncheck'
else:
print 'check'
for i in range(3):
three.append(string[i])
print three
for i in string:
print i
if len(three) >= 3:
three.pop(0)
print False
print three
three_str = ''.join(three)
print three_str
if three_str in keyboard:
print 'ERROR'
else:
three.append(i)
print True
print three
| husky-prophet/personal-backup | functions.py | Python | mit | 1,741 |
#!/usr/bin/python
#
# Nag(ix)SC -- nagixsc_write_xml.py
#
# Copyright (C) 2009-2010 Sven Velt <[email protected]>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import nagixsc
import optparse
parser = optparse.OptionParser()
parser.add_option('-o', '', dest='outfile', help='(Path and) file name of status file, default STDOUT')
parser.add_option('-v', '', action='count', dest='verb', help='Verbose output')
parser.set_defaults(outfile='-')
parser.set_defaults(verb=0)
(options, args) = parser.parse_args()
checks = [{'host_name': 'host1',
'output': 'Nag(ix)SC: Check result is 24535725(>14400) seconds old - DISK CRITICAL - free space: /home 775 MB (1% inode=91%);| /home=67584MB;61523;64941;0;68359',
'returncode': '2',
'service_description': 'Disk_Home',
'timestamp': 1234443420},
{'host_name': 'host1',
'output': 'Nag(ix)SC: Check result is 24535725(>14400) seconds old - OK - load average: 0.00, 0.00, 0.00|load1=0.000;5.000;10.000;0; load5=0.000;5.000;10.000;0; load15=0.000;5.000;10.000;0;',
'returncode': '0',
'service_description': 'Load',
'timestamp': 1234443420},
{'host_name': 'host2',
'output': 'Nag(ix)SC: Check result is 24535735(>14400) seconds old - PROCS OK: 163 processes',
'returncode': '0',
'service_description': 'Procs_Total',
'timestamp': 1234443410},
{'host_name': 'host2',
'output': 'Nag(ix)SC: Check result is 24535715(>14400) seconds old - SWAP OK - 79% free (1492 MB out of 1906 MB) |swap=1492MB;953;476;0;1906',
'returncode': '0',
'service_description': 'Swap', },
{'host_name': 'host1',
'output': 'Nag(ix)SC: Check result is 24535725(>14400) seconds old - DISK OK - free space: / 2167 MB (22% inode=97%);| /=7353MB;8568;9044;0;9520',
'returncode': '0',
'service_description': 'Disk_Root',
'timestamp': 1234443420},
{'host_name': 'host2',
'output': 'Nag(ix)SC: Check result is 24535735(>14400) seconds old - USERS WARNING - 11 users currently logged in |users=11;10;15;0\n3 root sessions\n8 non-root sessions',
'returncode': '1',
'service_description': 'Users',
'timestamp': 1234443410}]
xmldoc = nagixsc.xml_from_dict(checks)
xmldoc.saveFile(options.outfile)
| wAmpIre/nagixsc | nagixsc_write_xml.py | Python | gpl-2.0 | 2,807 |
import locale
import time
sample_locales = [
('USA', 'en_US'),
('France', 'fr_FR'),
('Spain', 'es_ES'),
('Portugal', 'pt_PT'),
('Poland', 'pl_PL'),
]
for name, loc in sample_locales:
locale.setlocale(locale.LC_ALL, loc)
format = locale.nl_langinfo(locale.D_T_FMT)
print('{:>10}: {}'.format(name, time.strftime(format)))
| jasonwee/asus-rt-n14uhp-mrtg | src/lesson_internationlization_and_localization/locale_date.py | Python | apache-2.0 | 355 |
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
from functools import partial
from lxml import etree
from html5lib.constants import cdataElements, rcdataElements
from calibre.ebooks.oeb.polish.tests.base import BaseTest
from calibre.ebooks.oeb.polish.parsing import parse_html5 as parse
from calibre.ebooks.oeb.base import XPath, XHTML_NS, SVG_NS, XLINK_NS
from calibre.ebooks.oeb.parse_utils import html5_parse
def nonvoid_cdata_elements(test, parse_function):
''' If self closed version of non-void cdata elements like <title/> are
present, the HTML5 parsing algorithm treats all following data as CDATA '''
markup = '''
<html> <head><{0}/></head> <body id="test"> </html>
'''
for tag in cdataElements | rcdataElements:
for x in (tag, tag.upper(), '\n' + tag, tag + ' id="xxx" '):
root = parse_function(markup.format(x))
test.assertEqual(
len(XPath('//h:body[@id="test"]')(root)), 1,
'Incorrect parsing for <%s/>, parsed markup:\n' % x + etree.tostring(root))
def namespaces(test, parse_function):
ae = test.assertEqual
def match_and_prefix(root, xpath, prefix, err=''):
matches = XPath(xpath)(root)
ae(len(matches), 1, err)
ae(matches[0].prefix, prefix, err)
markup = ''' <html xmlns="{xhtml}"><head><body id="test"></html> '''.format(xhtml=XHTML_NS)
root = parse_function(markup)
ae(
len(XPath('//h:body[@id="test"]')(root)), 1,
'Incorrect parsing, parsed markup:\n' + etree.tostring(root))
match_and_prefix(root, '//h:body[@id="test"]', None)
markup = '''
<html xmlns="{xhtml}"><head><body id="test">
<svg:svg xmlns:svg="{svg}"><svg:image xmlns:xlink="{xlink}" xlink:href="xxx"/></svg:svg>
'''.format(xhtml=XHTML_NS, svg=SVG_NS, xlink=XLINK_NS)
root = parse_function(markup)
err = 'Incorrect parsing, parsed markup:\n' + etree.tostring(root)
match_and_prefix(root, '//h:body[@id="test"]', None, err)
match_and_prefix(root, '//svg:svg', None if parse_function is parse else 'svg', err)
match_and_prefix(root, '//svg:image[@xl:href]', None if parse_function is parse else 'svg', err)
markup = '''
<html xmlns="{xhtml}"><head><body id="test">
<svg xmlns="{svg}" xmlns:xlink="{xlink}" ><image xlink:href="xxx"/></svg>
'''.format(xhtml=XHTML_NS, svg=SVG_NS, xlink=XLINK_NS)
root = parse_function(markup)
err = 'Incorrect parsing, parsed markup:\n' + etree.tostring(root)
match_and_prefix(root, '//h:body[@id="test"]', None, err)
match_and_prefix(root, '//svg:svg', None if parse_function is parse else 'svg', err)
match_and_prefix(root, '//svg:image[@xl:href]', None if parse_function is parse else 'svg', err)
markup = '<html><body><svg><image xlink:href="xxx"></svg>'
root = parse_function(markup)
err = 'Namespaces not created, parsed markup:\n' + etree.tostring(root)
match_and_prefix(root, '//svg:svg', None if parse_function is parse else 'svg', err)
match_and_prefix(root, '//svg:image[@xl:href]', None if parse_function is parse else 'svg', err)
if parse_function is parse:
image = XPath('//svg:image')(root)[0]
ae(image.nsmap, {'xlink':XLINK_NS, None:SVG_NS})
root = parse_function('<html id="a"><p><html xmlns:x="y" lang="en"><p>')
err = 'Multiple HTML tags not handled, parsed markup:\n' + etree.tostring(root)
match_and_prefix(root, '//h:html', None, err)
match_and_prefix(root, '//h:html[@lang]', None, err)
match_and_prefix(root, '//h:html[@id]', None, err)
if parse_function is not html5_parse:
markup = '<html:html xmlns:html="{html}" id="a"><html:body><html:p></html:p></html:body></html>'.format(html=XHTML_NS)
root = parse_function(markup)
err = 'HTML namespace prefixed, parsed markup:\n' + etree.tostring(root)
match_and_prefix(root, '//h:html', None, err)
markup = '<html><body><ns1:tag1 xmlns:ns1="NS"><ns2:tag2 xmlns:ns2="NS" ns1:id="test"/><ns1:tag3 xmlns:ns1="NS2" ns1:id="test"/></ns1:tag1>'
root = parse_function(markup)
err = 'Arbitrary namespaces not preserved, parsed markup:\n' + etree.tostring(root)
def xpath(expr):
return etree.XPath(expr, namespaces={'ns1':'NS', 'ns2':'NS2'})(root)
ae(len(xpath('//ns1:tag1')), 1, err)
ae(len(xpath('//ns1:tag2')), 1, err)
ae(len(xpath('//ns2:tag3')), 1, err)
ae(len(xpath('//ns1:tag2[@ns1:id="test"]')), 1, err)
ae(len(xpath('//ns2:tag3[@ns2:id="test"]')), 1, err)
for tag in root.iter():
if 'NS' in tag.tag:
ae('ns1', tag.prefix)
markup = '<html xml:lang="en"><body><p lang="de"><p xml:lang="es"><p lang="en" xml:lang="de">'
root = parse_function(markup)
err = 'xml:lang not converted to lang, parsed markup:\n' + etree.tostring(root)
ae(len(root.xpath('//*[@lang="en"]')), 2, err)
ae(len(root.xpath('//*[@lang="de"]')), 1, err)
ae(len(root.xpath('//*[@lang="es"]')), 1, err)
ae(len(XPath('//*[@xml:lang]')(root)), 0, err)
def space_characters(test, parse_function):
markup = '<html><p>\u000c</p>'
root = parse_function(markup)
err = 'form feed character not converted, parsed markup:\n' + etree.tostring(root)
test.assertNotIn('\u000c', root.xpath('//*[local-name()="p"]')[0].text, err)
markup = '<html><p>a\u000b\u000c</p>'
root = parse_function(markup) # Should strip non XML safe control code \u000b
test.assertNotIn('\u000b', root.xpath('//*[local-name()="p"]')[0].text, err)
test.assertNotIn('\u000c', root.xpath('//*[local-name()="p"]')[0].text, err)
def case_insensitive_element_names(test, parse_function):
markup = '<HTML><P> </p>'
root = parse_function(markup)
err = 'case sensitive parsing, parsed markup:\n' + etree.tostring(root)
test.assertEqual(len(XPath('//h:p')(root)), 1, err)
def entities(test, parse_function):
markup = '<html><p> '</p>'
root = parse_function(markup)
err = 'Entities not handled, parsed markup:\n' + etree.tostring(root)
test.assertEqual('\xa0\'', root.xpath('//*[local-name()="p"]')[0].text, err)
def multiple_html_and_body(test, parse_function):
markup = '<html id="1"><body id="2"><p><html lang="en"><body lang="de"></p>'
root = parse_function(markup)
err = 'multiple html and body not handled, parsed markup:\n' + etree.tostring(root)
test.assertEqual(len(XPath('//h:html')(root)), 1, err)
test.assertEqual(len(XPath('//h:body')(root)), 1, err)
test.assertEqual(len(XPath('//h:html[@id and @lang]')(root)), 1, err)
test.assertEqual(len(XPath('//h:body[@id and @lang]')(root)), 1, err)
def attribute_replacement(test, parse_function):
markup = '<html><body><svg viewbox="0"></svg><svg xmlns="%s" viewbox="1">' % SVG_NS
root = parse_function(markup)
err = 'SVG attributes not normalized, parsed markup:\n' + etree.tostring(root)
test.assertEqual(len(XPath('//svg:svg[@viewBox]')(root)), 2, err)
basic_checks = (nonvoid_cdata_elements, namespaces, space_characters,
case_insensitive_element_names, entities,
multiple_html_and_body, attribute_replacement)
class ParsingTests(BaseTest):
def test_conversion_parser(self):
' Test parsing with the HTML5 parser used for conversion '
for test in basic_checks:
test(self, html5_parse)
def test_polish_parser(self):
' Test parsing with the HTML5 parser used for polishing '
for test in basic_checks:
test(self, parse)
root = parse('<html><p><svg><image /><b></svg> \n<b>xxx', discard_namespaces=True)
self.assertTrue(root.xpath('//b'), 'Namespaces not discarded')
self.assertFalse(root.xpath('//svg/b'), 'The <b> was not moved out of <svg>')
for ds in (False, True):
src = '\n<html>\n<p>\n<svg><image />\n<b></svg> '
root = parse(src, discard_namespaces=ds)
for tag, lnum in {'html':2, 'head':3, 'body':3, 'p':3, 'svg':4, 'image':4, 'b':5}.iteritems():
elem = root.xpath('//*[local-name()="%s"]' % tag)[0]
self.assertEqual(lnum, elem.sourceline, 'Line number incorrect for %s, source: %s:' % (tag, src))
for ds in (False, True):
src = '\n<html>\n<p b=1 a=2 c=3 d=4 e=5 f=6 g=7 h=8><svg b=1 a=2 c=3 d=4 e=5 f=6 g=7 h=8>\n'
root = parse(src, discard_namespaces=ds)
for tag in ('p', 'svg'):
for i, (k, v) in enumerate(root.xpath('//*[local-name()="%s"]' % tag)[0].items()):
self.assertEqual(i+1, int(v))
root = parse('<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" xmlns:xml="http://www.w3.org/XML/1998/namespace"><body/></html>')
self.assertNotIn('xmlnsU0003Axml', root.attrib, 'xml namespace declaration not removed')
root = parse('<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" xmlns:extra="extra"><body/></html>')
self.assertIn('extra', root.nsmap, 'Extra namespace declaration on <html> tag not preserved')
def timing():
import time, sys
from calibre.ebooks.chardet import xml_to_unicode
from html5lib import parse as vanilla
filename = sys.argv[-1]
with open(filename, 'rb') as f:
raw = f.read()
raw = xml_to_unicode(raw)[0]
for name, f in (('calibre', partial(parse, line_numbers=False)), ('html5lib', vanilla), ('calibre-old', html5_parse)):
timings = []
for i in xrange(10):
st = time.time()
f(raw)
timings.append(time.time() - st)
avg = sum(timings)/len(timings)
print ('Average time for %s: %.2g' % (name, avg))
| ashang/calibre | src/calibre/ebooks/oeb/polish/tests/parsing.py | Python | gpl-3.0 | 9,879 |
from inspect import Signature, Parameter
class Descriptor:
def __init__(self, name):
self.name = name
def __set__(self, instance, value):
print("Setting %s to %s" % (self.name, value))
instance.__dict__[self.name] = value
def __delete__(self, instance):
print("Delete ", self.name)
del instance.__dict__[self.name]
class Typed(Descriptor):
ty = object
def __set__(self, instance, value):
if not isinstance(value, self.ty):
raise TypeError("Expected %s" % self.ty)
#not the parent but next class on mro list
super().__set__(instance, value)
#here is a good use of keyword only arguments
class Sized(Descriptor):
def __init__(self, *args, maxlen, **kwargs):
self.maxlen = maxlen
super().__init__(*args, **kwargs)
def __set__(self, instance, val):
if len(val) > self.maxlen:
raise ValueError('%s exceeds the maximum len %s' % \
(val, self.maxlen))
super().__set__(instance, val)
import re
class Regex(Descriptor):
def __init__(self, *args, pat, **kwargs):
self.pat = re.compile(pat)
super().__init__(*args, **kwargs)
def __set__(self, instance, val):
if not self.pat.match(val):
raise ValueError("%s does not match pattern %s" % \
(val, self.pat))
super().__set__(instance, val)
class Integer(Typed):
ty = int
class String(Typed):
ty = str
class Float(Typed):
ty = float
class Positive(Descriptor):
def __set__(self, instance, val):
if val < 0:
raise ValueError("%s must be >= 0" % val)
super().__set__(instance, val)
class PositiveInteger(Integer, Positive): #mixin class
pass
class SizedRegexString(Sized, Regex, String):
pass
class PositiveFloat(Float, Positive):
pass
def make_signature(fields):
return Signature(
(Parameter(f, Parameter.POSITIONAL_OR_KEYWORD) for f in fields))
class StrucMeta(type):
def __new__(cls, clsname, bases, clsdict):
clsobj = super().__new__(cls, clsname, bases, clsdict)
clsobj.__signature__ = make_signature(clsobj._fields)
return clsobj
class Structure(metaclass = StrucMeta):
_fields = []
def __init__(self, *args, **kwargs):
bound = self.__signature__.bind(*args, **kwargs)
for k, v in bound.arguments.items():
setattr(self, k, v)
class Stock(Structure):
_fields = ['name', 'shares', 'price']
name = SizedRegexString('name', maxlen = 8, pat = r'[A-Z]+')
shares = PositiveInteger('shares')
price = PositiveFloat('price')
s1 = Stock('GOOG', 100, 490.1)
| kmad1729/python_notes | metaprogramming/typely.py | Python | unlicense | 2,708 |
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from d1_cli.version import __version__ # noqa: F401
| DataONEorg/d1_python | client_cli/src/__init__.py | Python | apache-2.0 | 842 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Ciclista.facebook'
db.add_column(u'xbapp_ciclista', 'facebook',
self.gf('django.db.models.fields.CharField')(default='', max_length=70, blank=True),
keep_default=False)
# Adding field 'Ciclista.twitter'
db.add_column(u'xbapp_ciclista', 'twitter',
self.gf('django.db.models.fields.CharField')(default='', max_length=70, blank=True),
keep_default=False)
# Adding field 'Ciclista.score'
db.add_column(u'xbapp_ciclista', 'score',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Ciclista.facebook'
db.delete_column(u'xbapp_ciclista', 'facebook')
# Deleting field 'Ciclista.twitter'
db.delete_column(u'xbapp_ciclista', 'twitter')
# Deleting field 'Ciclista.score'
db.delete_column(u'xbapp_ciclista', 'score')
models = {
u'xbapp.ciclista': {
'Meta': {'object_name': 'Ciclista'},
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '70', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '70', 'blank': 'True'})
}
}
complete_apps = ['xbapp'] | developingo/Bicitacora | xbapp/migrations/0002_auto__add_field_ciclista_facebook__add_field_ciclista_twitter__add_fie.py | Python | gpl-3.0 | 1,839 |
#!/usr/bin/env python
import os
from glob import glob
from subprocess import check_output, CalledProcessError
def get_usb_devices():
try:
sdb_devices = map(os.path.realpath, glob('/sys/block/sd*'))
usb_devices = (dev for dev in sdb_devices
if 'usb' in dev.split('/')[5])
return dict((os.path.basename(dev), dev) for dev in usb_devices)
except Exception:
pass
def get_mount_points(devices=None):
try:
devices = devices or get_usb_devices() # if devices are None: get_usb_devices
output = check_output(['mount']).splitlines()
is_usb = lambda path: any(dev in path for dev in devices)
usb_info = (line for line in output if is_usb(line.split()[0]))
return [(info.split()[0], info.split()[2]) for info in usb_info]
except Exception:
pass
| DrewMeyersCUboulder/UPOD_Bridge | Atheros/devices.py | Python | mit | 846 |
'relational table'
import collections
class Table(object):
def __init__(self, column_names):
self._column_names = column_names
def insert(self, values):
'values::namedtuple for now'
def select_where(selected_column, where_columns, filter):
'''return list with selected_column
where rows satisify filter(namedtuple) --> bool
'''
pass
| rlowrance/re-avm | Table.py | Python | bsd-3-clause | 397 |
from distutils.core import setup, Extension
setup(name='gv_socket',
version='0.1',
description='Python GaVer Socket',
author='Emiliano A Billi',
author_email='[email protected]',
url='http://www.gaverprotocol.com',
py_modules=["gv_socket"],
ext_modules=[
Extension("pylibgv", ["pylibgv.c"],include_dirs=['./', '/usr/include/python2.6/'],libraries=['gv']),
])
| emilianobilli/gv_socket | python/setup.py | Python | gpl-2.0 | 432 |
"""
radish
~~~~~~
The root from red to green. BDD tooling for Python.
:copyright: (c) 2019 by Timo Furrer <[email protected]>
:license: MIT, see LICENSE for more details.
"""
class Tag:
"""Represents a single Gherkin Tag"""
def __init__(self, name: str, path: str, line: int) -> None:
self.name = name
self.path = path
self.line = line
def __repr__(self) -> str:
return "<Tag: {name} @ {path}:{line}>".format(
name=self.name, path=self.path, line=self.line
) # pragma: no cover
| radish-bdd/radish | src/radish/models/tag.py | Python | mit | 549 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
from scipy.constants import N_A
from pymatgen.core.periodic_table import Element
from pymatgen.core.units import Charge, Time
from pymatgen.analysis.reaction_calculator import BalancedReaction
from pymatgen.core.composition import Composition
from pymatgen.apps.battery.battery_abc import AbstractElectrode, \
AbstractVoltagePair
from pymatgen.analysis.phase_diagram import PhaseDiagram
from monty.json import MontyDecoder
"""
This module contains the classes to build a ConversionElectrode.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Feb 1, 2012"
__status__ = "Beta"
class ConversionElectrode(AbstractElectrode):
"""
Class representing a ConversionElectrode.
"""
def __init__(self, voltage_pairs, working_ion_entry, initial_comp):
"""
General constructor for ConversionElectrode. However, it is usually
easier to construct a ConversionElectrode using one of the static
constructors provided.
Args:
voltage_pairs: The voltage pairs making up the Conversion
Electrode.
working_ion_entry: A single ComputedEntry or PDEntry
representing the element that carries charge across the
battery, e.g. Li.
initial_comp: Starting composition for ConversionElectrode.
"""
self._composition = initial_comp
self._working_ion_entry = working_ion_entry
ion_el = self._working_ion_entry.composition.elements[0]
self._working_ion = ion_el.symbol
self._vpairs = voltage_pairs
@staticmethod
def from_composition_and_pd(comp, pd, working_ion_symbol="Li"):
"""
Convenience constructor to make a ConversionElectrode from a
composition and a phase diagram.
Args:
comp:
Starting composition for ConversionElectrode, e.g.,
Composition("FeF3")
pd:
A PhaseDiagram of the relevant system (e.g., Li-Fe-F)
working_ion_symbol:
Element symbol of working ion. Defaults to Li.
"""
working_ion = Element(working_ion_symbol)
entry = None
working_ion_entry = None
for e in pd.stable_entries:
if e.composition.reduced_formula == comp.reduced_formula:
entry = e
elif e.is_element and \
e.composition.reduced_formula == working_ion_symbol:
working_ion_entry = e
if not entry:
raise ValueError("Not stable compound found at composition {}."
.format(comp))
profile = pd.get_element_profile(working_ion, comp)
# Need to reverse because voltage goes form most charged to most
# discharged.
profile.reverse()
if len(profile) < 2:
return None
working_ion_entry = working_ion_entry
working_ion = working_ion_entry.composition.elements[0].symbol
normalization_els = {}
for el, amt in comp.items():
if el != Element(working_ion):
normalization_els[el] = amt
vpairs = [ConversionVoltagePair.from_steps(profile[i], profile[i + 1],
normalization_els)
for i in range(len(profile) - 1)]
return ConversionElectrode(vpairs, working_ion_entry, comp)
@staticmethod
def from_composition_and_entries(comp, entries_in_chemsys,
working_ion_symbol="Li"):
"""
Convenience constructor to make a ConversionElectrode from a
composition and all entries in a chemical system.
Args:
comp: Starting composition for ConversionElectrode, e.g.,
Composition("FeF3")
entries_in_chemsys: Sequence containing all entries in a
chemical system. E.g., all Li-Fe-F containing entries.
working_ion_symbol: Element symbol of working ion. Defaults to Li.
"""
pd = PhaseDiagram(entries_in_chemsys)
return ConversionElectrode.from_composition_and_pd(comp, pd,
working_ion_symbol)
def get_sub_electrodes(self, adjacent_only=True):
"""
If this electrode contains multiple voltage steps, then it is possible
to use only a subset of the voltage steps to define other electrodes.
For example, an LiTiO2 electrode might contain three subelectrodes:
[LiTiO2 --> TiO2, LiTiO2 --> Li0.5TiO2, Li0.5TiO2 --> TiO2]
This method can be used to return all the subelectrodes with some
options
Args:
adjacent_only: Only return electrodes from compounds that are
adjacent on the convex hull, i.e. no electrodes returned
will have multiple voltage steps if this is set true
Returns:
A list of ConversionElectrode objects
"""
if adjacent_only:
return [self.__class__(self._vpairs[i:i + 1],
self._working_ion_entry, self._composition)
for i in range(len(self._vpairs))]
sub_electrodes = []
for i in range(len(self._vpairs)):
for j in range(i, len(self._vpairs)):
sub_electrodes.append(self.__class__(self._vpairs[i:j + 1],
self._working_ion_entry,
self._composition))
return sub_electrodes
@property
def composition(self):
return self._composition
@property
def working_ion(self):
"""
The working ion as an Element object
"""
return self._working_ion_entry.composition.elements[0]
@property
def working_ion_entry(self):
return self._working_ion_entry
@property
def voltage_pairs(self):
return self._vpairs
def is_super_electrode(self, conversion_electrode):
"""
Checks if a particular conversion electrode is a sub electrode of the
current electrode. Starting from a more lithiated state may result in
a subelectrode that is essentially on the same path. For example, a
ConversionElectrode formed by starting from an FePO4 composition would
be a super_electrode of a ConversionElectrode formed from an LiFePO4
composition.
"""
for pair1 in conversion_electrode:
rxn1 = pair1.rxn
all_formulas1 = set([rxn1.all_comp[i].reduced_formula
for i in range(len(rxn1.all_comp))
if abs(rxn1.coeffs[i]) > 1e-5])
for pair2 in self:
rxn2 = pair2.rxn
all_formulas2 = set([rxn2.all_comp[i].reduced_formula
for i in range(len(rxn2.all_comp))
if abs(rxn2.coeffs[i]) > 1e-5])
if all_formulas1 == all_formulas2:
break
else:
return False
return True
def __eq__(self, conversion_electrode):
"""
Check if two electrodes are exactly the same:
"""
if len(self) != len(conversion_electrode):
return False
for pair1 in conversion_electrode:
rxn1 = pair1.rxn
all_formulas1 = set([rxn1.all_comp[i].reduced_formula
for i in range(len(rxn1.all_comp))
if abs(rxn1.coeffs[i]) > 1e-5])
for pair2 in self:
rxn2 = pair2.rxn
all_formulas2 = set([rxn2.all_comp[i].reduced_formula
for i in range(len(rxn2.all_comp))
if abs(rxn2.coeffs[i]) > 1e-5])
if all_formulas1 == all_formulas2:
break
else:
return False
return True
def __hash__(self):
return 7
def __str__(self):
return self.__repr__()
def __repr__(self):
output = ["Conversion electrode with formula {} and nsteps {}"
.format(self._composition.reduced_formula, self.num_steps),
"Avg voltage {} V, min voltage {} V, max voltage {} V"
.format(self.get_average_voltage(), self.min_voltage,
self.max_voltage),
"Capacity (grav.) {} mAh/g, capacity (vol.) {} Ah/l"
.format(self.get_capacity_grav(),
self.get_capacity_vol()),
"Specific energy {} Wh/kg, energy density {} Wh/l"
.format(self.get_specific_energy(),
self.get_energy_density())]
return "\n".join(output)
@classmethod
def from_dict(cls, d):
dec = MontyDecoder()
return cls(dec.process_decoded(d["voltage_pairs"]),
dec.process_decoded(d["working_ion_entry"]),
Composition(d["initial_comp"]))
def as_dict(self):
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"voltage_pairs": [v.as_dict() for v in self._vpairs],
"working_ion_entry": self.working_ion_entry.as_dict(),
"initial_comp": self._composition.as_dict()}
def get_summary_dict(self, print_subelectrodes=True):
"""
Args:
print_subelectrodes:
Also print data on all the possible subelectrodes
Returns:
a summary of this electrode"s properties in dictionary format
"""
d = {}
framework_comp = Composition({k: v
for k, v in self._composition.items()
if k.symbol != self.working_ion.symbol})
d["framework"] = framework_comp.to_data_dict
d["framework_pretty"] = framework_comp.reduced_formula
d["average_voltage"] = self.get_average_voltage()
d["max_voltage"] = self.max_voltage
d["min_voltage"] = self.min_voltage
d["max_delta_volume"] = self.max_delta_volume
d["max_instability"] = 0
d["max_voltage_step"] = self.max_voltage_step
d["nsteps"] = self.num_steps
d["capacity_grav"] = self.get_capacity_grav()
d["capacity_vol"] = self.get_capacity_vol()
d["energy_grav"] = self.get_specific_energy()
d["energy_vol"] = self.get_energy_density()
d["working_ion"] = self.working_ion.symbol
d["reactions"] = []
d["reactant_compositions"] = []
comps = []
frac = []
for pair in self._vpairs:
rxn = pair.rxn
frac.append(pair.frac_charge)
frac.append(pair.frac_discharge)
d["reactions"].append(str(rxn))
for i in range(len(rxn.coeffs)):
if abs(rxn.coeffs[i]) > 1e-5 and rxn.all_comp[i] not in comps:
comps.append(rxn.all_comp[i])
if abs(rxn.coeffs[i]) > 1e-5 and \
rxn.all_comp[i].reduced_formula != d["working_ion"]:
reduced_comp = rxn.all_comp[i].reduced_composition
comp_dict = reduced_comp.as_dict()
d["reactant_compositions"].append(comp_dict)
d["fracA_charge"] = min(frac)
d["fracA_discharge"] = max(frac)
d["nsteps"] = self.num_steps
if print_subelectrodes:
f_dict = lambda c: c.get_summary_dict(print_subelectrodes=False)
d["adj_pairs"] = map(f_dict,
self.get_sub_electrodes(adjacent_only=True))
d["all_pairs"] = map(f_dict,
self.get_sub_electrodes(adjacent_only=False))
return d
class ConversionVoltagePair(AbstractVoltagePair):
"""
A VoltagePair representing a Conversion Reaction with a defined voltage.
Typically not initialized directly but rather used by ConversionElectrode.
Args:
balanced_rxn (BalancedReaction): BalancedReaction for the step
voltage (float): Voltage for the step
mAh (float): Capacity of the step
vol_charge (float): Volume of charged state
vol_discharge (float): Volume of discharged state
mass_charge (float): Mass of charged state
mass_discharge (float): Mass of discharged state
frac_charge (float): Fraction of working ion in the charged state
frac_discharge (float): Fraction of working ion in the discharged state
entries_charge ([ComputedEntry]): Entries in the charged state
entries_discharge ([ComputedEntry]): Entries in discharged state
working_ion_entry (ComputedEntry): Entry of the working ion.
"""
def __init__(self, balanced_rxn, voltage, mAh, vol_charge, vol_discharge,
mass_charge, mass_discharge, frac_charge, frac_discharge,
entries_charge, entries_discharge, working_ion_entry):
self._working_ion_entry = working_ion_entry
working_ion = self._working_ion_entry.composition.elements[0].symbol
self._voltage = voltage
self._mAh = mAh
self._vol_charge = vol_charge
self._mass_charge = mass_charge
self._mass_discharge = mass_discharge
self._vol_discharge = vol_discharge
self._frac_charge = frac_charge
self._frac_discharge = frac_discharge
self._rxn = balanced_rxn
self._working_ion = working_ion
self._entries_charge = entries_charge
self._entries_discharge = entries_discharge
@staticmethod
def from_steps(step1, step2, normalization_els):
"""
Creates a ConversionVoltagePair from two steps in the element profile
from a PD analysis.
Args:
step1: Starting step
step2: Ending step
normalization_els: Elements to normalize the reaction by. To
ensure correct capacities.
"""
working_ion_entry = step1["element_reference"]
working_ion = working_ion_entry.composition.elements[0].symbol
voltage = -step1["chempot"] + working_ion_entry.energy_per_atom
mAh = (step2["evolution"] - step1["evolution"]) \
* Charge(1, "e").to("C") * Time(1, "s").to("h") * N_A * 1000
licomp = Composition(working_ion)
prev_rxn = step1["reaction"]
reactants = {comp: abs(prev_rxn.get_coeff(comp))
for comp in prev_rxn.products if comp != licomp}
curr_rxn = step2["reaction"]
products = {comp: abs(curr_rxn.get_coeff(comp))
for comp in curr_rxn.products if comp != licomp}
reactants[licomp] = (step2["evolution"] - step1["evolution"])
rxn = BalancedReaction(reactants, products)
for el, amt in normalization_els.items():
if rxn.get_el_amount(el) > 1e-6:
rxn.normalize_to_element(el, amt)
break
prev_mass_dischg = sum([prev_rxn.all_comp[i].weight
* abs(prev_rxn.coeffs[i])
for i in range(len(prev_rxn.all_comp))]) / 2
vol_charge = sum([abs(prev_rxn.get_coeff(e.composition))
* e.structure.volume
for e in step1["entries"]
if e.composition.reduced_formula != working_ion])
mass_discharge = sum([curr_rxn.all_comp[i].weight
* abs(curr_rxn.coeffs[i])
for i in range(len(curr_rxn.all_comp))]) / 2
mass_charge = prev_mass_dischg
mass_discharge = mass_discharge
vol_discharge = sum([abs(curr_rxn.get_coeff(e.composition))
* e.structure.volume
for e in step2["entries"]
if e.composition.reduced_formula != working_ion])
totalcomp = Composition({})
for comp in prev_rxn.products:
if comp.reduced_formula != working_ion:
totalcomp += comp * abs(prev_rxn.get_coeff(comp))
frac_charge = totalcomp.get_atomic_fraction(Element(working_ion))
totalcomp = Composition({})
for comp in curr_rxn.products:
if comp.reduced_formula != working_ion:
totalcomp += comp * abs(curr_rxn.get_coeff(comp))
frac_discharge = totalcomp.get_atomic_fraction(Element(working_ion))
rxn = rxn
entries_charge = step2["entries"]
entries_discharge = step1["entries"]
return ConversionVoltagePair(rxn, voltage, mAh, vol_charge,
vol_discharge, mass_charge,
mass_discharge,
frac_charge, frac_discharge,
entries_charge, entries_discharge,
working_ion_entry)
@property
def working_ion(self):
return self._working_ion
@property
def entries_charge(self):
return self._entries_charge
@property
def entries_discharge(self):
return self._entries_discharge
@property
def frac_charge(self):
return self._frac_charge
@property
def frac_discharge(self):
return self._frac_discharge
@property
def rxn(self):
return self._rxn
@property
def voltage(self):
return self._voltage
@property
def mAh(self):
return self._mAh
@property
def mass_charge(self):
return self._mass_charge
@property
def mass_discharge(self):
return self._mass_discharge
@property
def vol_charge(self):
return self._vol_charge
@property
def vol_discharge(self):
return self._vol_discharge
@property
def working_ion_entry(self):
return self._working_ion_entry
def __repr__(self):
output = ["Conversion voltage pair with working ion {}"
.format(self._working_ion_entry.composition.reduced_formula),
"Reaction : {}".format(self._rxn),
"V = {}, mAh = {}".format(self.voltage, self.mAh),
"frac_charge = {}, frac_discharge = {}"
.format(self.frac_charge, self.frac_discharge),
"mass_charge = {}, mass_discharge = {}"
.format(self.mass_charge, self.mass_discharge),
"vol_charge = {}, vol_discharge = {}"
.format(self.vol_charge, self.vol_discharge)]
return "\n".join(output)
def __str__(self):
return self.__repr__()
@classmethod
def from_dict(cls, d):
dec = MontyDecoder()
working_ion_entry = dec.process_decoded(d["working_ion_entry"])
balanced_rxn = dec.process_decoded(d["balanced_rxn"])
entries_charge = dec.process_decoded(d["entries_charge"])
entries_discharge = dec.process_decoded(d["entries_discharge"])
return ConversionVoltagePair(balanced_rxn, d["voltage"], d["mAh"],
d["vol_charge"], d["vol_discharge"],
d["mass_charge"], d["mass_discharge"],
d["frac_charge"], d["frac_discharge"],
entries_charge, entries_discharge,
working_ion_entry)
def as_dict(self):
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"working_ion_entry": self._working_ion_entry.as_dict(),
"voltage": self._voltage, "mAh": self._mAh,
"vol_charge": self._vol_charge,
"mass_charge": self._mass_charge,
"mass_discharge": self._mass_discharge,
"vol_discharge": self._vol_discharge,
"frac_charge": self._frac_charge,
"frac_discharge": self._frac_discharge,
"balanced_rxn": self._rxn.as_dict(),
"entries_charge": [e.as_dict() for e in self._entries_charge],
"entries_discharge": [e.as_dict() for e in
self._entries_discharge]}
| Bismarrck/pymatgen | pymatgen/apps/battery/conversion_battery.py | Python | mit | 20,784 |
# Copyright 2008-2010, Red Hat, Inc
# Dan Radez <[email protected]>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
| oppianmatt/django-loki | src/loki/management/commands/__init__.py | Python | gpl-3.0 | 359 |
# GUI object/properties browser.
# Copyright (C) 2011 Matiychuk D.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
#Boa:App:BoaApp
import wx
import _mainframe
modules ={'_mainframe': [0, '', '_mainframe.py'], 'proxy': [0, '', 'proxy.py']}
class BoaApp(wx.App):
def OnInit(self):
self.main = _mainframe.create(None)
self.main.Center()
self.main.Show()
self.SetTopWindow(self.main)
return True
def main():
application = BoaApp(0)
application.MainLoop()
if __name__ == '__main__':
main()
| moden-py/SWAPY-deleting | swapy-ob.py | Python | lgpl-2.1 | 1,243 |
# coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '%s rangées effacées',
'%s rows updated': '%s rangées mises à jour',
'911 Operator (calltaker)': '911 Operator (calltaker)',
'Action, scope, etc.': 'Action, scope, etc.',
'Actual Phone Transcript': 'Actual Phone Transcript',
'Arrested Adult': 'Arrested Adult',
'Arrested Minor': 'Arrested Minor',
'Arrived at': 'Arrived at',
'Authentication': 'Authentication',
'Available databases and tables': 'Bases de données et des tableaux disponibles',
'Calling party name': 'Calling party name',
'Cannot be empty': 'Ne peut pas être vide',
'Check to delete': 'Cliquez pour supprimer',
'Check to delete:': 'Cliquez pour supprimer:',
'Client IP': 'IP client',
'Comments': 'Comments',
'Complete the form': 'Complete the form',
'Confirmed': 'Confirmed',
'Contact': 'Contact',
'Controller': 'Contrôleur',
'Copyright': 'Copyright',
'Created by': 'Created by',
'Current request': 'Demande actuelle',
'Current response': 'Réponse actuelle',
'Current session': 'Session en cours',
'DB Model': 'Modèle DB',
'Database': 'Base de données',
'Delay': 'Delay',
'Delay in seconds (calculated)': 'Delay in seconds (calculated)',
'Delete:': 'Supprimer:',
'Description': 'Description',
'Dispatch date & time': 'Dispatch date & time',
'Dispatched at': 'Dispatched at',
'Dispatched by': 'Dispatched by',
'E-mail': 'Courriel',
'Edit': 'Éditer',
'Edit This App': 'Modifier cette application',
'Edit current record': "Modifier l'enregistrement courant",
'Event': 'Event',
'First name': 'Prénom',
'Function disabled': 'Function disabled',
'Group ID': 'Groupe ID',
'Hello World': 'Bonjour tout le monde',
'Id': 'Id',
'Import/Export': 'Importer/Exporter',
'Incident %s created!': 'Incident %s created!',
'Incident Type': 'Incident Type',
'Incident not saved! (correct errors!)': 'Incident not saved! (correct errors!)',
'Index': 'Index',
'Internal State': 'État interne',
'Invalid Query': 'Requête Invalide',
'Invalid email': 'Courriel invalide',
'Invalid incident!': 'Invalid incident!',
'Last modification date & time': 'Last modification date & time',
'Last name': 'Nom',
'Layout': 'Mise en page',
'Location': 'Location',
'Login': 'Connectez-vous',
'Lost Password': 'Mot de passe perdu',
'Main Menu': 'Menu principal',
'Main unit assigned': 'Main unit assigned',
'Medical': 'Medical',
'Menu Model': 'Menu modèle',
'Name': 'Nom',
'New Record': 'Nouvel enregistrement',
'No databases in this application': "Cette application n'a pas de bases de données",
'On scence date & time': 'On scence date & time',
'Origin': 'Origine',
'Password': 'Mot de passe',
"Password fields don't match": 'Les mots de passe ne correspondent pas',
'Phone': 'Phone',
'Phone Operator': 'Phone Operator',
'Powered by': 'Powered par',
'Preventable': 'Preventable',
'Priority classification': 'Priority classification',
'Query:': 'Requête:',
'Record ID': 'Record ID',
'Register': "S'inscrire",
'Registration key': "Clé d'enregistrement",
'Relevant': 'Relevant',
'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)',
'Reported by': 'Reported by',
'Request reset password': 'Demande de réinitialiser le mot clé',
'Reset Password key': 'Réinitialiser le mot clé',
'Role': 'Rôle',
'Rows in table': 'Lignes du tableau',
'Rows selected': 'Lignes sélectionnées',
'Seized Drugs': 'Seized Drugs',
'Seized Vehicles': 'Seized Vehicles',
'Seized Weapons': 'Seized Weapons',
'Severity': 'Severity',
'Source telephone number': 'Source telephone number',
'Start date & time': 'Start date & time',
'Started at': 'Started at',
'Status': 'Status',
'Street name, house number, direction (if any)': 'Street name, house number, direction (if any)',
'Stylesheet': 'Feuille de style',
'Submit': 'Soumettre',
'Sure you want to delete this object?': 'Souhaitez-vous vraiment effacer cet objet ?',
'Synopsis': 'Synopsis',
'Table name': 'Nom du tableau',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'The output of the file is a dictionary that was rendered by the view': 'The output of the file is a dictionary that was rendered by the view',
'This is a copy of the scaffolding application': 'This is a copy of the scaffolding application',
'Timestamp': 'Timestamp',
'Unit': 'Unit',
'Update:': 'Mise à jour:',
'Updated at': 'Updated at',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'User %(id)s Logged-in': 'Utilisateur %(id)s connecté',
'User %(id)s Registered': 'Utilisateur %(id)s enregistré',
'User ID': 'ID utilisateur',
'Verify Password': 'Vérifiez le mot de passe',
'View': 'Présentation',
'Welcome %s': 'Bienvenue %s',
'Welcome to web2py': 'Bienvenue sur web2py',
'Which called the function': 'Which called the function',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url': 'You visited the url',
'Zone': 'Zone',
'appadmin is disabled because insecure channel': 'appadmin is disabled because insecure channel',
'cache': 'cache',
'change password': 'changer le mot de passe',
'click here for online examples': 'cliquez ici pour voir des exemples enligne',
'click here for the administrative interface': "cliquez ici pour allerà l'interface d'administration",
'customize me!': 'me personnaliser!',
'data uploaded': 'données téléchargées',
'database': 'base de données',
'database %s select': 'base de données %s sélectionner',
'db': 'db',
'design': 'design',
'documentation': 'documentation',
'done!': 'fait!',
'edit profile': 'modifier le profil',
'export as csv file': 'exporter sous forme de fichier csv',
'insert new': 'insérer un nouveau',
'insert new %s': 'insérer un nouveau %s',
'invalid request': 'demande non valide',
'located in the file': 'located in the file',
'login': 'connectez-vous',
'logout': 'déconnectez-vous',
'lost password': 'mot de passe perdu',
'new record inserted': 'nouvel enregistrement inséré',
'next 100 rows': '100 prochaines lignes',
'or import from csv file': "ou importer d'un fichier CSV",
'previous 100 rows': '100 lignes précédentes',
'record': 'enregistrement',
'record does not exist': "l'archive n'existe pas",
'record id': "id d'enregistrement",
'register': "s'inscrire",
'selected': 'sélectionné',
'state': 'état',
'table': 'tableau',
'unable to parse csv file': "incapable d'analyser le fichier cvs",
}
| google-code/ampatu | languages/fr-ca.py | Python | agpl-3.0 | 7,028 |
# Copyright (c) 2014 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit import fake_constants as fake
from cinder.volume.drivers.dell import dell_storagecenter_api
from cinder.volume.drivers.dell import dell_storagecenter_fc
# We patch these here as they are used by every test to keep
# from trying to contact a Dell Storage Center.
@mock.patch.object(dell_storagecenter_api.HttpClient,
'__init__',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'open_connection')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'close_connection')
class DellSCSanFCDriverTestCase(test.TestCase):
VOLUME = {u'instanceId': u'64702.4829',
u'scSerialNumber': 64702,
u'replicationSource': False,
u'liveVolume': False,
u'vpdId': 4831,
u'objectType': u'ScVolume',
u'index': 4829,
u'volumeFolderPath': u'dopnstktst/',
u'hostCacheEnabled': False,
u'usedByLegacyFluidFsNasVolume': False,
u'inRecycleBin': False,
u'volumeFolderIndex': 17,
u'instanceName': u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'statusMessage': u'',
u'status': u'Down',
u'storageType': {u'instanceId': u'64702.1',
u'instanceName': u'Assigned - Redundant - 2 MB',
u'objectType': u'ScStorageType'},
u'cmmDestination': False,
u'replicationDestination': False,
u'volumeFolder': {u'instanceId': u'64702.17',
u'instanceName': u'opnstktst',
u'objectType': u'ScVolumeFolder'},
u'deviceId': u'6000d31000fcbe0000000000000012df',
u'active': False,
u'portableVolumeDestination': False,
u'deleteAllowed': True,
u'name': u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'scName': u'Storage Center 64702',
u'secureDataUsed': False,
u'serialNumber': u'0000fcbe-000012df',
u'replayAllowed': False,
u'flashOptimized': False,
u'configuredSize': u'1.073741824E9 Bytes',
u'mapped': False,
u'cmmSource': False}
SCSERVER = {u'scName': u'Storage Center 64702',
u'volumeCount': 0,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 4,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'Server_21000024ff30441d',
u'instanceId': u'64702.47',
u'serverFolderPath': u'opnstktst/',
u'portType': [u'FibreChannel'],
u'type': u'Physical',
u'statusMessage': u'Only 5 of 6 expected paths are up',
u'status': u'Degraded',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.4',
u'instanceName': u'opnstktst',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Partial',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 5,
u'name': u'Server_21000024ff30441d',
u'hbaPresent': True,
u'hbaCount': 2,
u'notes': u'Created by Dell Cinder Driver',
u'mapped': False,
u'operatingSystem': {u'instanceId': u'64702.38',
u'instanceName': u'Red Hat Linux 6.x',
u'objectType': u'ScServerOperatingSystem'}
}
MAPPING = {u'instanceId': u'64702.2183',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'lunUsed': [1],
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.4829',
u'instanceName':
u'5729f1db-4c45-416c-bc15-c8ea13a4465d',
u'objectType': u'ScVolume'},
u'connectivity': u'Up',
u'readOnly': False,
u'objectType': u'ScMappingProfile',
u'hostCache': False,
u'mappedVia': u'Server',
u'mapCount': 2,
u'instanceName': u'4829-47',
u'lunRequested': u'N/A'
}
def setUp(self):
super(DellSCSanFCDriverTestCase, self).setUp()
# configuration is a mock. A mock is pretty much a blank
# slate. I believe mock's done in setup are not happy time
# mocks. So we just do a few things like driver config here.
self.configuration = mock.Mock()
self.configuration.san_is_local = False
self.configuration.san_ip = "192.168.0.1"
self.configuration.san_login = "admin"
self.configuration.san_password = "pwd"
self.configuration.dell_sc_ssn = 64702
self.configuration.dell_sc_server_folder = 'opnstktst'
self.configuration.dell_sc_volume_folder = 'opnstktst'
self.configuration.dell_sc_api_port = 3033
self._context = context.get_admin_context()
self.driver = dell_storagecenter_fc.DellStorageCenterFCDriver(
configuration=self.configuration)
self.driver.do_setup(None)
self.driver._stats = {'QoS_support': False,
'volume_backend_name': 'dell-1',
'free_capacity_gb': 12123,
'driver_version': '1.0.1',
'total_capacity_gb': 12388,
'reserved_percentage': 0,
'vendor_name': 'Dell',
'storage_protocol': 'FC'}
# Start with none. Add in the specific tests later.
# Mock tests bozo this.
self.driver.backends = None
self.driver.replication_enabled = False
self.volid = '5729f1db-4c45-416c-bc15-c8ea13a4465d'
self.volume_name = "volume" + self.volid
self.connector = {'ip': '192.168.0.77',
'host': 'cinderfc-vm',
'wwnns': ['20000024ff30441c', '20000024ff30441d'],
'initiator': 'iqn.1993-08.org.debian:01:e1b1312f9e1',
'wwpns': ['21000024ff30441c', '21000024ff30441d']}
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
def test_initialize_connection(self,
mock_find_wwns,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_create_server,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.initialize_connection(volume, connector)
expected = {'data':
{'discard': True,
'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']},
'target_discovered': True,
'target_lun': 1,
'target_wwn':
[u'5000D31000FCBE3D', u'5000D31000FCBE35']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
# verify find_volume has been called and that is has been called twice
mock_find_volume.assert_called_once_with(fake.VOLUME_ID, None, False)
mock_get_volume.assert_called_once_with(self.VOLUME[u'instanceId'])
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(dell_storagecenter_fc.DellStorageCenterFCDriver,
'_is_live_vol')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns')
@mock.patch.object(dell_storagecenter_fc.DellStorageCenterFCDriver,
'initialize_secondary')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_live_volume')
def test_initialize_connection_live_vol(self,
mock_get_live_volume,
mock_initialize_secondary,
mock_find_wwns,
mock_is_live_volume,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_is_live_volume.return_value = True
mock_find_wwns.return_value = (
1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_initialize_secondary.return_value = (
1, [u'5000D31000FCBE3E', u'5000D31000FCBE36'],
{u'21000024FF30441E': [u'5000D31000FCBE36'],
u'21000024FF30441F': [u'5000D31000FCBE3E']})
mock_get_live_volume.return_value = (sclivevol, False)
res = self.driver.initialize_connection(volume, connector)
expected = {'data':
{'discard': True,
'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D'],
u'21000024FF30441E': [u'5000D31000FCBE36'],
u'21000024FF30441F': [u'5000D31000FCBE3E']},
'target_discovered': True,
'target_lun': 1,
'target_wwn': [u'5000D31000FCBE3D', u'5000D31000FCBE35',
u'5000D31000FCBE3E', u'5000D31000FCBE36']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
# verify find_volume has been called and that is has been called twice
mock_find_volume.assert_called_once_with(fake.VOLUME_ID, None, True)
mock_get_volume.assert_called_once_with(self.VOLUME[u'instanceId'])
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_no_wwns(self,
mock_find_wwns,
mock_map_volume,
mock_get_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_server',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_no_server(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_create_server,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPING)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_vol_not_found(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(None, [], {}))
def test_initialize_connection_map_vol_fail(self,
mock_find_wwns,
mock_map_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where map_volume returns None (no mappings)
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
volume,
connector)
def test_initialize_secondary(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
find_wwns_ret = (1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_api.find_wwns = mock.MagicMock(return_value=find_wwns_ret)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
self.assertEqual(find_wwns_ret, ret)
def test_initialize_secondary_create_server(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=None)
mock_api.create_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
find_wwns_ret = (1, [u'5000D31000FCBE3D', u'5000D31000FCBE35'],
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']})
mock_api.find_wwns = mock.MagicMock(return_value=find_wwns_ret)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
self.assertEqual(find_wwns_ret, ret)
def test_initialize_secondary_no_server(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=None)
mock_api.create_server = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
def test_initialize_secondary_map_fail(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
def test_initialize_secondary_vol_not_found(self,
mock_close_connection,
mock_open_connection,
mock_init):
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.map_secondary_volume = mock.MagicMock(
return_value=self.VOLUME)
mock_api.get_volume = mock.MagicMock(return_value=None)
ret = self.driver.initialize_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
def test_terminate_connection(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
@mock.patch.object(dell_storagecenter_fc.DellStorageCenterFCDriver,
'_is_live_vol')
@mock.patch.object(dell_storagecenter_fc.DellStorageCenterFCDriver,
'terminate_secondary')
def test_terminate_connection_live_vol(self,
mock_terminate_secondary,
mock_is_live_vol,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
mock_terminate_secondary.return_value = (None, [], {})
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
mock_is_live_vol.return_value = sclivevol
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_server(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_volume(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(None,
[],
{}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_no_wwns(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
expected = {'driver_volume_type': 'fibre_channel',
'data': {}}
self.assertEqual(expected, res, 'Unexpected return data')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=False)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=1)
def test_terminate_connection_failure(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
volume = {'id': fake.VOLUME_ID}
connector = self.connector
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
volume,
connector)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_server',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_wwns',
return_value=(1,
[u'5000D31000FCBE3D',
u'5000D31000FCBE35'],
{u'21000024FF30441C':
[u'5000D31000FCBE35'],
u'21000024FF30441D':
[u'5000D31000FCBE3D']}))
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_volume_count',
return_value=0)
def test_terminate_connection_vol_count_zero(self,
mock_get_volume_count,
mock_find_wwns,
mock_unmap_volume,
mock_find_volume,
mock_find_server,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get_volume_count is zero
volume = {'id': fake.VOLUME_ID}
connector = self.connector
res = self.driver.terminate_connection(volume, connector)
mock_unmap_volume.assert_called_once_with(self.VOLUME, self.SCSERVER)
expected = {'data':
{'initiator_target_map':
{u'21000024FF30441C': [u'5000D31000FCBE35'],
u'21000024FF30441D': [u'5000D31000FCBE3D']},
'target_wwn':
[u'5000D31000FCBE3D', u'5000D31000FCBE35']},
'driver_volume_type': 'fibre_channel'}
self.assertEqual(expected, res, 'Unexpected return data')
def test_terminate_secondary(self,
mock_close_connection,
mock_open_connection,
mock_init):
mock_api = mock.MagicMock()
mock_api.find_server = mock.MagicMock(return_value=self.SCSERVER)
mock_api.get_volume = mock.MagicMock(return_value=self.VOLUME)
mock_api.find_wwns = mock.MagicMock(return_value=(None, [], {}))
mock_api.unmap_volume = mock.MagicMock(return_value=True)
sclivevol = {'instanceId': '101.101',
'secondaryVolume': {'instanceId': '102.101',
'instanceName': fake.VOLUME_ID},
'secondaryScSerialNumber': 102}
ret = self.driver.terminate_secondary(mock_api, sclivevol,
['wwn1', 'wwn2'])
expected = (None, [], {})
self.assertEqual(expected, ret)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_storage_usage',
return_value={'availableSpace': 100, 'freeSpace': 50})
def test_update_volume_stats_with_refresh(self,
mock_get_storage_usage,
mock_close_connection,
mock_open_connection,
mock_init):
stats = self.driver.get_volume_stats(True)
self.assertEqual('FC', stats['storage_protocol'])
mock_get_storage_usage.assert_called_once_with()
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'get_storage_usage',
return_value={'availableSpace': 100, 'freeSpace': 50})
def test_get_volume_stats_no_refresh(self,
mock_get_storage_usage,
mock_close_connection,
mock_open_connection,
mock_init):
stats = self.driver.get_volume_stats(False)
self.assertEqual('FC', stats['storage_protocol'])
mock_get_storage_usage.assert_not_called()
| Nexenta/cinder | cinder/tests/unit/volume/drivers/dell/test_dellfc.py | Python | apache-2.0 | 41,853 |
#!/usr/bin/env python3
import sys
import unittest
from puppyparachute.trace import trace
from puppyparachute.store import format_db
from puppyparachute.tools import diff_db
def main():
z = 1
def f(x):
y = x + z
return y
class C(object):
def __init__(self, a):
self.a = a
def inc(self):
self.a += 1
f(2)
c = C(10)
c.inc()
main1 = main
main1_fn_count = 5
global_var = 0
def main():
global global_var
z = 1
def f(x):
y = x + z + global_var
return y
class C(object):
def __init__(self, a):
self.a = a
def inc(self):
self.a += 2
c = C(10)
c.inc()
f(2)
f(2)
global_var = 100
f(2)
z = 10
f(2)
main2 = main
main2_fn_count = 5
def main():
def f(s):
raise ValueError(s)
try:
f('Catch this error')
except:
pass
main_exc = main
main_exc_fn_count = 2
class Test(unittest.TestCase):
def test_dump(self):
fndb1, ret = trace(main1, [], trace_all=True)
dump1 = format_db(fndb1)
print(dump1)
self.assertEqual(len(fndb1), main1_fn_count)
def test_exception(self):
fndbe, ret = trace(main_exc, [], trace_all=True)
dumpe = format_db(fndbe)
print(dumpe)
self.assertEqual(len(fndbe), main_exc_fn_count)
self.assertTrue(any(
'ValueError' in line and 'Catch this error' in line
for line in dumpe.splitlines()))
def test_main(self):
fndb1, ret1 = trace(main1, [], trace_all=True)
fndb2, ret2 = trace(main2, [], trace_all=True)
print(diff_db(fndb1, fndb2))
self.assertEqual(len(fndb1), main1_fn_count)
self.assertEqual(len(fndb2), main2_fn_count)
self.assertEqual(list(fndb1.keys()), list(fndb2.keys()))
self.assertNotEqual(fndb1, fndb2)
def test_settrace(self):
previous = sys.gettrace()
def nimp():
return 'nimp'
calls = []
def logtrace(*args):
calls.append(args)
sys.settrace(logtrace)
now = sys.gettrace()
nimp()
sys.settrace(previous)
self.assertEqual(now, logtrace)
self.assertTrue(calls)
if __name__ == '__main__':
unittest.main()
| naure/PuppyParachute | tests/test_trace.py | Python | gpl-2.0 | 2,340 |
from __future__ import unicode_literals
from django.conf import settings
from django.contrib.flatpages.forms import FlatpageForm
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from django.utils import translation
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.flatpages', ]})
@override_settings(SITE_ID=1)
class FlatpageAdminFormTests(TestCase):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
def setUp(self):
# Site fields cache needs to be cleared after flatpages is added to
# INSTALLED_APPS
Site._meta._expire_cache()
self.form_data = {
'title': "A test page",
'content': "This is a test",
'sites': [settings.SITE_ID],
}
def test_flatpage_admin_form_url_validation(self):
"The flatpage admin form correctly validates urls"
self.assertTrue(FlatpageForm(data=dict(url='/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url='/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url='/a ? char/', **self.form_data)).is_valid())
def test_flatpage_requires_leading_slash(self):
form = FlatpageForm(data=dict(url='no_leading_slash/', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a leading slash."])
@override_settings(APPEND_SLASH=True,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_requires_trailing_slash_with_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
with translation.override('en'):
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['url'], ["URL is missing a trailing slash."])
@override_settings(APPEND_SLASH=False,
MIDDLEWARE_CLASSES=['django.middleware.common.CommonMiddleware'])
def test_flatpage_doesnt_requires_trailing_slash_without_append_slash(self):
form = FlatpageForm(data=dict(url='/no_trailing_slash', **self.form_data))
self.assertTrue(form.is_valid())
def test_flatpage_admin_form_url_uniqueness_validation(self):
"The flatpage admin form correctly enforces url uniqueness among flatpages of the same site"
data = dict(url='/myflatpage1/', **self.form_data)
FlatpageForm(data=data).save()
f = FlatpageForm(data=data)
with translation.override('en'):
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'__all__': ['Flatpage with url /myflatpage1/ already exists for site example.com']})
def test_flatpage_admin_form_edit(self):
"""
Existing flatpages can be edited in the admin form without triggering
the url-uniqueness validation.
"""
existing = FlatPage.objects.create(
url="/myflatpage1/", title="Some page", content="The content")
existing.sites.add(settings.SITE_ID)
data = dict(url='/myflatpage1/', **self.form_data)
f = FlatpageForm(data=data, instance=existing)
self.assertTrue(f.is_valid(), f.errors)
updated = f.save()
self.assertEqual(updated.title, "A test page")
def test_flatpage_nosites(self):
data = dict(url='/myflatpage1/', **self.form_data)
data.update({'sites': ''})
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(
f.errors,
{'sites': [translation.ugettext('This field is required.')]})
| jylaxp/django | tests/flatpages_tests/test_forms.py | Python | bsd-3-clause | 4,568 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .forms import (OpenSQliteForm, OpenPostgresForm,
NewForm, AddTableForm, MyModelForm,
AppUserForm, FormTableColumn)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION
from sqlalchemy import String, Float, Integer, exc # (exc: Exceptions)
from smart_drillholes.reflector.og_reflector import Reflector
from smart_drillholes.reflector.util import (create_model, defineObject,
update, pg_create, fields_generator,
connection_str, tb_data, depend,
adapt_postgresToSqlite, removeOnCascade)
from smart_drillholes.reflector.bugs import check_bugs
from smart_drillholes_gui import settings
from smart_drillholes.core import *
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, redirect
from django.http import JsonResponse, Http404
from django.contrib import messages
from django.forms import formset_factory
from django import forms
from django.urls import reverse
from sqlalchemy.schema import DropTable
from sqlalchemy.ext.compiler import compiles
import os
import re
def generic_add(request, table_key, oid=None):
engineURL = request.session.get('engineURL')
reflector = Reflector(engineURL)
reflector.reflectTables()
exist = reflector.exist_table(table_key)
if not exist:
msg = "Please verify that the table: '{}' does not exist.".format(table_key)
messages.add_message(request, messages.WARNING, msg)
return redirect('mainapp:reflector')
table = reflector.getOg_table(str(table_key))
fields = fields_generator(table)
generic_model = create_model('generic', attrs=fields)
class MyGenericModelForm(MyModelForm):
class Meta:
model = generic_model
fields = '__all__'
def __init__(self, *args, **kwargs):
super(MyGenericModelForm, self).__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs.update({'class': 'form-control'})
def clean(self):
super(MyGenericModelForm, self).clean()
if 'FROM' and 'TO' in self.fields.keys():
_from = self.cleaned_data.get('FROM')
_to = self.cleaned_data.get('TO')
if _from >= _to:
raise forms.ValidationError({'FROM': "FROM can't be greather or igual than TO"})
if request.method == "POST":
Base = declarative_base()
generic_object = type(str(table_key), (Base,), defineObject(table))
form = MyGenericModelForm(request.POST)
if "update" in request.POST:
oid = request.POST.get('oid')
pks = eval(str(oid))
action = ("update", pks)
elif "insert" in request.POST:
action = ("insert",)
if form.is_valid():
data = form.cleaned_data
# Example:
# Object_table = surveytable(BHID = 3.7, at = '2.0', az = 14.0, dip = 14.0 ,Comments = 'hello')
# session.add(Object_table)
session = reflector.make_session()
if "update" in request.POST:
Base = declarative_base()
query = session.query(generic_object).get(pks)
value = query.__dict__.copy()
del value['_sa_instance_state']
form = MyGenericModelForm(request.POST, initial=value)
if form.has_changed():
for k in form.changed_data:
query.__setattr__(k, data[k])
else:
Object_table = generic_object(**data)
session.add(Object_table)
try:
session.commit()
# session.flush()
except exc.IntegrityError as err:
# (psycopg2.IntegrityError) insert or update on table "assay" violates foreign key constraint "chk_bhid"
# DETAIL: Key (BHID)=(fddf) is not present in table "collar".
session.rollback()
if "violates foreign key constraint" in str(err):
m = re.search('(DETAIL:).+\W', str(err))
m = str(m.group(0)).partition("DETAIL:")
messages.add_message(request, messages.WARNING, m[2])
messages.add_message(request, messages.INFO, 'Please verify all foreign key constraints.')
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
# postgresql UNIQUE constraint error
elif "duplicate key value violates unique constraint" in str(err):
m = re.search('(DETAIL:).+\W', str(err))
m = str(m.group(0)).partition("DETAIL:")
messages.add_message(request, messages.WARNING, m[2])
messages.add_message(request, messages.INFO, 'Please verify all unique constraints.')
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
# sqlite UNIQUE constraint error
elif "UNIQUE constraint failed" in str(err):
m = re.search('(UNIQUE).+\[SQL', str(err))
m = str(m.group(0)).partition("UNIQUE")
m = str(m[1]) + (str(m[2]).strip('[SQL'))
messages.add_message(request, messages.WARNING, m)
messages.add_message(request, messages.INFO, 'Duplicate key value violates unique constraint.')
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
else:
messages.add_message(request, messages.WARNING, str(err))
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
except:
raise
finally:
session.close()
return redirect(reverse('mainapp:reflector', kwargs={'table_key': table_key}))
else:
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
elif oid is not None and request.method == "GET":
pks = oid.split(',')
Base = declarative_base()
object_table = type(str(table_key), (Base,), defineObject(table))
if pks:
session = reflector.make_session()
try:
query = session.query(object_table).get(pks)
except exc.InvalidRequestError as err:
messages.add_message(request, messages.WARNING, str(err))
return redirect(reverse('mainapp:reflector', kwargs={'table_key': table_key}))
if query:
value = query.__dict__.copy()
del value['_sa_instance_state']
model = generic_model(**value)
form = MyGenericModelForm(instance=model)
session.close()
action = ("update", pks)
else:
msg = "Please verify: The row you try to update does not exist."
messages.add_message(request, messages.WARNING, msg)
return redirect(reverse('mainapp:reflector', kwargs={'table_key': table_key}))
# --------------------------------
else:
action = ("insert",)
form = MyGenericModelForm()
return render(request, 'mainapp/row_add.html', {'form': form, 'table_key': table_key, 'action': action})
@login_required
def index(request):
response = render(request,
'mainapp/index.html',
{'ref': 'index'})
return response
# Shows the desktop version
def desktop(request):
request.session['desktop'] = True
return redirect("mainapp:index")
@login_required
def open(request):
db_type = 'sqlite'
if request.method == "GET":
form = OpenSQliteForm()
elif request.method == "POST":
db_type = request.POST.get('db_type')
if db_type == 'sqlite':
form = OpenSQliteForm()
if settings.files_explorer:
selected_file = request.POST.get('selected_file')
dbName = os.path.join(request.POST.get('current_path'), selected_file)
if selected_file == '':
messages.add_message(request, messages.INFO, "Please select a sqlite database file.")
else:
form = OpenSQliteForm(request.POST, request.FILES)
if form.is_valid():
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
dbName = BASE_DIR+'/smart4.sqlite'
if dbName != '':
engineURL = 'sqlite:///'+dbName
con_string = engineURL
elif db_type == 'postgresql':
form = OpenPostgresForm(request.POST)
if form.is_valid():
host = form.cleaned_data.get('db_host')
dbName = form.cleaned_data.get('db_name')
user = form.cleaned_data.get('db_user')
password = form.cleaned_data.get('db_password')
con_string = 'postgresql://{2}:{3}@{0}/{1}'.format(host, dbName, user, password)
request.session['engineURL'] = con_string
request.session['db_type'] = db_type
request.session['db_name'] = dbName
reflector = Reflector(con_string)
error = reflector.reflectTables()
if error:
messages.add_message(request, messages.WARNING, error)
else:
cols, tks, data, table_key = update(reflector)
return redirect('mainapp:dashboard')
return render(request, 'mainapp/open.html', {
'form': form,
'files_explorer': settings.files_explorer,
'directory_content': get_folder_content("/"),
'db_type': db_type
})
@login_required
def new(request):
if request.method == "GET":
form = NewForm()
return render(request,
'mainapp/new.html',
{'form': form,
'ref': 'new', 'files_explorer': settings.files_explorer, 'directory_content': get_folder_content("/")})
elif request.method == "POST":
form = NewForm(request.POST)
if form.is_valid():
db_type = form.cleaned_data.get('db_type')
dbname_to_create = form.cleaned_data.get('name')
if db_type == 'sqlite':
con_string = 'sqlite:///{}.sqlite'.format(os.path.join(request.POST.get('current_path'), dbname_to_create))
elif db_type == 'postgresql':
try:
con_string = pg_create(user='gramvi_admin', password='password', dbname_to_create=dbname_to_create)
# database "lm" already exists
except exc.ProgrammingError as err:
if "already exists" in str(err):
messages.add_message(request, messages.WARNING, 'Database "%s" already exists.' % (dbname_to_create))
messages.add_message(request, messages.INFO, 'Please verify all postgres database names.')
return redirect('mainapp:new')
error = False
try:
eng, meta = og_connect(con_string)
# Create drillhole definition tables in the metadata, collar and survey.
og_create_dhdef(eng, meta)
except AssertionError as err:
if db_type == 'sqlite':
messages.add_message(request, messages.WARNING, 'Database "%s" already exists on path: %s.' % (dbname_to_create, request.POST.get('current_path')))
else:
messages.add_message(request, messages.WARNING, str(err))
error = True
except exc.OperationalError as err:
if "unable to open database file" in str(err):
messages.add_message(request, messages.WARNING, 'Unable to create sqlite database file "%s.sqlite" on path: %s.' % (dbname_to_create, request.POST.get('current_path')))
else:
messages.add_message(request, messages.WARNING, str(err))
error = True
except:
raise
if error:
return redirect('mainapp:new')
og_system(eng, meta)
og_references(eng, meta, table_name='assay_certificate', key='SampleID', cols={'Au': {'coltypes': Float,
'nullable': True}})
og_references(eng, meta, table_name='rock_catalog', key='RockID', cols={'Description': {'coltypes': String,
'nullable': True}})
og_add_interval(eng, meta, table_name='assay', cols={'SampleID': {'coltypes': String,
'nullable': False,
'foreignkey': {'column': 'assay_certificate.SampleID',
'ondelete': 'CASCADE',
'onupdate': 'CASCADE'}}})
og_add_interval(eng, meta, table_name='litho', cols={'RockID': {'coltypes': String,
'nullable': True,
'foreignkey': {'column': 'rock_catalog.RockID',
'ondelete': 'CASCADE',
'onupdate': 'CASCADE'}}})
execute(eng, meta)
# -Register tables on system table: OG_SMDH_SYSTEM------------------------#
table_key = 'OG_SMDH_SYSTEM'
tdata = [
{'Table': 'survey', 'Type': 'definition (survey)', 'Comments': ''},
{'Table': 'collar', 'Type': 'definition (collar)', 'Comments': ''},
{'Table': 'assay_certificate', 'Type': 'reference', 'Comments': ''},
{'Table': 'rock_catalog', 'Type': 'reference', 'Comments': ''},
{'Table': 'assay', 'Type': 'interval', 'Comments': ''},
{'Table': 'litho', 'Type': 'interval', 'Comments': ''}
]
reflector = Reflector(con_string)
reflector.reflectTables()
table = reflector.getOg_table(table_key)
Base = declarative_base()
generic_object = type(str(table_key), (Base,), defineObject(table))
session = reflector.make_session()
for data in tdata:
Object_table = generic_object(**data)
session.add(Object_table)
try:
session.commit()
# session.flush()
except:
session.rollback()
finally:
session.close()
# -END----------------------#
request.session['engineURL'] = con_string
request.session['db_type'] = db_type
request.session['db_name'] = dbname_to_create
return redirect('mainapp:dashboard')
@login_required
def dashboard(request):
if not connection_str(request):
return redirect('mainapp:index')
return render(request, 'mainapp/dashboard.html', {'ref': 'dashboard'})
@login_required
def close_connection(request):
connection_str(request, clean=True)
return redirect('mainapp:index')
@login_required
def reflector(request, table_key=''):
engineURL = request.session.get('engineURL')
if not engineURL:
messages.add_message(request, messages.WARNING, message="Please open a database.")
return redirect('mainapp:open')
reflector = Reflector(engineURL)
# try: can raise AttributeError
error = reflector.reflectTables()
if error:
messages.add_message(request, messages.WARNING, error)
return redirect('mainapp:open')
if table_key != '':
exist = reflector.exist_table(table_key)
if not exist:
msg = "Please verify that the table: '{}' does not exist.".format(table_key)
messages.add_message(request, messages.WARNING, msg)
table_key = ''
if request.method == 'POST':
pks = request.POST.getlist('checkbox_delete')
for i, pk in enumerate(pks):
pks[i] = pk.split(',')
table_key = str(request.POST['tablename'])
Base = declarative_base()
table = reflector.getOg_table(str(table_key))
object_table = type(str(table_key), (Base,), defineObject(table))
if pks:
session = reflector.make_session()
for pk in pks:
query = session.query(object_table).get(pk)
session.delete(query)
try:
session.commit()
except exc.IntegrityError as err:
# DETAIL: Key (SampleID)=(120) is still referenced from table "assay".
if "Key" and "is still referenced from table" in str(err):
m = re.search('(DETAIL:)[\w|\s|\(|\)\|=|"]+\W', str(err))
m = str(m.group(0)).partition("DETAIL:")
messages.add_message(request, messages.WARNING, m[2])
session.rollback()
else:
messages.add_message(request, messages.WARNING, "A unexpected error has been happened")
session.rollback()
except:
messages.add_message(request, messages.WARNING, "A unexpected error has been happened")
session.rollback()
finally:
session.close()
cols, tks, data, table_key = update(reflector, table_key)
return render(request, 'mainapp/reflector.html', {'tks': tks, 'cols': cols, 'data': data, 'table_key': table_key})
@login_required
def add_table(request):
if request.method in ['GET', 'POST']:
RowFormset = formset_factory(FormTableColumn, extra=1, max_num=15)
db_type = request.session.get('db_type')
if db_type == "sqlite" or db_type == "postgresql":
# con_string = 'sqlite:///{}.sqlite'.format(request.COOKIES.get('db'))
# con_string = 'postgresql://postgres@localhost/{}'.format(request.COOKIES.get('db'))
con_string = request.session.get('engineURL')
eng, meta = og_connect(con_string)
if request.method == 'GET':
form = AddTableForm()
return render(request,
'mainapp/add_table.html',
{'ref': 'dashboard', 'form': form, 'formset': RowFormset})
elif request.method == 'POST':
form = AddTableForm(request.POST)
formset = RowFormset(request.POST)
if form.is_valid() and formset.is_valid():
table_name = form.cleaned_data.get('table_name')
reflector = get_reflector(request)
exist = reflector.exist_table(table_name)
if exist:
msg = "The table '{}', already exist.".format(table_name)
messages.add_message(request, messages.INFO, msg)
return render(request, 'mainapp/add_table.html', {'form': form, 'formset': formset})
formset_cols = {}
# formset
for fform in formset:
name = fform.cleaned_data.get('name')
tb_type = fform.cleaned_data.get('tb_type')
if tb_type == 'String':
tb_type = String
elif tb_type == 'Float':
tb_type = Float
elif tb_type == "Integer":
tb_type = Integer
nullable = fform.cleaned_data.get('nullable')
formset_cols[name] = {'coltypes': tb_type, 'nullable': nullable}
table_type = form.cleaned_data.get('table_type')
# insert assay_certificate, rock_catalog, other_reference table types
if table_type == 'assay_certificate' or table_type == 'rock_catalog' or table_type == 'other_reference':
# defaults on template client side:
# cols = {'Au': {'coltypes': Float,'nullable': True}}
# on assay_certificate key=SampleID
# on rock_catalog key=RockID
# on other_reference key=''
table_key = request.POST.get('ftable_key')
cols = formset_cols
og_references(eng, meta, table_name=table_name, key=str(table_key), cols=cols)
elif table_type == 'assay' or table_type == 'litho':
# on this tables, collar foreignkey: collar.BHID
table_reference = request.POST.get('table_reference')
for column in meta.tables[table_reference].columns:
if column.primary_key:
pk = column.key
cols = {pk: {'coltypes': String,
'nullable': False,
'foreignkey': {'column': '{}.{}'.format(table_reference, pk),
'ondelete': 'CASCADE',
'onupdate': 'CASCADE'}}}
cols.update(formset_cols)
og_add_interval(eng, meta, table_name=table_name, cols=cols)
# other_interval
elif table_type == 'other_interval':
# on this tables, collar foreignkey: dbsuffix+_collar.BHID
collar_reference = request.POST.get('collar_reference')
if collar_reference and collar_reference.endswith('_collar'):
m = re.search("_collar", collar_reference)
dbsuffix = collar_reference[:m.start()]
elif collar_reference == 'collar':
dbsuffix = ''
table_reference = request.POST.get('table_reference')
for column in meta.tables[table_reference].columns:
if column.primary_key:
pk = column.key
cols = {pk: {'coltypes': String,
'nullable': False,
'foreignkey': {'column': '{}.{}'.format(table_reference, pk),
'ondelete': 'CASCADE',
'onupdate': 'CASCADE'}}}
cols.update(formset_cols)
og_add_interval(eng, meta, table_name=table_name, cols=cols, dbsuffix=dbsuffix)
try:
execute(eng, meta)
except exc.NoReferencedTableError:
msg = "Please verify: there are tables really does not exists or are wrong."
messages.add_message(request, messages.WARNING, msg)
return redirect(reverse('mainapp:reflector'))
except:
raise
# -Register table on system table: OG_SMDH_SYSTEM------------------------#
og_register_table = 'OG_SMDH_SYSTEM'
if table_type == 'other_interval' or table_type == 'assay' or table_type == 'litho':
tbtype = 'interval'
elif table_type == 'assay_certificate' or table_type == 'rock_catalog' or table_type == 'other_reference':
tbtype = 'reference'
tdata = {'Table': table_name, 'Type': tbtype, 'Comments': ''}
reflector = Reflector(con_string)
reflector.reflectTables()
table = reflector.getOg_table(og_register_table)
Base = declarative_base()
generic_object = type(str(og_register_table), (Base,), defineObject(table))
session = reflector.make_session()
Object_table = generic_object(**tdata)
session.add(Object_table)
try:
session.commit()
# session.flush()
except:
session.rollback()
finally:
session.close()
# end register
else:
return render(request, 'mainapp/add_table.html', {'form': form, 'formset': formset})
return redirect('mainapp:reflector')
@login_required
@compiles(DropTable, "postgresql")
def _compile_drop_table(element, compiler, **kwargs):
return compiler.visit_drop_table(element) + " CASCADE"
@login_required
def remove_table(request):
engineURL = request.session.get('engineURL')
reflector = Reflector(engineURL)
reflector.reflectTables()
if request.method == 'POST':
tbl = request.POST.get('tbl')
db_type = request.session.get('db_type')
removeOnCascade(db_type, reflector, tbl)
reflector.reflectTables()
return redirect('mainapp:reflector')
def verify(request, table_key):
engineURL = request.session.get('engineURL')
reflector = Reflector(engineURL)
reflector.reflectTables()
exist = reflector.exist_table(table_key)
if not exist:
msg = "Is not posible verify bugs on: '{}', this table does not exist.".format(table_key)
messages.add_message(request, messages.WARNING, msg)
else:
errors = check_bugs(reflector, table_key)
messages.add_message(request, messages.WARNING, errors)
return redirect(reverse('mainapp:reflector', kwargs={'table_key': table_key}))
@login_required
def get_collar_reference_tables_in_json(request):
engineURL = request.session.get('engineURL')
reflector = Reflector(engineURL)
reflector.reflectTables()
data = tb_data(reflector, table_key='OG_SMDH_SYSTEM')
content = {'collars': [], 'references': []}
for row in data:
if row[1] == 'definition (collar)':
content['collars'].append(row[0])
if row[1] == 'reference':
content['references'].append(row[0])
return JsonResponse({'content': content})
def logout_user(request):
logout(request)
def signup_user(request):
if request.method == 'POST':
signup_form = AppUserForm(request.POST)
if signup_form.is_valid():
new_user = AppUser.objects.create_user(
username=signup_form.cleaned_data['username'],
fullname=signup_form.cleaned_data['fullname'],
email=signup_form.cleaned_data['email'],
phone=signup_form.cleaned_data['phone'],
password=signup_form.cleaned_data['password'])
return render(request, 'mainapp/signup.html', {'signup_form': signup_form})
else:
return render(request, 'mainapp/signup.html', {'signup_form': signup_form})
else:
signup_form = AppUserForm()
return render(request, 'mainapp/signup.html', {'signup_form': signup_form})
@login_required
def get_folder_content_in_json(request):
if settings.files_explorer:
content = get_folder_content(request.GET.get('path'))
return JsonResponse({'content': content})
else:
return Http404('You don\'t have access to this function')
def get_folder_content(path=None):
files = []
folders = []
if not path:
path = "/"
try:
content = os.listdir(path)
for element in content:
element_path = os.path.join(path, element)
if os.path.isfile(element_path):
files.append(element)
elif os.path.isdir(element_path) and os.access(element_path, os.R_OK):
folders.append(element)
except OSError:
return False
return {"files": files, "folders": folders, "path": path, "previous_path": os.path.dirname(os.path.dirname(path))}
# this function return reflector object of request engine
@login_required
def get_reflector(request):
engineURL = request.session.get('engineURL')
reflector = Reflector(engineURL)
reflector.reflectTables()
return reflector
# ---------------------------------------------------------
# Adapt postgres DOUBLE_PRECISION type to sqlite FLOAT type
@compiles(DOUBLE_PRECISION, 'sqlite')
def compile_DOUBLE_PRECISION_postgresql_sqlite(element, compiler, **kw):
"""Handles postgresql DOUBLE_PRECISION datatype as FLOAT in sqlite"""
res = compiler.visit_FLOAT(element, **kw)
return res
def postgres_to_sqlite(request):
engineURL = request.session.get('engineURL')
db_name = request.session.get('db_name')
db_type = request.session.get('db_type')
str_sqlite_meta = 'sqlite:////home/leudis/Desktop/{}.sqlite'.format(db_name)
adapted = False
if db_type == 'postgresql':
adapted = adapt_postgresToSqlite(engineURL, str_sqlite_meta)
if adapted:
msg = "The '{}' postgres database was succefull adapted to sqlite, enjoy this.".format(db_name)
messages.add_message(request, messages.SUCCESS, msg)
else:
msg = "The '{}' database was not succefull adapted.".format(db_name)
messages.add_message(request, messages.WARNING, msg)
return redirect(reverse('mainapp:dashboard'))
def test_json(request):
engineURL = request.session.get('engineURL')
table_key = request.GET.get("tk")
reflector = Reflector(engineURL)
db_type = request.session.get('db_type')
content = depend(db_type, reflector, table_key)
# content = depend(db_type,reflector,"collar")
# content = {'hola':{"lolo":{"lola":"null"}}}
return JsonResponse({'content': content})
# return render(request,'mainapp/test.html',{'data': content})
| opengeostat/SmartDHOLES | smart_drillholes_gui/mainapp/views.py | Python | lgpl-3.0 | 30,589 |
def brooke():
i01.enable()
gestureforlondon3()
sleep(8)
i01.disable()
sleep(8)
i01.head.enable()
lookrightside()
sleep(4)
lookleftside()
sleep(4)
lookrightside()
sleep(4)
lookleftside()
i01.disable()
sleep(8)
i01.enable()
gestureforlondon4()
i01.disable()
sleep(8)
| MyRobotLab/pyrobotlab | home/kwatters/harry/gestures/brooke.py | Python | apache-2.0 | 303 |
#!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) Vauxoo (<http://vauxoo.com>).
# All Rights Reserved
###############Credits######################################################
# Coded by: Juan Carlos Funes([email protected])
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
{
"name" : "Inherit filter_domain product_id field in search view of mrp_production",
"version" : "1.0",
"depends" : ['mrp'],
"author" : "Vauxoo",
#"license" : "AGPL-3",
"description" : """This module inherit filter_domain product_id field in search view of mrp_production
""",
"website" : "http://vauxoo.com",
"category" : "Generic Modules",
"init_xml" : [],
"demo_xml" : [],
"test": [],
"update_xml" : ['mrp_production_view.xml',
],
"active": False,
"installable": True,
}
| 3dfxsoftware/cbss-addons | mrp_production_filter_product/__openerp__.py | Python | gpl-2.0 | 1,768 |
"""
URLConf for test suite.
We need only the docs urls for DocumentationRenderer tests.
"""
from django.conf.urls import url
from rest_framework.documentation import include_docs_urls
urlpatterns = [
url(r'^docs/', include_docs_urls(title='Test Suite API')),
]
| kgeorgy/django-rest-framework | tests/urls.py | Python | bsd-2-clause | 268 |
from Source import Source
from Components.Element import cached
from enigma import eServiceReference, pNavigation
class StreamService(Source):
def __init__(self, navcore):
Source.__init__(self)
self.ref = None
self.__service = None
self.navcore = navcore
def serviceEvent(self, event):
pass
@cached
def getService(self):
return self.__service
service = property(getService)
def handleCommand(self, cmd):
print "StreamService handle command", cmd
self.ref = eServiceReference(cmd)
def recordEvent(self, service, event):
if service is self.__service:
return
print "RECORD event for us:", service
self.changed((self.CHANGED_ALL, ))
def execBegin(self):
if self.ref is None:
print "StreamService has no service ref set."
return
print "StreamService execBegin", self.ref.toString()
try:
#not all images support recording type indicators
self.__service = self.navcore.recordService(self.ref,False,pNavigation.isStreaming)
except:
self.__service = self.navcore.recordService(self.ref)
self.navcore.record_event.append(self.recordEvent)
if self.__service is not None:
self.__service.prepareStreaming()
self.__service.start()
def execEnd(self):
print "StreamService execEnd", self.ref.toString()
self.navcore.record_event.remove(self.recordEvent)
if self.__service is not None:
self.navcore.stopRecordService(self.__service)
self.__service = None
| openhdf/enigma2-wetek | lib/python/Components/Sources/StreamService.py | Python | gpl-2.0 | 1,425 |
import unittest
import testing_helper
import math
from softsailor.utils import *
class TestUtils(unittest.TestCase):
def testDegToRad(self):
self.assertEqual(math.pi, deg_to_rad(180))
degs =[90, -90]
rads = deg_to_rad(degs)
self.assertAlmostEqual(math.pi / 2, rads[0])
self.assertAlmostEqual(-math.pi / 2, rads[1])
rads = deg_to_rad(degs[0], degs[1])
self.assertAlmostEqual(math.pi / 2, rads[0])
self.assertAlmostEqual(-math.pi / 2, rads[1])
def testBearingToHeading(self):
bearing = (math.pi / 4, 1)
speed = 5
current = (0, 1)
heading = bearing_to_heading(bearing, speed, current)
self.assertAlmostEqual(math.atan(4.0/3.0), heading)
bearing = (math.atan(4.0/2.0), 1)
current = (math.pi, 1)
heading = bearing_to_heading(bearing, speed, current)
self.assertAlmostEqual(math.atan(4.0/3.0), heading)
if __name__ == '__main__':
unittest.main()
| jrversteegh/softsailor | softsailor/tst/test_utils.py | Python | gpl-3.0 | 992 |
hiddenimports = ['BTrees.OOBTree', 'BTrees.fsBTree',
'pkg_resources']
# 'pkg_resources', 'pywintypes',
# 'simo', 'jsonlib2', 'celeryconfig']
| cropleyb/pentai | packaging/osx/hook-ZODB.py | Python | mit | 193 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-24 15:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('l8pr', '0013_auto_20160613_1552'),
]
operations = [
migrations.AlterModelOptions(
name='itemsrelationship',
options={'ordering': ('-order',)},
),
]
| L8pR/L8pR | app/l8pr/migrations/0014_auto_20160624_1546.py | Python | lgpl-3.0 | 420 |
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QSplitter, QVBoxLayout, QGroupBox, QPushButton, QLineEdit, QTabWidget, QLabel, QComboBox, QFormLayout
from PyQt5.QtCore import pyqtSlot, Qt
from TableGUI import TableData
from PlotGraph import Plot_Graph
from GraphPlot import GraphPlot
from ToolsWidgets import *
from Calculator import Operations
class MainLayout(QWidget):
def __init__(self):
super(QWidget, self).__init__()
self.path = '/'
self.MainLyout = QHBoxLayout()
dataLyout = QHBoxLayout()
self.widgetsLyout = QVBoxLayout()
self.dataTable = TableData()
self.dataTable.tableWidget.itemChanged.connect(self.changeData)
self.ErrBar = ErrorBars()
self.Hamiltonian = Hamiltonian()
self.CalcError = CalculateError()
self.GrphAxes = GraphAxes()
self.Formula = FormulaEntry()
self.Formula.runButton.clicked.connect(self.formula_click)
self.Terminal = Terminal_for_table()
toolsTab = QTabWidget()
toolsTab.addTab(self.ErrBar, "Error Bars")
toolsTab.addTab(self.Hamiltonian, "Hamiltonian")
toolsTab.addTab(self.CalcError, "Errors")
self.widgetsLyout.addWidget(toolsTab)
self.widgetsLyout.addWidget(self.GrphAxes)
self.widgetsLyout.addWidget(self.Formula)
self.widgetsLyout.addWidget(self.Terminal)
self.splitLyout = QSplitter(Qt.Vertical)
self.splitLyout.setGeometry(0, 0, 1500, 1000)
self.splitLyout.addWidget(self.dataTable.tableWidget)
dataLyout.addWidget(self.splitLyout)
self.MainLyout.addLayout(dataLyout, 25)
self.MainLyout.addLayout(self.widgetsLyout, 7)
self.dataSaved = True
self.setLayout(self.MainLyout)
def plotGraph(self, marker):
axesXTitle = self.GrphAxes.axesXCombo.currentText()
axesYTitle = self.GrphAxes.axesYCombo.currentText()
values = [ self.dataTable.table[axesXTitle] ,
self.dataTable.table[axesYTitle] ]
if len(values[0]) != len(values[1]):
return
titles = [ axesXTitle , axesYTitle ]
types = self.ErrBar.MainCombo.currentText()
if types != 'None':
if types == 'Fixed value':
error = eval(self.ErrBar.Error[types].text())
elif types == '% of value':
percent = eval(self.ErrBar.Error[types].text())
error = [(percent*0.01)*y for y in values[1]]
elif types == 'Data column':
error = self.ErrBar.Error[types].currentText()
error = self.dataTable.table[error]
graph = GraphPlot(values, titles, error)
else:
graph = GraphPlot(values, titles)
if not hasattr(self, 'Graph'):
self.Graph = Plot_Graph()
if not self.GrphAxes.checkReplot.isChecked():
self.Graph.axes.clear()
self.GrphAxes.result.setText('')
self.Graph.nc = -1
self.Graph.nc = self.Graph.nc + 1
if self.Graph.nc >= len(self.Graph.color):
self.Graph.nc = 0
logX, logY = False, False
if self.GrphAxes.checkLogAxsX.isChecked():
logX = True
if self.GrphAxes.checkLogAxsY.isChecked():
logY = True
self.Graph.setGraph(graph, marker, logX, logY)
self.splitLyout.addWidget(self.Graph)
def plotRegressionGraph(self, typof):
axesXTitle = self.GrphAxes.axesXCombo.currentText()
axesYTitle = self.GrphAxes.axesYCombo.currentText()
values = [ self.dataTable.table[axesXTitle] ,
self.dataTable.table[axesYTitle] ]
if len(values[0]) != len(values[1]):
return
titles = [ axesXTitle , axesYTitle ]
types = self.ErrBar.MainCombo.currentText()
if types != 'None':
if types == 'Fixed value':
error = eval(self.ErrBar.Error[types].text())
elif types == '% of value':
percent = eval(self.ErrBar.Error[types].text())
error = [(percent*0.01)*y for y in values[1]]
elif types == 'Data column':
error = self.ErrBar.Error[types].currentText()
error = self.dataTable.table[error]
graph = GraphPlot(values, titles, error)
else:
graph = GraphPlot(values, titles)
if not hasattr(self, 'Graph'):
self.Graph = Plot_Graph()
if not self.GrphAxes.checkReplot.isChecked():
self.Graph.axes.clear()
self.GrphAxes.result.setText('')
self.Graph.nc = -1
else:
if graph.xInterval[0] > self.Graph.axes.get_xlim()[0]:
graph.xInterval[0] = self.Graph.axes.get_xlim()[0]
if graph.xInterval[1] < self.Graph.axes.get_xlim()[1]:
graph.xInterval[1] = self.Graph.axes.get_xlim()[1]
if graph.yInterval[0] > self.Graph.axes.get_ylim()[0]:
graph.yInterval[0] = self.Graph.axes.get_ylim()[0]
if graph.yInterval[1] < self.Graph.axes.get_ylim()[1]:
graph.yInterval[1] = self.Graph.axes.get_ylim()[1]
self.Graph.nc = self.Graph.nc + 1
if self.Graph.nc >= len(self.Graph.color):
self.Graph.nc = 0
if typof == 'lin':
correct = self.Graph.set_Regression(graph, 'lin')
elif typof == 'log':
correct = self.Graph.set_Regression(graph, 'log')
elif typof == 'exp':
correct = self.Graph.set_Regression(graph, 'exp')
elif typof == 'poly':
correct = self.Graph.set_Regression(graph, 'poly')
elif typof == 'pepe':
correct = self.Graph.set_Regression(graph, 'pepe')
elif typof == 'general':
correct = self.Graph.set_Regression(graph, 'general')
if correct:
self.GrphAxes.result.setAlignment(Qt.AlignVCenter | Qt.AlignHCenter)
self.GrphAxes.result.setText(graph.text)
self.splitLyout.addWidget(self.Graph)
def saveGraph(self):
self.Graph.saveGraph()
@pyqtSlot()
def changeData(self):
self.dataSaved = False
for item in self.dataTable.tableWidget.selectedItems():
boolean = True
while boolean:
try:
if item.text() == '':
if item.row() >= len(self.dataTable.table[
self.dataTable.index[item.column()]]):
self.dataTable.table[self.dataTable.index[
item.column()]]
else:
del self.dataTable.table[self.dataTable.index[
item.column()]][item.row()]
boolean = False
else:
self.dataTable.table[self.dataTable.index[
item.column()]][item.row()] = float(item.text())
boolean = False
except IndexError:
self.dataTable.table[self.dataTable.index[item.column()]
].append(float(item.text()))
boolean = False
except KeyError:
self.dataTable.table[str(item.column())] = []
self.dataTable.index[item.column()] = str(item.column())
self.ErrBar.set_new_Columns_names(self.dataTable.index)
self.GrphAxes.setNames(self.dataTable.index)
self.dataTable.addColumnRow()
@pyqtSlot()
def formula_click(self):
table, index = Operations( self.Formula.lineEdit.text(),
self.dataTable.table,
self.dataTable.index ).main()
self.dataTable.table = table
self.dataTable.index = index
self.dataTable.reDoTable()
self.ErrBar.set_new_Columns_names(self.dataTable.index)
self.GrphAxes.setNames(self.dataTable.index)
| Jaimedgp/J_Graphics_P | src/MainLayout.py | Python | gpl-3.0 | 8,216 |
import pygame, spritesheet
class CharSprite(pygame.sprite.Sprite):
# CONSTANTS
# TODO: Should these be alterable?
GRAVITY = 2
MAX_DOWN_SPEED = 100
RUN_SPEED = 10
JUMP_FORCE = -20
def __init__(self, image_loc, position):
pygame.sprite.Sprite.__init__(self)
self.sprites = spritesheet.Spritesheet(image_loc)
self.image = self.sprites.image_at(
(128,0,32,64),colorkey=-1)
self.rect = self.image.get_rect()
self.position = position
self.direction = 'right'
self.rect.center = self.position
self.dx, self.dy = 0,0
self.speed = (0,0)
self.fall = True
self.curr_anim = []
self.frame = 0
def update(self, deltat, collisions):
# SIMULATION
# FIXME: This does not properly handle collisions
if collisions and self.dy > 0:
self.fall = False
self.dy = 0
self.anim_cycle('idle')
# Gravity as a constant falling force
if not collisions:
self.dy += self.GRAVITY
if self.dy > self.MAX_DOWN_SPEED:
self.dy = self.MAX_DOWN_SPEED
self.anim_cycle('jump')
# Calculate speed vectors
self.speed = (self.dx,self.dy)
x, y = self.position
self.position = tuple(map(sum,zip((x,y),self.speed)))
# Gather image and ready display
# FIXME: Images display and refresh too quickly
self.image = self.curr_anim[self.frame]
self.frame += 1
self.frame = self.frame%len(self.curr_anim)
self.rect = self.image.get_rect()
self.rect.center = self.position
def anim_cycle(self, name):
animations = {
'idle': [
(0,0,32,64),(32,0,32,64),
(64,0,32,64),(96,0,32,64)],
'jump': [
(288,0,32,64),(320,0,32,64),
(352,0,32,64),(382,0,32,64)],
'run': [
(480, 0, 32, 64),(512,0,32,64),(544,0,32,64),
(576,0,32,64),(608,0,32,64),],
'fall': [
(288,0,32,64)]}
self.curr_anim = self.sprites.images_at(
animations[name], colorkey=-1)
if self.direction == 'left':
self.curr_anim = self.sprites.imagesR_at(
animations[name], colorkey=-1)
self.frame = 0
def idle(self):
# FIXME: This is very buggy, and stops unintended things.
for i in range(self.RUN_SPEED):
if self.direction == 'left':
self.dx += 1
else:
self.dx -= 1
self.anim_cycle('idle')
def run(self, direction):
for i in range(self.RUN_SPEED):
if direction == 'left':
self.dx -= 1
self.direction = 'left'
self.anim_cycle('run')
else:
self.dx += 1
self.direction = 'right'
self.anim_cycle('run')
def jump(self):
# TODO: Run the jump animation once, then turn to falling
if not self.fall:
self.dy += self.JUMP_FORCE
self.fall = True
self.anim_cycle('jump')
| Sanjurjo7/metroidVaniaJones | character.py | Python | mit | 3,230 |
import pytest
from v8py import Context, Script, JSException
def test_script():
c1 = Context()
c1.kappa = 'pride'
c2 = Context()
s = Script('kappa')
assert c1.eval(s) == 'pride'
with pytest.raises(JSException):
c2.eval(s)
def test_identical_scripts():
assert Script('kappa') is Script('kappa')
def test_filename():
s = Script('kappa', filename='file')
# why bother testing more...
| tbodt/v8py | tests/test_script.py | Python | lgpl-3.0 | 427 |
# Django settings for webvirtmgr project.
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.dirname(__file__), '..', 'webvirtmgr.sqlite3'),
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
TIME_JS_REFRESH = 2000
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/Zaporozhye'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(os.path.dirname(__file__), '..', 'static')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#^)!5i$=r!h#v9z9j1j5^=+l0xb&1n*5s(e+93r$%zrzr3zgc1'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'webvirtmgr.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'webvirtmgr.wsgi.application'
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', 'templates'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'instance',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| selboo/starl-mangle | webvirtmgr/webvirtmgr/settings.py | Python | apache-2.0 | 5,044 |
from __future__ import absolute_import
from django.contrib import admin
from .models import TaxRate
class TaxRateAdmin(admin.ModelAdmin):
list_display = ('id', 'country', 'state', 'zip_code', 'rate', 'updated_at')
admin.site.register(TaxRate, TaxRateAdmin)
| inabhi9/django-taxrates | taxrates/admin.py | Python | mit | 267 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozpack.packager.formats import (
FlatFormatter,
JarFormatter,
OmniJarFormatter,
)
from mozpack.packager import (
preprocess_manifest,
preprocess,
Component,
SimpleManifestSink,
)
from mozpack.files import (
GeneratedFile,
FileFinder,
File,
)
from mozpack.copier import (
FileCopier,
Jarrer,
)
from mozpack.errors import errors
from mozpack.unify import UnifiedBuildFinder
import mozpack.path as mozpath
import buildconfig
from argparse import ArgumentParser
import os
from StringIO import StringIO
import subprocess
import platform
import mozinfo
# List of libraries to shlibsign.
SIGN_LIBS = [
'softokn3',
'nssdbm3',
'freebl3',
'freeblpriv3',
'freebl_32fpu_3',
'freebl_32int_3',
'freebl_32int64_3',
'freebl_64fpu_3',
'freebl_64int_3',
]
class ToolLauncher(object):
'''
Helper to execute tools like xpcshell with the appropriate environment.
launcher = ToolLauncher()
launcher.tooldir = '/path/to/tools'
launcher.launch(['xpcshell', '-e', 'foo.js'])
'''
def __init__(self):
self.tooldir = None
def launch(self, cmd, extra_linker_path=None, extra_env={}):
'''
Launch the given command, passed as a list. The first item in the
command list is the program name, without a path and without a suffix.
These are determined from the tooldir member and the BIN_SUFFIX value.
An extra_linker_path may be passed to give an additional directory
to add to the search paths for the dynamic linker.
An extra_env dict may be passed to give additional environment
variables to export when running the command.
'''
assert self.tooldir
cmd[0] = os.path.join(self.tooldir, 'bin',
cmd[0] + buildconfig.substs['BIN_SUFFIX'])
if not extra_linker_path:
extra_linker_path = os.path.join(self.tooldir, 'bin')
env = dict(os.environ)
for p in ['LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH']:
if p in env:
env[p] = extra_linker_path + ':' + env[p]
else:
env[p] = extra_linker_path
for e in extra_env:
env[e] = extra_env[e]
# For VC12+, make sure we can find the right bitness of pgort1x0.dll
if not buildconfig.substs.get('HAVE_64BIT_BUILD'):
for e in ('VS140COMNTOOLS', 'VS120COMNTOOLS'):
if e not in env:
continue
vcdir = os.path.abspath(os.path.join(env[e], '../../VC/bin'))
if os.path.exists(vcdir):
env['PATH'] = '%s;%s' % (vcdir, env['PATH'])
break
# Work around a bug in Python 2.7.2 and lower where unicode types in
# environment variables aren't handled by subprocess.
for k, v in env.items():
if isinstance(v, unicode):
env[k] = v.encode('utf-8')
print >>errors.out, 'Executing', ' '.join(cmd)
errors.out.flush()
return subprocess.call(cmd, env=env)
def can_launch(self):
return self.tooldir is not None
launcher = ToolLauncher()
class LibSignFile(File):
'''
File class for shlibsign signatures.
'''
def copy(self, dest, skip_if_older=True):
assert isinstance(dest, basestring)
# os.path.getmtime returns a result in seconds with precision up to the
# microsecond. But microsecond is too precise because shutil.copystat
# only copies milliseconds, and seconds is not enough precision.
if os.path.exists(dest) and skip_if_older and \
int(os.path.getmtime(self.path) * 1000) <= \
int(os.path.getmtime(dest) * 1000):
return False
if launcher.launch(['shlibsign', '-v', '-o', dest, '-i', self.path]):
errors.fatal('Error while signing %s' % self.path)
def precompile_cache(registry, source_path, gre_path, app_path):
'''
Create startup cache for the given application directory, using the
given GRE path.
- registry is a FileRegistry-like instance where to add the startup cache.
- source_path is the base path of the package.
- gre_path is the GRE path, relative to source_path.
- app_path is the application path, relative to source_path.
Startup cache for all resources under resource://app/ are generated,
except when gre_path == app_path, in which case it's under
resource://gre/.
'''
from tempfile import mkstemp
source_path = os.path.abspath(source_path)
if app_path != gre_path:
resource = 'app'
else:
resource = 'gre'
app_path = os.path.join(source_path, app_path)
gre_path = os.path.join(source_path, gre_path)
fd, cache = mkstemp('.zip')
os.close(fd)
os.remove(cache)
try:
extra_env = {'MOZ_STARTUP_CACHE': cache}
if buildconfig.substs.get('MOZ_TSAN'):
extra_env['TSAN_OPTIONS'] = 'report_bugs=0'
if buildconfig.substs.get('MOZ_ASAN'):
extra_env['ASAN_OPTIONS'] = 'detect_leaks=0'
if launcher.launch(['xpcshell', '-g', gre_path, '-a', app_path,
'-f', os.path.join(os.path.dirname(__file__),
'precompile_cache.js'),
'-e', 'precompile_startupcache("resource://%s/");'
% resource],
extra_linker_path=gre_path,
extra_env=extra_env):
errors.fatal('Error while running startup cache precompilation')
return
from mozpack.mozjar import JarReader
jar = JarReader(cache)
resource = '/resource/%s/' % resource
for f in jar:
if resource in f.filename:
path = f.filename[f.filename.index(resource) + len(resource):]
if registry.contains(path):
registry.add(f.filename, GeneratedFile(f.read()))
jar.close()
finally:
if os.path.exists(cache):
os.remove(cache)
class RemovedFiles(GeneratedFile):
'''
File class for removed-files. Is used as a preprocessor parser.
'''
def __init__(self, copier):
self.copier = copier
GeneratedFile.__init__(self, '')
def handle_line(self, str):
f = str.strip()
if not f:
return
if self.copier.contains(f):
errors.error('Removal of packaged file(s): %s' % f)
self.content += f + '\n'
def split_define(define):
'''
Give a VAR[=VAL] string, returns a (VAR, VAL) tuple, where VAL defaults to
1. Numeric VALs are returned as ints.
'''
if '=' in define:
name, value = define.split('=', 1)
try:
value = int(value)
except ValueError:
pass
return (name, value)
return (define, 1)
class NoPkgFilesRemover(object):
'''
Formatter wrapper to handle NO_PKG_FILES.
'''
def __init__(self, formatter, has_manifest):
assert 'NO_PKG_FILES' in os.environ
self._formatter = formatter
self._files = os.environ['NO_PKG_FILES'].split()
if has_manifest:
self._error = errors.error
self._msg = 'NO_PKG_FILES contains file listed in manifest: %s'
else:
self._error = errors.warn
self._msg = 'Skipping %s'
def add_base(self, base, *args):
self._formatter.add_base(base, *args)
def add(self, path, content):
if not any(mozpath.match(path, spec) for spec in self._files):
self._formatter.add(path, content)
else:
self._error(self._msg % path)
def add_manifest(self, entry):
self._formatter.add_manifest(entry)
def add_interfaces(self, path, content):
self._formatter.add_interfaces(path, content)
def contains(self, path):
return self._formatter.contains(path)
def main():
parser = ArgumentParser()
parser.add_argument('-D', dest='defines', action='append',
metavar="VAR[=VAL]", help='Define a variable')
parser.add_argument('--format', default='omni',
help='Choose the chrome format for packaging ' +
'(omni, jar or flat ; default: %(default)s)')
parser.add_argument('--removals', default=None,
help='removed-files source file')
parser.add_argument('--ignore-errors', action='store_true', default=False,
help='Transform errors into warnings.')
parser.add_argument('--minify', action='store_true', default=False,
help='Make some files more compact while packaging')
parser.add_argument('--minify-js', action='store_true',
help='Minify JavaScript files while packaging.')
parser.add_argument('--js-binary',
help='Path to js binary. This is used to verify '
'minified JavaScript. If this is not defined, '
'minification verification will not be performed.')
parser.add_argument('--jarlog', default='', help='File containing jar ' +
'access logs')
parser.add_argument('--optimizejars', action='store_true', default=False,
help='Enable jar optimizations')
parser.add_argument('--unify', default='',
help='Base directory of another build to unify with')
parser.add_argument('--disable-compression', action='store_false',
dest='compress', default=True,
help='Disable jar compression')
parser.add_argument('manifest', default=None, nargs='?',
help='Manifest file name')
parser.add_argument('source', help='Source directory')
parser.add_argument('destination', help='Destination directory')
parser.add_argument('--non-resource', nargs='+', metavar='PATTERN',
default=[],
help='Extra files not to be considered as resources')
args = parser.parse_args()
defines = dict(buildconfig.defines)
if args.ignore_errors:
errors.ignore_errors()
if args.defines:
for name, value in [split_define(d) for d in args.defines]:
defines[name] = value
copier = FileCopier()
if args.format == 'flat':
formatter = FlatFormatter(copier)
elif args.format == 'jar':
formatter = JarFormatter(copier, compress=args.compress, optimize=args.optimizejars)
elif args.format == 'omni':
formatter = OmniJarFormatter(copier,
buildconfig.substs['OMNIJAR_NAME'],
compress=args.compress,
optimize=args.optimizejars,
non_resources=args.non_resource)
else:
errors.fatal('Unknown format: %s' % args.format)
# Adjust defines according to the requested format.
if isinstance(formatter, OmniJarFormatter):
defines['MOZ_OMNIJAR'] = 1
elif 'MOZ_OMNIJAR' in defines:
del defines['MOZ_OMNIJAR']
respath = ''
if 'RESPATH' in defines:
respath = SimpleManifestSink.normalize_path(defines['RESPATH'])
while respath.startswith('/'):
respath = respath[1:]
if args.unify:
def is_native(path):
path = os.path.abspath(path)
return platform.machine() in mozpath.split(path)
# Invert args.unify and args.source if args.unify points to the
# native architecture.
args.source, args.unify = sorted([args.source, args.unify],
key=is_native, reverse=True)
if is_native(args.source) and not buildconfig.substs['CROSS_COMPILE']:
launcher.tooldir = args.source
elif not buildconfig.substs['CROSS_COMPILE']:
launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist')
with errors.accumulate():
finder_args = dict(
minify=args.minify,
minify_js=args.minify_js,
)
if args.js_binary:
finder_args['minify_js_verify_command'] = [
args.js_binary,
os.path.join(os.path.abspath(os.path.dirname(__file__)),
'js-compare-ast.js')
]
if args.unify:
finder = UnifiedBuildFinder(FileFinder(args.source),
FileFinder(args.unify),
**finder_args)
else:
finder = FileFinder(args.source, **finder_args)
if 'NO_PKG_FILES' in os.environ:
sinkformatter = NoPkgFilesRemover(formatter,
args.manifest is not None)
else:
sinkformatter = formatter
sink = SimpleManifestSink(finder, sinkformatter)
if args.manifest:
preprocess_manifest(sink, args.manifest, defines)
else:
sink.add(Component(''), 'bin/*')
sink.close(args.manifest is not None)
if args.removals:
removals_in = StringIO(open(args.removals).read())
removals_in.name = args.removals
removals = RemovedFiles(copier)
preprocess(removals_in, removals, defines)
copier.add(mozpath.join(respath, 'removed-files'), removals)
# shlibsign libraries
if launcher.can_launch():
if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'):
for lib in SIGN_LIBS:
libbase = mozpath.join(respath, '%s%s') \
% (buildconfig.substs['DLL_PREFIX'], lib)
libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX'])
if copier.contains(libname):
copier.add(libbase + '.chk',
LibSignFile(os.path.join(args.destination,
libname)))
# Setup preloading
if args.jarlog and os.path.exists(args.jarlog):
from mozpack.mozjar import JarLog
log = JarLog(args.jarlog)
for p, f in copier:
if not isinstance(f, Jarrer):
continue
key = JarLog.canonicalize(os.path.join(args.destination, p))
if key in log:
f.preload(log[key])
# Fill startup cache
if isinstance(formatter, OmniJarFormatter) and launcher.can_launch() \
and buildconfig.substs['MOZ_DISABLE_STARTUPCACHE'] != '1':
gre_path = None
def get_bases():
for b in sink.packager.get_bases(addons=False):
for p in (mozpath.join('bin', b), b):
if os.path.exists(os.path.join(args.source, p)):
yield p
break
for base in sorted(get_bases()):
if not gre_path:
gre_path = base
omnijar_path = mozpath.join(sink.normalize_path(base),
buildconfig.substs['OMNIJAR_NAME'])
if formatter.contains(omnijar_path):
precompile_cache(formatter.copier[omnijar_path],
args.source, gre_path, base)
copier.copy(args.destination)
if __name__ == '__main__':
main()
| Yukarumya/Yukarum-Redfoxes | toolkit/mozapps/installer/packager.py | Python | mpl-2.0 | 15,683 |
# This file is part of CO𝘕CEPT, the cosmological 𝘕-body code in Python.
# Copyright © 2015–2022 Jeppe Mosgaard Dakin.
#
# CO𝘕CEPT is free software: You can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CO𝘕CEPT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CO𝘕CEPT. If not, see https://www.gnu.org/licenses/
#
# The author of CO𝘕CEPT can be contacted at dakin(at)phys.au.dk
# The latest version of CO𝘕CEPT is available at
# https://github.com/jmd-dk/concept/
# Import everything from the commons module.
# In the .pyx file, Cython declared variables will also get cimported.
from commons import *
# Cython imports
cimport(
'from communication import '
' communicate_ghosts, '
' domain_subdivisions, '
' get_buffer, '
' rank_neighbouring_domain, '
' sendrecv_component, '
)
cimport('from ewald import get_ewald_grid')
cimport(
'from mesh import '
' copy_modes, '
' diff_domaingrid, '
' domain_decompose, '
' fft, '
' fourier_loop, '
' fourier_operate, '
' get_fftw_slab, '
' interpolate_domaingrid_to_particles, '
' interpolate_upstream, '
' nullify_modes, '
' slab_decompose, '
)
cimport(
'from species import '
' accept_or_reject_subtiling_refinement, '
' tentatively_refine_subtiling, '
)
# Pure Python imports
from mesh import group_components
# Function pointer types used in this module
pxd("""
ctypedef void (*func_interaction)(
str, # interaction_name
Component, # receiver
Component, # supplier
dict, # ᔑdt_rungs
int, # rank_supplier
bint, # only_supply
str, # pairing_level
Py_ssize_t[::1], # tile_indices_receiver
Py_ssize_t**, # tile_indices_supplier_paired
Py_ssize_t*, # tile_indices_supplier_paired_N
dict, # interaction_extra_args
)
""")
# Generic function implementing component-component pairing
@cython.header(
# Arguments
interaction_name=str,
receivers=list,
suppliers=list,
interaction=func_interaction,
ᔑdt_rungs=dict,
pairing_level=str,
interaction_extra_args=dict,
# Locals
affected=list,
anticipate_refinement='bint',
anticipation_period='Py_ssize_t',
attempt_refinement='bint',
computation_time='double',
dependent=list,
deterministic='bint',
index='Py_ssize_t',
judge_refinement='bint',
judgement_period='Py_ssize_t',
lowest_active_rung='signed char',
only_supply='bint',
pair=set,
pairs=list,
rank_other='int',
receiver='Component',
refinement_offset='Py_ssize_t',
refinement_period='Py_ssize_t',
rung_index='signed char',
softening='double',
subtiles_computation_times_N_interaction='Py_ssize_t[::1]',
subtiles_computation_times_interaction='double[::1]',
subtiles_computation_times_sq_interaction='double[::1]',
subtiling='Tiling',
subtiling_shape_judged='Py_ssize_t[::1]',
subtiling_name=str,
subtiling_name_2=str,
supplier='Component',
tile_sorted=set,
tiling_name=str,
returns='void',
)
def component_component(
interaction_name, receivers, suppliers, interaction, ᔑdt_rungs,
pairing_level, interaction_extra_args={},
):
"""This function takes care of pairings between all receiver and
supplier components. It then calls domain_domain.
"""
# Lookup basic information for this interaction
interaction_info = interactions_registered[interaction_name]
dependent = interaction_info.dependent
affected = interaction_info.affected
deterministic = interaction_info.deterministic
# The names used to refer to the domain and tile level tiling
# (tiles and subtiles). In the case of pairing_level == 'domain',
# no actual tiling will take place, but we still need the
# tile + subtile structure. For this, the trivial tiling,
# spanning the box, is used.
if 𝔹[pairing_level == 'tile']:
tiling_name = f'{interaction_name} (tiles)'
subtiling_name = f'{interaction_name} (subtiles)'
subtiling_name_2 = f'{interaction_name} (subtiles 2)'
else: # pairing_level == 'domain':
tiling_name = subtiling_name = 'trivial'
# Set flags anticipate_refinement, attempt_refinement and
# judge_refinement. The first signals whether a tentative subtiling
# refinement attempt is coming up soon, in which case we should
# be collecting computation time data of the current subtiling.
# The second specifies whether a tentative refinement of the
# subtilings in use should be performed now, meaning prior to
# the interaction. The third specifies whether the previously
# performed tentative refinement should be concluded, resulting in
# either accepting or rejecting the refinement.
anticipate_refinement = attempt_refinement = judge_refinement = False
if 𝔹[pairing_level == 'tile'
and shortrange_params[interaction_name]['subtiling'][0] == 'automatic'
]:
# The anticipation_period and judgement_period specifies the
# number of time steps spend collecting computation time data
# before and after a tentative sutiling refinement.
# The refinement will be judged after the first interaction of
# the time step after judgement_period time steps has gone by
# after the tentative refinement (there may be many more
# interactions in this time step, depending on N_rungs).
# Note that changes to anticipation_period or judgement_period
# need to be reflected in the subtiling_refinement_period_min
# variable, defined in the commons module. The relation is
# subtiling_refinement_period_min = (
# anticipation_period + judgement_period + 1)
anticipation_period = 4
judgement_period = 2
subtiles_computation_times_interaction = subtiles_computation_times [interaction_name]
subtiles_computation_times_sq_interaction = subtiles_computation_times_sq[interaction_name]
subtiles_computation_times_N_interaction = subtiles_computation_times_N [interaction_name]
for receiver in receivers:
subtiling = receiver.tilings.get(subtiling_name)
if subtiling is None:
continue
refinement_period = subtiling.refinement_period
refinement_offset = subtiling.refinement_offset
if refinement_period == 0:
abort(
f'The subtiling "{subtiling_name}" is set to use automatic subtiling '
f'refinement, but it has a refinement period of {refinement_period}.'
)
# We judge the attempted refinement after 2 whole time steps
# has gone by; this one and the next. The refinement will
# then be judged after the first interaction on the third
# time step (there may be many more interactions
# if N_rungs > 1).
if interaction_name in subtilings_under_tentative_refinement:
anticipate_refinement = True
judge_refinement = (
ℤ[universals.time_step + refinement_offset + 1] % refinement_period == 0
)
else:
attempt_refinement = (
(ℤ[universals.time_step + refinement_offset + 1] + judgement_period
) % refinement_period == 0
)
# We begin storing the computation time data of the
# current subtiling 4 time steps before we tentatively
# apply the new subtiling.
anticipate_refinement = (
(ℤ[universals.time_step + refinement_offset + 1] + judgement_period
) % refinement_period >= refinement_period - anticipation_period
)
break
# Do the tentative subtiling refinement, if required
if attempt_refinement:
# Copy the old computation times to new locations in
# subtiles_computation_times_interaction, making room for the
# new computation times.
for rung_index in range(N_rungs):
subtiles_computation_times_interaction[N_rungs + rung_index] = (
subtiles_computation_times_interaction[rung_index]
)
subtiles_computation_times_sq_interaction[N_rungs + rung_index] = (
subtiles_computation_times_sq_interaction[rung_index]
)
subtiles_computation_times_N_interaction[N_rungs + rung_index] = (
subtiles_computation_times_N_interaction[rung_index]
)
subtiles_computation_times_interaction [rung_index] = 0
subtiles_computation_times_sq_interaction[rung_index] = 0
subtiles_computation_times_N_interaction [rung_index] = 0
# Replace the subtilings with slightly refined versions
subtilings_under_tentative_refinement.add(interaction_name)
tentatively_refine_subtiling(interaction_name)
# Pair each receiver with all suppliers and let them interact
pairs = []
tile_sorted = set()
computation_time = 0 # Total tile-tile computation time for this call to component_component()
for receiver in receivers:
for supplier in suppliers:
pair = {receiver, supplier}
if pair in pairs:
continue
pairs.append(pair)
# Make sure that the tile sorting of particles
# in the two components are up-to-date.
with unswitch(1):
if receiver not in tile_sorted:
receiver.tile_sort(tiling_name)
tile_sorted.add(receiver)
# Also ensure existence of subtiling
receiver.init_tiling(subtiling_name)
if supplier not in tile_sorted:
supplier.tile_sort(tiling_name)
tile_sorted.add(supplier)
# Also ensure existence of subtiling
supplier.init_tiling(subtiling_name)
# Flag specifying whether the supplier should only supply
# forces to the receiver and not receive any force itself.
only_supply = (supplier not in receivers)
# Pair up domains for the current
# receiver and supplier component.
domain_domain(
interaction_name,
receiver,
supplier,
interaction,
ᔑdt_rungs,
dependent,
affected,
only_supply,
deterministic,
pairing_level,
interaction_extra_args,
)
# The interactions between the receiver and all suppliers are
# now done. Add the accumulated computation time to the local
# computation_time variable, then nullify the computation time
# stored on the subtiling, so that it is ready for new data.
# To keep the total computation time tallied up over the entire
# time step present on the subtiling, add the currently stored
# computation time to the computation_time_total attribute
# before doing the nullification.
subtiling = receiver.tilings[subtiling_name]
computation_time += subtiling.computation_time
subtiling.computation_time_total += subtiling.computation_time
subtiling.computation_time = 0
# All interactions are now done. If the measured computation time
# should be used for automatic subtiling refinement, store this
# outside of this function.
if 𝔹[pairing_level == 'tile'
and shortrange_params[interaction_name]['subtiling'][0] == 'automatic'
]:
# The computation time depends drastically on which rungs are
# currently active. We therefore store the total computation
# time according to the current lowest active rung.
if anticipate_refinement or attempt_refinement or judge_refinement:
lowest_active_rung = ℤ[N_rungs - 1]
for receiver in receivers:
if receiver.lowest_active_rung < lowest_active_rung:
lowest_active_rung = receiver.lowest_active_rung
if lowest_active_rung == 0:
break
subtiles_computation_times_interaction [lowest_active_rung] += computation_time
subtiles_computation_times_sq_interaction[lowest_active_rung] += computation_time**2
subtiles_computation_times_N_interaction [lowest_active_rung] += 1
# If it is time to judge a previously attempted refinement,
# do so and reset the computation time.
if judge_refinement:
subtilings_under_tentative_refinement.remove(interaction_name)
subtiling_shape_judged = accept_or_reject_subtiling_refinement(
interaction_name,
subtiles_computation_times_interaction,
subtiles_computation_times_sq_interaction,
subtiles_computation_times_N_interaction,
)
subtiles_computation_times_interaction [:] = 0
subtiles_computation_times_sq_interaction[:] = 0
subtiles_computation_times_N_interaction [:] = 0
else:
subtiling_shape_judged = subtiling_shape_rejected
# Gather information about the acceptance of the new subtiling
# and print out any positive results.
Gather(subtiling_shape_judged, subtiling_shapes_judged)
if master:
for rank_other in range(nprocs):
index = 3*rank_other
if subtiling_shapes_judged[index] == 0:
continue
subtiling_shape_judged = subtiling_shapes_judged[index:index+3]
masterprint(
f'Rank {rank_other}: Refined subtile decomposition ({interaction_name}):',
'×'.join(list(map(str, subtiling_shape_judged)))
)
# Containers and array used by the component_component() function.
# The subtiles_computation_times and subtiles_computation_times_N are
# used to store total computation times and numbers for performed
# interactions. They are indexed as
# subtiles_computation_times[interaction_name][rung_index],
# resulting in the accumulated computation time for this interaction
# when the lowest active rung corresponds to rung_index.
# The subtilings_under_tentative_refinement set contain names of
# interactions the subtilings of which are currently under
# tentative refinement.
# The subtiling_shape_rejected are used only for signalling purposes,
# while the subtiling_shapes_judged are used to gather subtiling shapes
# from all processes into the master process.
cython.declare(
subtiles_computation_times=object,
subtiles_computation_times_sq=object,
subtiles_computation_times_N=object,
subtilings_under_tentative_refinement=set,
subtiling_shape_rejected='Py_ssize_t[::1]',
subtiling_shapes_judged='Py_ssize_t[::1]',
)
subtiles_computation_times = collections.defaultdict(
lambda: zeros(ℤ[2*N_rungs], dtype=C2np['double'])
)
subtiles_computation_times_sq = collections.defaultdict(
lambda: zeros(ℤ[2*N_rungs], dtype=C2np['double'])
)
subtiles_computation_times_N = collections.defaultdict(
lambda: zeros(ℤ[2*N_rungs], dtype=C2np['Py_ssize_t'])
)
subtilings_under_tentative_refinement = set()
subtiling_shape_rejected = zeros(3, dtype=C2np['Py_ssize_t'])
subtiling_shapes_judged = empty(3*nprocs, dtype=C2np['Py_ssize_t']) if master else None
# Generic function implementing domain-domain pairing
@cython.header(
# Arguments
interaction_name=str,
receiver='Component',
supplier='Component',
interaction=func_interaction,
ᔑdt_rungs=dict,
dependent=list,
affected=list,
only_supply='bint',
deterministic='bint',
pairing_level=str,
interaction_extra_args=dict,
# Locals
domain_pair_nr='Py_ssize_t',
instantaneous='bint',
interact='bint',
only_supply_communication='bint',
only_supply_passed='bint',
rank_recv='int',
rank_send='int',
ranks_recv='int[::1]',
ranks_send='int[::1]',
supplier_extrl='Component',
supplier_local='Component',
tile_indices='Py_ssize_t[:, ::1]',
tile_indices_receiver='Py_ssize_t[::1]',
tile_indices_supplier='Py_ssize_t[::1]',
tile_indices_supplier_paired='Py_ssize_t**',
tile_indices_supplier_paired_N='Py_ssize_t*',
tile_pairings_index='Py_ssize_t',
returns='void',
)
def domain_domain(
interaction_name, receiver, supplier, interaction, ᔑdt_rungs, dependent, affected,
only_supply, deterministic, pairing_level, interaction_extra_args,
):
"""This function takes care of pairings between the domains
containing particles/fluid elements of the passed receiver and
supplier component.
As the components are distributed at the domain level,
all communication needed for the interaction will be taken care of
by this function. The receiver will not be communicated, while the
supplier will be sent to other processes (domains) and also received
back from other processes. Thus both local and external versions of
the supplier exist, called supplier_local and supplier_extrl.
The dependent and affected arguments specify which attributes of the
supplier and receiver component are needed to supply and receive
the force, respectively. Only these attributes will be communicated.
If affected is an empty list, this is not really an interaction.
In this case, every domain will both send and receive from every
other domain.
"""
# To satisfy the compiler
tile_indices_receiver = tile_indices_supplier = None
tile_indices_supplier_paired = tile_indices_supplier_paired_N = NULL
# Flag specifying whether or not this interaction is instantaneous.
# For instantaneous interactions, we need to apply the updates to
# the affected variables after each domain-domain pairing.
instantaneous = interactions_registered[interaction_name].instantaneous
# Get the process ranks to send to and receive from.
# When only_supply is True, each domain will be paired with every
# other domain, either in the entire box (pairing_level == 'domain')
# or just among the neighbouring domains (pairing_level == 'tile').
# When only_supply is False, the results of an interaction
# computed on one process will be send back to the other
# participating process and applied, cutting the number of domain
# pairs roughly in half. Note however that even if only_supply is
# False, we may not cut the number of domain pairs in half if the
# receiver and supplier are separate components; all domains of
# the receiver then need to be paired with all domains of the
# supplier. That is, "only_supply" really serve two distinct usages:
# (1) it is passed to the interaction() function so that
# it knows whether to also update the supplier, (2) it determines
# the interprocess communication pattern. As this latter usage also
# depends upon whether the receiver and supplier is really the same
# component, we extract usage (2) into its own flag,
# "only_supply_communication".
only_supply_communication = (only_supply if 𝔹[receiver is supplier] else True)
ranks_send, ranks_recv = domain_domain_communication(pairing_level, only_supply_communication)
# Backup of the passed only_supply boolean
only_supply_passed = only_supply
# Pair this process/domain with whichever other
# processes/domains are needed. This process is paired
# with two other processes simultaneously. This process/rank sends
# a copy of the local supplier (from now on referred to
# as supplier_local) to rank_send, while receiving the external
# supplier (supplier_extrl) from rank_recv.
# On each process, the local receiver and the external
# (received) supplier_extrl then interact.
supplier_local = supplier
for domain_pair_nr in range(ranks_send.shape[0]):
# Process ranks to send to and receive from
rank_send = ranks_send[domain_pair_nr]
rank_recv = ranks_recv[domain_pair_nr]
# The passed interaction function should always update the
# particles of the receiver component within the local domain,
# due to the particles of the external supplier component,
# within whatever domain they happen to be in.
# Unless the supplier component is truly only a supplier and
# not also a receiver (only_supply is True), the particles
# that supply the force also need to be updated by the passed
# interaction function. It is important that the passed
# interaction function do not update the affected variables
# directly (e.g. mom for gravity), but instead update the
# corresponding buffers (e.g. Δmom for gravity). The exception
# is when the interaction is instantaneous, in which case the
# affected variables should be updated directly, while also
# updating the corresponding buffer for the supplier. The
# buffers are what will be communicated. Also, Δmom is used to
# figure out which short-range rung any given particle belongs
# to. Special cases described below may change whether or not
# the interaction between this particular domain pair should be
# carried out on the local process (specified by the interact
# flag), or whether the only_supply flag should be changed.
interact = True
only_supply = only_supply_passed
with unswitch:
if 𝔹[receiver is supplier] and 𝔹[pairing_level == 'domain']:
if rank_send == rank_recv != rank:
# We are dealing with the special case where the
# local process and some other (with a rank given by
# rank_send == rank_recv) both send all of their
# particles belonging to the same component to each
# other, after which the exact same interaction
# takes place on both processes. In such a case,
# even when only_supply is False, there is no need
# to communicate the interaction results, as these
# are already known to both processes. Thus, we
# always use only_supply = True in such cases.
# Note that this is not true for
# pairing_level == 'tile', as here not all of the
# particles within the domains are communicated, but
# rather particles within completely disjoint sets
# of tiles, and so the interactions taking place on
# the two processes will not be identical.
only_supply = True
# In the case of a non-deterministic interaction,
# the above logic no longer holds, as the two
# versions of the supposedly same interaction
# computed on different processes will not be
# identical. In such cases, perform the interaction
# only on one of the two processes. The process with
# the lower rank is chosen for the job.
with unswitch:
if not deterministic:
interact = (rank < rank_send)
only_supply = only_supply_passed
# Communicate the dependent variables (e.g. pos for gravity) of
# the supplier. For pairing_level == 'domain', communicate all
# local particles. For pairing_level == 'tile', we only need to
# communicate particles within the tiles that are going to
# interact during the current domain-domain pairing.
with unswitch:
if 𝔹[pairing_level == 'tile']:
# Find interacting tiles
tile_indices = domain_domain_tile_indices(
interaction_name, receiver,
only_supply_communication, domain_pair_nr,
)
tile_indices_receiver = tile_indices[0, :]
tile_indices_supplier = tile_indices[1, :]
else: # pairing_level == 'domain'
# For domain level pairing we make use of
# the trivial tiling, containing a single tile.
tile_indices_receiver = tile_indices_supplier = tile_indices_trivial
tile_indices_supplier_paired = tile_indices_trivial_paired
tile_indices_supplier_paired_N = tile_indices_trivial_paired_N
supplier_extrl = sendrecv_component(
supplier_local, dependent, pairing_level, interaction_name, tile_indices_supplier,
dest=rank_send, source=rank_recv,
)
# Let the local receiver interact with the external
# supplier_extrl. This will update the affected variable buffers
# (e.g. Δmom for gravity) of the local receiver, and of the
# external supplier if only_supply is False.
if interact:
with unswitch:
if 𝔹[pairing_level == 'tile']:
# Get the supplier tiles with which to pair each
# receiver tile and perform the interaction
# at the tile level.
tile_pairings_index = get_tile_pairings(
interaction_name,
receiver,
rank_recv,
only_supply_communication,
domain_pair_nr,
tile_indices_receiver,
tile_indices_supplier,
)
tile_indices_supplier_paired = tile_pairings_cache [tile_pairings_index]
tile_indices_supplier_paired_N = tile_pairings_N_cache[tile_pairings_index]
# Perform the interaction
interaction(
𝕊[interaction_name if 𝔹[pairing_level == 'tile'] else 'trivial'],
receiver,
supplier_extrl,
ᔑdt_rungs,
rank_recv,
only_supply,
pairing_level,
tile_indices_receiver,
tile_indices_supplier_paired,
tile_indices_supplier_paired_N,
interaction_extra_args,
)
# Send the populated buffers (e.g. Δmom for gravity) back to the
# process from which the external supplier_extrl came. Note that
# we should not do this in the case of a local interaction
# (rank_send == rank) or in a case where only_supply is True.
if rank_send != rank and not only_supply:
# For non-instantaneous interactions, the received Δ values
# should be added to the Δ's of the local supplier_local.
# For instantaneous interactions, the received Δ values
# should be added directly to the data of the
# local supplier_local.
sendrecv_component(
supplier_extrl, affected, pairing_level,
interaction_name, tile_indices_supplier,
dest=rank_recv, source=rank_send, component_recv=supplier_local,
use_Δ_recv=(not instantaneous),
)
# Nullify the Δ buffers of the external supplier_extrl,
# leaving this with no leftover junk.
supplier_extrl.nullify_Δ(affected, only_active=False)
# Tile indices for the trivial tiling,
# used by the domain_domain function.
cython.declare(
tile_indices_trivial='Py_ssize_t[::1]',
tile_indices_trivial_paired='Py_ssize_t**',
tile_indices_trivial_paired_N='Py_ssize_t*',
)
tile_indices_trivial = zeros(1, dtype=C2np['Py_ssize_t'])
tile_indices_trivial_paired = malloc(1*sizeof('Py_ssize_t*'))
tile_indices_trivial_paired[0] = cython.address(tile_indices_trivial[:])
tile_indices_trivial_paired_N = malloc(1*sizeof('Py_ssize_t'))
tile_indices_trivial_paired_N[0] = tile_indices_trivial.shape[0]
# Function returning the process ranks with which to pair
# the local process/domain in the domain_domain function,
# depending on the pairing level and supplier only supplies
# or also receives.
@cython.header(
# Arguments
pairing_level=str,
only_supply='bint',
# Locals
i='Py_ssize_t',
returns=tuple,
)
def domain_domain_communication(pairing_level, only_supply):
ranks = domain_domain_communication_dict.get((pairing_level, only_supply))
if ranks:
return ranks
if pairing_level == 'domain':
# When only_supply is True, each process should be paired with
# all processes. When only_supply is False, advantage is taken
# of the fact that a process is paired with two other processes
# simultaneously, meaning that the number of pairings is cut
# (roughly) in half. The particular order implemented below
# is of no importance.
N_domain_pairs = nprocs if only_supply else 1 + nprocs//2
ranks_send = empty(N_domain_pairs, dtype=C2np['int'])
ranks_recv = empty(N_domain_pairs, dtype=C2np['int'])
for i in range(N_domain_pairs):
ranks_send[i] = mod(rank + i, nprocs)
ranks_recv[i] = mod(rank - i, nprocs)
domain_domain_communication_dict[pairing_level, only_supply] = (ranks_send, ranks_recv)
elif pairing_level == 'tile':
# When only_supply is True, each domain should be paired with
# itself and all 26 neighbouring domains. Even though we might
# have nprocs < 27, meaning that some of the neighbouring
# domains might be the same, we always include all of them.
# If only_supply is False, advantage is taken of the fact that a
# domain is simultaneously paired with two other domains along
# the same direction (e.g. to the left and to the right),
# cutting the number of pairings (roughly) in half. The order is
# as specified below, and stored (as directions, not ranks) in
# domain_domain_communication_dict[
# 'tile', only_supply, 'domain_pair_offsets'].
ranks_send = []
ranks_recv = []
offsets_list = []
# - This domain itself
offsets = asarray([0, 0, 0], dtype=C2np['int'])
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
# - Domains at the 6 faces
# (when only_supply is False, send right, forward, upward)
direction = asarray([+1, 0, 0], dtype=C2np['int'])
for i in range(3):
offsets = np.roll(direction, i)
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
if only_supply:
offsets_list.append(-offsets)
ranks_send.append(rank_neighbouring_domain(*(-offsets)))
ranks_recv.append(rank_neighbouring_domain(*(+offsets)))
# - Domains at the 12 edges
# (when only_supply is False, send
# {right , forward}, {left , forward },
# {forward, upward }, {backward , upward },
# {right , upward }, {rightward, downward},
# )
direction = asarray([+1, +1, 0], dtype=C2np['int'])
flip = asarray([-1, +1, +1], dtype=C2np['int'])
for i in range(3):
offsets = np.roll(direction, i)
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
if only_supply:
offsets_list.append(-offsets)
ranks_send.append(rank_neighbouring_domain(*(-offsets)))
ranks_recv.append(rank_neighbouring_domain(*(+offsets)))
offsets *= np.roll(flip, i)
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
if only_supply:
offsets_list.append(-offsets)
ranks_send.append(rank_neighbouring_domain(*(-offsets)))
ranks_recv.append(rank_neighbouring_domain(*(+offsets)))
# - Domains at the 8 corners
# (when only_supply is False, send
# {right, forward , upward },
# {right, forward , downward},
# {left , forward , upward },
# {right, backward, upward },
# )
offsets = asarray([+1, +1, +1], dtype=C2np['int'])
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
if only_supply:
offsets_list.append(-offsets)
ranks_send.append(rank_neighbouring_domain(*(-offsets)))
ranks_recv.append(rank_neighbouring_domain(*(+offsets)))
direction = asarray([+1, +1, -1], dtype=C2np['int'])
for i in range(3):
offsets = np.roll(direction, i)
offsets_list.append(offsets.copy())
ranks_send.append(rank_neighbouring_domain(*(+offsets)))
ranks_recv.append(rank_neighbouring_domain(*(-offsets)))
if only_supply:
offsets_list.append(-offsets)
ranks_send.append(rank_neighbouring_domain(*(-offsets)))
ranks_recv.append(rank_neighbouring_domain(*(+offsets)))
domain_domain_communication_dict[pairing_level, only_supply] = (
(asarray(ranks_send, dtype=C2np['int']), asarray(ranks_recv, dtype=C2np['int']))
)
domain_domain_communication_dict[pairing_level, only_supply, 'domain_pair_offsets'] = (
asarray(offsets_list, dtype=C2np['Py_ssize_t'])
)
else:
abort(
f'domain_domain_communication() got '
f'pairing_level = {pairing_level} ∉ {{"domain", "tile"}}'
)
return domain_domain_communication_dict[pairing_level, only_supply]
# Cached results of the domain_domain_communication function
# are stored in the dict below.
cython.declare(domain_domain_communication_dict=dict)
domain_domain_communication_dict = {}
# Function returning the indices of the tiles of the local receiver and
# supplier which take part in tile-tile interactions under the
# domain-domain pairing with number domain_pair_nr.
@cython.header(
# Arguments
interaction_name=str,
component='Component',
only_supply='bint',
domain_pair_nr='Py_ssize_t',
# Locals
dim='int',
domain_pair_offsets='Py_ssize_t[:, ::1]',
domain_pair_offset='Py_ssize_t[::1]',
key=tuple,
sign='int',
tile_indices='Py_ssize_t[:, ::1]',
tile_indices_all=list,
tile_indices_component='Py_ssize_t[::1]',
tile_indices_list=list,
tile_layout='Py_ssize_t[:, :, ::1]',
tile_layout_slice_end='Py_ssize_t[::1]',
tile_layout_slice_start='Py_ssize_t[::1]',
tiling='Tiling',
tiling_name=str,
returns='Py_ssize_t[:, ::1]',
)
def domain_domain_tile_indices(interaction_name, component, only_supply, domain_pair_nr):
key = (interaction_name, only_supply)
tile_indices_all = domain_domain_tile_indices_dict.get(key)
if tile_indices_all is None:
tile_indices_all = [None]*27
domain_domain_tile_indices_dict[key] = tile_indices_all
else:
tile_indices = tile_indices_all[domain_pair_nr]
if tile_indices is not None:
return tile_indices
tile_layout_slice_start = empty(3, dtype=C2np['Py_ssize_t'])
tile_layout_slice_end = empty(3, dtype=C2np['Py_ssize_t'])
domain_pair_offsets = domain_domain_communication_dict[
'tile', only_supply, 'domain_pair_offsets']
domain_pair_offset = domain_pair_offsets[domain_pair_nr, :]
tiling_name = f'{interaction_name} (tiles)'
tiling = component.tilings[tiling_name]
tile_layout = tiling.layout
tile_indices_list = []
for sign in range(-1, 2, 2):
for dim in range(3):
if domain_pair_offset[dim] == -sign:
tile_layout_slice_start[dim] = 0
tile_layout_slice_end[dim] = 1
elif domain_pair_offset[dim] == 0:
tile_layout_slice_start[dim] = 0
tile_layout_slice_end[dim] = tile_layout.shape[dim]
elif domain_pair_offset[dim] == +sign:
tile_layout_slice_start[dim] = tile_layout.shape[dim] - 1
tile_layout_slice_end[dim] = tile_layout.shape[dim]
tile_indices_component = asarray(tile_layout[
tile_layout_slice_start[0]:tile_layout_slice_end[0],
tile_layout_slice_start[1]:tile_layout_slice_end[1],
tile_layout_slice_start[2]:tile_layout_slice_end[2],
]).flatten()
tile_indices_list.append(tile_indices_component)
tile_indices = asarray(tile_indices_list, dtype=C2np['Py_ssize_t'])
tile_indices_all[domain_pair_nr] = tile_indices
return tile_indices
# Cached results of the domain_domain_tile_indices function
# are stored in the dict below.
cython.declare(domain_domain_tile_indices_dict=dict)
domain_domain_tile_indices_dict = {}
# Function that given arrays of receiver and supplier tiles
# returns them in paired format.
@cython.header(
# Arguments
interaction_name=str,
component='Component',
rank_supplier='int',
only_supply='bint',
domain_pair_nr='Py_ssize_t',
tile_indices_receiver='Py_ssize_t[::1]',
tile_indices_supplier='Py_ssize_t[::1]',
# Locals
dim='int',
domain_pair_offset='Py_ssize_t[::1]',
global_tile_layout_shape='Py_ssize_t[::1]',
i='Py_ssize_t',
j='Py_ssize_t',
key=tuple,
l='Py_ssize_t',
l_offset='Py_ssize_t',
l_s='Py_ssize_t',
m='Py_ssize_t',
m_offset='Py_ssize_t',
m_s='Py_ssize_t',
n='Py_ssize_t',
n_offset='Py_ssize_t',
n_s='Py_ssize_t',
neighbourtile_index_3D_global='Py_ssize_t[::1]',
pairings='Py_ssize_t**',
pairings_N='Py_ssize_t*',
pairs_N='Py_ssize_t',
suppliertile_indices_3D_global_to_1D_local=dict,
tile_index3D='Py_ssize_t*',
tile_index_3D_global_s=tuple,
tile_index_r='Py_ssize_t',
tile_index_s='Py_ssize_t',
tile_indices_supplier_paired='Py_ssize_t[::1]',
tile_indices_supplier_paired_ptr='Py_ssize_t*',
tile_layout='Py_ssize_t[:, :, ::1]',
tile_pairings_index='Py_ssize_t',
tiling='Tiling',
tiling_name=str,
wraparound='bint',
returns='Py_ssize_t',
)
def get_tile_pairings(
interaction_name, component, rank_supplier, only_supply,
domain_pair_nr, tile_indices_receiver, tile_indices_supplier,
):
global tile_pairings_cache, tile_pairings_N_cache, tile_pairings_cache_size
# Lookup index of the required tile pairings in the global cache
key = (interaction_name, domain_pair_nr, only_supply)
tile_pairings_index = tile_pairings_cache_indices.get(key, tile_pairings_cache_size)
if tile_pairings_index < tile_pairings_cache_size:
return tile_pairings_index
# No cached results found. We will now compute the supplier tile
# indices to be paired with each of the receiver tiles.
# Below is a list of lists storing the supplier tile indices
# for each receiver tile. The type of this data structure will
# change during the computation.
tile_indices_receiver_supplier = [[] for i in range(tile_indices_receiver.shape[0])]
# Get the shape of the local (domain) tile layout,
# as well as of the global (box) tile layout.
tiling_name = f'{interaction_name} (tiles)'
tiling = component.tilings[tiling_name]
tile_layout = tiling.layout
tile_layout_shape = asarray(asarray(tile_layout).shape)
# The general computation below takes a long time when dealing with
# many tiles. By far the worst case is when all tiles in the local
# domain should be paired with themselves, which is the case for
# domain_pair_nr == 0. For this case we perform a much faster,
# more specialised computation.
if domain_pair_nr == 0:
if rank != rank_supplier:
abort(
f'get_tile_pairings() got rank_supplier = {rank_supplier} != rank = {rank} '
f'at domain_pair_nr == 0'
)
if not np.all(asarray(tile_indices_receiver) == asarray(tile_indices_supplier)):
abort(
f'get_tile_pairings() got tile_indices_receiver != tile_indices_supplier '
f'at domain_pair_nr == 0'
)
i = 0
for l in range(ℤ[tile_layout.shape[0]]):
for m in range(ℤ[tile_layout.shape[1]]):
for n in range(ℤ[tile_layout.shape[2]]):
if i != tile_layout[l, m, n]:
abort(
f'It looks as though the tile layout of {component.name} is incorrect'
)
neighbourtile_indices_supplier = tile_indices_receiver_supplier[i]
for l_offset in range(-1, 2):
l_s = l + l_offset
if l_s == -1 or l_s == ℤ[tile_layout.shape[0]]:
continue
for m_offset in range(-1, 2):
m_s = m + m_offset
if m_s == -1 or m_s == ℤ[tile_layout.shape[1]]:
continue
for n_offset in range(-1, 2):
n_s = n + n_offset
if n_s == -1 or n_s == ℤ[tile_layout.shape[2]]:
continue
tile_index_s = tile_layout[l_s, m_s, n_s]
# As domain_pair_nr == 0, all tiles in
# the local domain are paired with all
# others. To not double count, we
# disregard the pairing if the supplier
# tile index is lower than the receiver
# tile index (i). However, if
# only_supply is True, there is no
# double counting to be considered (the
# two components are presumably
# different), and so here we do not
# disregard the pairing.
with unswitch:
if not only_supply:
if tile_index_s < i:
continue
neighbourtile_indices_supplier.append(tile_index_s)
tile_indices_receiver_supplier[i] = asarray(
neighbourtile_indices_supplier, dtype=C2np['Py_ssize_t'],
)
i += 1
else:
# Get relative offsets of the domains currently being paired
domain_pair_offset = domain_domain_communication_dict[
'tile', only_supply, 'domain_pair_offsets'][domain_pair_nr, :]
# Get the indices of the global domain layout matching the
# receiver (local) domain and supplier domain.
domain_layout_receiver_indices = asarray(
np.unravel_index(rank, domain_subdivisions)
)
domain_layout_supplier_indices = asarray(
np.unravel_index(rank_supplier, domain_subdivisions)
)
global_tile_layout_shape = asarray(
asarray(domain_subdivisions)*tile_layout_shape,
dtype=C2np['Py_ssize_t'],
)
tile_index_3D_r_start = domain_layout_receiver_indices*tile_layout_shape
tile_index_3D_s_start = domain_layout_supplier_indices*tile_layout_shape
# Construct dict mapping global supplier 3D indices to their
# local 1D counterparts.
suppliertile_indices_3D_global_to_1D_local = {}
for j in range(tile_indices_supplier.shape[0]):
tile_index_s = tile_indices_supplier[j]
tile_index3D = tiling.tile_index3D(tile_index_s)
tile_index_3D_s = asarray([tile_index3D[dim] for dim in range(3)])
tile_index_3D_global_s = tuple(tile_index_3D_s + tile_index_3D_s_start)
suppliertile_indices_3D_global_to_1D_local[tile_index_3D_global_s] = tile_index_s
# Pair each receiver tile with all neighbouring supplier tiles
for i in range(tile_indices_receiver.shape[0]):
neighbourtile_indices_supplier = tile_indices_receiver_supplier[i]
# Construct global 3D index of this receiver tile
tile_index_r = tile_indices_receiver[i]
tile_index3D = tiling.tile_index3D(tile_index_r)
tile_index_3D_r = asarray([tile_index3D[dim] for dim in range(3)])
tile_index_3D_global_r = tile_index_3D_r + tile_index_3D_r_start
# Loop over all neighbouring receiver tiles
# (including the tile itself).
for l in range(-1, 2):
for m in range(-1, 2):
for n in range(-1, 2):
neighbourtile_index_3D_global = asarray(
tile_index_3D_global_r + asarray((l, m, n)),
dtype=C2np['Py_ssize_t'],
)
# For domain_pair_nr == 0, all tiles in the
# local domain are paired with all others.
# Here we must not take the periodicity into
# account, as such interactions are performed by
# future domain pairings.
with unswitch:
if domain_pair_nr == 0:
wraparound = False
for dim in range(3):
if not (
0 <= neighbourtile_index_3D_global[dim]
< global_tile_layout_shape[dim]
):
wraparound = True
break
if wraparound:
continue
# Take the periodicity of the domain layout
# into account. This should only be done
# along the direction(s) connecting
# the paired domains.
with unswitch:
if 𝔹[domain_pair_offset[0] != 0]:
neighbourtile_index_3D_global[0] = mod(
neighbourtile_index_3D_global[0],
global_tile_layout_shape[0],
)
with unswitch:
if 𝔹[domain_pair_offset[1] != 0]:
neighbourtile_index_3D_global[1] = mod(
neighbourtile_index_3D_global[1],
global_tile_layout_shape[1],
)
with unswitch:
if 𝔹[domain_pair_offset[2] != 0]:
neighbourtile_index_3D_global[2] = mod(
neighbourtile_index_3D_global[2],
global_tile_layout_shape[2],
)
# Check if a supplier tile sits at the location
# of the current neighbour tile.
tile_index_s = suppliertile_indices_3D_global_to_1D_local.get(
tuple(neighbourtile_index_3D_global),
-1,
)
if tile_index_s != -1:
# For domain_pair_nr == 0, all tiles in the
# local domain are paired with all others.
# To not double count, we disregard the
# pairing if the supplier tile index is
# lower than the receiver tile index.
# However, if only_supply is True, there is
# no double counting to be considered (the
# two components are presumably different),
# and so here we do not disregard
# the pairing.
with unswitch:
if domain_pair_nr == 0 and not only_supply:
if tile_index_s < tile_index_r:
continue
neighbourtile_indices_supplier.append(tile_index_s)
# Convert the neighbouring supplier tile indices from a list
# to an Py_ssize_t[::1] array.
# We also sort the indices, though this is not necessary.
neighbourtile_indices_supplier = asarray(
neighbourtile_indices_supplier, dtype=C2np['Py_ssize_t'],
)
neighbourtile_indices_supplier.sort()
tile_indices_receiver_supplier[i] = neighbourtile_indices_supplier
# Transform tile_indices_receiver_supplier to an object array,
# the elements of which are arrays of dtype Py_ssize_t.
tile_indices_receiver_supplier = asarray(tile_indices_receiver_supplier, dtype=object)
# If all arrays in tile_indices_receiver_supplier are of the
# same size, it will not be stored as an object array of Py_ssize_t
# arrays, but instead a 2D object array. In compiled mode this leads
# to a crash, as elements of tile_indices_receiver_supplier must be
# compatible with Py_ssize_t[::1]. We can convert it to a 2D
# Py_ssize_t array instead, single-index elements of which exactly
# are the Py_ssize_t[::1] arrays we need. When the arrays in
# tile_indices_receiver_supplier are not of the same size, such a
# conversion will fail. Since we do not care about the conversion
# in such a case anyway, we always just attempt to do
# the conversion. If it succeed, it was needed. If not, it was not
# needed anyway.
try:
tile_indices_receiver_supplier = asarray(
tile_indices_receiver_supplier, dtype=C2np['Py_ssize_t'])
except ValueError:
pass
# Cache the result. This cache is not actually used, but it ensures
# that Python will not garbage collect the data.
tile_indices_receiver_supplier_dict[key] = tile_indices_receiver_supplier
# Now comes the caching that is actually used, where we use pointers
# rather than Python objects.
pairs_N = tile_indices_receiver_supplier.shape[0]
pairings = malloc(pairs_N*sizeof('Py_ssize_t*'))
pairings_N = malloc(pairs_N*sizeof('Py_ssize_t'))
for i in range(pairs_N):
tile_indices_supplier_paired = tile_indices_receiver_supplier[i]
tile_indices_supplier_paired_ptr = cython.address(tile_indices_supplier_paired[:])
pairings[i] = tile_indices_supplier_paired_ptr
pairings_N[i] = tile_indices_supplier_paired.shape[0]
tile_pairings_cache_indices[key] = tile_pairings_index
tile_pairings_cache_size += 1
tile_pairings_cache = realloc(
tile_pairings_cache,
tile_pairings_cache_size*sizeof('Py_ssize_t**'),
)
tile_pairings_N_cache = realloc(
tile_pairings_N_cache,
tile_pairings_cache_size*sizeof('Py_ssize_t*'),
)
tile_pairings_cache [tile_pairings_index] = pairings
tile_pairings_N_cache[tile_pairings_index] = pairings_N
return tile_pairings_index
# Caches used by the get_tile_pairings function
cython.declare(
tile_indices_receiver_supplier_dict=dict,
tile_pairings_cache_indices=dict,
tile_pairings_cache_size='Py_ssize_t',
tile_pairings_cache='Py_ssize_t***',
tile_pairings_N_cache='Py_ssize_t**',
)
tile_indices_receiver_supplier_dict = {}
tile_pairings_cache_indices = {}
tile_pairings_cache_size = 0
tile_pairings_cache = malloc(tile_pairings_cache_size*sizeof('Py_ssize_t**'))
tile_pairings_N_cache = malloc(tile_pairings_cache_size*sizeof('Py_ssize_t*'))
# Function responsible for constructing pairings between subtiles within
# the supplied subtiling, including the corresponding subtiles in the 26
# neighbour tiles. Subtiles further away than the supplied forcerange
# will not be paired.
@cython.header(
# Arguments
subtiling='Tiling',
forcerange='double',
only_supply='bint',
# Locals
all_pairings='Py_ssize_t***',
all_pairings_N='Py_ssize_t**',
dim='int',
extent_over_range_dim='double',
key=tuple,
key_quick=tuple,
pairing_index='Py_ssize_t',
pairings='Py_ssize_t**',
pairings_N='Py_ssize_t*',
pairings_r='Py_ssize_t*',
r_dim='Py_ssize_t',
r2='double',
same_tile='bint',
shape='Py_ssize_t[::1]',
size='Py_ssize_t',
subtile_index_r='Py_ssize_t',
subtile_index_s='Py_ssize_t',
subtile_index3D='Py_ssize_t*',
subtile_index3D_r='Py_ssize_t[::1]',
subtile_index3D_s='Py_ssize_t*',
subtile_pairings_index='Py_ssize_t',
tile_extent='double[::1]',
tile_pair_index='int',
tiles_offset='Py_ssize_t[::1]',
tiles_offset_i='Py_ssize_t',
tiles_offset_j='Py_ssize_t',
tiles_offset_k='Py_ssize_t',
tiles_offset_ptr='Py_ssize_t*',
returns='Py_ssize_t',
)
def get_subtile_pairings(subtiling, forcerange, only_supply):
global subtile_pairings_cache, subtile_pairings_N_cache, subtile_pairings_cache_size
# Lookup index of the required subtile pairings in the global cache.
# We first try a quick lookup using a key containing the passed
# subtiling instance. The attributes (e.g. shape and extent)
# on a subtiling instance must then never be redefined.
key_quick = (subtiling, forcerange, only_supply)
subtile_pairings_index = subtile_pairings_cache_indices.get(
key_quick,
subtile_pairings_cache_size,
)
if subtile_pairings_index < subtile_pairings_cache_size:
return subtile_pairings_index
# The subtile pairings was not found in the cache. It is possible
# that a different subtiling instance with the same shape and the
# same extent in units of the forcerange is present in the cache.
# All results are therefore also stored using keys containing the
# shape and extent/forcerange. Try this more involved lookup.
for dim in range(3):
extent_over_range_dim = subtiling.extent[dim]*ℝ[1/forcerange]
extent_over_range[dim] = float(f'{extent_over_range_dim:.12g}')
shape = subtiling.shape
key = (tuple(shape), tuple(extent_over_range), forcerange, only_supply)
subtile_pairings_index = subtile_pairings_cache_indices.get(key, subtile_pairings_cache_size)
if subtile_pairings_index < subtile_pairings_cache_size:
# Found in cache. Add the missing, quick key.
subtile_pairings_cache_indices[key_quick] = subtile_pairings_index
return subtile_pairings_index
# No cached results found. Create subtile pairings
# for each of the 27 cases of neighbour tiles.
size = subtiling.size
tile_extent = subtiling.tile_extent
all_pairings = malloc(27*sizeof('Py_ssize_t**'))
all_pairings_N = malloc(27*sizeof('Py_ssize_t*'))
tiles_offset = empty(3, dtype=C2np['Py_ssize_t'])
subtile_index3D_r = empty(3, dtype=C2np['Py_ssize_t'])
tiles_offset_ptr = cython.address(tiles_offset[:])
same_tile = False
for tiles_offset_i in range(-1, 2):
tiles_offset_ptr[0] = tiles_offset_i
for tiles_offset_j in range(-1, 2):
tiles_offset_ptr[1] = tiles_offset_j
for tiles_offset_k in range(-1, 2):
tiles_offset_ptr[2] = tiles_offset_k
# Does the tile offset correspond to
# a tile being paired with itself?
with unswitch:
if not only_supply:
same_tile = (tiles_offset_i == tiles_offset_j == tiles_offset_k == 0)
# Get 1D tile pair index from the 3D offset
tile_pair_index = get_neighbourtile_pair_index(
tiles_offset_i, tiles_offset_j, tiles_offset_k,
)
# Allocate memory for subtile pairings
# for this particular tile pair.
pairings = malloc(size*sizeof('Py_ssize_t*'))
pairings_N = malloc(size*sizeof('Py_ssize_t'))
all_pairings [tile_pair_index] = pairings
all_pairings_N[tile_pair_index] = pairings_N
# Loop over all receiver subtiles
for subtile_index_r in range(size):
# Get 3D subtile index. As the tile_index3D() method
# return a view over internal data and we mutate
# subtile_index3D_r below, we take a copy of the
# returned data.
subtile_index3D = subtiling.tile_index3D(subtile_index_r)
for dim in range(3):
subtile_index3D_r[dim] = subtile_index3D[dim]
# The receiver and supplier subtiles belong to
# (potentially) different tiles, with a relative
# offset given by tiles_offset_*, so that the
# supplier tile is at the receiver tile location
# plus tiles_offset_*. We now subtract this offset
# from the receiver 3D subtile index, so that the
# difference in subtile indices between the receiver
# and supplier subtile is proportional to their
# physical separation. Note that subtile_index3D_r
# no longer represents the actual index in memory.
for dim in range(3):
subtile_index3D_r[dim] -= tiles_offset_ptr[dim]*shape[dim]
# Allocate memory for subtile pairings with this
# particular receiver subtile.
# We give it the maximum possible needed memory.
pairings_r = malloc(size*sizeof('Py_ssize_t'))
# Pair receiver subtile with every supplier subtile,
# unless the tile is being paired with itself.
# In that case, we need to not double count the
# subtile pairing (while still pairing every subtile
# with themselves).
pairing_index = 0
for subtile_index_s in range(subtile_index_r if same_tile else 0, size):
subtile_index3D_s = subtiling.tile_index3D(subtile_index_s)
# Measure (squared) distance between the subtile
# pair and reject if larger than the passed
# forcerange.
r2 = 0
for dim in range(3):
# Distance between the same point in the two
# subtiles along the dim'th dimension,
# in subtile grid units.
r_dim = abs(subtile_index3D_r[dim] - subtile_index3D_s[dim])
if r_dim > 0:
# The two subtiles are offset along the
# dim'th dimension. Subtract one unit
# from the length, making the length
# between the closest two points
# in the two subtiles.
r_dim -= 1
r2 += (r_dim*tile_extent[dim])**2
if r2 > ℝ[forcerange**2]:
continue
# Add this supplier subtile to the list of
# pairing partners for this receiver subtile.
pairings_r[pairing_index] = subtile_index_s
pairing_index += 1
# All pairs found for this receiver subtile.
# Truncate the allocated memory as to only contain
# the used chunk.
pairings_r = realloc(
pairings_r, pairing_index*sizeof('Py_ssize_t'),
)
pairings[subtile_index_r] = pairings_r
# Record the size of this pairing array
pairings_N[subtile_index_r] = pairing_index
# Store results in global caches
subtile_pairings_cache_indices[key_quick] = subtile_pairings_index
subtile_pairings_cache_indices[key ] = subtile_pairings_index
subtile_pairings_cache_size += 1
subtile_pairings_cache = realloc(
subtile_pairings_cache, subtile_pairings_cache_size*sizeof('Py_ssize_t***'),
)
subtile_pairings_N_cache = realloc(
subtile_pairings_N_cache, subtile_pairings_cache_size*sizeof('Py_ssize_t**'),
)
subtile_pairings_cache [subtile_pairings_index] = all_pairings
subtile_pairings_N_cache[subtile_pairings_index] = all_pairings_N
# Return cached results in form of the cache index
return subtile_pairings_index
# Caches used by the get_subtile_pairings function
cython.declare(
extent_over_range='double[::1]',
subtile_pairings_cache_indices=dict,
subtile_pairings_cache_size='Py_ssize_t',
subtile_pairings_cache='Py_ssize_t****',
subtile_pairings_N_cache='Py_ssize_t***',
)
extent_over_range = empty(3, dtype=C2np['double'])
subtile_pairings_cache_indices = {}
subtile_pairings_cache_size = 0
subtile_pairings_cache = malloc(subtile_pairings_cache_size*sizeof('Py_ssize_t***'))
subtile_pairings_N_cache = malloc(subtile_pairings_cache_size*sizeof('Py_ssize_t**'))
# Helper function for the get_subtile_pairings function
@cython.header(
# Arguments
tiles_offset_i='Py_ssize_t',
tiles_offset_j='Py_ssize_t',
tiles_offset_k='Py_ssize_t',
# Locals
i='int',
j='int',
k='int',
returns='int',
)
def get_neighbourtile_pair_index(tiles_offset_i, tiles_offset_j, tiles_offset_k):
# The passed tiles_offset is the relative offset between a pair of
# neighbouring tiles, and so each of its three elements has to be
# in {-1, 0, +1}. If any element is outside this range, it is due
# to the periodic boundaries. Fix this now, as we do not care about
# whether the tile pair is connected through the box boundary.
# To avoid jumps we write this out arithmetically.
i = (
+ (tiles_offset_i == 1) - (tiles_offset_i == -1)
+ (tiles_offset_i < -1) - (tiles_offset_i > 1)
)
j = (
+ (tiles_offset_j == 1) - (tiles_offset_j == -1)
+ (tiles_offset_j < -1) - (tiles_offset_j > 1)
)
k = (
+ (tiles_offset_k == 1) - (tiles_offset_k == -1)
+ (tiles_offset_k < -1) - (tiles_offset_k > 1)
)
# Compute 1D index from a 3×3×3 shape. We add 1 to each element,
# as they range from -1 to +1. The index is then given by
# ((i + 1)*3 + (j + 1))*3 + (k + 1), which we write out below.
return i*9 + j*3 + k + 13
# Generic function implementing particle-particle pairing.
# Note that this function returns a generator and so should only be
# called within a loop.
@cython.iterator(
depends=[
# Global variables used by particle_particle()
'periodic_offset',
'tile_location_r',
'tile_location_r_ptr',
'tile_location_s',
'tile_location_s_ptr',
'tiles_offset',
'tiles_offset_ptr',
# Functions used by particle_particle()
'get_subtile_pairings',
# Global variables used by get_subtile_pairings()
'extent_over_range',
'subtile_pairings_cache_indices',
'subtile_pairings_cache_size',
'subtile_pairings_cache',
'subtile_pairings_N_cache',
'get_neighbourtile_pair_index',
]
)
def particle_particle(
receiver, supplier, pairing_level,
tile_indices_receiver, tile_indices_supplier_paired, tile_indices_supplier_paired_N,
rank_supplier, interaction_name, only_supply, factors, forcerange=-1,
):
# Cython declarations for variables used for the iteration,
# not including those to yield.
# Do not write these using the decorator syntax above this function.
cython.declare(
# Keyword arguments
forcerange='double',
# Locals
N_subtiles='Py_ssize_t',
all_subtile_pairings='Py_ssize_t***',
all_subtile_pairings_N='Py_ssize_t**',
dim='int',
highest_populated_rung_r='signed char',
highest_populated_rung_s='signed char',
indexᵖ_i='Py_ssize_t',
local_interaction_flag_0='bint',
local_interaction_flag_1='bint',
local_interaction_flag_2='bint',
lowest_active_rung_r='signed char',
lowest_active_rung_s='signed char',
lowest_populated_rung_r='signed char',
lowest_populated_rung_s='signed char',
only_supply_communication='bint',
periodic_offset_ptr='double*',
pos_r='double*',
pos_s='double*',
rung_particle_index_r='Py_ssize_t',
rung_particle_index_s='Py_ssize_t',
rung_particle_index_s_start='Py_ssize_t',
rung_N_r='Py_ssize_t',
rung_N_s='Py_ssize_t',
rung_index_r='signed char',
rung_index_r_bgn='signed char',
rung_index_r_end='signed char',
rung_index_s_bgn='signed char',
rung_index_s_end='signed char',
rung_indices_jumped_r='signed char*',
rung_r='Py_ssize_t*',
rung_s='Py_ssize_t*',
rungs_N_r='Py_ssize_t*',
rungs_N_s='Py_ssize_t*',
subtile_contain_jumping_r='bint',
subtile_contain_onlyinactive_r='bint',
subtile_contain_particles_r='signed char',
subtile_contain_particles_s='signed char',
subtile_index_r='Py_ssize_t',
subtile_index_s='Py_ssize_t',
subtile_pairings='Py_ssize_t**',
subtile_pairings_N='Py_ssize_t*',
subtile_pairings_N_r='Py_ssize_t',
subtile_pairings_index='Py_ssize_t',
subtile_pairings_r='Py_ssize_t*',
subtile_r='Py_ssize_t**',
subtile_s='Py_ssize_t**',
subtiles_contain_particles_r='signed char*',
subtiles_contain_particles_s='signed char*',
subtiles_r='Py_ssize_t***',
subtiles_rungs_N_r='Py_ssize_t**',
subtiles_rungs_N_s='Py_ssize_t**',
subtiles_s='Py_ssize_t***',
subtiling_name=str,
subtiling_name_2=str,
subtiling_s='Tiling',
subtiling_s_2='Tiling',
tile_contain_onlyinactive_r='bint',
tile_contain_particles_r='signed char',
tile_contain_particles_s='signed char',
tile_extent='double*',
tile_index_r='Py_ssize_t',
tile_index_s='Py_ssize_t',
tile_index3D_r='Py_ssize_t*',
tile_index3D_s='Py_ssize_t*',
tile_indices_supplier='Py_ssize_t*',
tile_indices_supplier_N='Py_ssize_t',
tile_location_s_dim='double',
tile_pair_index='int',
tile_separation='double',
tiles_contain_particles_r='signed char*',
tiles_contain_particles_s='signed char*',
tiles_r='Py_ssize_t***',
tiles_s='Py_ssize_t***',
tiling_location_r='double*',
tiling_location_s='double*',
tiling_name=str,
tiling_r='Tiling',
tiling_s='Tiling',
xi='double',
yi='double',
zi='double',
)
# Extract particle variables from the receiver component
pos_r = receiver.pos
lowest_active_rung_r = receiver.lowest_active_rung
lowest_populated_rung_r = receiver.lowest_populated_rung
highest_populated_rung_r = receiver.highest_populated_rung
rung_indices_jumped_r = receiver.rung_indices_jumped
# Extract particle variables from the supplier
# (the external) component.
pos_s = supplier.pos
lowest_active_rung_s = supplier.lowest_active_rung
lowest_populated_rung_s = supplier.lowest_populated_rung
highest_populated_rung_s = supplier.highest_populated_rung
# The names used to refer to the domain and tile level tiling
# (tiles and subtiles). In the case of pairing_level == 'domain',
# we always use the trivial tiling.
if 𝔹[pairing_level == 'tile']:
tiling_name = f'{interaction_name} (tiles)'
subtiling_name = f'{interaction_name} (subtiles)'
else: # pairing_level == 'domain':
tiling_name = subtiling_name = 'trivial'
# Extract tiling variables from receiver
tiling_r = receiver.tilings[tiling_name]
tiling_location_r = cython.address(tiling_r.location[:])
tile_extent = cython.address(tiling_r.tile_extent[:]) # the same for receiver and supplier
tiles_r = tiling_r.tiles
tiles_contain_particles_r = tiling_r.contain_particles
# Extract subtiling variables from receiver
subtiling_r = receiver.tilings[subtiling_name]
subtiles_r = subtiling_r.tiles
subtiles_contain_particles_r = subtiling_r.contain_particles
N_subtiles = subtiling_r.size # The same for receiver and supplier
# Extract tiling variables from supplier
tiling_s = supplier.tilings[tiling_name]
tiling_location_s = cython.address(tiling_s.location[:])
tiles_s = tiling_s.tiles
tiles_contain_particles_s = tiling_s.contain_particles
# Extract subtiling variables from supplier.
# When the receiver and supplier components are the same
# and the receiver and supplier domains are also the same,
# we now have a case where (tiling_r is tiling_s) and
# (subtiling_r is subtiling_s) are both True. This is OK for
# the coarse tiling, but not for the subtiling, as here we need
# to re-sort the particles during the iteration below. That is,
# we need to keep track of the sorting of the receiver tiles
# into subtiles while also keeping track of the sorting of the
# supplier tiles into subtiles. We thus always need two separate
# subtiling_{r/s} instances, which we do not have in the case
# mentioned. When this is the case, we make use of a second,
# separate Tiling instance. If however the subtiling in use is the
# trivial tiling, the re-sorting has no effect, and so we do not
# have to worry.
subtiling_s = supplier.tilings[subtiling_name]
if 𝔹[receiver.name == supplier.name and rank == rank_supplier and subtiling_name != 'trivial']:
subtiling_name_2 = f'{interaction_name} (subtiles 2)'
if subtiling_name_2 not in supplier.tilings:
supplier.tilings.pop(subtiling_name)
subtiling_s_2 = supplier.init_tiling(subtiling_name)
supplier.tilings[subtiling_name ] = subtiling_s
supplier.tilings[subtiling_name_2] = subtiling_s_2
subtiling_s = supplier.tilings[subtiling_name_2]
subtiles_s = subtiling_s.tiles
subtiles_contain_particles_s = subtiling_s.contain_particles
# Get subtile pairings between each
# of the 27 possible tile pairings.
only_supply_communication = (only_supply if receiver.name == supplier.name else True)
if forcerange == -1:
forcerange = get_shortrange_param((receiver, supplier), interaction_name, 'range')
subtile_pairings_index = get_subtile_pairings(
subtiling_r, forcerange, only_supply_communication,
)
all_subtile_pairings = subtile_pairings_cache[subtile_pairings_index]
all_subtile_pairings_N = subtile_pairings_N_cache[subtile_pairings_index]
# Range of receiver and supplier rungs.
# Note that rung_index_s_bgn will be set later.
if only_supply:
rung_index_r_bgn = lowest_active_rung_r
else:
rung_index_r_bgn = lowest_populated_rung_r
rung_index_r_end = highest_populated_rung_r + 1
rung_index_s_end = highest_populated_rung_s + 1
# Local pointer into the global array of particle position offsets
# due to the periodicity.
periodic_offset_ptr = cython.address(periodic_offset[:])
# Some default values to yield
rung_index_i = 0
factor_i = 0
# Flags specifying whether the force between particle i and j
# should be applied to i and j. If only_supply is True,
# the values below are correct. Otherwise, other values
# will be set further down.
apply_to_i = True
apply_to_j = False
# The current time. This is yielded back to the caller,
# where time() - particle_particle_t_begin should be added to the
# computation_time of the receiver subtiling. This is used for the
# automatic subtiling refinement and the load imbalancing printout.
particle_particle_t_begin = time()
# Loop over the requested tiles in the receiver
for tile_index_r in range(ℤ[tile_indices_receiver.shape[0]]):
# Lookup supplier tile indices with which to pair the current
# receiver tile.
tile_indices_supplier = tile_indices_supplier_paired [tile_index_r]
tile_indices_supplier_N = tile_indices_supplier_paired_N[tile_index_r]
# Now make tile_index_r an actual receiver tile index
tile_index_r = tile_indices_receiver[tile_index_r]
# Skip tile if it does not contain any particles at all,
# or only inactive particles when only_supply is True.
tile_contain_particles_r = tiles_contain_particles_r[tile_index_r]
with unswitch(1):
if 𝔹[not only_supply]:
if tile_contain_particles_r == 0:
continue
else:
if tile_contain_particles_r < 2:
continue
tile_contain_onlyinactive_r = (tile_contain_particles_r == 1)
# Sort particles within the receiver tile into subtiles
tile_index3D_r = tiling_r.tile_index3D(tile_index_r)
for dim in range(3):
tile_location_r_ptr[dim] = (
tiling_location_r[dim] + tile_index3D_r[dim]*tile_extent[dim]
)
subtiling_r.relocate(tile_location_r)
subtiling_r.sort(tiling_r, tile_index_r)
subtiles_rungs_N_r = subtiling_r.tiles_rungs_N
# Loop over the requested tiles in the supplier
for tile_index_s in range(tile_indices_supplier_N):
tile_index_s = tile_indices_supplier[tile_index_s]
# Skip tile if it does not contain any particles at all
tile_contain_particles_s = tiles_contain_particles_s[tile_index_s]
if tile_contain_particles_s == 0:
continue
# If both the receiver and supplier tile contains particles
# on inactive rows only, we skip this tile pair.
with unswitch(1):
if tile_contain_onlyinactive_r:
if tile_contain_particles_s == 1:
continue
# Sort particles within the supplier tile into subtiles
tile_index3D_s = tiling_s.tile_index3D(tile_index_s)
for dim in range(3):
# While in this loop, also determine the tile offset
tiles_offset_ptr[dim] = ℤ[tile_index3D_s[dim]] - tile_index3D_r[dim]
# Set floating supplier tile location
tile_location_s_dim = (
tiling_location_s[dim] + ℤ[tile_index3D_s[dim]]*tile_extent[dim]
)
tile_location_s_ptr[dim] = tile_location_s_dim
# While in this loop, also determine
# the periodic particle offset.
tile_separation = tile_location_s_dim - tile_location_r_ptr[dim]
if tile_separation > ℝ[0.5*boxsize]:
periodic_offset_ptr[dim] = boxsize
elif tile_separation < ℝ[-0.5*boxsize]:
periodic_offset_ptr[dim] = ℝ[-boxsize]
else:
periodic_offset_ptr[dim] = 0
subtiling_s.relocate(tile_location_s)
subtiling_s.sort(tiling_s, tile_index_s)
subtiles_rungs_N_s = subtiling_s.tiles_rungs_N
# Extract the values from periodic_offset_ptr
periodic_offset_x = periodic_offset_ptr[0]
periodic_offset_y = periodic_offset_ptr[1]
periodic_offset_z = periodic_offset_ptr[2]
# Get the needed subtile pairings for the selected receiver
# and supplier tiles (which should be neighbour tiles).
tile_pair_index = get_neighbourtile_pair_index(
tiles_offset_ptr[0], tiles_offset_ptr[1], tiles_offset_ptr[2],
)
subtile_pairings = all_subtile_pairings [tile_pair_index]
subtile_pairings_N = all_subtile_pairings_N[tile_pair_index]
# Flag specifying whether this is a local interaction
local_interaction_flag_0 = (
𝔹[receiver.name == supplier.name and rank == rank_supplier]
and (tile_index_r == tile_index_s)
)
# Loop over all subtiles in the selected receiver tile
for subtile_index_r in range(N_subtiles):
# Skip subtile if it does not contain
# any particles at all, or only inactive particles
# when only_supply is True.
subtile_contain_particles_r = subtiles_contain_particles_r[subtile_index_r]
with unswitch(3):
if 𝔹[not only_supply]:
if subtile_contain_particles_r == 0:
continue
subtile_contain_onlyinactive_r = (subtile_contain_particles_r == 1)
else:
if subtile_contain_particles_r < 2:
continue
# Set and extract various receiver subtile variables
subtile_contain_jumping_r = (subtile_contain_particles_r == 3)
subtile_r = subtiles_r [subtile_index_r]
rungs_N_r = subtiles_rungs_N_r[subtile_index_r]
subtile_pairings_r = subtile_pairings [subtile_index_r]
subtile_pairings_N_r = subtile_pairings_N[subtile_index_r]
# Loop over the needed supplier subtiles
for subtile_index_s in range(subtile_pairings_N_r):
subtile_index_s = subtile_pairings_r[subtile_index_s]
# Skip subtile if it does not contain
# any particles at all.
subtile_contain_particles_s = subtiles_contain_particles_s[subtile_index_s]
if subtile_contain_particles_s == 0:
continue
# If both the receiver and supplier subtile contains
# particles on inactive rows only, we skip this
# subtile pair.
with unswitch(4):
if 𝔹[not only_supply]:
with unswitch(1):
if subtile_contain_onlyinactive_r:
if subtile_contain_particles_s == 1:
continue
# Set and extract various supplier subtile variables
subtile_contain_jumping_s = (subtile_contain_particles_s == 3)
subtile_s = subtiles_s [subtile_index_s]
rungs_N_s = subtiles_rungs_N_s[subtile_index_s]
# Flag specifying whether this is a local interaction
local_interaction_flag_1 = (
local_interaction_flag_0
and (subtile_index_r == subtile_index_s)
)
# Loop over all rungs in the receiver subtile
for rung_index_r in range(rung_index_r_bgn, rung_index_r_end):
rung_N_r = rungs_N_r[rung_index_r]
if rung_N_r == 0:
continue
rung_r = subtile_r[rung_index_r]
# We need to pair all active receiver rungs
# with all supplier rungs. All inactive
# receiver rungs need only to be paired with
# the active supplier rungs (i.e. we do not need
# to pair up two inactive rungs).
# If only_supply is True, the values already set
# will be used.
rung_index_s_bgn = lowest_populated_rung_s
with unswitch(5):
if 𝔹[not only_supply]:
if rung_index_r < lowest_active_rung_r:
# Only the supplier should receive
# a kick.
apply_to_i = False
rung_index_s_bgn = lowest_active_rung_s
else:
# The receiver and the supplier
# should receive a kick.
apply_to_i = True
# We need to make sure not to double count the
# rung pairs for local interactions. Here,
# local means that the current components,
# domains, tiles and subtiles for the receiver
# and supplier are all the same.
if local_interaction_flag_1 and (rung_index_s_bgn < rung_index_r):
rung_index_s_bgn = rung_index_r
# Loop over the needed supplier rungs
for rung_index_s in range(rung_index_s_bgn, rung_index_s_end):
rung_N_s = rungs_N_s[rung_index_s]
if rung_N_s == 0:
continue
rung_s = subtile_s[rung_index_s]
# Flag whether we need to apply the force to
# the supplier particles in this rung (if
# not, we still apply the force to the
# receiver particles).
with unswitch(6):
if 𝔹[not only_supply]:
apply_to_j = (rung_index_s >= lowest_active_rung_s)
# Flag specifying whether this is a local interaction
local_interaction_flag_2 = (
local_interaction_flag_1
and (rung_index_r == rung_index_s)
)
# Loop over all particles
# in the receiver rung.
for rung_particle_index_r in range(rung_N_r):
# Get receiver particle index
indexᵖ_i = rung_r[rung_particle_index_r]
# Get the jumped rung index for
# particle i, which is given by
# rung_indices_jumped_r[i]. In the
# common case of no jumping, this
# equals the non-jumped rung index,
# which is given by rung_index_r.
# In an attempt to avoid the lookup,
# we test whether the current receiver
# subtile contains any particles that
# jumps and get the jumped/non-jumped
# index accordingly.
with unswitch(2):
if apply_to_i:
with unswitch(4):
if subtile_contain_jumping_r:
rung_index_i = rung_indices_jumped_r[indexᵖ_i]
else:
rung_index_i = rung_index_r
# Fetch the corresponding factor
factor_i = factors[rung_index_i]
# Get coordinates of receiver particle
indexˣ_i = 3*indexᵖ_i
xi = pos_r[indexˣ_i + 0]
yi = pos_r[indexˣ_i + 1]
zi = pos_r[indexˣ_i + 2]
# We need to make sure not to double
# count the particle pairs for local
# interactions. Here, local means that
# the current components, domains,
# tiles, subtiles and rungs for the
# receiver and supplier are all
# the same.
rung_particle_index_s_start = (
local_interaction_flag_2*(rung_particle_index_r + 1)
)
# Loop over the needed particles
# in the supplier rung.
for rung_particle_index_s in range(
rung_particle_index_s_start, rung_N_s,
):
# Get supplier particle index
indexᵖ_j = rung_s[rung_particle_index_s]
# "Vector" from particle j
# to particle i.
indexˣ_j = 3*indexᵖ_j
x_ji = xi - pos_s[indexˣ_j + 0]
y_ji = yi - pos_s[indexˣ_j + 1]
z_ji = zi - pos_s[indexˣ_j + 2]
# Yield the needed variables
yield indexˣ_i, indexᵖ_j, indexˣ_j, rung_index_i, rung_index_s, x_ji, y_ji, z_ji, periodic_offset_x, periodic_offset_y, periodic_offset_z, apply_to_i, apply_to_j, factor_i, subtile_contain_jumping_s, particle_particle_t_begin, subtiling_r
# Variables used by the particle_particle() function
cython.declare(
periodic_offset='double[::1]',
tile_location_r='double[::1]',
tile_location_r_ptr='double*',
tile_location_s='double[::1]',
tile_location_s_ptr='double*',
tiles_offset='Py_ssize_t[::1]',
tiles_offset_ptr='Py_ssize_t*',
)
periodic_offset = empty(3, dtype=C2np['double'])
tile_location_r = empty(3, dtype=C2np['double'])
tile_location_s = empty(3, dtype=C2np['double'])
tiles_offset = empty(3, dtype=C2np['Py_ssize_t'])
tile_location_r_ptr = cython.address(tile_location_r[:])
tile_location_s_ptr = cython.address(tile_location_s[:])
tiles_offset_ptr = cython.address(tiles_offset[:])
# Function for converting a pair of softening lengths
# into a single softening length.
@cython.header(
# Arguments
ϵᵢ='double',
ϵⱼ='double',
# Locals
ϵ='double',
returns='double',
)
def combine_softening_lengths(ϵᵢ, ϵⱼ):
# Combining softening lengths may be done in several
# different ways, e.g.
# ϵ = sqrt(ϵᵢ² + ϵⱼ²)
# ϵ = (ϵᵢ + ϵⱼ)/2
# ϵ = min(ϵᵢ, ϵⱼ)
# ϵ = max(ϵᵢ, ϵⱼ)
# Here we settle for the arithmetic mean.
# This has been used by e.g. Hernquist & Barnes 1990.
ϵ = 0.5*(ϵᵢ + ϵⱼ)
return ϵ
# Function for computing the softened r⁻³.
# Instead of calling this function many times for some fixed
# softening length, consider tabulating its results.
@cython.header(
# Arguments
r2='double',
ϵ='double',
# Locals
h='double',
r='double',
r2_softened='double',
r3='double',
u='double',
returns='double',
)
def get_softened_r3inv(r2, ϵ):
# The ϵ argument is the Plummer softening length,
# regardless of which softening kernel is used (set by the
# softening_kernel parameter). Each implemented softening kernel is
# described in the code below. What is returned is always a
# softened version of r⁻³, intended for replacement in the
# gravitational force F⃗(r⃗) ∝ r⃗ r⁻³.
#
# References:
# [1] https://arxiv.org/abs/astro-ph/0011568
if 𝔹[softening_kernel == 'none']:
# Do not soften the force at all.
# In terms of a kernel, this corresponds to
# W(r) = δ³(r)
# ⟹ F⃗(r⃗) ∝ r⃗ r⁻³.
# Using this in simulations with any appreciable clustering
# will lead to generation of large amounts of spurious energy.
# To at least remove the divergence at r = 0,
# we return 0 here.
r3 = r2*sqrt(r2)
return (0 if r3 == 0 else 1/r3)
elif 𝔹[softening_kernel == 'plummer']:
# This is the simplest softening kernel. As it has non-compact
# support it softens the force at all scales.
# W(r) = 3/(4πϵ³) [1 + (r/ϵ)²]⁻⁵ᐟ²
# ⟹ F⃗(r⃗) ∝ r⃗ (r² + ϵ²)⁻³ᐟ².
r2_softened = r2 + ϵ**2
return 1/(r2_softened*sqrt(r2_softened))
elif 𝔹[softening_kernel == 'spline']:
# This is the cubic spline kernel of
# Monaghan & Lattanzio (1985), often used in SPH.
# It is the gravitational softening used in GADGET-2.
# ⎧ 1 - 6(r/h)² + 6(r/h)³ 0 ≤ r < h/2
# W(r) = 8/(πh³) ⎨ 2(1 - r/h)³ h/2 ≤ r < h
# ⎩ 0 h ≤ r
# ⎧ 32/h³ [1/3 - 6/5(r/h)² + (r/h)³] 0 ≤ r < h/2
# ⟹ F⃗(r⃗) ∝ r⃗ ⎨ 32/h³ [2/3 - 3/2(r/h) + 6/5(r/h)² - 1/3(r/h)³ - 1/480(r/h)⁻³] h/2 ≤ r < h
# ⎩ r⁻³ h ≤ r,
# where h is the spline softening length "equivalent" to the
# Plummer softening length ϵ. If we require F⃗(ϵ) for the
# spline softening to equal F⃗(ϵ) for the Plummer softening,
# we get h = 2.7116122709425334ϵ. GADGET-2 uses h = 2.8ϵ.
# We choose to follow GADGET-2.
h = 2.8*ϵ
r = sqrt(r2)
if r >= h:
return 1/(r2*r)
u = r/h
if u < 0.5:
return 32/h**3*(1./3. + u**2*(-6./5. + u))
return 32/(3*r**3)*(u**3*(2 + u*(-9./2. + u*(18./5. - u))) - 3./480.)
elif 𝔹[softening_kernel == 'epanechnikov']:
# This kernel is described in e.g. [1] (where it is called F₁),
# where it is shown to be superior to the cubic spline kernel
# as it features a smaller mean integrated
# squared force error.
abort('Softening kernel "Epanechnikov" not yet implemented')
elif 𝔹[softening_kernel == 'compensate']:
# This is the kernel described in [1] under the name K₁.
# It is reminiscent of the Epanechnikov kernel,
# but compensates for the softened force at small r by
# overestimating the force around the softening length,
# supposedly leading to an even smaller mean integrated
# squared force error.
abort('Softening kernel "compensate" not yet implemented')
# The specified softening kernel is not implemented
abort(f'Softening kernel "{softening_kernel}" not understood')
return 0 # To satisfy the compiler
# Generic function implementing particle-mesh interactions
# for both particle and fluid components.
@cython.header(
# Arguments
receivers=list,
suppliers=list,
gridsize_global='Py_ssize_t',
quantity=str,
force=str,
method=str,
potential=str,
deconvolve_downstream='bint',
deconvolve_upstream='bint',
interpolation_order='int',
interlace='bint',
ᔑdt=dict,
ᔑdt_key=object, # str or tuple
# Locals
all_receiver_downstream_gridsizes_equal_global='bint',
all_supplier_upstream_gridsizes_equal_global='bint',
at_last_differentiation_order='bint',
at_last_representation='bint',
deconv_order_downstream='int',
deconv_order_global='int',
differentiation_order='int',
dim='int',
downstream_description=str,
downstream_description_gridsize=str,
downstream_description_representation=str,
factor='double',
fourier_diff='bint',
grid_downstream='double[:, :, ::1]',
gridsize_downstream='Py_ssize_t',
group=dict,
groups=dict,
index='Py_ssize_t',
k2='Py_ssize_t',
ki='Py_ssize_t',
kj='Py_ssize_t',
kk='Py_ssize_t',
mutate_slab_downstream_ok='bint',
nullification=str,
only_particle_receivers='bint',
only_particle_suppliers='bint',
receiver='Component',
receivers_differentiations=list,
receivers_gridsizes_downstream=list,
representation=str,
slab_downstream='double[:, :, ::1]',
slab_downstream_representation='double[:, :, ::1]',
slab_global='double[:, :, ::1]',
slab_global_ptr='double*',
subgroup=list,
subgroups=dict,
supplier='Component',
suppliers_description=str,
suppliers_gridsizes_upstream=list,
Δx='double',
θ='double',
ᐁᵢgrid_downstream='double[:, :, ::1]',
returns='void',
)
def particle_mesh(
receivers, suppliers, gridsize_global, quantity, force, method, potential, interpolation_order,
deconvolve_upstream, deconvolve_downstream, interlace,
ᔑdt, ᔑdt_key,
):
"""
This function will update the momenta of all receiver components due
to an interaction with the supplier components. The steps taken are
outlined below.
- Interpolate the specified quantity of all supplier components
onto upstream (component specific) grids.
- Transform to upstream Fourier slabs.
- Perform deconvolution and interlacing of upstream Fourier slabs of
particle suppliers, if specified.
- Add upstream Fourier slabs together, producing global Fourier
slabs.
- Convert the values of the global Fourier slabs to potential
values, with the formula to use given by the 'potential' argument.
- Obtain downstream potential Fourier slabs for each receiver.
For particle receivers, another deconvolution is now performed due
to the upcoming interpolation.
- For receivers obtaining the force grid from the
real-space potential:
- Transform to real space downstream potentials.
- For each dimension, differentiate the downstream potentials to
get the real space downstream force.
- For receivers obtaining the force grid from the
Fourier-space potential:
- For each dimension, differentiate the downstream potentials in
Fourier space to obtain the downstream force grid
in Fourier space.
- Transform to real space downstream force grid.
- Interpolate the real-space downstream force grid onto
the receivers, applying the force.
The force application uses the prescription
Δmom = -component.mass*∂ⁱφ*ᔑdt[ᔑdt_key].
"""
if potential not in {'gravity', 'gravity long-range'}:
abort(
f'particle_mesh() got potential "{potential}" ∉ {{"gravity", "gravity long-range"}}'
)
suppliers_gridsizes_upstream = [
supplier.potential_gridsizes[force][method].upstream
for supplier in suppliers
]
receivers_gridsizes_downstream = [
receiver.potential_gridsizes[force][method].downstream
for receiver in receivers
]
suppliers_description = ', '.join([
supplier.name + (
f' ({supplier_gridsize_upstream})'
if 𝔹[np.any(asarray(suppliers_gridsizes_upstream) != gridsize_global)] else ''
)
for supplier, supplier_gridsize_upstream in zip(suppliers, suppliers_gridsizes_upstream)
])
if len(suppliers) > 1:
suppliers_description = f'{{{suppliers_description}}}'
masterprint(
f'Constructing potential of grid size {gridsize_global} due to {suppliers_description} ...'
)
# If we only have particle suppliers (receivers) and all
# upstream (downstream) grid sizes equal the global grid size,
# we perform the upstream (downstream) deconvolution right within
# this function while constructing the global potential,
# rather than through calls to interpolate_upstream()
# or fourier_operate().
only_particle_suppliers = all([
supplier.representation == 'particles'
for supplier in suppliers
])
only_particle_receivers = all([
receiver.representation == 'particles'
for receiver in receivers
])
all_supplier_upstream_gridsizes_equal_global = np.all(
asarray(suppliers_gridsizes_upstream) == gridsize_global
)
all_receiver_downstream_gridsizes_equal_global = np.all(
asarray(receivers_gridsizes_downstream) == gridsize_global
)
deconv_order_global = 0
if deconvolve_upstream:
if only_particle_suppliers and all_supplier_upstream_gridsizes_equal_global:
# Promote upstream deconvolution to global deconvolution
deconvolve_upstream = False
deconv_order_global += 1
if deconvolve_downstream:
if only_particle_receivers and all_receiver_downstream_gridsizes_equal_global:
# Promote downstream deconvolution to global deconvolution
deconvolve_downstream = False
deconv_order_global += 1
deconv_order_global *= interpolation_order
# Interpolate suppliers onto global Fourier slabs by first
# interpolating them onto individual upstream grids, transforming to
# Fourier space and then adding them together.
slab_global = interpolate_upstream(
suppliers, suppliers_gridsizes_upstream, gridsize_global, quantity, interpolation_order,
ᔑdt, deconvolve_upstream, interlace, output_space='Fourier',
)
slab_global_ptr = cython.address(slab_global[:, :, :])
# Convert slab_global values to potential
# and possibly perform upstream and/or downstream deconvolutions.
for index, ki, kj, kk, factor, θ in fourier_loop(gridsize_global,
skip_origin=True, deconv_order=deconv_order_global,
):
k2 = ℤ[ℤ[ℤ[kj**2] + ki**2] + kk**2]
# The potential factor, for converting the slab values
# to the desired potential.
with unswitch(5):
# The physical squared length of the wave
# vector is given by |k|² = (2π/boxsize)**2*k2.
if 𝔹[potential == 'gravity']:
# The Poisson equation, the factor of which is
# -4πG/|k|².
factor *= ℝ[-boxsize**2*G_Newton/π]/k2
else: # potential == 'gravity long-range'
# The Poisson equation with a Gaussian
# cutoff, resulting in the factor
# -4πG/|k|² * exp(-rₛ²*|k|²).
factor *= (
ℝ[-boxsize**2*G_Newton/π]/k2
*exp(k2*ℝ[-(2*π/boxsize*shortrange_params['gravity']['scale'])**2])
)
# Apply factor from deconvolution and potential
slab_global_ptr[index ] *= factor # real part
slab_global_ptr[index + 1] *= factor # imag part
# Ensure nullified origin
nullify_modes(slab_global, 'origin')
masterprint('done')
# Group receivers according to their downstream grid size.
# The order does not matter, except that we want the group with the
# downstream grid size equal to the global grid size to be the last,
# if indeed such a group exist.
groups = group_components(receivers, receivers_gridsizes_downstream, [..., gridsize_global])
# For each group, obtain downstream potential, compute downstream
# forces and apply these to the receivers within the group.
for gridsize_downstream, group in groups.items():
downstream_description_gridsize = (
str(gridsize_downstream)
if not all_receiver_downstream_gridsizes_equal_global else ''
)
# Physical grid spacing of downstream potential grid
Δx = boxsize/gridsize_downstream
# Obtain downstream slab potential from global slab potential
if gridsize_downstream == gridsize_global:
# The downstream and global grid sizes are the same, so we
# use the global slab as the downstream slab. The global
# slab is then mutated by the code below (e.g. by the
# in-place FFT), but as we are guaranteed that this is the
# last downstream grid/group, this is OK.
slab_downstream = slab_global
else:
# Fetch a slab to be used for the downstream potential.
# As this is constructed through down- or up-scaling of the
# global potential, we need to nullify all elements not set
# by the down-/up-scaling. We could of course just always
# perform a complete nullification.
if gridsize_downstream < gridsize_global:
# Downscaling. All elements of slab_downstream will be
# set except the Nyquist planes, which then have to be
# nullified.
nullification = 'nyquist'
else:
# Upscaling. Only elements within a cube centred at
# the origin and of size gridsize_global will be set.
# We then need to nullify all elements beyond this cube.
nullification = f'beyond cube of |k| < {gridsize_global//2}'
slab_downstream = get_fftw_slab(
gridsize_downstream, 'slab_updownstream', nullification,
)
# Obtain downstream slab potential through up-/down-scaling
copy_modes(slab_global, slab_downstream, operation='=')
# Handle each (possible) representation of the receivers in turn
for representation in ('fluid', 'particles'):
if representation not in group:
continue
at_last_representation = (
𝔹[representation == 'particles'] or 'particles' not in group
)
downstream_description_representation = (
representation
if len(group) == 2 else ''
)
downstream_description = ', '.join([
description
for description in (
downstream_description_gridsize,
downstream_description_representation,
)
if description
])
if downstream_description:
downstream_description = f'({downstream_description}) '
# The downstream deconvolution order, taking the
# representation into account.
deconv_order_downstream = interpolation_order*(
𝔹[representation == 'particles'] and deconvolve_downstream
)
# Further group the receivers of the current representation
# within the current group into subgroups based on their
# potential differentiation order.
receivers_differentiations = [
receiver.potential_differentiations[force][method]
for receiver in group[representation]
]
subgroups = group_components(
group[representation],
receivers_differentiations,
sorted(receivers_differentiations, reverse=True),
split_representations=False,
)
for differentiation_order, subgroup in subgroups.items():
at_last_differentiation_order = (
differentiation_order == ℤ[np.min(receivers_differentiations)]
)
# A differentiation order of 0
# signals differentiation in Fourier space.
fourier_diff = (differentiation_order == 0)
# The downstream potential for all receivers within the
# group (and thus subgroup) is stored as
# slab_downstream. We want to use this directly if
# possible, but if we need to mutate it and we are not
# at the last subgroup, we need to take a copy.
mutate_slab_downstream_ok = False
if fourier_diff:
if at_last_representation and at_last_differentiation_order:
# Really only True for the last dim
mutate_slab_downstream_ok = True
else:
if at_last_representation and at_last_differentiation_order:
mutate_slab_downstream_ok = True
elif deconv_order_downstream == 0:
# Mutation really not OK, but as the slab will
# not be differentiated nor deconvolved, there
# is no reason to take a copy.
mutate_slab_downstream_ok = True
# Obtain the force grid either in Fourier or real space
if fourier_diff:
# Fourier space differentiation.
# For each dimension, differentiate the grid
# to obtain the force and apply this force.
for dim in range(3):
masterprint(f'Obtaining and applying the {"xyz"[dim]}-force ...')
# Get reference to or copy of slab_downstream
if mutate_slab_downstream_ok and dim == 2:
slab_downstream_subgroup = slab_downstream
else:
slab_downstream_subgroup = get_fftw_slab(
gridsize_downstream, 'slab_updownstream_subgroup',
)
slab_downstream_subgroup[...] = slab_downstream
# Do the in-place Fourier differentiation along
# with the possible downstream deconvolution.
fourier_operate(
slab_downstream_subgroup,
deconv_order_downstream,
diff_dim=dim,
)
# Transform to real space
# and perform domain decomposition.
masterprint(
f'Transforming to real space force {downstream_description}...'
)
fft(slab_downstream_subgroup, 'backward')
grid_downstream = domain_decompose(
slab_downstream_subgroup,
'grid_updownstream',
do_ghost_communication=True,
)
masterprint('done')
# Apply force
apply_particle_mesh_force(
grid_downstream, dim, group[representation], interpolation_order,
ᔑdt, ᔑdt_key,
)
masterprint('done')
else:
# Real space differentiation.
# Get reference to or copy of slab_downstream.
if mutate_slab_downstream_ok:
slab_downstream_subgroup = slab_downstream
else:
slab_downstream_subgroup = get_fftw_slab(
gridsize_downstream, 'slab_updownstream_subgroup',
)
slab_downstream_subgroup[...] = slab_downstream
# Perform possible downstream deconvolution.
fourier_operate(
slab_downstream_subgroup,
deconv_order=deconv_order_downstream,
)
# Transform to real space
# and perform domain decomposition.
masterprint(
f'Transforming to real space potential {downstream_description}...'
)
fft(slab_downstream_subgroup, 'backward')
grid_downstream = domain_decompose(
slab_downstream_subgroup,
'grid_updownstream',
do_ghost_communication=True,
)
masterprint('done')
# For each dimension, differentiate the grid
# to obtain the force and apply this force.
for dim in range(3):
masterprint(f'Obtaining and applying the {"xyz"[dim]}-force ...')
# Differentiate the downstream potential in real
# space using finite difference. We need to
# properly populate the ghost points in the
# differentiated grid, as ghost points are
# needed for particle interpolation. For fluids,
# having proper ghost points in the
# differentiated grid means that the momentum
# grid will automatically get ghost points
# populated correctly as well.
ᐁᵢgrid_downstream = diff_domaingrid(
grid_downstream, dim, differentiation_order,
Δx, 'force_downstream',
do_ghost_communication=True,
)
# Apply force
apply_particle_mesh_force(
ᐁᵢgrid_downstream, dim, group[representation], interpolation_order,
ᔑdt, ᔑdt_key,
)
masterprint('done')
# Function for applying a scalar grid of the force along the dim'th
# dimension to receiver components.
@cython.header(
# Arguments
grid='double[:, :, ::1]',
dim='int',
receivers=list,
interpolation_order='int',
ᔑdt=dict,
ᔑdt_key=object, # str or tuple
# Locals
Jᵢ='FluidScalar',
Jᵢ_ptr='double*',
grid_ptr='double*',
index='Py_ssize_t',
receiver='Component',
substitute_ᔑdt_key='bint',
ϱ_ptr='double*',
𝒫_ptr='double*',
returns='void',
)
def apply_particle_mesh_force(grid, dim, receivers, interpolation_order, ᔑdt, ᔑdt_key):
if not receivers:
return
if not (0 <= dim < 3):
abort(f'apply_particle_mesh_force() called with dim = {dim} ∉ {{0, 1, 2}}')
# When ᔑdt_key is a (2-)tuple, the last element needs to be
# substituted with the name of a given component. The variable below
# acts as a flag for this substitution.
substitute_ᔑdt_key = isinstance(ᔑdt_key, tuple)
# Apply the force grid to each receiver
grid_ptr = cython.address(grid[:, :, :])
for receiver in receivers:
masterprint(f'Applying force to {receiver.name} ...')
if substitute_ᔑdt_key:
ᔑdt_key = (ᔑdt_key[0], receiver.name)
if receiver.representation == 'particles':
# Update the dim'th momentum component of all particles
# through interpolation in the grid. To convert from force
# to momentum change we should multiply by -mass*Δt (minus
# as the force is the negative gradient of the potential),
# where Δt = ᔑdt['1']. Here this integral over the time step
# is generalised and supplied by the caller.
interpolate_domaingrid_to_particles(
grid, receiver, 'mom', dim, interpolation_order,
receiver.mass*ℝ[-ᔑdt[ᔑdt_key]],
)
else: # receiver.representation == 'fluid'
# The source term has the form
# ΔJ ∝ -(ϱ + c⁻²𝒫)*ᐁφ.
# The proportionality factor above is something like
# Δt = ᔑdt['1']. Here this integral over the time step is
# generalised and supplied by the caller.
Jᵢ = receiver.J[dim]
Jᵢ_ptr = Jᵢ.grid
ϱ_ptr = receiver.ϱ.grid
𝒫_ptr = receiver.𝒫.grid
for index in range(receiver.size):
Jᵢ_ptr[index] += ℝ[-ᔑdt[ᔑdt_key]]*(
ϱ_ptr[index] + ℝ[light_speed**(-2)]*𝒫_ptr[index]
)*grid_ptr[index]
masterprint('done')
# Function implementing progress messages used for the short-range
# kicks intertwined with drift operations.
@cython.pheader(
# Arguments
force=str,
method=str,
receivers=list,
extra_message=str,
# Locals
component='Component',
returns=str,
)
def shortrange_progress_message(force, method, receivers, extra_message=' (short-range only)'):
# Lookup appropriate form of the name of the force
force = interactions_registered[force].conjugated_name
# Print the progress message
if method == 'p3m':
if len(receivers) == 1:
return f'{force} interaction for {receivers[0].name} via the P³M method{extra_message}'
else:
return (
f'{force} interaction for {{{{{{}}}}}} via the P³M method{extra_message}'
.format(', '.join([component.name for component in receivers]))
)
elif method == 'pp':
if len(receivers) == 1:
return f'{force} interaction for {receivers[0].name} via the PP method'
else:
return (
f'{force} interaction for {{{{{{}}}}}} via the PP method'
.format(', '.join([component.name for component in receivers]))
)
elif method == 'ppnonperiodic':
if len(receivers) == 1:
return f'{force} interaction for {receivers[0].name} via the non-periodic PP method'
else:
return (
f'{force} interaction for {{{{{{}}}}}} via the non-periodic PP method'
.format(', '.join([component.name for component in receivers]))
)
else:
abort(f'The method "{method}" is unknown to shortrange_progress_message()')
# Function that given lists of receiver and supplier components of a
# one-way interaction removes any components from the supplier list that
# are also present in the receiver list.
def oneway_force(receivers, suppliers):
return [component for component in suppliers if component not in receivers]
# Function which constructs a list of interactions from a list of
# components. The list of interactions store information about which
# components interact with one another, via what force and method.
def find_interactions(components, interaction_type='any', instantaneous='both'):
"""You may specify an interaction_type to only get
specific interactions. The options are:
- interaction_type == 'any':
Include every interaction.
- interaction_type == 'long-range':
Include long-range interactions only, i.e. ones with a method of
either PM and P³M. Note that P³M interactions will also be
returned for interaction_type == 'short-range'.
- interaction_type == 'short-range':
Include short-range interactions only, i.e. any other than PM.
Note that P³M interactions will also be returned
for interaction_type == 'short-range'.
Furthermore you may specify instantaneous to filter out interactions
that are (not) instantaneous:
- instantaneous == 'both':
Include both instantaneous and non-instantaneous interactions.
- instantaneous == True:
Include only non-instantaneous interactions.
- instantaneous == False:
Include only instantaneous interactions.
"""
# Use cached result
key = (interaction_type, instantaneous, tuple(components))
interactions_list = interactions_lists.get(key)
if interactions_list is not None:
return interactions_list
# Find all (force, method) pairs in use. Store these as a (default)
# dict mapping forces to lists of methods.
forces_in_use = collections.defaultdict(set)
for component in components:
for force, method in component.forces.items():
forces_in_use[force].add(method)
# Check that all forces and methods assigned
# to the components are implemented.
for force, methods in forces_in_use.items():
interaction_info = interactions_registered.get(force)
if interaction_info is None:
abort(f'Force "{force}" is not implemented')
for method in interaction_info.methods:
if not method:
# Should never happen
abort(f'Falsy method "{method}" registered for force "{force}')
if method not in methods_implemented:
abort(f'Force method "{method}" not recognised')
for method in methods:
if not method:
# When the method is set to an empty string it signifies
# that this method should be used as a supplier for the
# given force, but not receive the force itself.
continue
if method not in methods_implemented:
abort(f'Force method "{method}" not recognised')
if method not in interaction_info.methods:
if len(interaction_info.methods) == 1:
abort(
f'Method "{method}" for force "{force}" is not implemented. '
f'Did you mean "{interaction_info.methods[0]}"?'
)
else:
abort(
f'Method "{method}" for force "{force}" is not implemented. '
f'Did you mean one of {interaction_info.methods}?'
)
# Construct the interactions_list with (named) 4-tuples
# in the format (force, method, receivers, suppliers),
# where receivers is a list of all components which interact
# via the force and should therefore receive momentum updates
# computed via this force and the method given as the
# second element. In the simple case where all components
# interacting under some force using the same method, the suppliers
# list holds the same components as the receivers list. When the
# same force should be applied to several components using
# different methods, the suppliers list still holds all components
# as before, while the receivers list is limited to just those
# components that should receive the force using the
# specified method. Note that the receivers do not contribute to the
# force unless they are also present in the suppliers list.
interactions_list = []
for force, interaction_info in interactions_registered.items():
methods = interaction_info.methods
for method in methods:
if method not in forces_in_use.get(force, []):
continue
# Find all receiver and supplier components
# for this (force, method) pair.
receivers = []
suppliers = []
for component in components:
if force in component.forces:
suppliers.append(component)
if component.forces[force] == method:
receivers.append(component)
# Store the 4-tuple in the interactions_list
interactions_list.append(Interaction(force, method, receivers, suppliers))
# Cleanup the list of interactions
def cleanup():
nonlocal interactions_list
# If fluid components are present as suppliers for interactions
# using a method different from PM, remove them from the
# suppliers list and create a new PM interaction instead.
for i, interaction in enumerate(interactions_list):
if interaction.method == 'pm':
continue
for component in interaction.suppliers:
if component.representation == 'fluid':
interaction.suppliers.remove(component)
interactions_list.insert(
i + 1,
Interaction(
interaction.force, 'pm', interaction.receivers.copy(), [component],
)
)
return True
# Remove interactions with no suppliers or no receivers
interactions_list = [interaction for interaction in interactions_list
if interaction.receivers and interaction.suppliers]
# Merge interactions of identical force, method and receivers,
# but different suppliers; or identical force,
# method and suppliers but different receivers.
for i, interaction_i in enumerate(interactions_list):
for j, interaction_j in enumerate(interactions_list[i+1:], i+1):
if interaction_i.force != interaction_j.force:
continue
if interaction_i.method != interaction_j.method:
continue
if (
set(interaction_i.receivers) == set(interaction_j.receivers)
and set(interaction_i.suppliers) != set(interaction_j.suppliers)
):
for supplier in interaction_j.suppliers:
if supplier not in interaction_i.suppliers:
interaction_i.suppliers.insert(0, supplier)
interactions_list.pop(j)
return True
if (
set(interaction_i.receivers) != set(interaction_j.receivers)
and set(interaction_i.suppliers) == set(interaction_j.suppliers)
):
for receiver in interaction_j.receivers:
if receiver not in interaction_i.receivers:
interaction_i.receivers.insert(0, receiver)
interactions_list.pop(j)
return True
while cleanup():
pass
# In the case that only long-/short-range interactions should be
# considered, remove the unwanted interactions.
if 'long' in interaction_type:
for interaction in interactions_list:
if interaction.method not in {'pm', 'p3m'}:
interaction.receivers[:] = []
while cleanup():
pass
elif 'short' in interaction_type:
for interaction in interactions_list:
if interaction.method == 'pm':
interaction.receivers[:] = []
while cleanup():
pass
elif 'any' not in interaction_type:
abort(f'find_interactions(): Unknown interaction_type "{interaction_type}"')
# In the case that only (non-)instantaneous interactions should be
# considered, remove the unwanted interactions.
if 'True' in str(instantaneous):
for interaction in interactions_list:
if not interactions_registered[interaction.force].instantaneous:
interaction.receivers[:] = []
while cleanup():
pass
elif 'False' in str(instantaneous):
for interaction in interactions_list:
if interactions_registered[interaction.force].instantaneous:
interaction.receivers[:] = []
while cleanup():
pass
elif 'both' not in str(instantaneous):
abort(f'find_interactions(): Unknown instantaneous value "{instantaneous}"')
# Cache the result and return it
interactions_lists[key] = interactions_list
return interactions_list
# Global dict of interaction lists populated by the above function
cython.declare(interactions_lists=dict)
interactions_lists = {}
# Create the Interaction type used in the above function
Interaction = collections.namedtuple(
'Interaction', ('force', 'method', 'receivers', 'suppliers')
)
# Function for registering interactions
def register(
force, methods, conjugated_name=None,
*,
dependent=('pos', ), affected=('mom', ),
deterministic=True, instantaneous=False,
):
"""Every implemented interaction should be registered by a call to
this function. The order in which interactions are registered will
be the order in which they are carried out, with the exception that
short-range instantaneous interactions will be carried out before
short-range non-instantaneous interactions.
"""
# Canonicalize force and method names
def canonicalize(s):
s = s.lower()
for char in ' _-^()':
s = s.replace(char, '')
for n in range(10):
s = s.replace(unicode_superscript(str(n)), str(n))
return s
force = canonicalize(force)
methods = [canonicalize(method) for method in any2list(methods)]
# If no "conjugated" version of the force name is given,
# set it equal to the normal name of the force.
if conjugated_name is None:
conjugated_name = force
# Store the information globally as an InteractionInfo instance
interactions_registered[force] = InteractionInfo(
force, methods, conjugated_name,
any2list(dependent), any2list(affected),
deterministic, instantaneous,
)
# Global dict of interaction infos populated by the above function
cython.declare(interactions_registered=dict)
interactions_registered = {}
# Create the InteractionInfo type used in the above function
InteractionInfo = collections.namedtuple(
'InteractionInfo',
(
'force', 'methods', 'conjugated_name',
'dependent', 'affected',
'deterministic', 'instantaneous',
),
)
# Function which looks up quantities defined between pairs of
# components within a passed dict.
@cython.header(
# Arguments
receivers=list,
suppliers=list,
select_dict=dict,
name=str,
# Locals
key=tuple,
pair=set,
pairs=list,
quantities=dict,
quantity=object,
quantity_r=object,
quantity_s=object,
receiver='Component',
supplier='Component',
returns=dict,
)
def get_pairwise_quantities(receivers, suppliers, select_dict, name=''):
"""The "name" argument is only used in relation with the caching.
"""
# Attempt lookup in cache
if name:
key = (
name,
frozenset([receiver.name for receiver in receivers]),
frozenset([supplier.name for supplier in suppliers]),
)
quantities = pairwise_quantities.get(key)
if quantities is not None:
return quantities
# Result not present in cache. Do the lookups.
quantities = {}
pairs = []
for receiver in receivers:
for supplier in suppliers:
pair = {receiver, supplier}
if pair in pairs:
continue
pairs.append(pair)
# Look up the quantity for this {receiver, supplier} pair
quantity = is_selected((receiver, supplier), select_dict)
if quantity is None:
if receiver.name == supplier.name:
quantity = is_selected(receiver, select_dict)
else:
quantity_r = is_selected(receiver, select_dict)
quantity_s = is_selected(supplier, select_dict)
if quantity_r == quantity_s:
quantity = quantity_r
if quantity is None:
if name:
abort(
f'get_pairwise_quantities(): No pairwise quantity "{name}" '
f'for {{{receiver.name}, {supplier.name}}} specified in the passed dict.'
)
else:
abort(
f'get_pairwise_quantities(): No pairwise quantity '
f'for {{{receiver.name}, {supplier.name}}} specified in the passed dict.'
)
# Store the found quantity symmetrically
# with respect to the receiver and supplier.
quantities[receiver.name, supplier.name] = quantity
quantities[supplier.name, receiver.name] = quantity
# Save the found quantities to the cache
if name:
pairwise_quantities[key] = quantities
return quantities
# Cache used by the above function
cython.declare(pairwise_quantities=dict)
pairwise_quantities = {}
# Function for looking up various information needed for potential
# (i.e. particle mesh) interactions.
@cython.header(
# Arguments
force=str,
method=str,
receivers=list,
suppliers=list,
# Locals
deconvolve=object, # PotentialDeconvolutions
gridsize='Py_ssize_t',
gridsizes=set,
interlace='bint',
interpolation_order='int',
key=tuple,
potential_specs=object, # PotentialInfo
receiver='Component',
supplier='Component',
returns=object, # PotentialInfo
)
def get_potential_specs(force, method, receivers, suppliers):
# Cache lookup
key = (force, method, tuple(receivers), tuple(suppliers))
potential_specs = potential_specs_cache.get(key)
if potential_specs is not None:
return potential_specs
# Look up the global potential grid size
gridsize = 𝕆[potential_options['gridsize']['global']][force][method]
if gridsize == -1:
# No global grid size specified. If all supplier upstream
# and receiver downstream grid sizes are equal, use this for
# the global grid size.
gridsizes = (
{
supplier.potential_gridsizes[force][method].upstream
for supplier in suppliers
} | {
receiver.potential_gridsizes[force][method].downstream
for receiver in receivers
}
)
if len(gridsizes) != 1:
abort(
f'No global potential grid size specified for force "{force}" with '
f'method "{method}". As multiple upstream and/or downstream grid sizes '
f'are in use, the global grid size could not be set automatically.'
)
gridsize = gridsizes.pop()
# Fetch interpolation, deconvolution and interlacing specifications
interpolation_order = 𝕆[potential_options['interpolation' ]][force][method]
deconvolve = 𝕆[potential_options['deconvolve' ]][force][method]
interlace = 𝕆[potential_options['interlace' ]][force][method]
# Cache and return
potential_specs = PotentialInfo(gridsize, interpolation_order, deconvolve, interlace)
potential_specs_cache[key] = potential_specs
return potential_specs
# Cache and type used by the above function
cython.declare(potential_specs_cache=dict)
potential_specs_cache = {}
PotentialInfo = collections.namedtuple(
'PotentialInfo', ('gridsize', 'interpolation_order', 'deconvolve', 'interlace'),
)
#########################################
# Implement specific interactions below #
#########################################
# Gravity
cimport('from gravity import *')
register('gravity', ['ppnonperiodic', 'pp', 'p3m', 'pm'], 'gravitational')
@cython.pheader(
# Arguments
method=str,
receivers=list,
suppliers=list,
ᔑdt=dict,
interaction_type=str,
printout='bint',
# Locals
extra_message=str,
force=str,
potential=str,
potential_specs=object, # PotentialInfo
quantity=str,
ᔑdt_key=tuple,
)
def gravity(method, receivers, suppliers, ᔑdt, interaction_type, printout):
force = 'gravity'
# Set up variables used by potential/grid (PM and P³M) methods
if method in {'pm', 'p3m'}:
potential_specs = get_potential_specs(force, method, receivers, suppliers)
# The gravitational potential is given by the Poisson equation
# ∇²φ = 4πGa²ρ = 4πGa**(-3*w_eff - 1)ϱ,
# summed over all suppliers.
# The component dependent quantity is then a²ρ.
# In the case of the long-range part only (for use with P³M),
# though the expression for the potential is altered, we still
# have φ ∝ a²ρ.
quantity = 'a²ρ'
# In the fluid description, the gravitational source term is
# ∂ₜJⁱ = ⋯ -a**(-3*w_eff)*(ϱ + c⁻²𝒫)*∂ⁱφ
# and so a**(-3*w_eff) should be integrated over the time step
# to get ΔJⁱ. In the particle description, the gravitational
# source term is
# ∂ₜmomⁱ = -mass*∂ⁱφ.
# In the general case of a changing mass, the current mass is
# given by mass*a**(-3*w_eff), and so again, a**(-3*w_eff)
# should be integrated over the time step
# in order to obtain Δmomⁱ.
ᔑdt_key = ('a**(-3*w_eff)', 'component')
# Compute gravity via one of the following methods
if method == 'pm':
# The particle-mesh method
if printout:
masterprint(
f'Executing gravitational interaction for {receivers[0].name} '
f'via the PM method ...'
if len(receivers) == 1 else (
'Executing gravitational interaction for {{{}}} via the PM method ...'
.format(', '.join([component.name for component in receivers]))
)
)
potential = 'gravity'
particle_mesh(
receivers, suppliers, potential_specs.gridsize, quantity, force, method, potential,
potential_specs.interpolation_order,
potential_specs.deconvolve.upstream, potential_specs.deconvolve.downstream,
potential_specs.interlace,
ᔑdt, ᔑdt_key,
)
if printout:
masterprint('done')
elif method == 'p3m':
# The particle-particle-mesh method
if printout:
extra_message = ''
if 𝔹['long' in interaction_type]:
extra_message = ' (long-range only)'
elif 𝔹['short' in interaction_type]:
extra_message = ' (short-range only)'
masterprint(
'Executing',
shortrange_progress_message(force, method, receivers, extra_message),
'...',
)
# The long-range PM part
if 𝔹['any' in interaction_type] or 𝔹['long' in interaction_type]:
potential = 'gravity long-range'
particle_mesh(
receivers, suppliers, potential_specs.gridsize, quantity, force, method, potential,
potential_specs.interpolation_order,
potential_specs.deconvolve.upstream, potential_specs.deconvolve.downstream,
potential_specs.interlace,
ᔑdt, ᔑdt_key,
)
# The short-range PP part
if 𝔹['any' in interaction_type] or 𝔹['short' in interaction_type]:
component_component(
force, receivers, suppliers, gravity_pairwise_shortrange, ᔑdt,
pairing_level='tile',
)
if printout:
masterprint('done')
elif method == 'pp':
# The particle-particle method with Ewald-periodicity
if printout:
masterprint(
'Executing',
shortrange_progress_message(force, method, receivers),
'...',
)
get_ewald_grid()
component_component(
force, receivers, suppliers, gravity_pairwise, ᔑdt,
pairing_level='domain',
)
if printout:
masterprint('done')
elif method == 'ppnonperiodic':
# The non-periodic particle-particle method
if printout:
masterprint(
'Executing',
shortrange_progress_message(force, method, receivers),
'...',
)
component_component(
force, receivers, suppliers, gravity_pairwise_nonperiodic, ᔑdt,
pairing_level='domain',
)
if printout:
masterprint('done')
elif master:
abort(f'gravity() was called with the "{method}" method')
# The lapse force
register('lapse', 'pm')
@cython.pheader(
# Arguments
method=str,
receivers=list,
suppliers=list,
ᔑdt=dict,
interaction_type=str,
printout='bint',
# Locals
force=str,
potential=str,
potential_specs=object, # PotentialInfo
quantity=str,
ᔑdt_key=tuple,
)
def lapse(method, receivers, suppliers, ᔑdt, interaction_type, printout):
force = 'lapse'
# While the receivers list stores the correct components,
# the suppliers store the lapse component as well as all the
# components also present as receivers. As the lapse force should be
# supplied solely from the lapse component, we must remove these
# additional components.
suppliers = oneway_force(receivers, suppliers)
if len(suppliers) == 0:
abort('The lapse() function got no suppliers, but expected a lapse component.')
elif len(suppliers) > 1:
abort(
f'The lapse() function got the following suppliers: {suppliers}, '
f'but expected only a lapse component.'
)
# For the lapse force, only the PM method is implemented
if method == 'pm':
if printout:
masterprint(
f'Executing lapse interaction for {receivers[0].name} via the PM method ...'
if len(receivers) == 1 else (
'Executing lapse interaction for {{{}}} via the PM method ...'
.format(', '.join([component.name for component in receivers]))
)
)
# Get interaction specifications
potential_specs = get_potential_specs(force, method, receivers, suppliers)
# As the lapse potential is implemented exactly analogous to the
# gravitational potential, it obeys the Poisson equation
# ∇²φ = 4πGa²ρ = 4πGa**(-3*w_eff - 1)ϱ,
# with φ the lapse potential and ρ, ϱ and w_eff belonging to the
# fictitious lapse species.
quantity = 'a²ρ'
potential = 'gravity'
# As the lapse potential is implemented exactly analogous to the
# gravitational potential, the momentum updates are again
# proportional to a**(-3*w_eff) integrated over the time step
# (see the gravity function for a more detailed explanation).
# The realised lapse potential is the common lapse potential,
# independent of the component in question which is to receive
# momentum updates. The actual lapse potential needed for a
# given component is obtained by multiplying the common lapse
# potential by Γ/H, where Γ is the decay rate of the component
# and H is the Hubble parameter. As these are time dependent,
# the full time step integral is then a**(-3*w_eff)*Γ/H.
ᔑdt_key = ('a**(-3*w_eff)*Γ/H', 'component')
# Execute the lapse interaction
particle_mesh(
receivers, suppliers, potential_specs.gridsize, quantity, force, method, potential,
potential_specs.interpolation_order,
potential_specs.deconvolve.upstream, potential_specs.deconvolve.downstream,
potential_specs.interlace,
ᔑdt, ᔑdt_key,
)
if printout:
masterprint('done')
elif master:
abort(f'lapse() was called with the "{method}" method')
| jmd-dk/concept | src/interactions.py | Python | gpl-3.0 | 142,305 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import os
import re
import subprocess
from netrender.utils import *
class AbstractVCS:
name = "ABSTRACT VCS"
def __init__(self):
pass
def update(self, info):
"""update(info)
Update a working copy to the specified revision.
If working copy doesn't exist, do a full get from server to create it.
[info] model.VersioningInfo instance, specifies the working path, remote path and version number."""
pass
def revision(self, path):
"""revision(path)
return the current revision of the specified working copy path"""
pass
def path(self, path):
"""path(path)
return the remote path of the specified working copy path"""
pass
class Subversion(AbstractVCS):
name = "Subversion"
description = "Use the Subversion version control system"
def __init__(self):
super().__init__()
self.version_exp = re.compile("([0-9]*)")
self.path_exp = re.compile("URL: (.*)")
def update(self, info):
if not os.path.exists(info.wpath):
base, folder = os.path.split(info.wpath)
with DirectoryContext(base):
subprocess.call(["svn", "co", "%s@%s" % (info.rpath, str(info.revision)), folder])
else:
with DirectoryContext(info.wpath):
subprocess.call(["svn", "up", "--accept", "theirs-full", "-r", str(info.revision)])
def revision(self, path):
if not os.path.exists(path):
return
with DirectoryContext(path):
stdout = subprocess.check_output(["svnversion"])
match = self.version_exp.match(str(stdout, encoding="utf-8"))
if match:
return match.group(1)
def path(self, path):
if not os.path.exists(path):
return
with DirectoryContext(path):
stdout = subprocess.check_output(["svn", "info"])
match = self.path_exp.search(str(stdout, encoding="utf-8"))
if match:
return match.group(1)
class Git(AbstractVCS):
name = "Git"
description = "Use the Git distributed version control system"
def __init__(self):
super().__init__()
self.version_exp = re.compile("^commit (.*)")
def update(self, info):
if not os.path.exists(info.wpath):
base, folder = os.path.split(info.wpath)
with DirectoryContext(base):
subprocess.call(["git", "clone", "%s" % (info.rpath), folder])
with DirectoryContext(info.wpath):
subprocess.call(["git", "checkout", str(info.revision)])
def revision(self, path):
if not os.path.exists(path):
return
with DirectoryContext(path):
stdout = subprocess.check_output(["git", "show"])
match = self.version_exp.search(str(stdout, encoding="utf-8"))
if match:
return match.group(1)
def path(self, path):
if not os.path.exists(path):
return
# find something that could somehow work for git (fun times)
return path
SYSTEMS = {
Subversion.name: Subversion(),
Git.name: Git()
}
ITEMS = (
(Subversion.name, Subversion.name, Subversion.description),
(Git.name, Git.name, Git.description),
)
| Microvellum/Fluid-Designer | win64-vc/2.78/Python/bin/2.78/scripts/addons/netrender/versioning.py | Python | gpl-3.0 | 4,317 |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt4 (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x05\x5d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x17\x00\x00\x00\x18\x08\x06\x00\x00\x00\x11\x7c\x66\x75\
\x00\x00\x03\xf0\x69\x43\x43\x50\x49\x43\x43\x20\x50\x72\x6f\x66\
\x69\x6c\x65\x00\x00\x38\x8d\x8d\x55\xdd\x6f\xdb\x54\x14\x3f\x89\
\x6f\x5c\xa4\x16\x3f\xa0\xb1\x8e\x0e\x15\x8b\xaf\x55\x53\x5b\xb9\
\x1b\x1a\xad\xc6\x06\x49\x93\xa5\xe9\x42\x1a\xb9\xcd\xd8\x2a\xa4\
\xc9\x75\x6e\x1a\x53\xd7\x36\xb6\xd3\x6d\x55\x9f\xf6\x02\x6f\x0c\
\xf8\x03\x80\xb2\x07\x1e\x90\x78\x42\x1a\x0c\xc4\xf6\xb2\xed\x01\
\xb4\x49\x53\x41\x15\xd5\x24\xa4\x3d\x74\xda\x40\x68\x93\xf6\x82\
\xaa\x70\xae\xaf\x53\xbb\x5d\xc6\xb8\x91\xaf\x7f\x39\xe7\x77\x3e\
\xef\xd1\x35\x40\xc7\x57\x9a\xe3\x98\x49\x19\x60\xde\xf2\x5d\x35\
\x9f\x91\x8f\x9f\x98\x96\x3b\x56\x21\x09\xcf\x41\x27\xf4\x40\xa7\
\xa6\x7b\x4e\xba\x5c\x2e\x02\x2e\xc6\x85\x47\xd6\xc3\x5f\x21\xc1\
\xde\x37\x07\xda\xeb\xff\x73\x75\x56\xa9\xa7\x03\x24\x9e\x42\x6c\
\x57\x3d\x7d\x1e\xf1\x69\x80\x94\xa9\x3b\xae\x0f\x20\xde\x46\xf9\
\xf0\x29\xdf\x41\xdc\xf1\x3c\xe2\x1d\x2e\x26\x88\x58\x61\x78\x96\
\xe3\x2c\xc3\x33\x1c\x1f\x0f\x38\x53\xea\x28\x62\x96\x8b\xa4\xd7\
\xb5\x2a\xe2\x25\xc4\xfd\x33\x31\xf9\x6c\x0c\xf3\x1c\x82\xb5\x23\
\x4f\x2d\xea\x1a\xba\xcc\x7a\x51\x76\xed\x9a\x61\xd2\x58\xba\x4f\
\x50\xff\xcf\x35\x6f\x36\x5a\xf1\x7a\xf1\xe9\xf2\xe6\x26\x8f\xe2\
\xbb\x8f\xd5\x5e\x77\xc7\xd4\x10\x7f\xae\x6b\xb9\x49\xc4\x2f\x23\
\xbe\xe6\xf8\x19\x26\x7f\x15\xf1\xbd\xc6\x5c\x25\x8d\x78\x2f\x40\
\xf2\x99\x9a\x7b\xa4\xc2\xf9\xc9\x37\x16\xeb\x53\xef\x20\xde\x89\
\xb8\x6a\xf8\x85\xa9\x50\xbe\x68\xcd\x94\x26\xb8\x6d\x72\x79\xce\
\x3e\xaa\x86\x9c\x6b\xba\x37\x8a\x3d\x83\x17\x11\xdf\xaa\xd3\x42\
\x91\xe7\x23\x40\x95\x66\x73\xac\x5f\x88\x7b\xeb\x8d\xb1\xd0\xbf\
\x30\xee\x2d\x4c\xe6\x5a\x7e\x16\xeb\xa3\x25\xee\x47\x70\xdf\xd3\
\xc6\xcb\x88\x7b\x10\x7f\xe8\xda\xea\x04\xcf\x59\x58\xa6\x66\x5e\
\xe5\xfe\x85\x2b\x8e\x5f\x0e\x73\x10\xd6\x2d\xb3\x54\xe4\x3e\x89\
\x44\xbd\xa0\xc6\x40\xee\xd7\xa7\xc6\xb8\x2d\x39\xe0\xe3\x21\x72\
\x5b\x32\x5d\x33\x8e\x14\x42\xfe\x92\x63\x06\xb3\x88\xb9\x91\xf3\
\x6e\x43\xad\x84\x9c\x1b\x9a\x9b\xcb\x73\x3f\xe4\x3e\xb5\x2a\xa1\
\xcf\xd4\xae\xaa\x96\x65\xbd\x1d\x44\x7c\x18\x8e\x25\x34\xa0\x60\
\xc3\x0c\xee\x3a\x58\xb0\x01\x32\xa8\x90\x87\x0c\xbe\x1d\x70\x51\
\x53\x03\x03\x4c\x94\x50\xd4\x52\x94\x18\x89\xa7\x61\x0e\x65\xed\
\x79\xe5\x80\xc3\x71\xc4\x98\x0d\xac\xd7\x99\x35\x46\x69\xcf\xe1\
\x11\xee\x84\x1c\x9b\x74\x13\x85\xec\xc7\xe7\x20\x29\x92\x43\x64\
\x98\x8c\x80\x4c\xde\x24\x6f\x91\xc3\x24\x8b\xd2\x11\x72\x70\xd3\
\xb6\x1c\x8b\xcf\x62\xdd\xd9\xf4\xf3\x3e\x34\xd0\x2b\xe3\x1d\x83\
\xcc\xb9\x9e\x46\x5f\x14\xef\xac\x7b\xd2\xd0\xaf\x7f\xf4\xf7\x16\
\x6b\xfb\x91\x9c\x69\x2b\x9f\x78\x07\xc0\xc3\x0e\xb4\x98\x03\xf1\
\xfa\xaf\x2e\xfd\xb0\x2b\xf2\xb1\x42\x2e\xbc\x7b\xb3\xeb\xea\x12\
\x4c\x3c\xa9\xbf\xa9\xdb\xa9\xf5\xd4\x0a\xee\xab\xa9\xb5\x88\x91\
\xfa\x3d\xb5\x86\xbf\x55\x48\x63\x6e\x66\x90\xd1\x3c\x3e\x46\x90\
\x87\x17\xcb\x20\x5e\xc3\x65\x7c\x7c\xd0\x70\xff\x03\x79\x76\x8c\
\xb7\x25\x62\xcd\x3a\xd7\x13\x69\x58\x27\xe8\x07\xa5\x87\x25\x38\
\xdb\x1f\x49\x95\xdf\x94\x3f\x95\x15\xe5\x0b\xe5\xbc\x72\x77\x5b\
\x97\xdb\x76\x49\xf8\x54\xf8\x56\xf8\x51\xf8\x4e\xf8\x5e\xf8\x19\
\x64\xe1\x92\x70\x59\xf8\x49\xb8\x22\x7c\x23\x5c\x8c\x9d\xd5\xe3\
\xe7\x63\xf3\xec\x83\x7a\x5b\xd5\x32\x4d\xbb\x5e\x53\x30\xa5\x8c\
\xb4\x5b\x7a\x49\xca\x4a\x2f\x48\xaf\x48\xc5\xc8\x9f\xd4\x2d\x0d\
\x49\x63\xd2\x1e\xd4\xec\xde\x3c\xb7\x78\xbc\x78\x2d\x06\x9c\xc0\
\xbd\xd5\xd5\xf6\xb1\x38\xaf\x82\x5a\x03\x4e\x05\x15\x78\x41\x87\
\x2d\x38\xb3\x6d\xfe\x43\x6b\xd2\x4b\x86\x48\x61\xdb\xd4\x0e\xb3\
\x59\x6e\x31\xc4\x9c\x98\x15\xd3\x20\x8b\x7b\xc5\x11\x71\x48\x1c\
\x67\xb8\x95\x9f\xb8\x07\x75\x23\xb8\xe7\xb6\x4c\x9d\xfe\x98\x0a\
\x68\x8c\x15\xaf\x73\x20\x98\x3a\x36\xab\xcc\x7a\x21\xd0\x79\x40\
\x7d\x7a\xda\x67\x17\xed\xa8\xed\x9c\x71\x8d\xd9\xba\x2f\xef\x53\
\x94\xd7\xe5\x34\x7e\xaa\xa8\x5c\xb0\xf4\xc1\x7e\x59\x33\x4d\x39\
\x50\x79\xb2\x4b\x3d\xea\x2e\xd0\xea\x20\xb0\xef\x20\xbf\xa2\x1f\
\xa8\xc1\xf7\x2d\xb1\xf3\x7a\x24\xf3\xdf\x06\x38\xf4\x17\xde\x59\
\x37\x22\xd9\x74\x03\xe0\x6b\x0f\xa0\xfb\xb5\x48\xd6\x87\x77\xe2\
\xb3\x9f\x01\x5c\x38\xa0\x37\xdc\x85\xf0\xce\x4f\x24\x7e\x01\xf0\
\x6a\xfb\xf7\xf1\x7f\x5d\x19\xbc\x9b\x6e\x35\x9b\x0f\xf0\xbe\xea\
\xf8\x04\x60\xe3\xe3\x66\xf3\x9f\xe5\x66\x73\xe3\x4b\xf4\xbf\x06\
\x70\xc9\xfc\x17\x59\x00\x71\x78\x12\xb5\xe1\x86\x00\x00\x01\x28\
\x49\x44\x41\x54\x48\x0d\xed\x93\xb1\x4b\x42\x51\x14\xc6\x9f\x65\
\x43\xb8\xd4\x22\xd1\x20\x08\x21\x08\x4e\xfe\x03\xae\x2d\x42\x4b\
\xe0\xd2\xe2\x1f\x21\x8d\xed\xb6\xb6\x04\x2d\x2d\x6e\x12\x81\xe0\
\xd2\xd0\xa2\x83\xe1\x22\x84\x04\x81\x8a\xae\x2e\xba\x8a\xfe\xbe\
\xf0\xc1\xe3\x75\x14\x6f\x38\xa5\x07\x7e\xdc\x7b\xbe\x7b\xee\xc7\
\xbd\xe7\xbe\xe7\x79\xfb\xf8\x57\x1d\x88\x84\x6e\x93\x27\xcf\x84\
\xb4\x60\xda\x23\xa9\x04\x05\x97\xf9\x33\xc5\x33\x18\xaf\xa0\xea\
\x62\x16\x35\x8a\x47\x68\x09\x43\x77\x96\x2c\x73\x17\x93\x0b\x8a\
\xaf\xe0\x14\xea\xd0\x00\xdd\xdc\x0c\xb5\x65\x60\xae\xfc\x16\xf5\
\x3e\x53\xf8\x86\x77\x90\x69\x07\x62\xf0\x13\xd6\xc9\x75\x8a\x87\
\xe5\x7a\x78\x28\x23\xf4\x40\x06\x8f\xf0\x06\x3a\xf9\x1c\xb2\xf0\
\x01\x05\x78\x02\xcf\x32\x3f\x42\x4f\x6b\xd1\x88\xe3\xa5\x76\xc9\
\x78\x06\xf7\x20\x63\x45\x1b\x8a\xd0\x57\x62\xc5\xa6\x6d\x29\xb1\
\x59\xa6\x27\x96\x89\xaf\x1d\xf8\x13\xc7\x51\xfb\x64\x7e\xb8\x6e\
\xdf\x5f\xcd\x3f\x31\xd5\x0f\x98\x0b\x98\xeb\x1d\x86\x70\xeb\x6b\
\x56\xcf\x55\x74\xe3\x17\x84\xc6\x09\xf9\x0b\xd4\xa0\x0b\x6a\x4f\
\x0b\xf4\xa5\xdc\x41\x1c\x5e\xc1\x0c\xf5\x5c\xd7\x5d\xc5\x57\x60\
\x57\x8a\xb9\x3e\x3d\xd5\xca\xbc\x09\xd7\xb0\xd5\x48\xe2\x76\xbe\
\x55\xc7\xbd\xd9\x0e\x76\x60\x01\x35\xca\x39\x8d\x7f\x34\x42\x37\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x16\
\x06\xa1\xb7\x19\
\x00\x45\
\x00\x6c\x00\x65\x00\x63\x00\x74\x00\x72\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x43\x00\x6f\x00\x6e\x00\x64\x00\x75\x00\x63\x00\x74\
\x00\x69\x00\x76\x00\x69\x00\x74\x00\x79\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x46\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| NuttamonW/Archaeological | ElectricalConductivity/resources_rc.py | Python | gpl-3.0 | 6,837 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.com/license.html.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/.
from __future__ import with_statement
from datetime import datetime
from trac.admin import *
from trac.core import *
from trac.perm import PermissionSystem
from trac.resource import ResourceNotFound
from txomon.ticket import model
from trac.util import getuser
from trac.util.datefmt import utc, parse_date, format_date, format_datetime, \
get_datetime_format_hint, user_time
from trac.util.text import print_table, printout, exception_to_unicode
from trac.util.translation import _, N_, gettext
from trac.web.chrome import Chrome, add_notice, add_warning
class TicketAdminPanel(Component):
implements(IAdminPanelProvider, IAdminCommandProvider)
abstract = True
_label = (N_('(Undefined)'), N_('(Undefined)'))
# i18n note: use gettext() whenever refering to the above as text labels,
# and don't use it whenever using them as field names (after
# a call to `.lower()`)
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'TICKET_ADMIN' in req.perm:
yield ('ticket', _('Ticket System'), self._type,
gettext(self._label[1]))
def render_admin_panel(self, req, cat, page, version):
req.perm.require('TICKET_ADMIN')
# Trap AssertionErrors and convert them to TracErrors
try:
return self._render_admin_panel(req, cat, page, version)
except AssertionError, e:
raise TracError(e)
def _save_config(config, req, log):
"""Try to save the config, and display either a success notice or a
failure warning.
"""
try:
config.save()
add_notice(req, _('Your changes have been saved.'))
except Exception, e:
log.error('Error writing to trac.ini: %s', exception_to_unicode(e))
add_warning(req, _('Error writing to trac.ini, make sure it is '
'writable by the web server. Your changes have not '
'been saved.'))
class ComponentAdminPanel(TicketAdminPanel):
_type = 'components'
_label = (N_('Component'), N_('Components'))
# TicketAdminPanel methods
def _render_admin_panel(self, req, cat, page, component):
# Detail view?
if component:
comp = model.Component(self.env, component)
if req.method == 'POST':
if req.args.get('save'):
comp.name = name = req.args.get('name')
comp.owner = req.args.get('owner')
comp.description = req.args.get('description')
try:
comp.update()
except self.env.db_exc.IntegrityError:
raise TracError(_('The component "%(name)s" already '
'exists.', name=name))
add_notice(req, _('Your changes have been saved.'))
req.redirect(req.href.admin(cat, page))
elif req.args.get('cancel'):
req.redirect(req.href.admin(cat, page))
Chrome(self.env).add_wiki_toolbars(req)
data = {'view': 'detail', 'component': comp}
else:
default = self.config.get('ticket', 'default_component')
if req.method == 'POST':
# Add Component
if req.args.get('add') and req.args.get('name'):
name = req.args.get('name')
try:
comp = model.Component(self.env, name=name)
except ResourceNotFound:
comp = model.Component(self.env)
comp.name = name
if req.args.get('owner'):
comp.owner = req.args.get('owner')
comp.insert()
add_notice(req, _('The component "%(name)s" has been '
'added.', name=name))
req.redirect(req.href.admin(cat, page))
else:
if comp.name is None:
raise TracError(_("Invalid component name."))
raise TracError(_("Component %(name)s already exists.",
name=name))
# Remove components
elif req.args.get('remove'):
sel = req.args.get('sel')
if not sel:
raise TracError(_('No component selected'))
if not isinstance(sel, list):
sel = [sel]
with self.env.db_transaction:
for name in sel:
model.Component(self.env, name).delete()
add_notice(req, _("The selected components have been "
"removed."))
req.redirect(req.href.admin(cat, page))
# Set default component
elif req.args.get('apply'):
name = req.args.get('default')
if name and name != default:
self.log.info("Setting default component to %s", name)
self.config.set('ticket', 'default_component', name)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
data = {'view': 'list',
'components': model.Component.select(self.env),
'default': default}
if self.config.getbool('ticket', 'restrict_owner'):
perm = PermissionSystem(self.env)
def valid_owner(username):
return perm.get_user_permissions(username).get('TICKET_MODIFY')
data['owners'] = [username for username, name, email
in self.env.get_known_users()
if valid_owner(username)]
data['owners'].insert(0, '')
data['owners'].sort()
else:
data['owners'] = None
return 'admin_components.html', data
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('component list', '',
'Show available components',
None, self._do_list)
yield ('component add', '<name> <owner>',
'Add a new component',
self._complete_add, self._do_add)
yield ('component rename', '<name> <newname>',
'Rename a component',
self._complete_remove_rename, self._do_rename)
yield ('component remove', '<name>',
'Remove/uninstall a component',
self._complete_remove_rename, self._do_remove)
yield ('component chown', '<name> <owner>',
'Change component ownership',
self._complete_chown, self._do_chown)
def get_component_list(self):
return [c.name for c in model.Component.select(self.env)]
def get_user_list(self):
return [username for username, in
self.env.db_query("SELECT DISTINCT username FROM permission")]
def _complete_add(self, args):
if len(args) == 2:
return self.get_user_list()
def _complete_remove_rename(self, args):
if len(args) == 1:
return self.get_component_list()
def _complete_chown(self, args):
if len(args) == 1:
return self.get_component_list()
elif len(args) == 2:
return self.get_user_list()
def _do_list(self):
print_table([(c.name, c.owner)
for c in model.Component.select(self.env)],
[_('Name'), _('Owner')])
def _do_add(self, name, owner):
component = model.Component(self.env)
component.name = name
component.owner = owner
component.insert()
def _do_rename(self, name, newname):
component = model.Component(self.env, name)
component.name = newname
component.update()
def _do_remove(self, name):
model.Component(self.env, name).delete()
def _do_chown(self, name, owner):
component = model.Component(self.env, name)
component.owner = owner
component.update()
class MilestoneAdminPanel(TicketAdminPanel):
_type = 'milestones'
_label = (N_('Milestone'), N_('Milestones'))
# IAdminPanelProvider methods
def get_admin_panels(self, req):
if 'MILESTONE_VIEW' in req.perm:
return TicketAdminPanel.get_admin_panels(self, req)
return iter([])
# TicketAdminPanel methods
def _render_admin_panel(self, req, cat, page, milestone):
req.perm.require('MILESTONE_VIEW')
# Detail view?
if milestone:
mil = model.Milestone(self.env, milestone)
if req.method == 'POST':
if req.args.get('save'):
req.perm.require('MILESTONE_MODIFY')
mil.name = name = req.args.get('name')
mil.due = mil.completed = None
due = req.args.get('duedate', '')
if due:
mil.due = user_time(req, parse_date, due,
hint='datetime')
if req.args.get('completed', False):
completed = req.args.get('completeddate', '')
mil.completed = user_time(req, parse_date, completed,
hint='datetime')
if mil.completed > datetime.now(utc):
raise TracError(_('Completion date may not be in '
'the future'),
_('Invalid Completion Date'))
mil.description = req.args.get('description', '')
try:
mil.update()
except self.env.db_exc.IntegrityError:
raise TracError(_('The milestone "%(name)s" already '
'exists.', name=name))
add_notice(req, _('Your changes have been saved.'))
req.redirect(req.href.admin(cat, page))
elif req.args.get('cancel'):
req.redirect(req.href.admin(cat, page))
Chrome(self.env).add_wiki_toolbars(req)
data = {'view': 'detail', 'milestone': mil}
else:
default = self.config.get('ticket', 'default_milestone')
if req.method == 'POST':
# Add Milestone
if req.args.get('add') and req.args.get('name'):
req.perm.require('MILESTONE_CREATE')
name = req.args.get('name')
try:
mil = model.Milestone(self.env, name=name)
except ResourceNotFound:
mil = model.Milestone(self.env)
mil.name = name
if req.args.get('duedate'):
mil.due = user_time(req, parse_date,
req.args.get('duedate'),
hint='datetime')
mil.insert()
add_notice(req, _('The milestone "%(name)s" has been '
'added.', name=name))
req.redirect(req.href.admin(cat, page))
else:
if mil.name is None:
raise TracError(_('Invalid milestone name.'))
raise TracError(_("Milestone %(name)s already exists.",
name=name))
# Remove milestone
elif req.args.get('remove'):
req.perm.require('MILESTONE_DELETE')
sel = req.args.get('sel')
if not sel:
raise TracError(_('No milestone selected'))
if not isinstance(sel, list):
sel = [sel]
with self.env.db_transaction:
for name in sel:
mil = model.Milestone(self.env, name)
mil.delete(author=req.authname)
add_notice(req, _("The selected milestones have been "
"removed."))
req.redirect(req.href.admin(cat, page))
# Set default milestone
elif req.args.get('apply'):
name = req.args.get('default')
if name and name != default:
self.log.info("Setting default milestone to %s", name)
self.config.set('ticket', 'default_milestone', name)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
# Get ticket count
milestones = [
(milestone, self.env.db_query("""
SELECT COUNT(*) FROM ticket WHERE milestone=%s
""", (milestone.name,))[0][0])
for milestone in model.Milestone.select(self.env)]
data = {'view': 'list',
'milestones': milestones,
'default': default}
Chrome(self.env).add_jquery_ui(req)
data.update({
'datetime_hint': get_datetime_format_hint(req.lc_time),
})
return 'admin_milestones.html', data
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('milestone list', '',
"Show milestones",
None, self._do_list)
yield ('milestone add', '<name> [due]',
"Add milestone",
None, self._do_add)
yield ('milestone rename', '<name> <newname>',
"Rename milestone",
self._complete_name, self._do_rename)
yield ('milestone due', '<name> <due>',
"""Set milestone due date
The <due> date must be specified in the "%s" format.
Alternatively, "now" can be used to set the due date to the
current time. To remove the due date from a milestone, specify
an empty string ("").
""" % console_date_format_hint,
self._complete_name, self._do_due)
yield ('milestone completed', '<name> <completed>',
"""Set milestone complete date
The <completed> date must be specified in the "%s" format.
Alternatively, "now" can be used to set the completion date to
the current time. To remove the completion date from a
milestone, specify an empty string ("").
""" % console_date_format_hint,
self._complete_name, self._do_completed)
yield ('milestone remove', '<name>',
"Remove milestone",
self._complete_name, self._do_remove)
def get_milestone_list(self):
return [m.name for m in model.Milestone.select(self.env)]
def _complete_name(self, args):
if len(args) == 1:
return self.get_milestone_list()
def _do_list(self):
print_table([(m.name, m.due and
format_date(m.due, console_date_format),
m.completed and
format_datetime(m.completed, console_datetime_format))
for m in model.Milestone.select(self.env)],
[_("Name"), _("Due"), _("Completed")])
def _do_add(self, name, due=None):
milestone = model.Milestone(self.env)
milestone.name = name
if due is not None:
milestone.due = parse_date(due, hint='datetime')
milestone.insert()
def _do_rename(self, name, newname):
milestone = model.Milestone(self.env, name)
milestone.name = newname
milestone.update()
def _do_due(self, name, due):
milestone = model.Milestone(self.env, name)
milestone.due = due and parse_date(due, hint='datetime')
milestone.update()
def _do_completed(self, name, completed):
milestone = model.Milestone(self.env, name)
milestone.completed = completed and parse_date(completed,
hint='datetime')
milestone.update()
def _do_remove(self, name):
model.Milestone(self.env, name).delete(author=getuser())
class VersionAdminPanel(TicketAdminPanel):
_type = 'versions'
_label = (N_('Version'), N_('Versions'))
# TicketAdminPanel methods
def _render_admin_panel(self, req, cat, page, version):
# Detail view?
if version:
ver = model.Version(self.env, version)
if req.method == 'POST':
if req.args.get('save'):
ver.name = name = req.args.get('name')
if req.args.get('time'):
ver.time = user_time(req, parse_date,
req.args.get('time'),
hint='datetime')
else:
ver.time = None # unset
ver.description = req.args.get('description')
try:
ver.update()
except self.env.db_exc.IntegrityError:
raise TracError(_('The version "%(name)s" already '
'exists.', name=name))
add_notice(req, _('Your changes have been saved.'))
req.redirect(req.href.admin(cat, page))
elif req.args.get('cancel'):
req.redirect(req.href.admin(cat, page))
Chrome(self.env).add_wiki_toolbars(req)
data = {'view': 'detail', 'version': ver}
else:
default = self.config.get('ticket', 'default_version')
if req.method == 'POST':
# Add Version
if req.args.get('add') and req.args.get('name'):
name = req.args.get('name')
try:
ver = model.Version(self.env, name=name)
except ResourceNotFound:
ver = model.Version(self.env)
ver.name = name
if req.args.get('time'):
ver.time = user_time(req, parse_date,
req.args.get('time'),
hint='datetime')
ver.insert()
add_notice(req, _('The version "%(name)s" has been '
'added.', name=name))
req.redirect(req.href.admin(cat, page))
else:
if ver.name is None:
raise TracError(_("Invalid version name."))
raise TracError(_("Version %(name)s already exists.",
name=name))
# Remove versions
elif req.args.get('remove'):
sel = req.args.get('sel')
if not sel:
raise TracError(_("No version selected"))
if not isinstance(sel, list):
sel = [sel]
with self.env.db_transaction:
for name in sel:
ver = model.Version(self.env, name)
ver.delete()
add_notice(req, _("The selected versions have been "
"removed."))
req.redirect(req.href.admin(cat, page))
# Set default version
elif req.args.get('apply'):
name = req.args.get('default')
if name and name != default:
self.log.info("Setting default version to %s", name)
self.config.set('ticket', 'default_version', name)
_save_config(self.config, req, self.log)
req.redirect(req.href.admin(cat, page))
data = {'view': 'list',
'versions': model.Version.select(self.env),
'default': default}
Chrome(self.env).add_jquery_ui(req)
data.update({
'datetime_hint': get_datetime_format_hint(req.lc_time),
})
return 'admin_versions.html', data
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('version list', '',
"Show versions",
None, self._do_list)
yield ('version add', '<name> [time]',
"Add version",
None, self._do_add)
yield ('version rename', '<name> <newname>',
"Rename version",
self._complete_name, self._do_rename)
yield ('version time', '<name> <time>',
"""Set version date
The <time> must be specified in the "%s" format. Alternatively,
"now" can be used to set the version date to the current time.
To remove the date from a version, specify an empty string
("").
""" % console_date_format_hint,
self._complete_name, self._do_time)
yield ('version remove', '<name>',
"Remove version",
self._complete_name, self._do_remove)
def get_version_list(self):
return [v.name for v in model.Version.select(self.env)]
def _complete_name(self, args):
if len(args) == 1:
return self.get_version_list()
def _do_list(self):
print_table([(v.name,
v.time and format_date(v.time, console_date_format))
for v in model.Version.select(self.env)],
[_("Name"), _("Time")])
def _do_add(self, name, time=None):
version = model.Version(self.env)
version.name = name
if time is not None:
version.time = time and parse_date(time, hint='datetime')
version.insert()
def _do_rename(self, name, newname):
version = model.Version(self.env, name)
version.name = newname
version.update()
def _do_time(self, name, time):
version = model.Version(self.env, name)
version.time = time and parse_date(time, hint='datetime')
version.update()
def _do_remove(self, name):
model.Version(self.env, name).delete()
class AbstractEnumAdminPanel(TicketAdminPanel):
abstract = True
_type = 'unknown'
_enum_cls = None
# TicketAdminPanel methods
def _render_admin_panel(self, req, cat, page, path_info):
label = [gettext(each) for each in self._label]
data = {'label_singular': label[0], 'label_plural': label[1],
'type': self._type}
# Detail view?
if path_info:
enum = self._enum_cls(self.env, path_info)
if req.method == 'POST':
if req.args.get('save'):
enum.name = name = req.args.get('name')
try:
enum.update()
except self.env.db_exc.IntegrityError:
raise TracError(_('%(type)s value "%(name)s" already '
'exists', type=label[0], name=name))
add_notice(req, _("Your changes have been saved."))
req.redirect(req.href.admin(cat, page))
elif req.args.get('cancel'):
req.redirect(req.href.admin(cat, page))
data.update({'view': 'detail', 'enum': enum})
else:
default = self.config.get('ticket', 'default_%s' % self._type)
if req.method == 'POST':
# Add enum
if req.args.get('add') and req.args.get('name'):
name = req.args.get('name')
try:
enum = self._enum_cls(self.env, name=name)
except ResourceNotFound:
enum = self._enum_cls(self.env)
enum.name = name
enum.insert()
add_notice(req, _('The %(field)s value "%(name)s" has '
'been added.',
field=label[0], name=name))
req.redirect(req.href.admin(cat, page))
else:
if enum.name is None:
raise TracError(_("Invalid %(type)s value.",
type=label[0]))
raise TracError(_('%(type)s value "%(name)s" already '
'exists', type=label[0], name=name))
# Remove enums
elif req.args.get('remove'):
sel = req.args.get('sel')
if not sel:
raise TracError(_("No %s selected") % self._type)
if not isinstance(sel, list):
sel = [sel]
with self.env.db_transaction:
for name in sel:
self._enum_cls(self.env, name).delete()
add_notice(req, _("The selected %(field)s values have "
"been removed.", field=label[0]))
req.redirect(req.href.admin(cat, page))
# Apply changes
elif req.args.get('apply'):
changed = False
# Set default value
name = req.args.get('default')
if name and name != default:
self.log.info("Setting default %s to %s",
self._type, name)
self.config.set('ticket', 'default_%s' % self._type,
name)
try:
self.config.save()
changed = True
except Exception, e:
self.log.error("Error writing to trac.ini: %s",
exception_to_unicode(e))
add_warning(req,
_("Error writing to trac.ini, make "
"sure it is writable by the web "
"server. The default value has not "
"been saved."))
# Change enum values
order = dict([(str(int(key[6:])),
str(int(req.args.get(key)))) for key
in req.args.keys()
if key.startswith('value_')])
values = dict([(val, True) for val in order.values()])
if len(order) != len(values):
raise TracError(_("Order numbers must be unique"))
with self.env.db_transaction:
for enum in self._enum_cls.select(self.env):
new_value = order[enum.value]
if new_value != enum.value:
enum.value = new_value
enum.update()
changed = True
if changed:
add_notice(req, _("Your changes have been saved."))
req.redirect(req.href.admin(cat, page))
data.update(dict(enums=list(self._enum_cls.select(self.env)),
default=default, view='list'))
return 'admin_enums.html', data
# IAdminCommandProvider methods
_command_help = {
'list': 'Show possible ticket %s',
'add': 'Add a %s value option',
'change': 'Change a %s value',
'remove': 'Remove a %s value',
'order': 'Move a %s value up or down in the list',
}
def get_admin_commands(self):
enum_type = getattr(self, '_command_type', self._type)
label = tuple(each.lower() for each in self._label)
yield ('%s list' % enum_type, '',
self._command_help['list'] % label[1],
None, self._do_list)
yield ('%s add' % enum_type, '<value>',
self._command_help['add'] % label[0],
None, self._do_add)
yield ('%s change' % enum_type, '<value> <newvalue>',
self._command_help['change'] % label[0],
self._complete_change_remove, self._do_change)
yield ('%s remove' % enum_type, '<value>',
self._command_help['remove'] % label[0],
self._complete_change_remove, self._do_remove)
yield ('%s order' % enum_type, '<value> up|down',
self._command_help['order'] % label[0],
self._complete_order, self._do_order)
def get_enum_list(self):
return [e.name for e in self._enum_cls.select(self.env)]
def _complete_change_remove(self, args):
if len(args) == 1:
return self.get_enum_list()
def _complete_order(self, args):
if len(args) == 1:
return self.get_enum_list()
elif len(args) == 2:
return ['up', 'down']
def _do_list(self):
print_table([(e.name,) for e in self._enum_cls.select(self.env)],
[_('Possible Values')])
def _do_add(self, name):
enum = self._enum_cls(self.env)
enum.name = name
enum.insert()
def _do_change(self, name, newname):
enum = self._enum_cls(self.env, name)
enum.name = newname
enum.update()
def _do_remove(self, value):
self._enum_cls(self.env, value).delete()
def _do_order(self, name, up_down):
if up_down not in ('up', 'down'):
raise AdminCommandError(_("Invalid up/down value: %(value)s",
value=up_down))
direction = -1 if up_down == 'up' else 1
enum1 = self._enum_cls(self.env, name)
enum1.value = int(float(enum1.value) + direction)
for enum2 in self._enum_cls.select(self.env):
if int(float(enum2.value)) == enum1.value:
enum2.value = int(float(enum2.value) - direction)
break
else:
return
with self.env.db_transaction:
enum1.update()
enum2.update()
class PriorityAdminPanel(AbstractEnumAdminPanel):
_type = 'priority'
_enum_cls = model.Priority
_label = (N_('Priority'), N_('Priorities'))
class ResolutionAdminPanel(AbstractEnumAdminPanel):
_type = 'resolution'
_enum_cls = model.Resolution
_label = (N_('Resolution'), N_('Resolutions'))
class SeverityAdminPanel(AbstractEnumAdminPanel):
_type = 'severity'
_enum_cls = model.Severity
_label = (N_('Severity'), N_('Severities'))
class TicketTypeAdminPanel(AbstractEnumAdminPanel):
_type = 'type'
_enum_cls = model.Type
_label = (N_('Ticket Type'), N_('Ticket Types'))
_command_type = 'ticket_type'
_command_help = {
'list': 'Show possible %s',
'add': 'Add a %s',
'change': 'Change a %s',
'remove': 'Remove a %s',
'order': 'Move a %s up or down in the list',
}
class TicketAdmin(Component):
"""trac-admin command provider for ticket administration."""
implements(IAdminCommandProvider)
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('ticket remove', '<number>',
'Remove ticket',
None, self._do_remove)
def _do_remove(self, number):
try:
number = int(number)
except ValueError:
raise AdminCommandError(_('<number> must be a number'))
with self.env.db_transaction:
model.Ticket(self.env, number).delete()
printout(_('Ticket #%(num)s and all associated data removed.',
num=number))
| nextview/medicticket | txomon/ticket/admin.py | Python | bsd-3-clause | 33,247 |
# Copyright (C) 2017 Martin Nilsson
# This file is part of the Memtran compiler.
#
# The Memtran compiler is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The Memtran compiler is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Memtran compiler. If not, see http://www.gnu.org/licenses/ .
def enum(*sequential, **named): # for Python 2.7 compatibility, I guess
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
Tok = enum(
'EOF', #
'LCURLYBRACKET', #
'RCURLYBRACKET', #
'LSQUAREBRACKET', #
'RSQUAREBRACKET', #
'LPAREN', #
'RPAREN', #
'COLON', #
'SEMICOLON', #
'PERIOD', #
'COMMA', #
'STRING', #
'IF', #
'ELSE', #
'FOR', #
'IN', #
'OVER', #
'RETURN', #
'INDEXOFFSET', #
'INDEXFACTOR', #
'LOOP', #
'MUT', #
'REF', #
'INLINE', #
'TO', #
'ASSIGNMENTOPERATOR', #
'CASE', #
'DEFAULT', #
'ANDSYMBOL', #
'ORSYMBOL', #
'TRUE', #
'FALSE', #
'FUN', #
'END', #
'IMPORT', #
'INTERNAL', #
'TYPE', #
'LABEL', #
'IDENTIFIER', #
'INTEGER', #
'FLOAT', #
'NILTYPE', #
'BOOL', #
'I8', #
'I16', #
'I32', #
'I64', #
'ISIZE', #
'U8', #
'U16', #
'U32', #
'U64', #
'USIZE', #
'F32', #
'F64', #
'PERCENT', #
'STAR', #
'PLUS', #
'SLASH', #
'MINUS', #
'LESSTHAN', #
'GREATERTHAN', #
'EXCLAMATION', #
'BACKTICK', #
'PERCENTASSIGNMENTOPERATOR', #
'STARASSIGNMENTOPERATOR', #
'PLUSASSIGNMENTOPERATOR', #
'SLASHASSIGNMENTOPERATOR', #
'MINUSASSIGNMENTOPERATOR', #
'LESSTHANOREQUALS', #
'GREATERTHANOREQUALS', #
'EQUALS', #
'EQUALSNOT', #
'BREAK', #
'CONTINUE', #
'SINGLEQUOTE', #
'TRIPLECOLON', #
'CONTENTTYPE', #
'PREFIXIMPORT', #
'CONSTRUAND',
'DOWNTO',
'REPEAT',
'TRASH',
'UNINITIALIZED',
'IFUPPERCASE',
'ELSEUPPERCASE',
'SWITCHUPPERCASE',
'CONTENTTYPEUPPERCASE',
'CASEUPPERCASE',
'DEFAULTUPPERCASE',
'SWITCH',
'BACKSLASH',
'DOUBLEPERIOD',
'TRIPLEPERIOD',
'DOUBLESINGLEQUOTE',
'ARR',
'TILDE',
'VBOX',
'ERRATIC'
)
class Token:
# public long lineNr;
# public long rowNr;
# public Tok kind;
# public String tokString;
def __init__(self, lineNr, rowNr, kind, tokString):
self.lineNr = lineNr
self.rowNr = rowNr
self.kind = kind
self.tokString = tokString
# def __init__(self, toBeCopied):
# self.lineNr = toBeCopied.lineNr
# self.rowNr = toBeCopied.rowNr
# self.kind = toBeCopied.kind
# self.tokString = toBeCopied.tokString
def print_it(self): # for testing purposes
if self.kind == Tok.EOF:
print("EOF", end='')
elif self.kind == Tok.LCURLYBRACKET:
print("{", end='')
elif self.kind == Tok.RCURLYBRACKET:
print("}", end='')
elif self.kind == Tok.LSQUAREBRACKET:
print("[", end='')
elif self.kind == Tok.RSQUAREBRACKET:
print("]", end='')
elif self.kind == Tok.LPAREN:
print("(", end='')
elif self.kind == Tok.RPAREN:
print(")", end='')
elif self.kind == Tok.COLON:
print(":", end='')
elif self.kind == Tok.SEMICOLON:
print(";", end='')
elif self.kind == Tok.PERIOD:
print(".", end='')
elif self.kind == Tok.COMMA:
print(",", end='')
elif self.kind == Tok.STRING:
print("\"" + self.tokString + "\"", end='') # will print newlines and cr:s and escape chars in a funny way though
elif self.kind == Tok.IF:
print("if", end='')
elif self.kind == Tok.ELSE:
print("else", end='')
elif self.kind == Tok.FOR:
print("for", end='')
elif self.kind == Tok.IN:
print("in", end='')
elif self.kind == Tok.OVER:
print("over", end='')
elif self.kind == Tok.RETURN:
print("return", end='')
elif self.kind == Tok.INDEXOFFSET:
print("indexoffset", end='')
elif self.kind == Tok.INDEXFACTOR:
print("indexfactor", end='')
elif self.kind == Tok.LOOP:
print("loop", end='')
elif self.kind == Tok.MUT:
print("mu", end='')
elif self.kind == Tok.REF:
print("ref", end='')
elif self.kind == Tok.INLINE:
print("inline", end='')
elif self.kind == Tok.TO:
print("to", end='')
elif self.kind == Tok.ASSIGNMENTOPERATOR:
print("=", end='')
elif self.kind == Tok.CASE:
print("case", end='')
elif self.kind == Tok.DEFAULT:
print("default", end='')
elif self.kind == Tok.ANDSYMBOL:
print("&&", end='')
elif self.kind == Tok.ORSYMBOL:
print("||", end='')
elif self.kind == Tok.TRUE:
print("true", end='')
elif self.kind == Tok.FALSE:
print("false", end='')
elif self.kind == Tok.FUN:
print("fn", end='')
elif self.kind == Tok.END:
print("end", end='')
elif self.kind == Tok.IMPORT:
print("import", end='')
elif self.kind == Tok.INTERNAL:
print("internal", end='')
elif self.kind == Tok.TYPE:
print("type", end='')
elif self.kind == Tok.LABEL:
print("label", end='')
elif self.kind == Tok.IDENTIFIER:
print("$" + self.tokString, end='')
elif self.kind == Tok.INTEGER:
print("#" + self.tokString, end='')
elif self.kind == Tok.FLOAT:
print("##" + self.tokString, end='')
elif self.kind == Tok.NILTYPE:
print("nil", end='')
elif self.kind == Tok.BOOL:
print("bool", end='')
elif self.kind == Tok.I8:
print("i8", end='')
elif self.kind == Tok.I16:
print("i16", end='')
elif self.kind == Tok.I32:
print("i32", end='')
elif self.kind == Tok.I64:
print("i64", end='')
elif self.kind == Tok.ISIZE:
print("int", end='')
elif self.kind == Tok.U8:
print("u8", end='')
elif self.kind == Tok.U16:
print("u16", end='')
elif self.kind == Tok.U32:
print("u32", end='')
elif self.kind == Tok.U64:
print("u64", end='')
elif self.kind == Tok.USIZE:
print("uint", end='')
elif self.kind == Tok.F32:
print("f32", end='')
elif self.kind == Tok.F64:
print("f64", end='')
elif self.kind == Tok.PERCENT:
print("%", end='')
elif self.kind == Tok.STAR:
print("*", end='')
elif self.kind == Tok.PLUS:
print("+", end='')
elif self.kind == Tok.SLASH:
print("/", end='')
elif self.kind == Tok.MINUS:
print("-", end='')
elif self.kind == Tok.LESSTHAN:
print("<", end='')
elif self.kind == Tok.GREATERTHAN:
print(">", end='')
elif self.kind == Tok.EXCLAMATION:
print("!", end='')
elif self.kind == Tok.BACKTICK:
print("`", end='')
elif self.kind == Tok.PERCENTASSIGNMENTOPERATOR:
print("%=", end='')
elif self.kind == Tok.STARASSIGNMENTOPERATOR:
print("*=", end='')
elif self.kind == Tok.PLUSASSIGNMENTOPERATOR:
print("+=", end='')
elif self.kind == Tok.SLASHASSIGNMENTOPERATOR:
print("/=", end='')
elif self.kind == Tok.MINUSASSIGNMENTOPERATOR:
print("-=", end='')
elif self.kind == Tok.LESSTHANOREQUALS:
print("<=", end='')
elif self.kind == Tok.GREATERTHANOREQUALS:
print(">=", end='')
elif self.kind == Tok.EQUALS:
print("==", end='')
elif self.kind == Tok.EQUALSNOT:
print("!=", end='')
elif self.kind == Tok.BREAK:
print("break", end='')
elif self.kind == Tok.CONTINUE:
print("continue", end='')
elif self.kind == Tok.SINGLEQUOTE:
print("'", end='')
elif self.kind == Tok.TRIPLECOLON:
print(":::", end='')
elif self.kind == Tok.CONTENTTYPE:
print("storetype", end='')
elif self.kind == Tok.PREFIXIMPORT:
print("prefiximport", end='')
elif self.kind == Tok.CONSTRUAND:
print("construand", end='')
elif self.kind == Tok.DOWNTO:
print("downto", end='')
elif self.kind == Tok.REPEAT:
print("repeat", end='')
elif self.kind == Tok.TRASH:
print("trash", end='')
elif self.kind == Tok.UNINITIALIZED:
print("uninitialized", end='');
elif self.kind == Tok.IFUPPERCASE:
print("IF", end='')
elif self.kind == Tok.ELSEUPPERCASE:
print("ELSE", end='')
elif self.kind == Tok.SWITCHUPPERCASE:
print("SWITCH", end='')
elif self.kind == Tok.CONTENTTYPEUPPERCASE:
print("STORETYPE", end='')
elif self.kind == Tok.CASEUPPERCASE:
print("CASE", end='')
elif self.kind == Tok.DEFAULTUPPERCASE:
print("DEFAULT", end='')
elif self.kind == Tok.SWITCH:
print("switch", end='')
elif self.kind == Tok.BACKSLASH:
print("\\", end='')
elif self.kind == Tok.DOUBLEPERIOD:
print("..", end='')
elif self.kind == Tok.TRIPLEPERIOD:
print("...", end='')
elif self.kind == Tok.DOUBLESINGLEQUOTE:
print("''", end='')
elif self.kind == Tok.ARR:
print("arr", end='')
elif self.kind == Tok.TILDE:
print("~", end='')
elif self.kind == Tok.VBOX:
print("vbox", end='')
elif self.kind == Tok.ERRATIC:
print("ERRATIC", end='')
else:
print("Token not found. Should not happen.");
| LJMNilsson/memtran | src/tokens.py | Python | gpl-3.0 | 12,189 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('CORE', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SavedCriteria',
fields=[
('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)),
('name', models.CharField(verbose_name='name', max_length=100)),
('modelname', models.CharField(verbose_name='model', max_length=100)),
('criteria', models.TextField(verbose_name='criteria', blank=True)),
],
options={
'verbose_name': 'Saved criteria',
'default_permissions': [],
'verbose_name_plural': 'Saved criterias',
},
),
]
| Lucterios2/core | lucterios/CORE/migrations/0002_savedcriteria.py | Python | gpl-3.0 | 904 |
from pylibftdi import USB_PID_LIST, SerialDevice
from ctypes import byref
USB_PID_LIST.append(0x6015)
BITMODE_CBUS = 0x20
dev = SerialDevice()
dev.baudrate = 46875
# programming voltage
dev.rts = 1
# reset
dev.ftdi_fn.ftdi_set_bitmode(0x40, BITMODE_CBUS)
dev.close() | Noah1989/pyl08 | pyl08.py | Python | mit | 273 |
# Copyright (c) 2011 Tencent Inc.
# All rights reserved.
#
# Author: Michaelpeng <[email protected]>
# Date: October 20, 2011
"""
This is the test module for java_jar target.
"""
import blade_test
class TestJavaJar(blade_test.TargetTest):
"""Test java_jar """
def setUp(self):
"""setup method. """
self.doSetUp('test_java_jar/java', ':poppy_java_client',
generate_php=False)
self.upper_target_path = 'test_java_jar'
def testLoadBuildsNotNone(self):
"""Test direct targets and all command targets are not none. """
pass
def testGenerateRules(self):
"""Test that rules are generated correctly. """
self.all_targets = self.blade.analyze_targets()
self.rules_buf = self.blade.generate_build_rules()
swig_library = (self.upper_target_path, 'poppy_client')
java_client = (self.target_path, 'poppy_java_client')
proto_library = (self.upper_target_path, 'rpc_option_proto')
self.command_file = 'cmds.tmp'
self.assertTrue(swig_library in self.all_targets.keys())
self.assertTrue(java_client in self.all_targets.keys())
self.assertTrue(proto_library in self.all_targets.keys())
self.assertTrue(self.dryRun())
com_proto_cpp_option = ''
com_proto_java_option = ''
com_proto_cpp_meta = ''
com_proto_java_meta = ''
com_proto_option_cc = ''
com_proto_meta_cc = ''
com_swig_python = ''
com_swig_java = ''
com_swig_python_cxx = ''
com_swig_java_cxx = ''
swig_python_so = ''
swig_java_so = ''
java_com_line = ''
java_so_line = ''
jar_line = ''
java_com_idx = 0
java_so_idx = 0
jar_idx = 0
index = 0
for line in self.scons_output:
index += 1
if 'protobuf/bin/protoc' in line:
if 'cpp_out' in line:
if 'rpc_option.proto' in line:
com_proto_cpp_option = line
elif 'rpc_meta_info.proto' in line:
com_proto_cpp_meta = line
if 'java_out' in line:
if 'rpc_option.proto' in line:
com_proto_java_option = line
elif 'rpc_meta_info.proto' in line:
com_proto_java_meta = line
if 'rpc_option.pb.cc.o -c' in line:
com_proto_option_cc = line
if 'rpc_meta_info.pb.cc.o -c' in line:
com_proto_meta_cc = line
if 'swig -python' in line:
com_swig_python = line
if 'swig -java' in line:
com_swig_java = line
if 'poppy_client_pywrap.cxx.o -c' in line:
com_swig_python_cxx = line
if 'poppy_client_javawrap.cxx.o -c' in line:
com_swig_java_cxx = line
if 'javac -classpath' in line:
java_com_line = line
java_com_idx = index
if 'libpoppy_client_java.so -m64' in line:
java_so_line = line
java_so_idx = index
if 'jar cf' in line:
jar_line = line
jar_idx = index
self.assertTrue(com_proto_cpp_option)
self.assertTrue(com_proto_cpp_meta)
self.assertTrue(com_proto_java_option)
self.assertTrue(com_proto_java_meta)
self.assertTrue('-fPIC' in com_proto_option_cc)
self.assertTrue('-Wall -Wextra' not in com_proto_option_cc)
self.assertTrue('-Wframe-larger-than=' not in com_proto_option_cc)
self.assertTrue('-Werror=overloaded-virtual' not in com_proto_option_cc)
self.assertTrue('-fPIC' in com_proto_meta_cc)
self.assertTrue('poppy_client_pywrap.cxx' in com_swig_python)
self.assertTrue('poppy_client_javawrap.cxx' in com_swig_java)
self.assertTrue('-fno-omit-frame-pointer' in com_swig_python_cxx)
self.assertTrue('-mcx16 -pipe -g' in com_swig_python_cxx)
self.assertTrue('-DNDEBUG -D_FILE_OFFSET_BITS' in com_swig_python_cxx)
self.assertTrue('-fno-omit-frame-pointer' in com_swig_java_cxx)
self.assertTrue('-mcx16 -pipe -g' in com_swig_java_cxx)
self.assertTrue('-DNDEBUG -D_FILE_OFFSET_BITS' in com_swig_java_cxx)
self.assertTrue(java_com_line)
self.assertTrue(java_so_line)
self.assertTrue(jar_line)
self.assertTrue('test_java_jar/java/lib/junit.jar' in java_com_line)
self.assertTrue('com/soso/poppy/swig/*.java' in java_com_line)
self.assertTrue('com/soso/poppy/*.java' in java_com_line)
whole_archive = ('--whole-archive build64_release/test_java_jar/'
'librpc_meta_info_proto.a build64_release/test_java_jar/'
'librpc_option_proto.a -Wl,--no-whole-archive')
self.assertTrue(whole_archive in java_so_line)
self.assertTrue(jar_idx > java_com_idx)
self.assertTrue(jar_idx > java_so_idx)
if __name__ == '__main__':
blade_test.run(TestJavaJar)
| Lunewcome/typhoon-blade | src/test/java_jar_test.py | Python | bsd-3-clause | 5,167 |
"""
Copyright (C) 2014, Jaguar Land Rover
This program is licensed under the terms and conditions of the
Mozilla Public License, version 2.0. The full text of the
Mozilla Public License is at https://www.mozilla.org/MPL/2.0/
Maintainer: Rudolf Streif ([email protected])
"""
from .base import *
| dvthiriez/rvi_backend | config/settings/local.py | Python | mpl-2.0 | 310 |
#
# Copyright (C) 2011 EADS France, Fabrice Desclaux <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# These module implements Miasm IR components and basic operations related.
# IR components are :
# - ExprInt
# - ExprId
# - ExprAff
# - ExprCond
# - ExprMem
# - ExprOp
# - ExprSlice
# - ExprCompose
#
import warnings
import itertools
from miasm2.expression.modint import mod_size2uint, is_modint, size2mask, \
define_uint
from miasm2.core.graph import DiGraph
# Define tokens
TOK_INF = "<"
TOK_INF_SIGNED = TOK_INF + "s"
TOK_INF_UNSIGNED = TOK_INF + "u"
TOK_INF_EQUAL = "<="
TOK_INF_EQUAL_SIGNED = TOK_INF_EQUAL + "s"
TOK_INF_EQUAL_UNSIGNED = TOK_INF_EQUAL + "u"
TOK_EQUAL = "=="
TOK_POS = "pos"
TOK_POS_STRICT = "Spos"
# Hashing constants
EXPRINT = 1
EXPRID = 2
EXPRAFF = 3
EXPRCOND = 4
EXPRMEM = 5
EXPROP = 6
EXPRSLICE = 5
EXPRCOMPOSE = 5
def visit_chk(visitor):
"Function decorator launching callback on Expression visit"
def wrapped(expr, callback, test_visit=lambda x: True):
if (test_visit is not None) and (not test_visit(expr)):
return expr
expr_new = visitor(expr, callback, test_visit)
if expr_new is None:
return None
expr_new2 = callback(expr_new)
return expr_new2
return wrapped
# Expression display
class DiGraphExpr(DiGraph):
"""Enhanced graph for Expression diplay
Expression are displayed as a tree with node and edge labeled
with only relevant information"""
def node2str(self, node):
if isinstance(node, ExprOp):
return node.op
elif isinstance(node, ExprId):
return node.name
elif isinstance(node, ExprMem):
return "@%d" % node.size
elif isinstance(node, ExprCompose):
return "{ %d }" % node.size
elif isinstance(node, ExprCond):
return "? %d" % node.size
elif isinstance(node, ExprSlice):
return "[%d:%d]" % (node.start, node.stop)
return str(node)
def edge2str(self, nfrom, nto):
if isinstance(nfrom, ExprCompose):
for i in nfrom.args:
if i[0] == nto:
return "[%s, %s]" % (i[1], i[2])
elif isinstance(nfrom, ExprCond):
if nfrom.cond == nto:
return "?"
elif nfrom.src1 == nto:
return "True"
elif nfrom.src2 == nto:
return "False"
return ""
# IR definitions
class Expr(object):
"Parent class for Miasm Expressions"
__slots__ = ["__hash", "__repr", "__size"]
all_exprs = set()
args2expr = {}
canon_exprs = set()
use_singleton = True
def set_size(self, _):
raise ValueError('size is not mutable')
def __init__(self):
self.__hash = None
self.__repr = None
self.__size = None
size = property(lambda self: self.__size)
@staticmethod
def get_object(expr_cls, args):
if not expr_cls.use_singleton:
return object.__new__(expr_cls, args)
expr = Expr.args2expr.get((expr_cls, args))
if expr is None:
expr = object.__new__(expr_cls, args)
Expr.args2expr[(expr_cls, args)] = expr
return expr
def get_is_canon(self):
return self in Expr.canon_exprs
def set_is_canon(self, value):
assert value is True
Expr.canon_exprs.add(self)
is_canon = property(get_is_canon, set_is_canon)
# Common operations
def __str__(self):
raise NotImplementedError("Abstract Method")
def __getitem__(self, i):
if not isinstance(i, slice):
raise TypeError("Expression: Bad slice: %s" % i)
start, stop, step = i.indices(self.size)
if step != 1:
raise ValueError("Expression: Bad slice: %s" % i)
return ExprSlice(self, start, stop)
def get_size(self):
raise DeprecationWarning("use X.size instead of X.get_size()")
def is_function_call(self):
"""Returns true if the considered Expr is a function call
"""
return False
def __repr__(self):
if self.__repr is None:
self.__repr = self._exprrepr()
return self.__repr
def __hash__(self):
if self.__hash is None:
self.__hash = self._exprhash()
return self.__hash
def __eq__(self, other):
if self is other:
return True
elif self.use_singleton:
# In case of Singleton, pointer comparison is sufficient
# Avoid computation of hash and repr
return False
if self.__class__ is not other.__class__:
return False
if hash(self) != hash(other):
return False
return repr(self) == repr(other)
def __ne__(self, other):
return not self.__eq__(other)
def __add__(self, other):
return ExprOp('+', self, other)
def __sub__(self, other):
return ExprOp('+', self, ExprOp('-', other))
def __div__(self, other):
return ExprOp('/', self, other)
def __mod__(self, other):
return ExprOp('%', self, other)
def __mul__(self, other):
return ExprOp('*', self, other)
def __lshift__(self, other):
return ExprOp('<<', self, other)
def __rshift__(self, other):
return ExprOp('>>', self, other)
def __xor__(self, other):
return ExprOp('^', self, other)
def __or__(self, other):
return ExprOp('|', self, other)
def __and__(self, other):
return ExprOp('&', self, other)
def __neg__(self):
return ExprOp('-', self)
def __pow__(self, other):
return ExprOp("**", self, other)
def __invert__(self):
return ExprOp('^', self, self.mask)
def copy(self):
"Deep copy of the expression"
return self.visit(lambda x: x)
def __deepcopy__(self, _):
return self.copy()
def replace_expr(self, dct=None):
"""Find and replace sub expression using dct
@dct: dictionary of Expr -> *
"""
if dct is None:
dct = {}
def my_replace(expr, dct):
if expr in dct:
return dct[expr]
return expr
return self.visit(lambda expr: my_replace(expr, dct))
def canonize(self):
"Canonize the Expression"
def must_canon(expr):
return not expr.is_canon
def canonize_visitor(expr):
if expr.is_canon:
return expr
if isinstance(expr, ExprOp):
if expr.is_associative():
# ((a+b) + c) => (a + b + c)
args = []
for arg in expr.args:
if isinstance(arg, ExprOp) and expr.op == arg.op:
args += arg.args
else:
args.append(arg)
args = canonize_expr_list(args)
new_e = ExprOp(expr.op, *args)
else:
new_e = expr
else:
new_e = expr
new_e.is_canon = True
return new_e
return self.visit(canonize_visitor, must_canon)
def msb(self):
"Return the Most Significant Bit"
return self[self.size - 1:self.size]
def zeroExtend(self, size):
"""Zero extend to size
@size: int
"""
assert self.size <= size
if self.size == size:
return self
ad_size = size - self.size
return ExprCompose(self, ExprInt(0, ad_size))
def signExtend(self, size):
"""Sign extend to size
@size: int
"""
assert self.size <= size
if self.size == size:
return self
ad_size = size - self.size
return ExprCompose(self,
ExprCond(self.msb(),
ExprInt(size2mask(ad_size), ad_size),
ExprInt(0, ad_size)))
def graph_recursive(self, graph):
"""Recursive method used by graph
@graph: miasm2.core.graph.DiGraph instance
Update @graph instance to include sons
This is an Abstract method"""
raise ValueError("Abstract method")
def graph(self):
"""Return a DiGraph instance standing for Expr tree
Instance's display functions have been override for better visibility
Wrapper on graph_recursive"""
# Create recursively the graph
graph = DiGraphExpr()
self.graph_recursive(graph)
return graph
def set_mask(self, value):
raise ValueError('mask is not mutable')
mask = property(lambda self: ExprInt(-1, self.size))
def is_int(self, value=None):
return False
def is_id(self, name=None):
return False
def is_aff(self):
return False
def is_cond(self):
return False
def is_mem(self):
return False
def is_op(self, op=None):
return False
def is_slice(self, start=None, stop=None):
return False
def is_compose(self):
return False
def is_op_segm(self):
"""Returns True if is ExprOp and op == 'segm'"""
return False
def is_mem_segm(self):
"""Returns True if is ExprMem and ptr is_op_segm"""
return False
class ExprInt(Expr):
"""An ExprInt represent a constant in Miasm IR.
Some use cases:
- Constant 0x42
- Constant -0x30
- Constant 0x12345678 on 32bits
"""
__slots__ = Expr.__slots__ + ["__arg"]
def __init__(self, arg, size):
"""Create an ExprInt from a modint or num/size
@arg: 'intable' number
@size: int size"""
super(ExprInt, self).__init__()
# Work is done in __new__
size = property(lambda self: self.__size)
arg = property(lambda self: self.__arg)
def __reduce__(self):
state = int(self.__arg), self.__size
return self.__class__, state
def __new__(cls, arg, size):
"""Create an ExprInt from a modint or num/size
@arg: 'intable' number
@size: int size"""
if is_modint(arg):
assert size == arg.size
# Avoid a common blunder
assert not isinstance(arg, ExprInt)
# Ensure arg is always a moduint
arg = int(arg)
if size not in mod_size2uint:
define_uint(size)
arg = mod_size2uint[size](arg)
# Get the Singleton instance
expr = Expr.get_object(cls, (arg, size))
# Save parameters (__init__ is called with parameters unchanged)
expr.__arg = arg
expr.__size = expr.__arg.size
return expr
def __get_int(self):
"Return self integer representation"
return int(self.__arg & size2mask(self.__size))
def __str__(self):
if self.__arg < 0:
return str("-0x%X" % (- self.__get_int()))
else:
return str("0x%X" % self.__get_int())
def get_r(self, mem_read=False, cst_read=False):
if cst_read:
return set([self])
else:
return set()
def get_w(self):
return set()
def _exprhash(self):
return hash((EXPRINT, self.__arg, self.__size))
def _exprrepr(self):
return "%s(0x%X, %d)" % (self.__class__.__name__, self.__get_int(),
self.__size)
def __contains__(self, expr):
return self == expr
@visit_chk
def visit(self, callback, test_visit=None):
return self
def copy(self):
return ExprInt(self.__arg, self.__size)
def depth(self):
return 1
def graph_recursive(self, graph):
graph.add_node(self)
def __int__(self):
return int(self.arg)
def __long__(self):
return long(self.arg)
def is_int(self, value=None):
if value is not None and self.__arg != value:
return False
return True
class ExprId(Expr):
"""An ExprId represent an identifier in Miasm IR.
Some use cases:
- EAX register
- 'start' offset
- variable v1
"""
__slots__ = Expr.__slots__ + ["__name"]
def __init__(self, name, size=32):
"""Create an identifier
@name: str, identifier's name
@size: int, identifier's size
"""
super(ExprId, self).__init__()
self.__name, self.__size = name, size
size = property(lambda self: self.__size)
name = property(lambda self: self.__name)
def __reduce__(self):
state = self.__name, self.__size
return self.__class__, state
def __new__(cls, name, size=32):
return Expr.get_object(cls, (name, size))
def __str__(self):
return str(self.__name)
def get_r(self, mem_read=False, cst_read=False):
return set([self])
def get_w(self):
return set([self])
def _exprhash(self):
return hash((EXPRID, self.__name, self.__size))
def _exprrepr(self):
return "%s(%r, %d)" % (self.__class__.__name__, self.__name, self.__size)
def __contains__(self, expr):
return self == expr
@visit_chk
def visit(self, callback, test_visit=None):
return self
def copy(self):
return ExprId(self.__name, self.__size)
def depth(self):
return 1
def graph_recursive(self, graph):
graph.add_node(self)
def is_id(self, name=None):
if name is not None and self.__name != name:
return False
return True
class ExprAff(Expr):
"""An ExprAff represent an affection from an Expression to another one.
Some use cases:
- var1 <- 2
"""
__slots__ = Expr.__slots__ + ["__dst", "__src"]
def __init__(self, dst, src):
"""Create an ExprAff for dst <- src
@dst: Expr, affectation destination
@src: Expr, affectation source
"""
# dst & src must be Expr
assert isinstance(dst, Expr)
assert isinstance(src, Expr)
super(ExprAff, self).__init__()
if dst.size != src.size:
raise ValueError(
"sanitycheck: ExprAff args must have same size! %s" %
([(str(arg), arg.size) for arg in [dst, src]]))
self.__size = self.dst.size
size = property(lambda self: self.__size)
dst = property(lambda self: self.__dst)
src = property(lambda self: self.__src)
def __reduce__(self):
state = self.__dst, self.__src
return self.__class__, state
def __new__(cls, dst, src):
if isinstance(dst, ExprSlice):
# Complete the source with missing slice parts
new_dst = dst.arg
rest = [(ExprSlice(dst.arg, r[0], r[1]), r[0], r[1])
for r in dst.slice_rest()]
all_a = [(src, dst.start, dst.stop)] + rest
all_a.sort(key=lambda x: x[1])
args = [expr for (expr, _, _) in all_a]
new_src = ExprCompose(*args)
else:
new_dst, new_src = dst, src
expr = Expr.get_object(cls, (new_dst, new_src))
expr.__dst, expr.__src = new_dst, new_src
return expr
def __str__(self):
return "%s = %s" % (str(self.__dst), str(self.__src))
def get_r(self, mem_read=False, cst_read=False):
elements = self.__src.get_r(mem_read, cst_read)
if isinstance(self.__dst, ExprMem) and mem_read:
elements.update(self.__dst.arg.get_r(mem_read, cst_read))
return elements
def get_w(self):
if isinstance(self.__dst, ExprMem):
return set([self.__dst]) # [memreg]
else:
return self.__dst.get_w()
def _exprhash(self):
return hash((EXPRAFF, hash(self.__dst), hash(self.__src)))
def _exprrepr(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.__dst, self.__src)
def __contains__(self, expr):
return (self == expr or
self.__src.__contains__(expr) or
self.__dst.__contains__(expr))
@visit_chk
def visit(self, callback, test_visit=None):
dst, src = self.__dst.visit(callback, test_visit), self.__src.visit(callback, test_visit)
if dst == self.__dst and src == self.__src:
return self
else:
return ExprAff(dst, src)
def copy(self):
return ExprAff(self.__dst.copy(), self.__src.copy())
def depth(self):
return max(self.__src.depth(), self.__dst.depth()) + 1
def graph_recursive(self, graph):
graph.add_node(self)
for arg in [self.__src, self.__dst]:
arg.graph_recursive(graph)
graph.add_uniq_edge(self, arg)
def is_aff(self):
return True
class ExprCond(Expr):
"""An ExprCond stand for a condition on an Expr
Use cases:
- var1 < var2
- min(var1, var2)
- if (cond) then ... else ...
"""
__slots__ = Expr.__slots__ + ["__cond", "__src1", "__src2"]
def __init__(self, cond, src1, src2):
"""Create an ExprCond
@cond: Expr, condition
@src1: Expr, value if condition is evaled to not zero
@src2: Expr, value if condition is evaled zero
"""
# cond, src1, src2 must be Expr
assert isinstance(cond, Expr)
assert isinstance(src1, Expr)
assert isinstance(src2, Expr)
super(ExprCond, self).__init__()
self.__cond, self.__src1, self.__src2 = cond, src1, src2
assert src1.size == src2.size
self.__size = self.src1.size
size = property(lambda self: self.__size)
cond = property(lambda self: self.__cond)
src1 = property(lambda self: self.__src1)
src2 = property(lambda self: self.__src2)
def __reduce__(self):
state = self.__cond, self.__src1, self.__src2
return self.__class__, state
def __new__(cls, cond, src1, src2):
return Expr.get_object(cls, (cond, src1, src2))
def __str__(self):
return "(%s?(%s,%s))" % (str(self.__cond), str(self.__src1), str(self.__src2))
def get_r(self, mem_read=False, cst_read=False):
out_src1 = self.src1.get_r(mem_read, cst_read)
out_src2 = self.src2.get_r(mem_read, cst_read)
return self.cond.get_r(mem_read,
cst_read).union(out_src1).union(out_src2)
def get_w(self):
return set()
def _exprhash(self):
return hash((EXPRCOND, hash(self.cond),
hash(self.__src1), hash(self.__src2)))
def _exprrepr(self):
return "%s(%r, %r, %r)" % (self.__class__.__name__,
self.__cond, self.__src1, self.__src2)
def __contains__(self, expr):
return (self == expr or
self.cond.__contains__(expr) or
self.src1.__contains__(expr) or
self.src2.__contains__(expr))
@visit_chk
def visit(self, callback, test_visit=None):
cond = self.__cond.visit(callback, test_visit)
src1 = self.__src1.visit(callback, test_visit)
src2 = self.__src2.visit(callback, test_visit)
if cond == self.__cond and src1 == self.__src1 and src2 == self.__src2:
return self
return ExprCond(cond, src1, src2)
def copy(self):
return ExprCond(self.__cond.copy(),
self.__src1.copy(),
self.__src2.copy())
def depth(self):
return max(self.__cond.depth(),
self.__src1.depth(),
self.__src2.depth()) + 1
def graph_recursive(self, graph):
graph.add_node(self)
for arg in [self.__cond, self.__src1, self.__src2]:
arg.graph_recursive(graph)
graph.add_uniq_edge(self, arg)
def is_cond(self):
return True
class ExprMem(Expr):
"""An ExprMem stand for a memory access
Use cases:
- Memory read
- Memory write
"""
__slots__ = Expr.__slots__ + ["__arg"]
def __init__(self, arg, size=32):
"""Create an ExprMem
@arg: Expr, memory access address
@size: int, memory access size
"""
# arg must be Expr
assert isinstance(arg, Expr)
assert isinstance(size, (int, long))
super(ExprMem, self).__init__()
if not isinstance(arg, Expr):
raise ValueError(
'ExprMem: arg must be an Expr (not %s)' % type(arg))
self.__arg, self.__size = arg, size
size = property(lambda self: self.__size)
arg = property(lambda self: self.__arg)
def __reduce__(self):
state = self.__arg, self.__size
return self.__class__, state
def __new__(cls, arg, size=32):
return Expr.get_object(cls, (arg, size))
def __str__(self):
return "@%d[%s]" % (self.size, str(self.arg))
def get_r(self, mem_read=False, cst_read=False):
if mem_read:
return set(self.__arg.get_r(mem_read, cst_read).union(set([self])))
else:
return set([self])
def get_w(self):
return set([self]) # [memreg]
def _exprhash(self):
return hash((EXPRMEM, hash(self.__arg), self.__size))
def _exprrepr(self):
return "%s(%r, %r)" % (self.__class__.__name__,
self.__arg, self.__size)
def __contains__(self, expr):
return self == expr or self.__arg.__contains__(expr)
@visit_chk
def visit(self, callback, test_visit=None):
arg = self.__arg.visit(callback, test_visit)
if arg == self.__arg:
return self
return ExprMem(arg, self.size)
def copy(self):
arg = self.arg.copy()
return ExprMem(arg, size=self.size)
def is_mem_segm(self):
"""Returns True if is ExprMem and ptr is_op_segm"""
return self.__arg.is_op_segm()
def depth(self):
return self.__arg.depth() + 1
def graph_recursive(self, graph):
graph.add_node(self)
self.__arg.graph_recursive(graph)
graph.add_uniq_edge(self, self.__arg)
def is_mem(self):
return True
class ExprOp(Expr):
"""An ExprOp stand for an operation between Expr
Use cases:
- var1 XOR var2
- var1 + var2 + var3
- parity bit(var1)
"""
__slots__ = Expr.__slots__ + ["__op", "__args"]
def __init__(self, op, *args):
"""Create an ExprOp
@op: str, operation
@*args: Expr, operand list
"""
# args must be Expr
assert all(isinstance(arg, Expr) for arg in args)
super(ExprOp, self).__init__()
sizes = set([arg.size for arg in args])
if len(sizes) != 1:
# Special cases : operande sizes can differ
if op not in ["segm"]:
raise ValueError(
"sanitycheck: ExprOp args must have same size! %s" %
([(str(arg), arg.size) for arg in args]))
if not isinstance(op, str):
raise ValueError("ExprOp: 'op' argument must be a string")
assert isinstance(args, tuple)
self.__op, self.__args = op, args
# Set size for special cases
if self.__op in [
'==', 'parity', 'fcom_c0', 'fcom_c1', 'fcom_c2', 'fcom_c3',
'fxam_c0', 'fxam_c1', 'fxam_c2', 'fxam_c3',
"access_segment_ok", "load_segment_limit_ok", "bcdadd_cf",
"ucomiss_zf", "ucomiss_pf", "ucomiss_cf"]:
size = 1
elif self.__op in [TOK_INF, TOK_INF_SIGNED,
TOK_INF_UNSIGNED, TOK_INF_EQUAL,
TOK_INF_EQUAL_SIGNED, TOK_INF_EQUAL_UNSIGNED,
TOK_EQUAL, TOK_POS,
TOK_POS_STRICT,
]:
size = 1
elif self.__op in ['mem_16_to_double', 'mem_32_to_double',
'mem_64_to_double', 'mem_80_to_double',
'int_16_to_double', 'int_32_to_double',
'int_64_to_double', 'int_80_to_double']:
size = 64
elif self.__op in ['double_to_mem_16', 'double_to_int_16',
'float_trunc_to_int_16', 'double_trunc_to_int_16']:
size = 16
elif self.__op in ['double_to_mem_32', 'double_to_int_32',
'float_trunc_to_int_32', 'double_trunc_to_int_32',
'double_to_float']:
size = 32
elif self.__op in ['double_to_mem_64', 'double_to_int_64',
'float_trunc_to_int_64', 'double_trunc_to_int_64',
'float_to_double']:
size = 64
elif self.__op in ['double_to_mem_80', 'double_to_int_80',
'float_trunc_to_int_80',
'double_trunc_to_int_80']:
size = 80
elif self.__op in ['segm']:
size = self.__args[1].size
else:
if None in sizes:
size = None
else:
# All arguments have the same size
size = list(sizes)[0]
self.__size = size
size = property(lambda self: self.__size)
op = property(lambda self: self.__op)
args = property(lambda self: self.__args)
def __reduce__(self):
state = tuple([self.__op] + list(self.__args))
return self.__class__, state
def __new__(cls, op, *args):
return Expr.get_object(cls, (op, args))
def __str__(self):
if self.is_associative():
return '(' + self.__op.join([str(arg) for arg in self.__args]) + ')'
if (self.__op.startswith('call_func_') or
self.__op == 'cpuid' or
len(self.__args) > 2 or
self.__op in ['parity', 'segm']):
return self.__op + '(' + ', '.join([str(arg) for arg in self.__args]) + ')'
if len(self.__args) == 2:
return ('(' + str(self.__args[0]) +
' ' + self.op + ' ' + str(self.__args[1]) + ')')
else:
return reduce(lambda x, y: x + ' ' + str(y),
self.__args,
'(' + str(self.__op)) + ')'
def get_r(self, mem_read=False, cst_read=False):
return reduce(lambda elements, arg:
elements.union(arg.get_r(mem_read, cst_read)), self.__args, set())
def get_w(self):
raise ValueError('op cannot be written!', self)
def _exprhash(self):
h_hargs = [hash(arg) for arg in self.__args]
return hash((EXPROP, self.__op, tuple(h_hargs)))
def _exprrepr(self):
return "%s(%r, %s)" % (self.__class__.__name__, self.__op,
', '.join(repr(arg) for arg in self.__args))
def __contains__(self, expr):
if self == expr:
return True
for arg in self.__args:
if arg.__contains__(expr):
return True
return False
def is_function_call(self):
return self.__op.startswith('call')
def is_associative(self):
"Return True iff current operation is associative"
return (self.__op in ['+', '*', '^', '&', '|'])
def is_commutative(self):
"Return True iff current operation is commutative"
return (self.__op in ['+', '*', '^', '&', '|'])
@visit_chk
def visit(self, callback, test_visit=None):
args = [arg.visit(callback, test_visit) for arg in self.__args]
modified = any([arg[0] != arg[1] for arg in zip(self.__args, args)])
if modified:
return ExprOp(self.__op, *args)
return self
def copy(self):
args = [arg.copy() for arg in self.__args]
return ExprOp(self.__op, *args)
def depth(self):
depth = [arg.depth() for arg in self.__args]
return max(depth) + 1
def graph_recursive(self, graph):
graph.add_node(self)
for arg in self.__args:
arg.graph_recursive(graph)
graph.add_uniq_edge(self, arg)
def is_op(self, op=None):
if op is None:
return True
return self.op == op
def is_op_segm(self):
"""Returns True if is ExprOp and op == 'segm'"""
return self.is_op('segm')
class ExprSlice(Expr):
__slots__ = Expr.__slots__ + ["__arg", "__start", "__stop"]
def __init__(self, arg, start, stop):
# arg must be Expr
assert isinstance(arg, Expr)
assert isinstance(start, (int, long))
assert isinstance(stop, (int, long))
super(ExprSlice, self).__init__()
assert start < stop
self.__arg, self.__start, self.__stop = arg, start, stop
self.__size = self.__stop - self.__start
size = property(lambda self: self.__size)
arg = property(lambda self: self.__arg)
start = property(lambda self: self.__start)
stop = property(lambda self: self.__stop)
def __reduce__(self):
state = self.__arg, self.__start, self.__stop
return self.__class__, state
def __new__(cls, arg, start, stop):
return Expr.get_object(cls, (arg, start, stop))
def __str__(self):
return "%s[%d:%d]" % (str(self.__arg), self.__start, self.__stop)
def get_r(self, mem_read=False, cst_read=False):
return self.__arg.get_r(mem_read, cst_read)
def get_w(self):
return self.__arg.get_w()
def _exprhash(self):
return hash((EXPRSLICE, hash(self.__arg), self.__start, self.__stop))
def _exprrepr(self):
return "%s(%r, %d, %d)" % (self.__class__.__name__, self.__arg,
self.__start, self.__stop)
def __contains__(self, expr):
if self == expr:
return True
return self.__arg.__contains__(expr)
@visit_chk
def visit(self, callback, test_visit=None):
arg = self.__arg.visit(callback, test_visit)
if arg == self.__arg:
return self
return ExprSlice(arg, self.__start, self.__stop)
def copy(self):
return ExprSlice(self.__arg.copy(), self.__start, self.__stop)
def depth(self):
return self.__arg.depth() + 1
def slice_rest(self):
"Return the completion of the current slice"
size = self.__arg.size
if self.__start >= size or self.__stop > size:
raise ValueError('bad slice rest %s %s %s' %
(size, self.__start, self.__stop))
if self.__start == self.__stop:
return [(0, size)]
rest = []
if self.__start != 0:
rest.append((0, self.__start))
if self.__stop < size:
rest.append((self.__stop, size))
return rest
def graph_recursive(self, graph):
graph.add_node(self)
self.__arg.graph_recursive(graph)
graph.add_uniq_edge(self, self.__arg)
def is_slice(self, start=None, stop=None):
if start is not None and self.__start != start:
return False
if stop is not None and self.__stop != stop:
return False
return True
class ExprCompose(Expr):
"""
Compose is like a hambuger. It concatenate Expressions
"""
__slots__ = Expr.__slots__ + ["__args"]
def __init__(self, *args):
"""Create an ExprCompose
The ExprCompose is contiguous and starts at 0
@args: [Expr, Expr, ...]
DEPRECATED:
@args: [(Expr, int, int), (Expr, int, int), ...]
"""
# args must be Expr
assert all(isinstance(arg, Expr) for arg in args)
super(ExprCompose, self).__init__()
assert isinstance(args, tuple)
self.__args = args
self.__size = sum([arg.size for arg in args])
size = property(lambda self: self.__size)
args = property(lambda self: self.__args)
def __reduce__(self):
state = self.__args
return self.__class__, state
def __new__(cls, *args):
return Expr.get_object(cls, args)
def __str__(self):
return '{' + ', '.join(["%s %s %s" % (arg, idx, idx + arg.size) for idx, arg in self.iter_args()]) + '}'
def get_r(self, mem_read=False, cst_read=False):
return reduce(lambda elements, arg:
elements.union(arg.get_r(mem_read, cst_read)), self.__args, set())
def get_w(self):
return reduce(lambda elements, arg:
elements.union(arg.get_w()), self.__args, set())
def _exprhash(self):
h_args = [EXPRCOMPOSE] + [hash(arg) for arg in self.__args]
return hash(tuple(h_args))
def _exprrepr(self):
return "%s%r" % (self.__class__.__name__, self.__args)
def __contains__(self, expr):
if self == expr:
return True
for arg in self.__args:
if arg == expr:
return True
if arg.__contains__(expr):
return True
return False
@visit_chk
def visit(self, callback, test_visit=None):
args = [arg.visit(callback, test_visit) for arg in self.__args]
modified = any([arg != arg_new for arg, arg_new in zip(self.__args, args)])
if modified:
return ExprCompose(*args)
return self
def copy(self):
args = [arg.copy() for arg in self.__args]
return ExprCompose(*args)
def depth(self):
depth = [arg.depth() for arg in self.__args]
return max(depth) + 1
def graph_recursive(self, graph):
graph.add_node(self)
for arg in self.args:
arg.graph_recursive(graph)
graph.add_uniq_edge(self, arg)
def iter_args(self):
index = 0
for arg in self.__args:
yield index, arg
index += arg.size
def is_compose(self):
return True
# Expression order for comparaison
EXPR_ORDER_DICT = {ExprId: 1,
ExprCond: 2,
ExprMem: 3,
ExprOp: 4,
ExprSlice: 5,
ExprCompose: 7,
ExprInt: 8,
}
def compare_exprs_compose(expr1, expr2):
# Sort by start bit address, then expr, then stop but address
ret = cmp(expr1[1], expr2[1])
if ret:
return ret
ret = compare_exprs(expr1[0], expr2[0])
if ret:
return ret
ret = cmp(expr1[2], expr2[2])
return ret
def compare_expr_list_compose(l1_e, l2_e):
# Sort by list elements in incremental order, then by list size
for i in xrange(min(len(l1_e), len(l2_e))):
ret = compare_exprs(l1_e[i], l2_e[i])
if ret:
return ret
return cmp(len(l1_e), len(l2_e))
def compare_expr_list(l1_e, l2_e):
# Sort by list elements in incremental order, then by list size
for i in xrange(min(len(l1_e), len(l2_e))):
ret = compare_exprs(l1_e[i], l2_e[i])
if ret:
return ret
return cmp(len(l1_e), len(l2_e))
def compare_exprs(expr1, expr2):
"""Compare 2 expressions for canonization
@expr1: Expr
@expr2: Expr
0 => ==
1 => expr1 > expr2
-1 => expr1 < expr2
"""
cls1 = expr1.__class__
cls2 = expr2.__class__
if cls1 != cls2:
return cmp(EXPR_ORDER_DICT[cls1], EXPR_ORDER_DICT[cls2])
if expr1 == expr2:
return 0
if cls1 == ExprInt:
ret = cmp(expr1.size, expr2.size)
if ret != 0:
return ret
return cmp(expr1.arg, expr2.arg)
elif cls1 == ExprId:
ret = cmp(expr1.name, expr2.name)
if ret:
return ret
return cmp(expr1.size, expr2.size)
elif cls1 == ExprAff:
raise NotImplementedError(
"Comparaison from an ExprAff not yet implemented")
elif cls2 == ExprCond:
ret = compare_exprs(expr1.cond, expr2.cond)
if ret:
return ret
ret = compare_exprs(expr1.src1, expr2.src1)
if ret:
return ret
ret = compare_exprs(expr1.src2, expr2.src2)
return ret
elif cls1 == ExprMem:
ret = compare_exprs(expr1.arg, expr2.arg)
if ret:
return ret
return cmp(expr1.size, expr2.size)
elif cls1 == ExprOp:
if expr1.op != expr2.op:
return cmp(expr1.op, expr2.op)
return compare_expr_list(expr1.args, expr2.args)
elif cls1 == ExprSlice:
ret = compare_exprs(expr1.arg, expr2.arg)
if ret:
return ret
ret = cmp(expr1.start, expr2.start)
if ret:
return ret
ret = cmp(expr1.stop, expr2.stop)
return ret
elif cls1 == ExprCompose:
return compare_expr_list_compose(expr1.args, expr2.args)
raise NotImplementedError(
"Comparaison between %r %r not implemented" % (expr1, expr2))
def canonize_expr_list(expr_list):
expr_list = list(expr_list)
expr_list.sort(cmp=compare_exprs)
return expr_list
def canonize_expr_list_compose(expr_list):
expr_list = list(expr_list)
expr_list.sort(cmp=compare_exprs_compose)
return expr_list
# Generate ExprInt with common size
def ExprInt1(i):
warnings.warn('DEPRECATION WARNING: use ExprInt(i, 1) instead of '\
'ExprInt1(i))')
return ExprInt(i, 1)
def ExprInt8(i):
warnings.warn('DEPRECATION WARNING: use ExprInt(i, 8) instead of '\
'ExprInt8(i))')
return ExprInt(i, 8)
def ExprInt16(i):
warnings.warn('DEPRECATION WARNING: use ExprInt(i, 16) instead of '\
'ExprInt16(i))')
return ExprInt(i, 16)
def ExprInt32(i):
warnings.warn('DEPRECATION WARNING: use ExprInt(i, 32) instead of '\
'ExprInt32(i))')
return ExprInt(i, 32)
def ExprInt64(i):
warnings.warn('DEPRECATION WARNING: use ExprInt(i, 64) instead of '\
'ExprInt64(i))')
return ExprInt(i, 64)
def ExprInt_from(expr, i):
"Generate ExprInt with size equal to expression"
warnings.warn('DEPRECATION WARNING: use ExprInt(i, expr.size) instead of'\
'ExprInt_from(expr, i))')
return ExprInt(i, expr.size)
def get_expr_ids_visit(expr, ids):
"""Visitor to retrieve ExprId in @expr
@expr: Expr"""
if isinstance(expr, ExprId):
ids.add(expr)
return expr
def get_expr_ids(expr):
"""Retrieve ExprId in @expr
@expr: Expr"""
ids = set()
expr.visit(lambda x: get_expr_ids_visit(x, ids))
return ids
def test_set(expr, pattern, tks, result):
"""Test if v can correspond to e. If so, update the context in result.
Otherwise, return False
@expr : Expr to match
@pattern : pattern Expr
@tks : list of ExprId, available jokers
@result : dictionary of ExprId -> Expr, current context
"""
if not pattern in tks:
return expr == pattern
if pattern in result and result[pattern] != expr:
return False
result[pattern] = expr
return result
def match_expr(expr, pattern, tks, result=None):
"""Try to match the @pattern expression with the pattern @expr with @tks jokers.
Result is output dictionary with matching joker values.
@expr : Expr pattern
@pattern : Targetted Expr to match
@tks : list of ExprId, available jokers
@result : dictionary of ExprId -> Expr, output matching context
"""
if result is None:
result = {}
if pattern in tks:
# pattern is a Joker
return test_set(expr, pattern, tks, result)
if expr.is_int():
return test_set(expr, pattern, tks, result)
elif expr.is_id():
return test_set(expr, pattern, tks, result)
elif expr.is_op():
# expr need to be the same operation than pattern
if not pattern.is_op():
return False
if expr.op != pattern.op:
return False
if len(expr.args) != len(pattern.args):
return False
# Perform permutation only if the current operation is commutative
if expr.is_commutative():
permutations = itertools.permutations(expr.args)
else:
permutations = [expr.args]
# For each permutations of arguments
for permut in permutations:
good = True
# We need to use a copy of result to not override it
myresult = dict(result)
for sub_expr, sub_pattern in zip(permut, pattern.args):
ret = match_expr(sub_expr, sub_pattern, tks, myresult)
# If the current permutation do not match EVERY terms
if ret is False:
good = False
break
if good is True:
# We found a possibility
for joker, value in myresult.items():
# Updating result in place (to keep pointer in recursion)
result[joker] = value
return result
return False
# Recursive tests
elif expr.is_mem():
if not pattern.is_mem():
return False
if expr.size != pattern.size:
return False
return match_expr(expr.arg, pattern.arg, tks, result)
elif expr.is_slice():
if not pattern.is_slice():
return False
if expr.start != pattern.start or expr.stop != pattern.stop:
return False
return match_expr(expr.arg, pattern.arg, tks, result)
elif expr.is_cond():
if not pattern.is_cond():
return False
if match_expr(expr.cond, pattern.cond, tks, result) is False:
return False
if match_expr(expr.src1, pattern.src1, tks, result) is False:
return False
if match_expr(expr.src2, pattern.src2, tks, result) is False:
return False
return result
elif expr.is_compose():
if not pattern.is_compose():
return False
for sub_expr, sub_pattern in zip(expr.args, pattern.args):
if match_expr(sub_expr, sub_pattern, tks, result) is False:
return False
return result
elif expr.is_aff():
if not pattern.is_aff():
return False
if match_expr(expr.src, pattern.src, tks, result) is False:
return False
if match_expr(expr.dst, pattern.dst, tks, result) is False:
return False
return result
else:
raise NotImplementedError("match_expr: Unknown type: %s" % type(expr))
def MatchExpr(expr, pattern, tks, result=None):
warnings.warn('DEPRECATION WARNING: use match_expr instead of MatchExpr')
return match_expr(expr, pattern, tks, result)
def get_rw(exprs):
o_r = set()
o_w = set()
for expr in exprs:
o_r.update(expr.get_r(mem_read=True))
for expr in exprs:
o_w.update(expr.get_w())
return o_r, o_w
def get_list_rw(exprs, mem_read=False, cst_read=True):
"""Return list of read/write reg/cst/mem for each @exprs
@exprs: list of expressions
@mem_read: walk though memory accesses
@cst_read: retrieve constants
"""
list_rw = []
# cst_num = 0
for expr in exprs:
o_r = set()
o_w = set()
# get r/w
o_r.update(expr.get_r(mem_read=mem_read, cst_read=cst_read))
if isinstance(expr.dst, ExprMem):
o_r.update(expr.dst.arg.get_r(mem_read=mem_read, cst_read=cst_read))
o_w.update(expr.get_w())
# each cst is indexed
o_r_rw = set()
for read in o_r:
o_r_rw.add(read)
o_r = o_r_rw
list_rw.append((o_r, o_w))
return list_rw
def get_expr_ops(expr):
"""Retrieve operators of an @expr
@expr: Expr"""
def visit_getops(expr, out=None):
if out is None:
out = set()
if isinstance(expr, ExprOp):
out.add(expr.op)
return expr
ops = set()
expr.visit(lambda x: visit_getops(x, ops))
return ops
def get_expr_mem(expr):
"""Retrieve memory accesses of an @expr
@expr: Expr"""
def visit_getmem(expr, out=None):
if out is None:
out = set()
if isinstance(expr, ExprMem):
out.add(expr)
return expr
ops = set()
expr.visit(lambda x: visit_getmem(x, ops))
return ops
| stephengroat/miasm | miasm2/expression/expression.py | Python | gpl-2.0 | 45,069 |
#
# Copyright 2009-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import logging
from functools import wraps
from vdsm.storage import exception as se
from vdsm.storage import task
_EXPORTED_ATTRIBUTE = "__dispatcher_exported__"
def exported(f):
setattr(f, _EXPORTED_ATTRIBUTE, True)
return f
class Dispatcher(object):
log = logging.getLogger('storage.dispatcher')
STATUS_OK = {'status': {'code': 0, 'message': "OK"}}
STATUS_ERROR = {'status': {'code': 100, 'message': "ERROR"}}
def __init__(self, obj):
self._obj = obj
self._exposeFunctions(obj)
self.log.info("Starting StorageDispatcher...")
@property
def ready(self):
return getattr(self._obj, 'ready', True)
def _exposeFunctions(self, obj):
for funcName in dir(obj):
if funcName.startswith("_"):
continue
funcObj = getattr(obj, funcName)
if hasattr(funcObj, _EXPORTED_ATTRIBUTE) and callable(funcObj):
if hasattr(self, funcName):
self.log.error("StorageDispatcher: init - multiple public"
" functions with same name: %s" % funcName)
continue
# Create a new entry in instance's "dict" that will mask the
# original method
setattr(self, funcName, self.protect(funcObj, funcName))
def protect(self, func, name, *args, **kwargs):
@wraps(func)
def wrapper(*args, **kwargs):
try:
ctask = task.Task(id=None, name=name)
try:
response = self.STATUS_OK.copy()
result = ctask.prepare(func, *args, **kwargs)
if type(result) == dict:
response.update(result)
return response
except se.GeneralException as e:
# Match api.method format
if e.expected:
self.log.info("FINISH %s error=%s", name, e)
else:
self.log.error("FINISH %s error=%s", name, e)
return e.response()
except BaseException as e:
# Match api.method format
self.log.exception("FINISH %s error=%s", name, e)
defaultException = ctask.defaultException
if (defaultException and
hasattr(defaultException, "response")):
resp = defaultException.response()
defaultExceptionInfo = (resp['status']['code'],
resp['status']['message'])
return se.generateResponse(e, defaultExceptionInfo)
return se.generateResponse(e)
except:
try:
# We should never reach this
self.log.exception(
"Unhandled exception (name=%s, args=%s, kwargs=%s)",
name, args, kwargs)
finally:
return self.STATUS_ERROR.copy()
return wrapper
| nirs/vdsm | lib/vdsm/storage/dispatcher.py | Python | gpl-2.0 | 4,009 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
default_app_config = 'kong_admin.apps.KongAdminConfig'
__version__ = '0.3.7'
| athento/django-kong-admin | kong_admin/__init__.py | Python | bsd-3-clause | 158 |
from net.wyun.mer.ink import scginkparser
import numpy as np
from net.wyun.mer.ink.stroke import Stroke
from net.wyun.mer.ink.stroke import get_bounding_box
from net.wyun.mer.ink.stroke import get_bounding_box_h1000
from net.wyun.mer.ink.sample import Sample
from scipy import misc
# Import `load_workbook` module from `openpyxl`
from openpyxl import load_workbook
# Load in the workbook
wb = load_workbook('data/scg/test.xlsx')
# Get sheet names
print(wb.get_sheet_names()) #[u'hw_record', u'Sheet1']
# Get a sheet by name
ws = wb.get_sheet_by_name('hw_record')
# Print the sheet title
print ws['A1'].value, ws['B1'].value, ws['C1'].value
print ws['A2'].value, ws['B2'].value, ws['C2'].value
print 'length of record: ', len(ws['A'])
scg_id = int(ws['A2'].value)
scg_content = ws['B2'].value
strokes = scginkparser.parse_scg_ink_file(scg_content, scg_id)
for st in strokes:
print st
traces = {}
trace_id_int = 0
for st in strokes:
coords = np.zeros((2, len(st)))
idx = 0
for x_y in st:
coords[:, idx] = [float(x_y[0]), float(x_y[1])]
idx += 1
traces[trace_id_int] = Stroke(trace_id_int, coords)
trace_id_int += 1
# //Compute bounding box of the input expression
x_min, y_min, x_max, y_max = get_bounding_box(traces) # bounding box for the whole math expression
# Just in case there is only one point or a sequence of points perfectly aligned with the x or y axis
if x_max == x_min: x_max = x_min + 1;
if y_max == y_min: y_max = y_min + 1;
# Renormalize to height [0,10000] keeping the aspect ratio
H = 10000.0
W = H * (x_max - x_min) / (y_max - y_min)
for trace_key, trace_v in traces.iteritems():
trace_v.calc_coords_h10000(H, W, x_min, y_min, x_max, y_max)
for trace_key, trace_v in traces.iteritems():
print trace_key, trace_v
rx, ry, rs, rt = trace_v.get_bounding_box_h10000()
print rx, ry, rs, rt
dummy_sample = Sample('data/inkml/65_alfonso.inkml')
dummy_sample.traces = traces
img, W, H = dummy_sample.render()
print 'save image to temp/all.png: '
misc.imsave('temp/all.png', img)
| michaelyin/im2markup-prep | net/wyun/mer/prep/xlsxhandler.py | Python | apache-2.0 | 2,067 |
# coding: utf-8
from pyDes import *
import base64
import hashlib
import binascii
from model import itsettings
reload(sys)
sys.setdefaultencoding('utf-8')
class MobileManage:
"""Mobile Http Client"""
def __init__(self):
pass
def encrpyt(self, key, data):
if len(key) != 8:
print 'key length is not 16!'
return None
"""DES对称加密"""
k = des(str(key), ECB, pad=None, padmode=PAD_PKCS5)
d = k.encrypt(str(data))
"""base64加密"""
return base64.b64encode(d)
def decrypt(self, key, data):
if len(key) != 8:
print 'key length is not 16!'
return None
"""base64解密"""
d = base64.b64decode(data)
"""DES对称解密"""
k = des(key, ECB, pad=None, padmode=PAD_PKCS5)
destr = k.decrypt(d)
return destr
def getkey(self, key1, unique):
"""generate the key of DES"""
print 'uuid is ' + unique
listuuid = []
listuuid.append(unique[0:4])
listuuid.append(unique[4:8])
listuuid.append(unique[8:12])
listuuid.append(unique[12:16])
listuuid.append(unique[16:20])
listuuid.append(unique[20:24])
listuuid.append(unique[24:28])
listuuid.append(unique[28:32])
listcrc32 = []
for element in listuuid:
listcrc32.append(binascii.crc32(element))
list32 = []
for element in listcrc32:
list32.append(element % 32)
key = key1[list32[0]] + key1[list32[1]] + key1[list32[2]] + key1[list32[3]] + key1[list32[4]] + key1[
list32[5]] + key1[list32[6]] + key1[list32[7]]
print 'key is ' + key
return key
def generate_sign(self,key,enbody,unique):
"""generate sign, sign = sha1(key+time+unique+enbody),then transform 16 byte string"""
value = str(key) + str(itsettings.current_time) + str(unique) + str(enbody)
h = hashlib.sha1()
h.update(value)
return h.hexdigest()
# def qrcode_get(self, url, uid, key2):
#
# uidencode = base64.b64encode(uid)
# # 手机端扫描二维码
# unique = str(uuid.uuid1()).replace('-', '')
# key = self.getkey(key2, unique)
#
# # 计算签名
# value = key2 + uidencode + str(time)
# h = hashlib.sha1()
# h.update(value)
# sign = h.hexdigest()
# # 发送请求
# http = httplib2.Http()
# qrpara = {'uid': uidencode, 'timestamp': str(time), 'signature': sign, 'unique': unique}
# geturl = url + "&" + self.encodepara(str(qrpara))
# print 'HttpGet url is ' + geturl
# try:
# http = httplib2.Http(".cache", disable_ssl_certificate_validation=True)
# resp, content = http.request(geturl, 'GET')
# except Exception, e:
# raise e
# else:
# de_content = self.decrypt(key, content)
# res_content = self.replace_null(de_content)
# print 'send HttpPost successful! content is ' + res_content
# return res_content.decode('utf-8')
# # print content
#
# def encodepara(self, para):
# encodepara = urllib.urlencode(eval(para))
# return encodepara
#
# def replace_null(self, response):
# strres = json.dumps(response, ensure_ascii=False)
# return eval(strres.replace('null', '\\"null\\"').replace('false', '\\"false\\"').replace('true', '\\"true\\"'))
#
# def checkport(self):
# global host
# global port
# if port == 0:
# url = host
# else:
# url = host + ':' + str(port)
# return url
#
# def mobile_environment_config(self, h, p):
# """Set HTTP Request host and port,host and port is global variable.
# host default value is https://b.yixin.im,port default value is 0.
#
# Examples:
# | Environment Mobile Config| host | port |
# """
# global host
# global port
# host = h
# port = p
# print 'host is ' + h
# print 'port is ' + str(p)
#
# def get_app_oAuth(self, url, user, password, mac):
# ###获取重定向URL###
# r = requests.get(url)
# oAuthUrl = r.history[1].url
#
# ###去服务器拿临时票据ticket###
# # 调用102登录接口#
# para_login = str({"c": login_port})
# body_login = str({"email": user, "password": password, "mac": mac})
# res_login = self.mobile_post(para_login, body_login, '["password"]')
# res_json_login = json.loads(res_login)
# uid_login = str(res_json_login["result"]["uid"])
# key2_login = str(res_json_login["result"]["key2"])
# # 调用901获取应用免登票据#
# para = str({"c": ticket_port})
# body = '{"url": "' + oAuthUrl + '"}'
# sbody = str(body)
# res_ticket = self.mobile_post(para, sbody, "None", key2_login, uid_login)
# st = json.loads(res_ticket).get('result').get('st')
#
# ###oauth地址加上st参数,获取带code地址###
# url_st = oAuthUrl + "&st=" + st
# url_code = requests.get(url_st).url
# print url_code
# return url_code | longmazhanfeng/interface_web | interface_platform/mobile_manage.py | Python | mit | 5,314 |
#!/usr/bin/env python
#
# $LicenseInfo:firstyear=2010&license=mit$
#
# Copyright (c) 2010, Linden Research, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# $/LicenseInfo$
#
'''
This is the main script for apiary. It is responsible for all things
related to configuration, option parsing, and process/thread management.
'''
import optparse
import sys
import os
import multiprocessing
import signal
import time
import pkgutil
import importlib
import apiary
import apiary.tools.debug
from apiary.tools.debug import *
def main(args=sys.argv[1:]):
options, arguments = parse_args(args)
if options.debug:
apiary.tools.debug.enable_debug()
if options.profile:
from apiary.tools import lsprof
profiler = lsprof.Profiler()
profiler.enable(subcalls=True)
beekeeper = apiary.BeeKeeper(options, arguments)
beekeeper.start()
if options.profile:
profiler.disable()
stats = lsprof.Stats(profiler.getstats())
stats.sort()
stats.pprint(top=10, file=sys.stderr, climit=5)
def get_protocol_modules():
path = os.path.join(os.path.dirname(__file__), 'protocols')
modules = {}
for loader, name, is_package in pkgutil.iter_modules([path]):
if not is_package:
modules[name] = importlib.import_module('apiary.protocols.%s' % name)
return modules
def parse_args(args=[]):
parser = build_option_parser()
modules = get_protocol_modules()
for mod in modules.values():
if hasattr(mod, 'add_options'):
mod.add_options(parser)
options, args = parser.parse_args(args)
options.protocols = modules
return options, args
def build_option_parser():
parser = optparse.OptionParser()
apiary.add_options(parser)
return parser
if __name__ == '__main__':
main()
| lexelby/apiary | apiary/main.py | Python | mit | 2,842 |
#!/usr/bin/env python
# The MIT License (MIT)
#
# Copyright (c) 2013 Keita Kita
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# mount_raspberry_pi_image_rootfs
#
# A script that mount the root filesystem in an image of Raspberry Pi.
import argparse
import os.path
import subprocess
import sys
import fdisk_output_parser
class CannotDetectOffsetError(Exception):
pass
def detect_root_filesystem_offset(fdisk_output):
u'''
Detect offset bytes of the root filesystem from an output of fdisk.
Offset bytes is calculated by multiplying bytes of unit by start unit
index of the root filesytem.
Arguments:
fdisk_output : An output of fdisk.
Return:
Offset bytes of the root filesystem.
'''
# Get partitions.
try:
image_partitions = fdisk_output_parser.detect_partitions(fdisk_output)
except fdisk_output_parser.ParseError:
raise CannotDetectOffsetError()
# Get the offset of the root file system.
for partition in image_partitions:
if partition.system == u'Linux':
return partition.start_offset_bytes
else:
raise CannotDetectOffsetError()
def detach_loopback_device(loopback_device_file):
subprocess.call(['losetup', '-d', loopback_device_file])
def main(image_file, loopback_device_file, mount_point):
# Check the files exist.
# If one of the file does not exist, print an error message and exit.
if not os.path.exists(image_file):
print >>sys.stderr, "Image file does not exist : " + image_file
sys.exit(1)
if not os.path.exists(loopback_device_file):
print >>sys.stderr, \
"Loopback device file does not exist : " + loopback_device_file
sys.exit(1)
if not os.path.exists(mount_point):
print >>sys.stderr, "Mount point does not exist : " + mount_point
sys.exit(1)
# Set loopback device for the image.
print '--- Set loopback device %s for %s ---' % (
loopback_device_file, image_file)
try:
subprocess.check_call(
['losetup', loopback_device_file, image_file],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
print >>sys.stderr, e
sys.exit(1)
# Get the offset of partition of the root filesystem.
print '--- Get the offset of partition of the root filesystem ---'
try:
fdisk_output = subprocess.check_output(
['fdisk', '-lu', loopback_device_file],
stderr=subprocess.STDOUT)
offset = detect_root_filesystem_offset(fdisk_output)
except subprocess.CalledProcessError, e:
print >>sys.stderr, e
detach_loopback_device(loopback_device_file)
sys.exit(1)
except CannotDetectOffsetError:
print >>sys.stderr, \
"The offset of the root filesystem cannot be detected."
detach_loopback_device(loopback_device_file)
sys.exit(1)
# Mount the partition of the root filesystem.
print '--- Mount the partition of the root filesystem ---'
try:
subprocess.check_call(
['mount', '-o', 'loop,offset=%d' % offset, loopback_device_file,
mount_point],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError, e:
print >>sys.stderr, e
detach_loopback_device(loopback_device_file)
sys.exit(1)
# Complete.
print 'Success.'
def create_command_line_parser():
parser = argparse.ArgumentParser(
description=u'Mount the root filesystem in an image of Raspberry Pi.')
parser.add_argument(
'image_file', metavar='IMAGE_FILE', nargs='?')
parser.add_argument(
'loopback_device_file', metavar='LOOPBACK_DEVICE_FILE', nargs='?')
parser.add_argument(
'mount_point', metavar='MOUNT_POINT', nargs='?')
return parser
if __name__ == '__main__':
# Parse command-line arguments.
parser = create_command_line_parser()
arguments = parser.parse_args()
# Call main function with parsed arguments.
# If there is not arguments, print help and exit.
if arguments.image_file and arguments.loopback_device_file and \
arguments.mount_point:
main(arguments.image_file, arguments.loopback_device_file,
arguments.mount_point)
else:
parser.print_help()
sys.exit(1)
| mikanbako/mount-raspberry-pi-image-rootfs | main/mount_raspberry_pi_image_rootfs.py | Python | mit | 5,391 |
balance=4842
annualInterestRate=.2
monthlyPaymentRate=.04
totalPaid=0.0
for month in range(1,13):
print 'Month: '+str(month)
minimumMonthlyPayment=round(balance*monthlyPaymentRate,2)
remainingBalance=balance-minimumMonthlyPayment
interest=round(annualInterestRate/12*remainingBalance,2)
balance=remainingBalance+interest
totalPaid+=minimumMonthlyPayment
print 'Minimum monthly payment : '+str(minimumMonthlyPayment)
print 'Remaining Balance: '+str(balance)
print '\nTotal paid: '+str(totalPaid)
print 'Remaining Balance: '+str(balance)
| arielisidro/myprograms | python/6.00.1x Files/W02P0201.py | Python | gpl-2.0 | 595 |
# -*- extra stuff goes here -*-
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
| UPCnet/transparencia.theme | transparencia/theme/__init__.py | Python | gpl-3.0 | 119 |
# PSTH experiments
#
# Copyright (C) 2010-2012 Huang Xin
#
# See LICENSE.TXT that came with this file.
from __future__ import division
import os
import sys
import time
import Pyro.core
import subprocess
from Experiment import ExperimentConfig,Experiment
class PSTHExperiment(Experiment):
PSTH_SERVER_PROCESS = None
PSTH_SERVER_PORT = 6743
def __init__(self,*args,**kwargs):
super(PSTHExperiment, self).__init__(*args,**kwargs)
self.pyro_source = ''
self.exp_param = ''
def psth_analysis(self, psth_type=None):
#self.psth_server = self.get_psth_server()
try:
self.psth_server = self.get_psth_server()
except Exception,e:
self.logger.error('Failed to get psth app. ' + str(e))
#self.psth_server.start_psth()
try:
self.logger.info('Starting psth data.')
self.psth_server.start_data()
except Exception,e:
self.logger.error('Failed to start psth app. ' + str(e))
try:
self.logger.info('Setting up psth app before stimulation.')
self.pre_stim_setup()
except Exception,e:
self.logger.error('Failed to setup psth app. ' + str(e))
try:
self.wait_for_stim()
except Exception,e:
self.logger.error('Failed to wait for stimulation. ' + str(e))
try:
self.logger.info('Setting up psth app after stimulation.')
self.post_stim_setup()
except Exception,e:
self.logger.error('Failed to setup psth app. ' + str(e))
try:
data = self.psth_server.get_data()
except Exception,e:
self.logger.error('Failed to get data from psth. ' + str(e))
try:
self.log_psth_data(data)
except Exception,e:
self.logger.error('Failed to log psth data. ' + str(e))
try:
results = self.extract_results(data)
except Exception,e:
self.logger.error('Failed to extract psth data. ' + str(e))
try:
chart_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.png'
self.logger.info('Exporting chart to: ' + chart_file)
self.psth_server.export_chart(chart_file)
except Exception,e:
self.logger.error('Failed to export psth chart. ' + str(e))
try:
# wait for complete of preceding pyro operationsg
time.sleep(3.0)
self.logger.info('Stopping psth data.')
self.psth_server.stop_data()
except Exception,e:
self.logger.error('Failed to stop psth app. ' + str(e))
try:
# wait for complete of preceding pyro operationsg
time.sleep(3.0)
self.logger.info('Closing psth server.')
self.psth_server.close()
except Exception,e:
self.logger.error('Failed to close psth server. ' + str(e))
try:
return results
except Exception,e:
self.logger.error('Failed to return psth result. ' + str(e))
def log_psth_data(self, data):
data_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.csv'
param = self.exp_param
with open(data_file,'w') as data_output:
if 'param' in data:
data_output.writelines('param,%s\n' %data['param'])
if 'x' in data:
data_output.writelines('%s,%s\n' %(param , ','.join([str(x) for x in data['x']])))
if 'means' in data:
data_output.writelines('means,%s\n' % ','.join([str(mean) for mean in data['means']]))
if 'stds' in data:
data_output.writelines('stds,%s\n' % ','.join([str(std) for std in data['stds']]))
if 'max_param' in data:
data_output.writelines('opt %s,%s\n' %(param , str(data['max_param'])))
if 'max_value' in data:
data_output.writelines('opt rate,%s\n' % str(data['max_value']))
if 'min_param' in data:
data_output.writelines('nul %s,%s\n' %(param , str(data['min_param'])))
if 'max_value' in data:
data_output.writelines('nul rate,%s\n' % str(data['min_value']))
if 'F1/F0' in data:
data_output.writelines('F1/F0,%s\n' % str(data['F1/F0']))
if 'BII' in data:
data_output.writelines('BII,%s\n' % str(data['BII']))
if 'S/N' in data:
data_output.writelines('S/N,%s\n' % str(data['S/N']))
def get_psth_server(self):
self.logger.info('Fetching psth server.')
try:
if PSTHExperiment.PSTH_SERVER_PROCESS.poll() is not None:
self.logger.info('PSTH server is dead.')
raise
except:
self.logger.info('Creating new psth app.')
psth_app_path = os.path.dirname(__file__) + os.path.sep + 'app' + os.path.sep + self.pyro_source
args = [sys.executable, psth_app_path, str(PSTHExperiment.PSTH_SERVER_PORT)]
PSTHExperiment.PSTH_SERVER_PROCESS = subprocess.Popen(args)
time.sleep(3.0)
else:
self.logger.info('Psth app has been launched.')
assert PSTHExperiment.PSTH_SERVER_PROCESS.poll() is None
URI = "PYROLOC://localhost:%d/%s" % (PSTHExperiment.PSTH_SERVER_PORT, 'psth_server')
Pyro.core.initClient()
return Pyro.core.getProxyForURI(URI)
def pre_stim_setup(self):
self.psth_server.set_title(self.exp_name)
def post_stim_setup(self):
pass
def extract_results(self, _data):
raise RuntimeError("Must override extract_results method with exp implementation!")
class ORITunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(ORITunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'orientation_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-ori-tun-' + eye
self.exp_param = 'ori'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(ORITunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
ori = self.psth_analysis()
return ori
def pre_stim_setup(self):
super(ORITunExp, self).pre_stim_setup()
self.logger.info('Choose no curve fitting for this experiment.')
self.psth_server.check_fitting('none')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class SPFTunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(SPFTunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'spatial_freq_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-spf-tun-' + eye
self.exp_param = 'spf'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(SPFTunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
spf = self.psth_analysis()
return spf
def pre_stim_setup(self):
super(SPFTunExp, self).pre_stim_setup()
self.logger.info('Choose Gaussian curve fitting.')
self.psth_server.check_fitting('gauss')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class PHATunExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(PHATunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'phase_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-pha-tun-' + eye
self.exp_param = 'pha'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(PHATunExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
pha = self.psth_analysis()
return pha
def pre_stim_setup(self):
super(PHATunExp, self).pre_stim_setup()
self.logger.info('Choose no curve fitting for this experiment.')
self.psth_server.check_fitting('none')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class DSPTunExp(PSTHExperiment):
def __init__(self,left_params,right_params,repeats,postfix,*args,**kwargs):
super(DSPTunExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_tuning.py'
self.stim_source = 'disparity_tuning.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-dsp-tun-' + postfix
self.exp_param = 'dsp'
self.eye = ['left','right']
self.left_params = left_params
self.right_params = right_params
self.repeats = repeats
self.assignments = ['repeats = %d' %repeats]
def run(self):
super(DSPTunExp, self).run()
self.run_stimulus(self.left_params,self.right_params,assignments=self.assignments)
pha = self.psth_analysis()
return pha
def pre_stim_setup(self):
super(DSPTunExp, self).pre_stim_setup()
self.logger.info('Choose Sinusoid curve fitting.')
self.psth_server.check_fitting('sin')
def extract_results(self, data):
if 'max_param' not in data:
self.logger.error('Failed to get optimal parameter from %s experiment.' %self.exp_name)
else:
self.logger.info('Get optimal parameter from %s experiment: %f' %(self.exp_name, data['max_param']))
return float(data['max_param'])
class SpikeLatencyExp(PSTHExperiment):
def __init__(self,eye,params,*args,**kwargs):
super(SpikeLatencyExp, self).__init__(*args,**kwargs)
self.pyro_source = 'pyro_psth_average.py'
self.stim_source = 'rand_phase.py'
self.exp_name = ExperimentConfig.CELLPREFIX + '-latency-' + eye
self.exp_param = 'lat'
self.eye = eye
self.params = params
self.assignments = ["eye = '%s'" %eye]
def run(self):
super(SpikeLatencyExp, self).run()
if self.eye == 'left':
self.run_stimulus(left_params=self.params, assignments=self.assignments)
elif self.eye == 'right':
self.run_stimulus(right_params=self.params, assignments=self.assignments)
latency = self.psth_analysis()
return latency
def pre_stim_setup(self):
super(SpikeLatencyExp, self).pre_stim_setup()
def extract_results(self, data):
if 'maxima' not in data:
self.logger.error('Failed to get spike latency from %s experiment.' %self.exp_name)
else:
first_peak = data['maxima_index'][0]
self.logger.info('Get spike latency from %s experiment: %f' %(self.exp_name, first_peak))
return first_peak/1000.0
def log_psth_data(self, data):
data_file = ExperimentConfig.CELLDIR + os.path.sep + self.exp_name + '.csv'
data_lines = ''
if 'time' in data and 'psth' in data:
data_lines += 'Time,Value\n'
for psth_time, psth_value in zip(data['time'], data['psth']):
data_lines += '{0},{1:.2f}\n'.format(psth_time, psth_value)
extrima_lines = ''
if 'maxima_indices' in data and 'maxima' in data:
extrima_lines += 'Maxima,Value\n'
for maxima_time,maxima_value in zip(data['maxima_indices'],data['maxima']):
extrima_lines += '{0},{1:.2f}\n'.format(maxima_time,maxima_value)
if 'minima_indices' in data and 'minima' in data:
extrima_lines += 'Minima,Value\n'
for minima_time,minima_value in zip(data['minima_indices'],data['minima']):
extrima_lines += '{0},{1:.2f}\n'.format(minima_time,minima_value)
with open(data_file,'w') as data_output:
data_output.writelines(extrima_lines + data_lines)
| chrox/RealTimeElectrophy | Experimenter/Experiments/PSTHExperiment.py | Python | bsd-2-clause | 13,693 |
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Volume replication code.
"""
import mock
from oslo.config import cfg
from cinder import context
from cinder import db
from cinder import exception
from cinder.openstack.common import importutils
from cinder import test
from cinder.tests import utils as test_utils
CONF = cfg.CONF
class VolumeReplicationTestCase(test.TestCase):
def setUp(self):
super(VolumeReplicationTestCase, self).setUp()
self.ctxt = context.RequestContext('user', 'fake', False)
self.adm_ctxt = context.RequestContext('admin', 'fake', True)
self.manager = importutils.import_object(CONF.volume_manager)
self.manager.host = 'test_host'
self.manager.stats = {'allocated_capacity_gb': 0}
self.driver_patcher = mock.patch.object(self.manager, 'driver')
self.driver = self.driver_patcher.start()
@mock.patch('cinder.utils.require_driver_initialized')
def test_promote_replica_uninit_driver(self, _init):
"""Test promote replication when driver is not initialized."""
_init.side_effect = exception.DriverNotInitialized
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='active')
self.driver.promote_replica.return_value = None
self.assertRaises(exception.DriverNotInitialized,
self.manager.promote_replica,
self.adm_ctxt,
vol['id'])
def test_promote_replica(self):
"""Test promote replication."""
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='active')
self.driver.promote_replica.return_value = \
{'replication_status': 'inactive'}
self.manager.promote_replica(self.adm_ctxt, vol['id'])
vol_after = db.volume_get(self.ctxt, vol['id'])
self.assertEqual(vol_after['replication_status'], 'inactive')
def test_promote_replica_fail(self):
"""Test promote replication when promote fails."""
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='active')
self.driver.promote_replica.side_effect = exception.CinderException
self.assertRaises(exception.CinderException,
self.manager.promote_replica,
self.adm_ctxt,
vol['id'])
def test_reenable_replication(self):
"""Test reenable replication."""
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='error')
self.driver.reenable_replication.return_value = \
{'replication_status': 'copying'}
self.manager.reenable_replication(self.adm_ctxt, vol['id'])
vol_after = db.volume_get(self.ctxt, vol['id'])
self.assertEqual(vol_after['replication_status'], 'copying')
@mock.patch('cinder.utils.require_driver_initialized')
def test_reenable_replication_uninit_driver(self, _init):
"""Test reenable replication when driver is not initialized."""
_init.side_effect = exception.DriverNotInitialized
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='error')
self.assertRaises(exception.DriverNotInitialized,
self.manager.reenable_replication,
self.adm_ctxt,
vol['id'])
def test_reenable_replication_fail(self):
"""Test promote replication when driver is not initialized."""
vol = test_utils.create_volume(self.ctxt,
status='available',
replication_status='error')
self.driver.reenable_replication.side_effect = \
exception.CinderException
self.assertRaises(exception.CinderException,
self.manager.reenable_replication,
self.adm_ctxt,
vol['id'])
| alex8866/cinder | cinder/tests/test_replication.py | Python | apache-2.0 | 4,970 |
# Authors:
# Jason Gerard DeRose <[email protected]>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipalib.errors` module.
"""
# FIXME: Pylint errors
# pylint: disable=no-member
import re
import inspect
import pytest
import six
from ipatests.util import assert_equal, raises
from ipalib import errors
from ipalib.constants import TYPE_ERROR
if six.PY3:
unicode = str
pytestmark = pytest.mark.tier0
class PrivateExceptionTester(object):
_klass = None
__klass = None
def __get_klass(self):
if self.__klass is None:
self.__klass = self._klass
assert issubclass(self.__klass, Exception)
assert issubclass(self.__klass, errors.PrivateError)
assert not issubclass(self.__klass, errors.PublicError)
return self.__klass
klass = property(__get_klass)
def new(self, **kw):
for (key, value) in kw.items():
assert not hasattr(self.klass, key), key
inst = self.klass(**kw)
assert isinstance(inst, Exception)
assert isinstance(inst, errors.PrivateError)
assert isinstance(inst, self.klass)
assert not isinstance(inst, errors.PublicError)
for (key, value) in kw.items():
assert getattr(inst, key) is value
assert str(inst) == self.klass.format % kw
assert inst.message == str(inst)
return inst
class test_PrivateError(PrivateExceptionTester):
"""
Test the `ipalib.errors.PrivateError` exception.
"""
_klass = errors.PrivateError
def test_init(self):
"""
Test the `ipalib.errors.PrivateError.__init__` method.
"""
inst = self.klass(key1='Value 1', key2='Value 2')
assert inst.key1 == 'Value 1'
assert inst.key2 == 'Value 2'
assert str(inst) == ''
# Test subclass and use of format:
class subclass(self.klass):
format = '%(true)r %(text)r %(number)r'
kw = dict(true=True, text='Hello!', number=18)
inst = subclass(**kw)
assert inst.true is True
assert inst.text is kw['text']
assert inst.number is kw['number']
assert str(inst) == subclass.format % kw
# Test via PrivateExceptionTester.new()
inst = self.new(**kw)
assert isinstance(inst, self.klass)
assert inst.true is True
assert inst.text is kw['text']
assert inst.number is kw['number']
class test_SubprocessError(PrivateExceptionTester):
"""
Test the `ipalib.errors.SubprocessError` exception.
"""
_klass = errors.SubprocessError
def test_init(self):
"""
Test the `ipalib.errors.SubprocessError.__init__` method.
"""
bin_false = '/bin/false'
inst = self.new(returncode=1, argv=(bin_false,))
assert inst.returncode == 1
assert inst.argv == (bin_false,)
assert str(inst) == "return code 1 from ('{}',)".format(bin_false)
assert inst.message == str(inst)
class test_PluginSubclassError(PrivateExceptionTester):
"""
Test the `ipalib.errors.PluginSubclassError` exception.
"""
_klass = errors.PluginSubclassError
def test_init(self):
"""
Test the `ipalib.errors.PluginSubclassError.__init__` method.
"""
inst = self.new(plugin='bad', bases=('base1', 'base2'))
assert inst.plugin == 'bad'
assert inst.bases == ('base1', 'base2')
assert str(inst) == \
"'bad' not subclass of any base in ('base1', 'base2')"
assert inst.message == str(inst)
class test_PluginDuplicateError(PrivateExceptionTester):
"""
Test the `ipalib.errors.PluginDuplicateError` exception.
"""
_klass = errors.PluginDuplicateError
def test_init(self):
"""
Test the `ipalib.errors.PluginDuplicateError.__init__` method.
"""
inst = self.new(plugin='my_plugin')
assert inst.plugin == 'my_plugin'
assert str(inst) == "'my_plugin' was already registered"
assert inst.message == str(inst)
class test_PluginOverrideError(PrivateExceptionTester):
"""
Test the `ipalib.errors.PluginOverrideError` exception.
"""
_klass = errors.PluginOverrideError
def test_init(self):
"""
Test the `ipalib.errors.PluginOverrideError.__init__` method.
"""
inst = self.new(base='Base', name='cmd', plugin='my_cmd')
assert inst.base == 'Base'
assert inst.name == 'cmd'
assert inst.plugin == 'my_cmd'
assert str(inst) == "unexpected override of Base.cmd with 'my_cmd'"
assert inst.message == str(inst)
class test_PluginMissingOverrideError(PrivateExceptionTester):
"""
Test the `ipalib.errors.PluginMissingOverrideError` exception.
"""
_klass = errors.PluginMissingOverrideError
def test_init(self):
"""
Test the `ipalib.errors.PluginMissingOverrideError.__init__` method.
"""
inst = self.new(base='Base', name='cmd', plugin='my_cmd')
assert inst.base == 'Base'
assert inst.name == 'cmd'
assert inst.plugin == 'my_cmd'
assert str(inst) == "Base.cmd not registered, cannot override with 'my_cmd'"
assert inst.message == str(inst)
##############################################################################
# Unit tests for public errors:
class PublicExceptionTester(object):
_klass = None
__klass = None
def __get_klass(self):
if self.__klass is None:
self.__klass = self._klass
assert issubclass(self.__klass, Exception)
assert issubclass(self.__klass, errors.PublicError)
assert not issubclass(self.__klass, errors.PrivateError)
assert type(self.__klass.errno) is int
assert 900 <= self.__klass.errno <= 5999
return self.__klass
klass = property(__get_klass)
def new(self, format=None, message=None, **kw):
# Test that TypeError is raised if message isn't unicode:
e = raises(TypeError, self.klass, message=b'The message')
assert str(e) == TYPE_ERROR % ('message', unicode, b'The message', bytes)
# Test the instance:
for (key, value) in kw.items():
assert not hasattr(self.klass, key), key
inst = self.klass(format=format, message=message, **kw)
for required_class in self.required_classes:
assert isinstance(inst, required_class)
assert isinstance(inst, self.klass)
assert not isinstance(inst, errors.PrivateError)
for (key, value) in kw.items():
assert getattr(inst, key) is value
return inst
class test_PublicError(PublicExceptionTester):
"""
Test the `ipalib.errors.PublicError` exception.
"""
_klass = errors.PublicError
required_classes = Exception, errors.PublicError
def test_init(self):
message = u'The translated, interpolated message'
format = 'key=%(key1)r and key2=%(key2)r'
val1 = u'Value 1'
val2 = u'Value 2'
kw = dict(key1=val1, key2=val2)
# Test with format=str, message=None
inst = self.klass(format, **kw)
assert inst.format is format
assert_equal(inst.message, format % kw)
assert inst.forwarded is False
assert inst.key1 is val1
assert inst.key2 is val2
# Test with format=None, message=unicode
inst = self.klass(message=message, **kw)
assert inst.format is None
assert inst.message is message
assert inst.strerror is message
assert inst.forwarded is True
assert inst.key1 is val1
assert inst.key2 is val2
# Test with format=None, message=bytes
e = raises(TypeError, self.klass, message=b'the message', **kw)
assert str(e) == TYPE_ERROR % ('message', unicode, b'the message', bytes)
# Test with format=None, message=None
e = raises(ValueError, self.klass, **kw)
assert (str(e) == '%s.format is None yet format=None, message=None' %
self.klass.__name__)
######################################
# Test via PublicExceptionTester.new()
# Test with format=str, message=None
inst = self.new(format, **kw)
assert isinstance(inst, self.klass)
assert inst.format is format
assert_equal(inst.message, format % kw)
assert inst.forwarded is False
assert inst.key1 is val1
assert inst.key2 is val2
# Test with format=None, message=unicode
inst = self.new(message=message, **kw)
assert isinstance(inst, self.klass)
assert inst.format is None
assert inst.message is message
assert inst.strerror is message
assert inst.forwarded is True
assert inst.key1 is val1
assert inst.key2 is val2
##################
# Test a subclass:
class subclass(self.klass):
format = '%(true)r %(text)r %(number)r'
kw = dict(true=True, text=u'Hello!', number=18)
# Test with format=str, message=None
e = raises(ValueError, subclass, format, **kw)
assert str(e) == 'non-generic %r needs format=None; got format=%r' % (
'subclass', format)
# Test with format=None, message=None:
inst = subclass(**kw)
assert inst.format is subclass.format
assert_equal(inst.message, subclass.format % kw)
assert inst.forwarded is False
assert inst.true is True
assert inst.text is kw['text']
assert inst.number is kw['number']
# Test with format=None, message=unicode:
inst = subclass(message=message, **kw)
assert inst.format is subclass.format
assert inst.message is message
assert inst.strerror is message
assert inst.forwarded is True
assert inst.true is True
assert inst.text is kw['text']
assert inst.number is kw['number']
# Test with instructions:
# first build up "instructions", then get error and search for
# lines of instructions appended to the end of the strerror
# despite the parameter 'instructions' not existing in the format
instructions = u"The quick brown fox jumps over the lazy dog".split()
# this expression checks if each word of instructions
# exists in a string as a separate line, with right order
regexp = re.compile('(?ims).*' +
''.join('(%s).*' % (x) for x in instructions) +
'$')
inst = subclass(instructions=instructions, **kw)
assert inst.format is subclass.format
assert_equal(inst.instructions, unicode(instructions))
inst_match = regexp.match(inst.strerror).groups()
assert_equal(list(inst_match),list(instructions))
class BaseMessagesTest(object):
"""Generic test for all of a module's errors or messages
"""
def test_public_messages(self):
i = 0
for klass in self.message_list:
for required_class in self.required_classes:
assert issubclass(klass, required_class)
assert type(klass.errno) is int
assert klass.errno in self.errno_range
doc = inspect.getdoc(klass)
assert doc is not None, 'need class docstring for %s' % klass.__name__
m = re.match(r'^\*{2}(\d+)\*{2} ', doc)
assert m is not None, "need '**ERRNO**' in %s docstring" % klass.__name__
errno = int(m.group(1))
assert errno == klass.errno, (
'docstring=%r but errno=%r in %s' % (errno, klass.errno, klass.__name__)
)
self.extratest(klass)
# Test format
if klass.format is not None:
assert klass.format is self.texts[i]
i += 1
def extratest(self, cls):
pass
class test_PublicErrors(object):
message_list = errors.public_errors
errno_range = list(range(900, 5999))
required_classes = (Exception, errors.PublicError)
texts = errors._texts
def extratest(self, cls):
assert not issubclass(cls, errors.PrivateError)
| realsobek/freeipa | ipatests/test_ipalib/test_errors.py | Python | gpl-3.0 | 12,911 |
from django.db import models
class ApplicationActiveManager(models.Manager):
def get_query_set(self):
return (super(ApplicationActiveManager, self).get_query_set()
.filter(~models.Q(status=self.model.REMOVED)))
class ApplicationPublishedManager(models.Manager):
def get_query_set(self):
return (super(ApplicationPublishedManager, self).get_query_set()
.filter(status=self.model.PUBLISHED))
def get_featured(self):
try:
return (self.get_queryset().filter(is_featured=True)
.order_by('-is_featured', '-created')[0])
except IndexError:
return None
def get_homepage(self):
try:
return (self.get_queryset().filter(is_homepage=True)
.order_by('-is_featured', '-created')[0])
except IndexError:
return None
class ApplicationVersionManager(models.Manager):
def create_version(self, application):
"""Generates an ``ApplicationVersion`` of the given ``application``."""
data = {
'application': application,
'name': application.name,
'stage': application.stage,
'website': application.website,
'image': application.image,
'summary': application.summary,
'impact_statement': application.impact_statement,
'assistance': application.assistance,
'team_description': application.team_description,
'acknowledgments': application.acknowledgments,
'notes': application.notes,
}
return self.create(**data)
def get_latest_version(self, application):
results = self.filter(application=application).order_by('-created')
if results:
return results[0]
return None
| us-ignite/us_ignite | us_ignite/apps/managers.py | Python | bsd-3-clause | 1,843 |
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from graphy.backends import google_chart_api
print '<html>'
print '<h2>Oh no! Traffic is dropping off, something must be wrong!</h2>'
traffic = [578, 579, 580, 550, 545, 552]
chart = google_chart_api.LineChart(traffic)
print chart.display.Img(100, 50)
print """<p>But wait, that was automatically scaled to fill the entire
vertical range. We should scale from zero instead:</p>"""
chart.left.min = 0
chart.left.max = 600
print chart.display.Img(100, 50)
print """<p>Also, maybe some labels would help out here:</p>"""
chart.left.labels = range(0, 601, 200)
chart.left.label_positions = chart.left.labels
print chart.display.Img(100, 50)
| Acidburn0zzz/graphy | examples/traffic.py | Python | apache-2.0 | 1,252 |
# encoding: utf-8
import datetime
__all__ = [
'info',
]
def info():
return {
'birthday': datetime.date(1995, 12, 3),
'class': 10,
'family_name_en': u'iriyama',
'family_name_kana': u'いりやま',
'first_name_en': u'anna',
'first_name_kana': u'あんな',
'graduate_date': None,
'hometown': u'千葉',
'name_en': u'Iriyama Anna',
'name_ja': u'入山杏奈',
'name_kana': u'いりやま あんな',
'nick': u'あんにん',
'team': u'4',
}
| moriyoshi/pyakb48 | akb48/member/iriyama_anna.py | Python | mit | 650 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The super-group for the logging CLI."""
import argparse
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import apis
from googlecloudsdk.core import properties
from googlecloudsdk.core import resolvers
from googlecloudsdk.core import resources
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class Logging(base.Group):
"""Manage Stackdriver Logging."""
def Filter(self, context, args):
"""Modify the context that will be given to this group's commands when run.
Args:
context: The current context.
args: The argparse namespace given to the corresponding .Run() invocation.
Returns:
The updated context.
"""
# All logging collections use projectId, so we can set a default value.
resources.REGISTRY.SetParamDefault(
api='logging', collection=None, param='projectsId',
resolver=resolvers.FromProperty(properties.VALUES.core.project))
context['logging_resources'] = resources.REGISTRY
return context
| KaranToor/MA450 | google-cloud-sdk/lib/surface/logging/__init__.py | Python | apache-2.0 | 1,634 |
"""The test for the sql sensor platform."""
import unittest
import pytest
import voluptuous as vol
from homeassistant.components.sensor.sql import validate_sql_select
from homeassistant.setup import setup_component
from homeassistant.const import STATE_UNKNOWN
from tests.common import get_test_home_assistant
class TestSQLSensor(unittest.TestCase):
"""Test the SQL sensor."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_query(self):
"""Test the SQL sensor."""
config = {
'sensor': {
'platform': 'sql',
'db_url': 'sqlite://',
'queries': [{
'name': 'count_tables',
'query': 'SELECT 5 as value',
'column': 'value',
}]
}
}
assert setup_component(self.hass, 'sensor', config)
state = self.hass.states.get('sensor.count_tables')
assert state.state == '5'
assert state.attributes['value'] == 5
def test_invalid_query(self):
"""Test the SQL sensor for invalid queries."""
with pytest.raises(vol.Invalid):
validate_sql_select("DROP TABLE *")
config = {
'sensor': {
'platform': 'sql',
'db_url': 'sqlite://',
'queries': [{
'name': 'count_tables',
'query': 'SELECT * value FROM sqlite_master;',
'column': 'value',
}]
}
}
assert setup_component(self.hass, 'sensor', config)
state = self.hass.states.get('sensor.count_tables')
self.assertEqual(state.state, STATE_UNKNOWN)
| persandstrom/home-assistant | tests/components/sensor/test_sql.py | Python | apache-2.0 | 1,910 |
try:
from . import generic as g
except BaseException:
import generic as g
class RegistrationTest(g.unittest.TestCase):
def test_procrustes(self):
# create random points in space
points_a = (g.np.random.random((1000, 3)) - .5) * 1000
# create a random transform
matrix = g.trimesh.transformations.random_rotation_matrix()
# add a translation component to transform
matrix[:3, 3] = g.np.random.random(3) * 100
# apply transform to points A
points_b = g.trimesh.transform_points(points_a, matrix)
# run the solver
(matrixN,
transformed,
cost) = g.trimesh.registration.procrustes(points_a, points_b)
# the points should be identical
assert(cost < 0.01)
# it should have found the matrix we used
assert g.np.allclose(matrixN, matrix)
def test_icp_mesh(self):
# see if ICP alignment works with meshes
m = g.trimesh.creation.box()
X = m.sample(10)
X = X + [0.1, 0.1, 0.1]
matrix, transformed, cost = g.trimesh.registration.icp(
X, m, scale=False)
assert(cost < 0.01)
def test_icp_points(self):
# see if ICP alignment works with point clouds
# create random points in space
points_a = (g.np.random.random((1000, 3)) - .5) * 1000
# create a random transform
# matrix = g.trimesh.transformations.random_rotation_matrix()
# create a small transform
# ICP will not work at all with large transforms
matrix = g.trimesh.transformations.rotation_matrix(
g.np.radians(1.0),
[0, 0, 1])
# take a few randomly chosen points and make
# sure the order is permutated
index = g.np.random.choice(g.np.arange(len(points_a)), 20)
# transform and apply index
points_b = g.trimesh.transform_points(points_a[index], matrix)
# tun the solver
matrixN, transformed, cost = g.trimesh.registration.icp(points_b,
points_a)
assert cost < 1e-3
assert g.np.allclose(matrix,
g.np.linalg.inv(matrixN))
assert g.np.allclose(transformed, points_a[index])
def test_mesh(self):
noise = .05
extents = [6, 12, 3]
# create the mesh as a simple box
mesh = g.trimesh.creation.box(extents=extents)
# subdivide until we have more faces than we want
for i in range(3):
mesh = mesh.subdivide()
# apply tessellation and random noise
mesh = mesh.permutate.noise(noise)
# randomly rotation with translation
transform = g.trimesh.transformations.random_rotation_matrix()
transform[:3, 3] = (g.np.random.random(3) - .5) * 1000
mesh.apply_transform(transform)
scan = mesh
# create a "true" mesh
truth = g.trimesh.creation.box(extents=extents)
for a, b in [[truth, scan], [scan, truth]]:
a_to_b, cost = a.register(b)
a_check = a.copy()
a_check.apply_transform(a_to_b)
assert g.np.linalg.norm(
a_check.centroid -
b.centroid) < (
noise *
2)
# find the distance from the truth mesh to each scan vertex
distance = a_check.nearest.on_surface(b.vertices)[1]
assert distance.max() < (noise * 2)
# try our registration with points
points = g.trimesh.transform_points(
scan.sample(100),
matrix=g.trimesh.transformations.random_rotation_matrix())
truth_to_points, cost = truth.register(points)
truth.apply_transform(truth_to_points)
distance = truth.nearest.on_surface(points)[1]
assert distance.mean() < noise
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
| dajusc/trimesh | tests/test_registration.py | Python | mit | 3,980 |
# Given a set of distinct integers, nums, return all possible subsets.
# Note: The solution set must not contain duplicate subsets.
# For example,
# If nums = [1,2,3], a solution is:
# [
# [3],
# [1],
# [2],
# [1,2,3],
# [1,3],
# [2,3],
# [1,2],
# []
# ]
# O(2**n)
def subsets(nums):
res = []
backtrack(res, nums, [], 0)
return res
def backtrack(res, nums, stack, pos):
if pos == len(nums):
res.append(list(stack))
else:
# take nums[pos]
stack.append(nums[pos])
backtrack(res, nums, stack, pos + 1)
stack.pop()
# dont take nums[pos]
backtrack(res, nums, stack, pos + 1)
# simplified backtrack
# def backtrack(res, nums, cur, pos):
# if pos >= len(nums):
# res.append(cur)
# else:
# backtrack(res, nums, cur+[nums[pos]], pos+1)
# backtrack(res, nums, cur, pos+1)
# Iteratively
def subsets2(self, nums):
res = [[]]
for num in sorted(nums):
res += [item + [num] for item in res]
return res
test = [1, 2, 3]
print(test)
print(subsets(test))
| marcosfede/algorithms | backtrack/subsets.py | Python | gpl-3.0 | 1,049 |
import random
def jogo(max_=1000):
numero = random.randint(0, max_)
print("Escolhi um número entre 0 e {}, tente adivinhar:".format(max_))
tentativas = 1
while True:
chute = input("Palpite: ")
chute = int(chute)
if chute == numero:
print("Parabéns, você acertou em {} tentativas".format(tentativas))
break
if chute < numero:
print("Tente um número maior")
else:
print("Tente um número menor")
tentativas += 1
def principal():
while True:
jogo(30)
opcao = input("Quer jogar de novo?")
if opcao[0].lower() != "s":
break
principal() | lhc/lhcpython | adivinha.py | Python | mit | 692 |
# -*- coding: utf-8 -*-
# Natural Language Toolkit: RSLP Stemmer
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Tiago Tresoldi <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
# This code is based on the algorithm presented in the paper "A Stemming
# Algorithm for the Portuguese Language" by Viviane Moreira Orengo and
# Christian Huyck, which unfortunately I had no access to. The code is a
# Python version, with some minor modifications of mine, to the description
# presented at http://www.webcitation.org/5NnvdIzOb and to the C source code
# available at http://www.inf.ufrgs.br/~arcoelho/rslp/integrando_rslp.html.
# Please note that this stemmer is intended for demonstration and educational
# purposes only. Feel free to write me for any comments, including the
# development of a different and/or better stemmer for Portuguese. I also
# suggest using NLTK's mailing list for Portuguese for any discussion.
# Este código é baseado no algoritmo apresentado no artigo "A Stemming
# Algorithm for the Portuguese Language" de Viviane Moreira Orengo e
# Christian Huyck, o qual infelizmente não tive a oportunidade de ler. O
# código é uma conversão para Python, com algumas pequenas modificações
# minhas, daquele apresentado em http://www.webcitation.org/5NnvdIzOb e do
# código para linguagem C disponível em
# http://www.inf.ufrgs.br/~arcoelho/rslp/integrando_rslp.html. Por favor,
# lembre-se de que este stemmer foi desenvolvido com finalidades unicamente
# de demonstração e didáticas. Sinta-se livre para me escrever para qualquer
# comentário, inclusive sobre o desenvolvimento de um stemmer diferente
# e/ou melhor para o português. Também sugiro utilizar-se a lista de discussão
# do NLTK para o português para qualquer debate.
from __future__ import print_function, unicode_literals
from nltk.data import load
from nltk.stem.api import StemmerI
class RSLPStemmer(StemmerI):
"""
A stemmer for Portuguese.
>>> from nltk.stem import RSLPStemmer
>>> st = RSLPStemmer()
>>> # opening lines of Erico Verissimo's "Música ao Longe"
>>> text = '''
... Clarissa risca com giz no quadro-negro a paisagem que os alunos
... devem copiar . Uma casinha de porta e janela , em cima duma
... coxilha .'''
>>> for token in text.split():
... print(st.stem(token))
clariss risc com giz no quadro-negr a pais que os alun dev copi .
uma cas de port e janel , em cim dum coxilh .
"""
def __init__ (self):
self._model = []
self._model.append( self.read_rule("step0.pt") )
self._model.append( self.read_rule("step1.pt") )
self._model.append( self.read_rule("step2.pt") )
self._model.append( self.read_rule("step3.pt") )
self._model.append( self.read_rule("step4.pt") )
self._model.append( self.read_rule("step5.pt") )
self._model.append( self.read_rule("step6.pt") )
def read_rule (self, filename):
rules = load('nltk:stemmers/rslp/' + filename, format='raw').decode("utf8")
lines = rules.split("\n")
lines = [line for line in lines if line != ""] # remove blank lines
lines = [line for line in lines if line[0] != "#"] # remove comments
# NOTE: a simple but ugly hack to make this parser happy with double '\t's
lines = [line.replace("\t\t", "\t") for line in lines]
# parse rules
rules = []
for line in lines:
rule = []
tokens = line.split("\t")
# text to be searched for at the end of the string
rule.append( tokens[0][1:-1] ) # remove quotes
# minimum stem size to perform the replacement
rule.append( int(tokens[1]) )
# text to be replaced into
rule.append( tokens[2][1:-1] ) # remove quotes
# exceptions to this rule
rule.append( [token[1:-1] for token in tokens[3].split(",")] )
# append to the results
rules.append(rule)
return rules
def stem(self, word):
word = word.lower()
# the word ends in 's'? apply rule for plural reduction
if word[-1] == "s":
word = self.apply_rule(word, 0)
# the word ends in 'a'? apply rule for feminine reduction
if word[-1] == "a":
word = self.apply_rule(word, 1)
# augmentative reduction
word = self.apply_rule(word, 3)
# adverb reduction
word = self.apply_rule(word, 2)
# noun reduction
prev_word = word
word = self.apply_rule(word, 4)
if word == prev_word:
# verb reduction
prev_word = word
word = self.apply_rule(word, 5)
if word == prev_word:
# vowel removal
word = self.apply_rule(word, 6)
return word
def apply_rule(self, word, rule_index):
rules = self._model[rule_index]
for rule in rules:
suffix_length = len(rule[0])
if word[-suffix_length:] == rule[0]: # if suffix matches
if len(word) >= suffix_length + rule[1]: # if we have minimum size
if word not in rule[3]: # if not an exception
word = word[:-suffix_length] + rule[2]
break
return word
| adazey/Muzez | libs/nltk/stem/rslp.py | Python | gpl-3.0 | 5,602 |
import numpy as np
import pandas.util.testing as tm
from pandas import (DataFrame, Series, DatetimeIndex, MultiIndex, Index,
date_range)
from .pandas_vb_common import setup, lib # noqa
class Reindex(object):
goal_time = 0.2
def setup(self):
rng = DatetimeIndex(start='1/1/1970', periods=10000, freq='1min')
self.df = DataFrame(np.random.rand(10000, 10), index=rng,
columns=range(10))
self.df['foo'] = 'bar'
self.rng_subset = Index(rng[::2])
self.df2 = DataFrame(index=range(10000),
data=np.random.rand(10000, 30), columns=range(30))
N = 5000
K = 200
level1 = tm.makeStringIndex(N).values.repeat(K)
level2 = np.tile(tm.makeStringIndex(K).values, N)
index = MultiIndex.from_arrays([level1, level2])
self.s = Series(np.random.randn(N * K), index=index)
self.s_subset = self.s[::2]
def time_reindex_dates(self):
self.df.reindex(self.rng_subset)
def time_reindex_columns(self):
self.df2.reindex(columns=self.df.columns[1:5])
def time_reindex_multiindex(self):
self.s.reindex(self.s_subset.index)
class ReindexMethod(object):
goal_time = 0.2
params = ['pad', 'backfill']
param_names = ['method']
def setup(self, method):
N = 100000
self.idx = date_range('1/1/2000', periods=N, freq='1min')
self.ts = Series(np.random.randn(N), index=self.idx)[::2]
def time_reindex_method(self, method):
self.ts.reindex(self.idx, method=method)
class Fillna(object):
goal_time = 0.2
params = ['pad', 'backfill']
param_names = ['method']
def setup(self, method):
N = 100000
self.idx = date_range('1/1/2000', periods=N, freq='1min')
ts = Series(np.random.randn(N), index=self.idx)[::2]
self.ts_reindexed = ts.reindex(self.idx)
self.ts_float32 = self.ts_reindexed.astype('float32')
def time_reindexed(self, method):
self.ts_reindexed.fillna(method=method)
def time_float_32(self, method):
self.ts_float32.fillna(method=method)
class LevelAlign(object):
goal_time = 0.2
def setup(self):
self.index = MultiIndex(
levels=[np.arange(10), np.arange(100), np.arange(100)],
labels=[np.arange(10).repeat(10000),
np.tile(np.arange(100).repeat(100), 10),
np.tile(np.tile(np.arange(100), 100), 10)])
self.df = DataFrame(np.random.randn(len(self.index), 4),
index=self.index)
self.df_level = DataFrame(np.random.randn(100, 4),
index=self.index.levels[1])
def time_align_level(self):
self.df.align(self.df_level, level=1, copy=False)
def time_reindex_level(self):
self.df_level.reindex(self.index, level=1)
class DropDuplicates(object):
goal_time = 0.2
params = [True, False]
param_names = ['inplace']
def setup(self, inplace):
N = 10000
K = 10
key1 = tm.makeStringIndex(N).values.repeat(K)
key2 = tm.makeStringIndex(N).values.repeat(K)
self.df = DataFrame({'key1': key1, 'key2': key2,
'value': np.random.randn(N * K)})
self.df_nan = self.df.copy()
self.df_nan.iloc[:10000, :] = np.nan
self.s = Series(np.random.randint(0, 1000, size=10000))
self.s_str = Series(np.tile(tm.makeStringIndex(1000).values, 10))
N = 1000000
K = 10000
key1 = np.random.randint(0, K, size=N)
self.df_int = DataFrame({'key1': key1})
self.df_bool = DataFrame(np.random.randint(0, 2, size=(K, 10),
dtype=bool))
def time_frame_drop_dups(self, inplace):
self.df.drop_duplicates(['key1', 'key2'], inplace=inplace)
def time_frame_drop_dups_na(self, inplace):
self.df_nan.drop_duplicates(['key1', 'key2'], inplace=inplace)
def time_series_drop_dups_int(self, inplace):
self.s.drop_duplicates(inplace=inplace)
def time_series_drop_dups_string(self, inplace):
self.s_str.drop_duplicates(inplace=inplace)
def time_frame_drop_dups_int(self, inplace):
self.df_int.drop_duplicates(inplace=inplace)
def time_frame_drop_dups_bool(self, inplace):
self.df_bool.drop_duplicates(inplace=inplace)
class Align(object):
# blog "pandas escaped the zoo"
goal_time = 0.2
def setup(self):
n = 50000
indices = tm.makeStringIndex(n)
subsample_size = 40000
self.x = Series(np.random.randn(n), indices)
self.y = Series(np.random.randn(subsample_size),
index=np.random.choice(indices, subsample_size,
replace=False))
def time_align_series_irregular_string(self):
self.x + self.y
class LibFastZip(object):
goal_time = 0.2
def setup(self):
N = 10000
K = 10
key1 = tm.makeStringIndex(N).values.repeat(K)
key2 = tm.makeStringIndex(N).values.repeat(K)
col_array = np.vstack([key1, key2, np.random.randn(N * K)])
col_array2 = col_array.copy()
col_array2[:, :10000] = np.nan
self.col_array_list = list(col_array)
self.col_array_list2 = list(col_array2)
def time_lib_fast_zip(self):
lib.fast_zip(self.col_array_list)
def time_lib_fast_zip_fillna(self):
lib.fast_zip_fillna(self.col_array_list2)
| zfrenchee/pandas | asv_bench/benchmarks/reindex.py | Python | bsd-3-clause | 5,590 |
import json
import sys
keys = ['id_beneficiaire', 'beneficiaire', 'descriptif', 'adresse', 'programme_budgetaire', 'id_acteur', 'civilite', 'nom', 'prenom', 'departement', 'groupe']
for key in keys:
sys.stdout.write(key+";");
print("")
for arg in sys.argv[1:]:
myfile = open(arg)
data = json.load(myfile)
for (num, subvention) in data.items():
for key in keys:
if (subvention[key]):
sys.stdout.write(subvention[key].encode('utf-8').replace('\n', ' - ').replace('"', '')+";");
else:
sys.stdout.write(";");
print("")
| regardscitoyens/reserveparlementaire_parser | bin/json2csv.py | Python | gpl-3.0 | 563 |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from superdesk.resource import Resource
logger = logging.getLogger(__name__)
class SubscriberTransmitReferenceResource(Resource):
schema = {
# superdesk id of the item
'item_id': {
'type': 'string'
},
'subscriber_id': Resource.rel('subscribers'),
# reference_id points to the unique id in the subscriber system
'reference_id': {
'type': 'string'
},
'extra': {'type': 'dict'}
}
internal_resource = True
mongo_indexes = {
'item_id_1': [('item_id', 1)],
'subscriber_id_1': [('subscriber_id', 1)],
'reference_id_1': [('reference_id', 1)]
}
item_methods = []
resource_methods = []
| mdhaman/superdesk-aap | server/aap/subscriber_transmit_references/resource.py | Python | agpl-3.0 | 1,044 |
class Pdb:
def __init__(self, file_name):
self.file_name = file_name
self.atoms = []
self.read_pdb_file(self.file_name)
def read_pdb_file(self, file_name):
with open(self.file_name, 'r') as f:
for line in f:
line = line.split()
if 'ATOM' in line[0]:
self.atoms.append({'number': line[1], 'type': line[2], 'residue_type': line[3], 'residue_number':
line[4], 'x': line[5], 'y': line[6], 'z': line[7]})
# returns a list of the specified keys, which can be given as a list or a string
def get(self, keys):
if isinstance(keys, str):
keys = keys.split()
if not isinstance(keys, list):
raise ValueError("Keys have to be given as strings (e.g. \"number type\") or list (e.g. \"[\'number\'"
", \'type\']\")")
out = []
for item in self.atoms:
for key in keys:
if key in item.keys():
out.append(item[key])
else:
raise ValueError(
"Key %s not found. Available keys: 'number', 'type', 'residue_type', 'residue_number'"
", 'x', 'y', 'z'" % key)
return out
# returns list of atom types
def get_atom_types(self):
types = self.get("type")
types = list(set(types))
return types
# returns list of residue numbers as integers
def get_residue_numbers(self):
residue_numbers = self.get("residue_number")
residue_numbers = list(set(residue_numbers))
residue_numbers = [int(x) for x in residue_numbers]
residue_numbers = sorted(residue_numbers)
return residue_numbers
# returns all non-water residue numbers
def get_non_solvent_residue_numbers(self):
residue_numbers = []
for item in self.atoms:
if 'WAT' not in item['residue_type']:
residue_numbers.append(item['residue_number'])
residue_numbers = list(set(residue_numbers))
residue_numbers = [int(x) for x in residue_numbers]
residue_numbers = sorted(residue_numbers)
return residue_numbers
# returns a list of atoms types
@staticmethod
def get_atom_types(atoms):
atom_types = []
for item in atoms:
item = item.split('@')[1]
atom_types.append(item)
atom_types = list(set(atom_types))
return atom_types
| Gimba/Occupancy | pdb.py | Python | gpl-3.0 | 2,535 |
import unittest
from unittest.mock import patch
from Javatar.core.dependency_manager import _DependencyManager
class TestDependencyManager(unittest.TestCase):
@patch("os.path.exists", return_value=True)
@patch(
"Javatar.core.settings._Settings.get_global",
return_value=["Alpha/Bravo/"]
)
@patch(
"Javatar.core.settings._Settings.get_local",
return_value=["Charlie/Delta/"]
)
def test_get_dependencies(self, *_):
dm = _DependencyManager()
self.assertEqual(
dm.get_dependencies(from_global=True),
[["Alpha/Bravo/", True]]
)
self.assertEqual(
dm.get_dependencies(from_global=False),
[["Charlie/Delta/", False], ["Alpha/Bravo/", True]]
)
| spywhere/Javatar | tests/core/test_dependency_manager.py | Python | mit | 781 |
from __future__ import unicode_literals
import unittest
from .models import PersonWithDefaultMaxLengths, PersonWithCustomMaxLengths
class MaxLengthArgumentsTests(unittest.TestCase):
def verify_max_length(self, model, field, length):
self.assertEqual(model._meta.get_field(field).max_length, length)
def test_default_max_lengths(self):
self.verify_max_length(PersonWithDefaultMaxLengths, 'email', 254)
self.verify_max_length(PersonWithDefaultMaxLengths, 'vcard', 100)
self.verify_max_length(PersonWithDefaultMaxLengths, 'homepage', 200)
self.verify_max_length(PersonWithDefaultMaxLengths, 'avatar', 100)
def test_custom_max_lengths(self):
self.verify_max_length(PersonWithCustomMaxLengths, 'email', 250)
self.verify_max_length(PersonWithCustomMaxLengths, 'vcard', 250)
self.verify_max_length(PersonWithCustomMaxLengths, 'homepage', 250)
self.verify_max_length(PersonWithCustomMaxLengths, 'avatar', 250)
class MaxLengthORMTests(unittest.TestCase):
def test_custom_max_lengths(self):
args = {
"email": "[email protected]",
"vcard": "vcard",
"homepage": "http://example.com/",
"avatar": "me.jpg"
}
for field in ("email", "vcard", "homepage", "avatar"):
new_args = args.copy()
new_args[field] = "X" * 250 # a value longer than any of the default fields could hold.
p = PersonWithCustomMaxLengths.objects.create(**new_args)
self.assertEqual(getattr(p, field), ("X" * 250))
| diegoguimaraes/django | tests/max_lengths/tests.py | Python | bsd-3-clause | 1,589 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for recurrent v2 layers functionality other than GRU, LSTM.
See also: lstm_v2_test.py, gru_v2_test.py.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.layers import recurrent_v2 as rnn_v2
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class RNNV2Test(keras_parameterized.TestCase):
@parameterized.parameters([rnn_v2.LSTM, rnn_v2.GRU])
def test_device_placement(self, layer):
if not test.is_gpu_available():
self.skipTest('Need GPU for testing.')
vocab_size = 20
embedding_dim = 10
batch_size = 8
timestep = 12
units = 5
x = np.random.randint(0, vocab_size, size=(batch_size, timestep))
y = np.random.randint(0, vocab_size, size=(batch_size, timestep))
# Test when GPU is available but not used, the graph should be properly
# created with CPU ops.
with test_util.device(use_gpu=False):
model = keras.Sequential([
keras.layers.Embedding(vocab_size, embedding_dim,
batch_input_shape=[batch_size, timestep]),
layer(units, return_sequences=True, stateful=True),
keras.layers.Dense(vocab_size)
])
model.compile(
optimizer='adam',
loss='sparse_categorical_crossentropy',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(x, y, epochs=1, shuffle=False)
@parameterized.parameters([rnn_v2.LSTM, rnn_v2.GRU])
def test_reset_dropout_mask_between_batch(self, layer):
# See https://github.com/tensorflow/tensorflow/issues/29187 for more details
batch_size = 8
timestep = 12
embedding_dim = 10
units = 5
layer = layer(units, dropout=0.5, recurrent_dropout=0.5)
inputs = np.random.random((batch_size, timestep, embedding_dim)).astype(
np.float32)
previous_dropout, previous_recurrent_dropout = None, None
for _ in range(5):
layer(inputs, training=True)
dropout = layer.cell.get_dropout_mask_for_cell(inputs, training=True)
recurrent_dropout = layer.cell.get_recurrent_dropout_mask_for_cell(
inputs, training=True)
if previous_dropout is not None:
self.assertNotAllClose(self.evaluate(previous_dropout),
self.evaluate(dropout))
previous_dropout = dropout
if previous_recurrent_dropout is not None:
self.assertNotAllClose(self.evaluate(previous_recurrent_dropout),
self.evaluate(recurrent_dropout))
previous_recurrent_dropout = recurrent_dropout
@parameterized.parameters([rnn_v2.LSTM, rnn_v2.GRU])
def test_recurrent_dropout_with_stateful_RNN(self, layer):
# See https://github.com/tensorflow/tensorflow/issues/27829 for details.
# The issue was caused by using inplace mul for a variable, which was a
# warning for RefVariable, but an error for ResourceVariable in 2.0
keras.models.Sequential([
layer(128, stateful=True, return_sequences=True, dropout=0.2,
batch_input_shape=[32, None, 5], recurrent_dropout=0.2)
])
def test_recurrent_dropout_saved_model(self):
if not context.executing_eagerly():
self.skipTest('v2-only test')
inputs = keras.Input(shape=(784, 3), name='digits')
x = keras.layers.GRU(64, activation='relu', name='GRU', dropout=0.1)(inputs)
x = keras.layers.Dense(64, activation='relu', name='dense')(x)
outputs = keras.layers.Dense(
10, activation='softmax', name='predictions')(
x)
model = keras.Model(inputs=inputs, outputs=outputs, name='3_layer')
model.save(os.path.join(self.get_temp_dir(), 'model'), save_format='tf')
if __name__ == '__main__':
test.main()
| gunan/tensorflow | tensorflow/python/keras/layers/recurrent_v2_test.py | Python | apache-2.0 | 4,779 |
import json
from typing import Union, List, Dict, Any
import torch
from torch.autograd import Variable
from torch.nn.modules import Dropout
import numpy
import h5py
from overrides import overrides
from allennlp.common.file_utils import cached_path
from allennlp.common.checks import ConfigurationError
from allennlp.common import Registrable, Params
from allennlp.modules.elmo_lstm import ElmoLstm
from allennlp.modules.highway import Highway
from allennlp.modules.scalar_mix import ScalarMix
from allennlp.nn.util import remove_sentence_boundaries, add_sentence_boundary_token_ids
from allennlp.data import Vocabulary
from allennlp.data.token_indexers.elmo_indexer import ELMoCharacterMapper
# pylint: disable=attribute-defined-outside-init
@Registrable.register('elmo')
class Elmo(torch.nn.Module, Registrable):
"""
Compute ELMo representations using a pre-trained bidirectional language model.
See "Deep contextualized word representations", Peters et al. for details.
This module takes character id input and computes ``num_output_representations`` different layers
of ELMo representations. Typically ``num_output_representations`` is 1 or 2. For example, in
the case of the SRL model in the above paper, ``num_output_representations=1`` where ELMo was included at
the input token representation layer. In the case of the SQuAD model, ``num_output_representations=2``
as ELMo was also included at the GRU output layer.
In the implementation below, we learn separate scalar weights for each output layer,
but only run the biLM once on each input sequence for efficiency.
Parameters
----------
options_file : ``str``, required.
ELMo JSON options file
weight_file : ``str``, required.
ELMo hdf5 weight file
num_output_representations: ``int``, required.
The number of ELMo representation layers to output.
do_layer_norm : ``bool``, optional, (default=False).
Should we apply layer normalization (passed to ``ScalarMix``)?
dropout : ``float``, optional, (default = 0.5).
The dropout to be applied to the ELMo representations.
"""
def __init__(self,
options_file: str,
weight_file: str,
num_output_representations: int,
do_layer_norm: bool = False,
dropout: float = 0.5) -> None:
super(Elmo, self).__init__()
self._elmo_lstm = _ElmoBiLm(options_file, weight_file)
self._dropout = Dropout(p=dropout)
self._scalar_mixes: Any = []
for k in range(num_output_representations):
scalar_mix = ScalarMix(self._elmo_lstm.num_layers, do_layer_norm=do_layer_norm)
self.add_module('scalar_mix_{}'.format(k), scalar_mix)
self._scalar_mixes.append(scalar_mix)
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor) -> Dict[str, Union[torch.Tensor, List[torch.Tensor]]]:
"""
Parameters
----------
inputs : ``torch.autograd.Variable``
Shape ``(batch_size, timesteps, 50)`` of character ids representing the current batch.
We also accept tensors with additional optional dimensions:
``(batch_size, dim0, dim1, ..., dimn, timesteps, 50)``
Returns
-------
Dict with keys:
``'elmo_representations'``: ``List[torch.autograd.Variable]``
A ``num_output_representations`` list of ELMo representations for the input sequence.
Each representation is shape ``(batch_size, timesteps, embedding_dim)``
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps)`` long tensor with sequence mask.
"""
# reshape the input if needed
original_shape = inputs.size()
timesteps, num_characters = original_shape[-2:]
if len(original_shape) > 3:
reshaped_inputs = inputs.view(-1, timesteps, num_characters)
else:
reshaped_inputs = inputs
# run the biLM
bilm_output = self._elmo_lstm(reshaped_inputs)
layer_activations = bilm_output['activations']
mask_with_bos_eos = bilm_output['mask']
# compute the elmo representations
representations = []
for scalar_mix in self._scalar_mixes:
representation_with_bos_eos = scalar_mix(layer_activations, mask_with_bos_eos)
representation_without_bos_eos, mask_without_bos_eos = remove_sentence_boundaries(
representation_with_bos_eos, mask_with_bos_eos
)
representations.append(self._dropout(representation_without_bos_eos))
# reshape if necessary
if len(original_shape) > 3:
mask = mask_without_bos_eos.view(original_shape[:-1])
elmo_representations = [representation.view(original_shape[:-1] + (-1, ))
for representation in representations]
else:
mask = mask_without_bos_eos
elmo_representations = representations
return {'elmo_representations': elmo_representations, 'mask': mask}
@classmethod
def from_params(cls, params: Params) -> 'Elmo':
# Add files to archive
params.add_file_to_archive('options_file')
params.add_file_to_archive('weight_file')
options_file = params.pop('options_file')
weight_file = params.pop('weight_file')
num_output_representations = params.pop('num_output_representations')
do_layer_norm = params.pop('do_layer_norm', False)
params.assert_empty(cls.__name__)
return cls(options_file, weight_file, num_output_representations, do_layer_norm)
class _ElmoCharacterEncoder(torch.nn.Module):
"""
Compute context sensitive token representation using pretrained biLM.
This embedder has input character ids of size (batch_size, sequence_length, 50)
and returns (batch_size, sequence_length + 2, embedding_dim), where embedding_dim
is specified in the options file (typically 512).
We add special entries at the beginning and end of each sequence corresponding
to <S> and </S>, the beginning and end of sentence tokens.
Note: this is a lower level class useful for advanced usage. Most users should
use ``ElmoTokenEmbedder`` or ``allennlp.modules.Elmo`` instead.
Parameters
----------
options_file : ``str``
ELMo JSON options file
weight_file : ``str``
ELMo hdf5 weight file
The relevant section of the options file is something like:
.. example-code::
.. code-block:: python
{'char_cnn': {
'activation': 'relu',
'embedding': {'dim': 4},
'filters': [[1, 4], [2, 8], [3, 16], [4, 32], [5, 64]],
'max_characters_per_token': 50,
'n_characters': 262,
'n_highway': 2
}
}
"""
def __init__(self,
options_file: str,
weight_file: str) -> None:
super(_ElmoCharacterEncoder, self).__init__()
with open(cached_path(options_file), 'r') as fin:
self._options = json.load(fin)
self._weight_file = weight_file
self.output_dim = self._options['lstm']['projection_dim']
self._load_weights()
# Cache the arrays for use in forward -- +1 due to masking.
self._beginning_of_sentence_characters = Variable(torch.from_numpy(
numpy.array(ELMoCharacterMapper.beginning_of_sentence_characters) + 1
))
self._end_of_sentence_characters = Variable(torch.from_numpy(
numpy.array(ELMoCharacterMapper.end_of_sentence_characters) + 1
))
def get_output_dim(self):
return self.output_dim
@overrides
def forward(self, inputs: torch.Tensor) -> Dict[str, torch.Tensor]: # pylint: disable=arguments-differ
"""
Compute context insensitive token embeddings for ELMo representations.
Parameters
----------
inputs: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length, 50)`` of character ids representing the
current batch.
Returns
-------
Dict with keys:
``'token_embedding'``: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length + 2, embedding_dim)`` tensor with context
insensitive token representations.
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, sequence_length + 2)`` long tensor with sequence mask.
"""
# Add BOS/EOS
mask = ((inputs > 0).long().sum(dim=-1) > 0).long()
character_ids_with_bos_eos, mask_with_bos_eos = add_sentence_boundary_token_ids(
inputs,
mask,
self._beginning_of_sentence_characters,
self._end_of_sentence_characters
)
# the character id embedding
max_chars_per_token = self._options['char_cnn']['max_characters_per_token']
# (batch_size * sequence_length, max_chars_per_token, embed_dim)
character_embedding = torch.nn.functional.embedding(
character_ids_with_bos_eos.view(-1, max_chars_per_token),
self._char_embedding_weights
)
# run convolutions
cnn_options = self._options['char_cnn']
if cnn_options['activation'] == 'tanh':
activation = torch.nn.functional.tanh
elif cnn_options['activation'] == 'relu':
activation = torch.nn.functional.relu
else:
raise ConfigurationError("Unknown activation")
# (batch_size * sequence_length, embed_dim, max_chars_per_token)
character_embedding = torch.transpose(character_embedding, 1, 2)
convs = []
for conv in self._convolutions:
convolved = conv(character_embedding)
# (batch_size * sequence_length, n_filters for this width)
convolved, _ = torch.max(convolved, dim=-1)
convolved = activation(convolved)
convs.append(convolved)
# (batch_size * sequence_length, n_filters)
token_embedding = torch.cat(convs, dim=-1)
# apply the highway layers (batch_size * sequence_length, n_filters)
token_embedding = self._highways(token_embedding)
# final projection (batch_size * sequence_length, embedding_dim)
token_embedding = self._projection(token_embedding)
# reshape to (batch_size, sequence_length, embedding_dim)
batch_size, sequence_length, _ = character_ids_with_bos_eos.size()
return {
'mask': mask_with_bos_eos,
'token_embedding': token_embedding.view(batch_size, sequence_length, -1)
}
def _load_weights(self):
self._load_char_embedding()
self._load_cnn_weights()
self._load_highway()
self._load_projection()
def _load_char_embedding(self):
with h5py.File(cached_path(self._weight_file), 'r') as fin:
char_embed_weights = fin['char_embed'][...]
weights = numpy.zeros(
(char_embed_weights.shape[0] + 1, char_embed_weights.shape[1]),
dtype='float32'
)
weights[1:, :] = char_embed_weights
self._char_embedding_weights = torch.nn.Parameter(
torch.FloatTensor(weights), requires_grad=False
)
def _load_cnn_weights(self):
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
char_embed_dim = cnn_options['embedding']['dim']
convolutions = []
for i, (width, num) in enumerate(filters):
conv = torch.nn.Conv1d(
in_channels=char_embed_dim,
out_channels=num,
kernel_size=width,
bias=True
)
# load the weights
with h5py.File(cached_path(self._weight_file), 'r') as fin:
weight = fin['CNN']['W_cnn_{}'.format(i)][...]
bias = fin['CNN']['b_cnn_{}'.format(i)][...]
w_reshaped = numpy.transpose(weight.squeeze(axis=0), axes=(2, 1, 0))
if w_reshaped.shape != tuple(conv.weight.data.shape):
raise ValueError("Invalid weight file")
conv.weight.data.copy_(torch.FloatTensor(w_reshaped))
conv.bias.data.copy_(torch.FloatTensor(bias))
conv.weight.requires_grad = False
conv.bias.requires_grad = False
convolutions.append(conv)
self.add_module('char_conv_{}'.format(i), conv)
self._convolutions = convolutions
def _load_highway(self):
# pylint: disable=protected-access
# the highway layers have same dimensionality as the number of cnn filters
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
n_filters = sum(f[1] for f in filters)
n_highway = cnn_options['n_highway']
# create the layers, and load the weights
self._highways = Highway(n_filters, n_highway, activation=torch.nn.functional.relu)
for k in range(n_highway):
# The AllenNLP highway is one matrix multplication with concatenation of
# transform and carry weights.
with h5py.File(cached_path(self._weight_file), 'r') as fin:
# The weights are transposed due to multiplication order assumptions in tf
# vs pytorch (tf.matmul(X, W) vs pytorch.matmul(W, X))
w_transform = numpy.transpose(fin['CNN_high_{}'.format(k)]['W_transform'][...])
# -1.0 since AllenNLP is g * x + (1 - g) * f(x) but tf is (1 - g) * x + g * f(x)
w_carry = -1.0 * numpy.transpose(fin['CNN_high_{}'.format(k)]['W_carry'][...])
weight = numpy.concatenate([w_transform, w_carry], axis=0)
self._highways._layers[k].weight.data.copy_(torch.FloatTensor(weight))
self._highways._layers[k].weight.requires_grad = False
b_transform = fin['CNN_high_{}'.format(k)]['b_transform'][...]
b_carry = -1.0 * fin['CNN_high_{}'.format(k)]['b_carry'][...]
bias = numpy.concatenate([b_transform, b_carry], axis=0)
self._highways._layers[k].bias.data.copy_(torch.FloatTensor(bias))
self._highways._layers[k].bias.requires_grad = False
def _load_projection(self):
cnn_options = self._options['char_cnn']
filters = cnn_options['filters']
n_filters = sum(f[1] for f in filters)
self._projection = torch.nn.Linear(n_filters, self.output_dim, bias=True)
with h5py.File(cached_path(self._weight_file), 'r') as fin:
weight = fin['CNN_proj']['W_proj'][...]
bias = fin['CNN_proj']['b_proj'][...]
self._projection.weight.data.copy_(torch.FloatTensor(numpy.transpose(weight)))
self._projection.bias.data.copy_(torch.FloatTensor(bias))
self._projection.weight.requires_grad = False
self._projection.bias.requires_grad = False
@classmethod
def from_params(cls, vocab: Vocabulary, params: Params) -> '_ElmoCharacterEncoder':
# pylint: disable=unused-argument
options_file = params.pop('options_file')
weight_file = params.pop('weight_file')
params.assert_empty(cls.__name__)
return cls(options_file, weight_file)
class _ElmoBiLm(torch.nn.Module):
"""
Run a pre-trained bidirectional language model, outputing the activations at each
layer for weighting together into an ELMo representation (with
``allennlp.modules.seq2seq_encoders.Elmo``). This is a lower level class, useful
for advanced uses, but most users should use ``allennlp.modules.seq2seq_encoders.Elmo``
directly.
Parameters
----------
options_file : ``str``
ELMo JSON options file
weight_file : ``str``
ELMo hdf5 weight file
"""
def __init__(self,
options_file: str,
weight_file: str) -> None:
super(_ElmoBiLm, self).__init__()
self._token_embedder = _ElmoCharacterEncoder(options_file, weight_file)
with open(cached_path(options_file), 'r') as fin:
options = json.load(fin)
if not options['lstm'].get('use_skip_connections'):
raise ConfigurationError('We only support pretrained biLMs with residual connections')
self._elmo_lstm = ElmoLstm(input_size=options['lstm']['projection_dim'],
hidden_size=options['lstm']['projection_dim'],
cell_size=options['lstm']['dim'],
num_layers=options['lstm']['n_layers'],
memory_cell_clip_value=options['lstm']['cell_clip'],
state_projection_clip_value=options['lstm']['proj_clip'])
self._elmo_lstm.load_weights(weight_file)
# Number of representation layers including context independent layer
self.num_layers = options['lstm']['n_layers'] + 1
def forward(self, # pylint: disable=arguments-differ
inputs: torch.Tensor) -> Dict[str, Union[torch.Tensor, List[torch.Tensor]]]:
"""
Parameters
----------
inputs: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps, 50)`` of character ids representing the current batch.
Returns
-------
Dict with keys:
``'activations'``: ``List[torch.autograd.Variable]``
A list of activations at each layer of the network, each of shape
``(batch_size, timesteps + 2, embedding_dim)``
``'mask'``: ``torch.autograd.Variable``
Shape ``(batch_size, timesteps + 2)`` long tensor with sequence mask.
Note that the output tensors all include additional special begin and end of sequence
markers.
"""
token_embedding = self._token_embedder(inputs)
type_representation = token_embedding['token_embedding']
mask = token_embedding['mask']
lstm_outputs = self._elmo_lstm(type_representation, mask)
# Prepare the output. The first layer is duplicated.
output_tensors = [
torch.cat([type_representation, type_representation], dim=-1)
]
for layer_activations in torch.chunk(lstm_outputs, lstm_outputs.size(0), dim=0):
output_tensors.append(layer_activations.squeeze(0))
return {
'activations': output_tensors,
'mask': mask,
}
| nafitzgerald/allennlp | allennlp/modules/elmo.py | Python | apache-2.0 | 18,830 |
Subsets and Splits