repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
obriencj/python-sibilant | sibilant/__init__.py | 1 | 1153 | # This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, see
# <http://www.gnu.org/licenses/>.
"""
Sibilant, a LISP for Python
author: Christopher O'Brien <[email protected]>
license: LGPL v.3
"""
def __auto_enable_importer():
from os import environ
from sys import _xoptions as xoption
xopt = xoption.get("SIBILANT_NOIMPORTER", "0") == "0"
eopt = environ.get("SIBILANT_NOIMPORTER", "0") == "0"
if xopt and eopt:
from .importlib import install
install()
return True
else:
return False
__auto_enable_importer()
#
# The end.
| lgpl-3.0 | -4,162,881,882,859,467,300 | 25.813953 | 70 | 0.703382 | false |
hadronproject/lpms | lpms/syncers/git.py | 1 | 2845 | #!/usr/bin/env python
# Copyright 2009 - 2011 Burak Sezer <[email protected]>
#
# This file is part of lpms
#
# lpms is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# lpms is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with lpms. If not, see <http://www.gnu.org/licenses/>.
import os
import lpms
from lpms import out
from lpms import utils
from lpms import shelltools
from lpms import constants as cst
from lpms.exceptions import InvalidURI
class GITSync(object):
def __init__(self, repo, remote):
self.repo = repo
self.remote = remote
self.git_binary = utils.executable_path("git")
self.repo_path = os.path.join(cst.repos, repo)
def git_repo(self):
if os.path.isdir(self.repo_path) and os.listdir(self.repo_path):
if os.path.isdir(self.repo_path+"/"+".git"):
return True
return False
# parse_uri method is borrowed from pkgcore: sync/git.py
def parse_uri(self):
if not self.remote.startswith("git+") and not self.remote.startswith("git://"):
raise InvalidURI(self.remote, "doesn't start with git+ nor git://")
if self.remote.startswith("git+"):
if self.remote.startswith("git+:"):
raise InvalidURI(self.remote, "need to specify the sub protocol if using git+")
self.remote = self.remote[4:]
def sync(self):
if self.git_repo():
os.chdir(self.repo_path)
if lpms.getopt("--reset"):
out.warn("forcing git to overwrite local files")
shelltools.system("%s reset --hard HEAD" % self.git_binary, sandbox=False)
shelltools.system("%s clean -f -d" % self.git_binary, sandbox=False)
shelltools.system("%s pull -f -u origin" % self.git_binary, sandbox=False)
else:
os.chdir(os.path.dirname(self.repo_path))
shelltools.system("%s clone %s %s" % (self.git_binary, self.remote, self.repo), sandbox=False)
def run(repo, remote):
obj = GITSync(repo, remote)
if not os.access("%s" % obj.git_binary, os.X_OK):
lpms.terminate("%s seems not executable or not exist. Please check dev-vcs/git." % obj.git_binary)
obj.parse_uri()
obj.sync()
| gpl-3.0 | 659,749,943,777,660,500 | 39.070423 | 167 | 0.615817 | false |
Hamuko/lovepon | lovepon/ffmpeg.py | 1 | 9222 | from math import fabs, ceil
from shlex import quote
from datetime import timedelta
import os
import re
import shutil
import subprocess
import tempfile
import click
class FFmpeg(object):
"""Class used for video conversions to WebM. Uses the instance variables to
generate ffmpeg arguments to run and matches the output video to specified
parameters.
"""
duration_re = re.compile(r'Duration: ([0-9:\.]*),')
def __init__(self, file):
self.file = file
self.original_filename = os.path.splitext(os.path.split(self.file)[1])[0]
self._temp_dir = tempfile.TemporaryDirectory()
self._subs_extracted = False
self.bandwidth = None
self.coordinates = None
self.end = None
self.h264 = False
self.iterations = 0
self.output = None
self.quiet = True
self.resolution = ()
self.sound = False
self.start = None
self.subtitles = False
self.target_filesize = None
self.title = None
def arguments(self, encode_pass=1):
"""Returns a list of ffmpeg arguments based on the set instance variables.
"""
arguments = ['ffmpeg', '-y']
if self.start:
start1, start2 = self.split_start_time()
arguments += ['-ss', str(start1)]
arguments += ['-i', self.file]
if self.start:
arguments += ['-ss', str(start2)]
if self.title:
arguments += ['-metadata', 'title={}'.format(self.title)]
if self.coordinates:
arguments += ['-filter:v', 'crop={}'.format(self.crop_coordinates)]
if self.subtitles:
arguments += ['-copyts',
'-vf', 'subtitles={},setpts=PTS-STARTPTS'
.format(quote(self.file))]
arguments += ['-sn']
if self.end:
arguments += ['-t', str(self.duration)]
if self.resolution:
arguments += ['-s', 'x'.join([str(x) for x in self.resolution])]
if self.h264:
arguments += ['-c:v', 'libx264', '-preset', 'slower']
else:
arguments += ['-c:v', 'libvpx']
if self.sound:
arguments += ['-af', 'asetpts=PTS-STARTPTS']
if self.h264:
arguments += ['-c:a', 'aac', '-q:a', '4']
else:
arguments += ['-c:a', 'libvorbis', '-q:a', '4']
if self.bandwidth:
arguments += ['-b:v', str(self.bandwidth) + 'M']
arguments += ['-pass', str(encode_pass)]
if not self.sound:
arguments += ['-an']
arguments += [self.filename]
return arguments
def default_bitrate(self):
"""Calculates a bitrate to start the encoding process based on the
target filesize and the length of the output video. The following
formula is used to calculate the bitrate (Mb/s):
target size (kB) / video duration (s) / 1024^2 * 8
"""
seconds = self.duration.total_seconds()
return self.target_filesize / seconds / 1048576 * 8
@property
def duration(self):
"""Return the duration as a timedelta object."""
if self.start:
start = self.string_to_timedelta(self.start)
else:
start = timedelta(0)
if self.end:
end = self.string_to_timedelta(self.end)
else:
end = self.string_to_timedelta(self.video_duration)
return end - start
@duration.setter
def duration(self, value):
"""Set the end point for the video based on the start time and duration.
"""
if self.start:
start = self.string_to_timedelta(self.start)
else:
start = timedelta(0)
duration = self.string_to_timedelta(value)
self.end = start + duration
@property
def crop_coordinates(self):
"""Returns a string with coordinate information presented with a format
usable by the crop filter in ffmpeg.
"""
width = self.coordinates[2] - self.coordinates[0]
height = self.coordinates[3] - self.coordinates[1]
return ('{w}:{h}:{c[0]}:{c[1]}'
.format(w=width, h=height, c=self.coordinates))
def encode(self):
"""Performs a two-pass encode. If the class has a specified target
filesize, performs the encode until either the target filesize has
been reached or bandwidth changes do not affect filesize.
"""
kwargs = {
'cwd': self._temp_dir.name,
'stderr': subprocess.DEVNULL if self.quiet else None
}
old_bitrate = 0
old_filesize = 0
temporary_file = os.path.join(self._temp_dir.name, self.filename)
if not self.bandwidth:
self.bandwidth = self.default_bitrate()
iteration = 0
while True:
iteration += 1
click.echo("Encoding video at {}M."
.format(ceil(self.bandwidth * 100) / 100))
args = self.arguments()
process = subprocess.Popen(args, **kwargs)
process.wait()
args = self.arguments(encode_pass=2)
process = subprocess.Popen(args, **kwargs)
process.wait()
filesize = os.stat(temporary_file).st_size
click.echo("Encoded video is {} kB."
.format(ceil(filesize / 1024)))
if not self.target_filesize:
# Stop encoding: bitrate mode used.
break
if fabs(self.target_filesize - filesize) < 10240:
# Stop encoding: File within 10 kB.
break
if fabs(filesize - old_filesize) < 8 * 1024:
click.echo('Bitrate maxed. Stopping.')
break
if self.iterations and iteration >= self.iterations:
# Stop encoding: reached maximum iterations.
break
if old_bitrate and old_filesize:
delta_filesize = filesize - old_filesize
delta_bitrate = self.bandwidth - old_bitrate
d = delta_filesize / delta_bitrate
add_bitrate = -3 * pow(min(d / 300000, 1), 0.25) + 3
else:
add_bitrate = 0
old_bitrate = self.bandwidth
old_filesize = filesize
self.bandwidth *= self.target_filesize / filesize + add_bitrate
shutil.move(temporary_file, self.out_filename)
@property
def extension(self):
if self.h264:
return '.mp4'
else:
return '.webm'
@property
def filename(self):
return 'output{}'.format(self.extension)
def generate_screenshot(self):
"""Generates a screenshot of the video at the start position."""
kwargs = {
'cwd': self._temp_dir.name,
'stderr': subprocess.DEVNULL if self.quiet else None
}
outname = os.path.join(self._temp_dir.name, 'output.jpg')
args = ['ffmpeg', '-ss', self.start, '-i', self.file,
'-vframes', '1', '-q:v', '2', outname]
process = subprocess.Popen(args, **kwargs)
process.wait()
return outname
@property
def out_filename(self):
if self.output:
name = self.output + self.extension
else:
name = self.original_filename + self.extension
return os.path.join(os.getcwd(), name)
def split_start_time(self):
original = self.string_to_timedelta(self.start)
start1 = max(original - timedelta(seconds=10), timedelta(0))
start2 = original - start1
return start1, start2
def string_to_timedelta(self, time):
"""Converts a timestamp used by FFmpeg to a Python timedelta object."""
parts = time.split(':')
try:
seconds = int(parts[-1].split('.')[0])
except (IndexError, ValueError):
seconds, milliseconds = 0, 0
try:
milliseconds = int(parts[-1].split('.')[1])
except (IndexError, ValueError):
milliseconds = 0
try:
minutes = int(parts[-2])
except (IndexError, ValueError):
minutes = 0
try:
hours = int(parts[-3])
except (IndexError, ValueError):
hours = 0
return timedelta(hours=hours, minutes=minutes,
seconds=seconds, milliseconds=milliseconds)
def timedelta_to_string(self, delta):
"""Converts a timedelta object to a FFmpeg compatible string."""
hours = delta.seconds // 3600
minutes = delta.seconds % 3600 // 60
seconds = delta.seconds % 60
milliseconds = delta.microseconds // 1000
return '{}:{}:{}.{}'.format(hours, minutes, seconds, milliseconds)
@property
def video_duration(self):
args = ['ffmpeg', '-i', self.file]
process = subprocess.Popen(args, stderr=subprocess.PIPE)
process.wait()
for line in process.stderr:
linestr = str(line)
if ' Duration: ' in linestr:
return re.search(FFmpeg.duration_re, linestr).group(1)
| apache-2.0 | 7,365,530,233,980,663,000 | 35.164706 | 82 | 0.552809 | false |
tic-ull/portal-del-investigador-cvn | views.py | 1 | 10168 | # -*- encoding: UTF-8 -*-
#
# Copyright 2014-2015
#
# STIC-Investigación - Universidad de La Laguna (ULL) <[email protected]>
#
# This file is part of CVN.
#
# CVN is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CVN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CVN. If not, see
# <http://www.gnu.org/licenses/>.
#
import datetime
from django.conf import settings as st
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, Http404
from django.shortcuts import render
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView, View
from django.utils.decorators import method_decorator
from cvn import settings as st_cvn
from . import signals
from .forms import UploadCVNForm, GetDataCVNULL, DownloadReportForm
from .models import CVN
from .utils import (scientific_production_to_context, cvn_to_context,
stats_to_context)
from .reports import DBDeptReport, DBAreaReport, WSAreaReport, WSDeptReport
from .reports.shortcuts import (get_report_path, ReportDoesNotExist,
get_report_instance)
from .decorators import user_can_view_reports
from statistics.models import Area, Department
from core.routers import in_database
@login_required
def index(request):
context = {}
user = request.user
try:
cvn = CVN.objects.get(user_profile__user=user)
except ObjectDoesNotExist:
cvn = None
form = UploadCVNForm()
if request.method == 'POST':
form = UploadCVNForm(request.POST, request.FILES,
user=user, instance=cvn)
if form.is_valid():
cvn = form.save()
context['message'] = _("CVN updated successfully.")
signals.cvn_uploaded.send(sender=None, cvn=cvn)
context['form'] = form
stats_to_context(request, context)
cvn_to_context(user.profile, context)
context['CVN'] = scientific_production_to_context(user.profile, context)
context['TIME_WAITING'] = st_cvn.TIME_WAITING
context['MESSAGES_WAITING'] = st_cvn.MESSAGES_WAITING
return render(request, 'cvn/index.html', context)
@login_required
def download_cvn(request):
cvn = request.user.profile.cvn
try:
pdf = open(cvn.cvn_file.path)
except IOError:
raise Http404
response = HttpResponse(pdf, content_type=st.MIMETYPES['pdf'])
response['Content-Disposition'] = 'inline; filename=%s' % (
cvn.cvn_file.name.split('/')[-1])
signals.cvn_downloaded.send(sender=None)
return response
@login_required
@permission_required('cvn.view_university_report')
def university_report(request, year):
with in_database(year):
if year is None or year not in st.HISTORICAL:
raise Http404
context = {}
user = User.objects.get(username='GesInv-ULL')
scientific_production_to_context(user.profile, context)
try:
context['report_date'] = unicode(year)
except ObjectDoesNotExist:
context['report_date'] = _("Not Available")
return render(request, 'cvn/ull_report.html', context)
@login_required
def export_data_ull(request):
if not request.user.profile.rrhh_code:
raise Http404
context = dict()
context['form'] = GetDataCVNULL()
if request.method == 'POST':
form = GetDataCVNULL(request.POST)
if form.is_valid():
start_year = None
end_year = None
if 'select_year' in form.data:
form_year = int(form.data['year'])
start_year = datetime.date(form_year, 01, 01)
end_year = datetime.date(form_year, 12, 31)
if 'range_years' in form.data:
form_start_year = int(form.data['start_year'])
start_year = datetime.date(form_start_year, 01, 01)
end_year = datetime.date(int(form.data['end_year']), 12, 31)
pdf = CVN.get_user_pdf_ull(request.user, start_year, end_year)
if not pdf:
form._errors['__all__'] = _(
u'No information in this period')
context['form'] = form
return render(request, 'cvn/export_data_ull.html', context)
response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = (
'attachment;' 'filename="CVN-EXPORT-%s.pdf"' % (
request.user.profile.documento))
signals.pdf_exported.send(sender=None)
return response
context['form'] = form
return render(request, 'cvn/export_data_ull.html', context)
class AdminReportsView(TemplateView):
template_name = "cvn/reports.html"
@method_decorator(login_required)
@method_decorator(user_can_view_reports)
def dispatch(self, *args, **kwargs):
return super(AdminReportsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(AdminReportsView, self).get_context_data(**kwargs)
years = st.HISTORICAL
context['depts'] = []
context['areas'] = []
current_year = datetime.date.today().year
context['depts'].append({
'year': current_year,
'units': WSDeptReport.get_all_units_names(year=current_year)
})
context['areas'].append({
'year': current_year,
'units': WSAreaReport.get_all_units_names(year=current_year)
})
for year in years:
context['depts'].append({
'year': year,
'units': DBDeptReport.get_all_units_names(year=year)
})
context['areas'].append({
'year': year,
'units': DBAreaReport.get_all_units_names(year=year)
})
return context
class ReportsView(TemplateView):
template_name = "cvn/reports.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReportsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ReportsView, self).get_context_data(**kwargs)
years = st.HISTORICAL
context['depts'] = []
context['areas'] = []
try:
dc = self.request.session['dept_code']
except KeyError:
dc = None
dept = ''
else:
dept = Department.objects.get(code=dc).name
try:
ac = self.request.session['area_code']
except KeyError:
ac = None
area = ''
else:
area = Area.objects.get(code=ac).name
current_year = datetime.date.today().year
context['depts'].append({
'year': current_year,
'units': [{'code': dc, 'name': dept}]
})
context['areas'].append({
'year': current_year,
'units': [{'code': ac, 'name': area}]
})
for year in years:
if dc is not None:
try:
get_report_path('dept', 'ipdf', year, dc)
except ReportDoesNotExist:
# The report does not exist. Probably the user's department
# didn't exist this year.
pass
else:
context['depts'].append({
'year': year,
'units': [{'code': dc, 'name': dept}]
})
if ac is not None:
try:
get_report_path('area', 'ipdf', year, ac)
except ReportDoesNotExist:
# The report does not exist. Probably the user's department
# didn't exist this year.
pass
else:
context['areas'].append({
'year': year,
'units': [{'code': ac, 'name': area}]
})
context['show_rcsv'] = False
return context
class DownloadReportView(View):
form_class = DownloadReportForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(DownloadReportView, self).dispatch(*args, **kwargs)
def create_response(self, path):
try:
pdf = open(path, 'r')
except (IOError, TypeError):
raise Http404
response = HttpResponse(
pdf, content_type=st.MIMETYPES[path.split('.')[-1]])
response['Content-Disposition'] = 'inline; filename=%s' % (
path.split('/')[-1])
return response
def get(self, request, *args, **kwargs):
# Form validation
form = self.form_class(kwargs)
if not form.is_valid():
raise Http404
# Get form fields
params = form.cleaned_data
unit_type = params['unit_type']
report_type = params['type']
year = int(params['year'])
code = params['code'] if report_type != 'rcsv' else None
# Check user permissions
user_unit = self.request.session.get(unit_type + '_code', '')
if (not user_can_view_reports(user=self.request.user)
and user_unit != code):
raise Http404
if year == datetime.date.today().year:
unit_type = 'ws_' + unit_type
path = get_report_path(unit_type, report_type, year, code)
response = self.create_response(path)
return response | agpl-3.0 | -2,380,956,261,017,551,400 | 34.552448 | 79 | 0.58503 | false |
tsl143/addons-server | src/olympia/api/tests/test_pagination.py | 1 | 4198 | import mock
from rest_framework import generics
from rest_framework import serializers
from rest_framework import status
from rest_framework.test import APIRequestFactory
from olympia.amo.tests import TestCase
from olympia.api.pagination import (
CustomPageNumberPagination, OneOrZeroPageNumberPagination,
ESPageNumberPagination)
class PassThroughSerializer(serializers.BaseSerializer):
def to_representation(self, item):
return item
class TestCustomPageNumberPagination(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.view = generics.ListAPIView.as_view(
serializer_class=PassThroughSerializer,
queryset=range(1, 101),
pagination_class=CustomPageNumberPagination
)
def test_metadata_with_page_size(self):
request = self.factory.get('/', {'page_size': 10, 'page': 2})
response = self.view(request)
assert response.status_code == status.HTTP_200_OK
assert response.data == {
'page_size': 10,
'page_count': 10,
'results': range(11, 21),
'previous': 'http://testserver/?page_size=10',
'next': 'http://testserver/?page=3&page_size=10',
'count': 100
}
def test_metadata_with_default_page_size(self):
request = self.factory.get('/')
response = self.view(request)
assert response.status_code == status.HTTP_200_OK
assert response.data == {
'page_size': 25,
'page_count': 4,
'results': range(1, 26),
'previous': None,
'next': 'http://testserver/?page=2',
'count': 100
}
class TestESPageNumberPagination(TestCustomPageNumberPagination):
def test_next_page_never_exeeds_max_result_window(self):
mocked_qs = mock.MagicMock()
mocked_qs.__getitem__().execute().hits.total = 30000
view = generics.ListAPIView.as_view(
serializer_class=PassThroughSerializer,
queryset=mocked_qs,
pagination_class=ESPageNumberPagination
)
request = self.factory.get('/', {'page_size': 5, 'page': 4999})
response = view(request)
assert response.data == {
'page_size': 5,
'page_count': 5000,
'results': mock.ANY,
'previous': 'http://testserver/?page=4998&page_size=5',
'next': 'http://testserver/?page=5000&page_size=5',
'count': 30000
}
request = self.factory.get('/', {'page_size': 5, 'page': 5000})
response = view(request)
assert response.data == {
'page_size': 5,
'page_count': 5000,
'results': mock.ANY,
'previous': 'http://testserver/?page=4999&page_size=5',
'next': None,
# We don't lie about the total count
'count': 30000
}
class TestOneOrZeroPageNumberPagination(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.view = generics.ListAPIView.as_view(
serializer_class=PassThroughSerializer,
queryset=range(1, 101),
pagination_class=OneOrZeroPageNumberPagination
)
def test_response(self):
# page size and page should be ignored.
request = self.factory.get('/', {'page_size': 10, 'page': 2})
response = self.view(request)
assert response.data == {
'page_size': 1,
'page_count': 1,
'results': range(1, 2),
'previous': None,
'next': None,
'count': 1
}
def test_response_with_empty_queryset(self):
self.view = generics.ListAPIView.as_view(
serializer_class=PassThroughSerializer,
queryset=[],
pagination_class=OneOrZeroPageNumberPagination
)
request = self.factory.get('/')
response = self.view(request)
assert response.data == {
'page_size': 1,
'page_count': 1,
'results': [],
'previous': None,
'next': None,
'count': 0
}
| bsd-3-clause | 2,561,878,797,920,512,500 | 31.796875 | 71 | 0.572415 | false |
MeanEYE/Sunflower | sunflower/tools/find_files.py | 1 | 11142 | from __future__ import absolute_import
import os
from gi.repository import Gtk, Gdk, Pango, GObject
from threading import Thread, Event
class Column:
ICON = 0
NAME = 1
DIRECTORY = 2
class FindFiles(GObject.GObject):
"""Find files tool"""
__gtype_name__ = 'Sunflower_FindFiles'
__gsignals__ = {
'notify-start': (GObject.SignalFlags.RUN_LAST, None, ()),
'notify-stop': (GObject.SignalFlags.RUN_LAST, None, ())
}
def __init__(self, parent, application):
GObject.GObject.__init__(self)
# store parameters
self._parent = parent
self._application = application
self._path = self._parent.path
self._provider = None
self._running = False
# thread control object
self._abort = Event()
if hasattr(self._parent, 'get_provider'):
self._provider = self._parent.get_provider()
# configure window
self.window = Gtk.Window.new(Gtk.WindowType.TOPLEVEL)
self.window.set_title(_('Find files'))
self.window.set_default_size(550, 400)
self.window.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
self.window.set_modal(True)
self.window.set_transient_for(application)
self.window.set_wmclass('Sunflower', 'Sunflower')
self.window.connect('key-press-event', self._handle_key_press)
# create header
self.header_bar = Gtk.HeaderBar.new()
self.header_bar.set_show_close_button(True)
self.window.set_titlebar(self.header_bar)
self.stack_switcher = Gtk.StackSwitcher.new()
self.header_bar.set_custom_title(self.stack_switcher)
self.stack = Gtk.Stack.new()
self.stack_switcher.set_stack(self.stack)
self.window.add(self.stack)
# busy indicator
self.spinner = Gtk.Spinner.new()
self.spinner.set_margin_left(10)
self.header_bar.pack_start(self.spinner)
# create configuration interface
vbox = Gtk.VBox.new(False, 0)
self.stack.add_titled(vbox, 'criteria', _('Criteria'))
search_bar = Gtk.SearchBar.new()
search_bar.set_search_mode(True)
vbox.pack_start(search_bar, False, False, 0)
# create path and basic options
vbox_search = Gtk.VBox.new(False, 5)
search_bar.add(vbox_search)
hbox = Gtk.HBox.new(False, 5)
vbox_search.pack_start(hbox, True, False, 0)
self._entry_path = Gtk.Entry()
self._entry_path.set_size_request(300, -1)
self._entry_path.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, 'folder-symbolic')
self._entry_path.connect('icon-release', self._browse_directory)
self._entry_path.connect('activate', self.find_files)
path = self._parent.path if hasattr(self._parent, 'path') else os.path.expanduser('~')
self._entry_path.set_text(path)
hbox.pack_start(self._entry_path, False, False, 0)
self.button_start = Gtk.Button.new_with_label(_('Start'))
self.button_start.connect('clicked', self.find_files)
hbox.pack_start(self.button_start, False, False, 0)
self.button_stop = Gtk.Button.new_from_icon_name('media-playback-stop-symbolic', Gtk.IconSize.BUTTON)
self.button_stop.connect('clicked', self.stop_search)
self.button_stop.set_sensitive(False)
self.header_bar.pack_end(self.button_stop)
self._checkbox_recursive = Gtk.CheckButton.new_with_label(_('Search recursively'))
self._checkbox_recursive.set_active(True)
vbox_search.pack_start(self._checkbox_recursive, False, False, 0)
# create extensions container
hbox = Gtk.HBox.new(False, 0)
vbox.pack_start(hbox, True, True, 0)
self.extensions_list = Gtk.ListBox.new()
self.extensions_list.set_size_request(200, -1)
self.extensions_list.connect('row-selected', self.__handle_extension_click)
self.extensions_container = Gtk.Stack.new()
hbox.pack_start(self.extensions_list, False, False, 0)
hbox.pack_start(Gtk.Separator.new(Gtk.Orientation.VERTICAL), False, False, 0)
hbox.pack_start(self.extensions_container, False, False, 0)
# create list
results_container = Gtk.ScrolledWindow.new()
results_container.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.ALWAYS)
self.stack.add_titled(results_container, 'results', _('Results'))
self._list = Gtk.ListStore.new((str, str, str))
self._names = Gtk.TreeView.new_with_model(self._list)
results_container.add(self._names)
cell_icon = Gtk.CellRendererPixbuf.new()
cell_name = Gtk.CellRendererText.new()
cell_directory = Gtk.CellRendererText.new()
col_name = Gtk.TreeViewColumn.new()
col_name.set_title(_('Name'))
col_name.set_expand(True)
col_directory = Gtk.TreeViewColumn.new()
col_directory.set_title(_('Location'))
col_directory.set_expand(True)
# pack renderer
col_name.pack_start(cell_icon, False)
col_name.pack_start(cell_name, True)
col_directory.pack_start(cell_directory, True)
# connect renderer attributes
col_name.add_attribute(cell_icon, 'icon-name', Column.ICON)
col_name.add_attribute(cell_name, 'text', Column.NAME)
col_directory.add_attribute(cell_directory, 'text', Column.DIRECTORY)
self._names.append_column(col_name)
self._names.append_column(col_directory)
self._names.connect('row-activated', self.__handle_row_activated)
self.__create_extensions()
self.window.show_all()
def __handle_extension_click(self, widget, title, data=None):
"""Handle clicking on extension's title widget."""
container = title.get_extension().get_container()
self.extensions_container.set_visible_child(container)
def __handle_row_activated(self, treeview, path, view_column, data=None):
"""Handle actions on list"""
# get list selection
selection = treeview.get_selection()
list_, iter_ = selection.get_selected()
# we need selection for this
if iter_ is None: return
name = list_.get_value(iter_, Column.NAME)
path = list_.get_value(iter_, Column.DIRECTORY)
# get active object
active_object = self._application.get_active_object()
if hasattr(active_object, 'change_path'):
# change path
active_object.change_path(path, name)
# close window
self._close_window()
else:
# notify user about active object
dialog = Gtk.MessageDialog(
self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
_(
'Active object doesn\'t support changing '
'path. Set focus on a different object, '
'preferably file list, and try again.'
)
)
dialog.run()
dialog.destroy()
def __create_extensions(self):
"""Create rename extensions"""
for ExtensionClass in self._application.find_extension_classes.values():
extension = ExtensionClass(self)
title = extension.get_title()
list_row = extension.get_title_widget()
self.extensions_list.add(list_row)
self.extensions_container.add_named(extension.get_container(), title)
def __update_status(self, running=True):
"""Update button status"""
self._running = running
if running:
self.stack.set_visible_child_name('results')
self.button_start.set_sensitive(False)
self.button_stop.set_sensitive(True)
self.spinner.start()
else:
self.button_start.set_sensitive(True)
self.button_stop.set_sensitive(False)
self.spinner.stop()
def __find_files(self, path, extensions, scan_recursively):
"""Threaded find files method."""
scan_queue = []
extension_list = list(map(lambda child: child.extension, extensions))
self.emit('notify-start')
GObject.idle_add(self.__update_status, True)
# add current path to scan queue
try:
item_list = self._provider.list_dir(path)
item_list = map(lambda new_item: os.path.join(path, new_item), item_list)
scan_queue.extend(item_list)
except:
pass
# traverse through directories
while not self._abort.is_set() and len(scan_queue) > 0:
item = scan_queue.pop(0)
# extend scan queue with directory content
if self._provider.is_dir(item) and scan_recursively:
try:
item_list = self._provider.list_dir(item)
item_list = map(lambda new_item: os.path.join(item, new_item), item_list)
scan_queue.extend(item_list)
except:
pass
match = True
for extension in extension_list:
match &= extension.is_path_ok(self._provider, item)
if not match: break # no point in testing other extensions
if match:
name = os.path.basename(item)
path = os.path.dirname(item)
icon = self._application.icon_manager.get_icon_for_file(item)
self._list.append((icon, name, path))
# update thread status
GObject.idle_add(self.__update_status, False)
# tell extensions search has been stopped
self.emit('notify-stop')
def _close_window(self, widget=None, data=None):
"""Close window"""
self._abort.set() # notify search thread we are terminating
self.window.destroy()
def _browse_directory(self, widget=None, icon_position=None, event=None, data=None):
"""Prompt user for directory selection."""
dialog = Gtk.FileChooserDialog(
title=_('Find files'),
parent=self.window,
action=Gtk.FileChooserAction.SELECT_FOLDER,
buttons=(
_('Cancel'), Gtk.ResponseType.REJECT,
_('Select'), Gtk.ResponseType.ACCEPT
)
)
dialog.set_filename(self._entry_path.get_text())
response = dialog.run()
if response == Gtk.ResponseType.ACCEPT:
self._entry_path.set_text(dialog.get_filename())
dialog.destroy()
def _handle_key_press(self, widget, event, data=None):
"""Handle pressing keys"""
if event.keyval == Gdk.KEY_Escape:
self._close_window()
def stop_search(self, widget=None, data=None):
"""Stop searching for files"""
self._abort.set()
def find_files(self, widget=None, data=None):
"""Start searching for files"""
if self._running:
return
# thread is not running, start it
path = self._entry_path.get_text()
# make sure we have a valid provider
if self._provider is None:
ProviderClass = self._application.get_provider_by_protocol('file')
self._provider = ProviderClass(self._parent)
# check if specified path exists
if not self._provider.is_dir(path):
dialog = Gtk.MessageDialog(
self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.ERROR,
Gtk.ButtonsType.OK,
_(
'Specified path is not valid or doesn\'t '
'exist anymore. Please check your selection '
'and try again.'
)
)
dialog.run()
dialog.destroy()
return
# get list of active extensions
extension_containers = self.extensions_container.get_children()
active_extensions = list(filter(lambda cont: cont.extension.active, extension_containers))
if len(active_extensions) == 0:
dialog = Gtk.MessageDialog(
self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.WARNING,
Gtk.ButtonsType.OK,
_(
'You need to enable at least one extension '
'in order to find files and directories!'
)
)
dialog.run()
dialog.destroy()
return
# set thread control objects
self._abort.clear()
# clear existing list
self._list.clear()
# start the thread
params = {
'path': path,
'extensions': active_extensions,
'scan_recursively': self._checkbox_recursive.get_active()
}
thread = Thread(target=self.__find_files, kwargs=params)
thread.start()
| gpl-3.0 | -3,849,536,758,069,162,500 | 29.032345 | 103 | 0.691438 | false |
Som-Energia/somenergia-generationkwh | generationkwh/usagetracker_test.py | 1 | 6349 | # -*- coding: utf-8 -*-
from .usagetracker import UsageTracker
from .isodates import isodate
import unittest
class CurveProvider_MockUp(object):
def __init__(self, data):
self._data = data
def usage(self, member, start, end):
return self._data[:]
def updateUsage(self, member, start, data):
self._data[:] = data
def rights_kwh(self, member, start, end):
return self._data
def periodMask(self, fare, period, start, end):
return self._data
# Readable verbose testcase listing
unittest.TestCase.__str__ = unittest.TestCase.id
class UsageTracker_Test(unittest.TestCase):
def setupUsageTracker(self, rights, usage, periodMask):
self.today = isodate('2015-01-02')
return UsageTracker(
rights=CurveProvider_MockUp(rights),
usage=CurveProvider_MockUp(usage),
periodMask=CurveProvider_MockUp(periodMask),
)
def test_available_noProduction(self):
t = self.setupUsageTracker(
rights=[0,0],
usage=[0,0],
periodMask=[1,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 0)
def test_available_singleBinProduction(self):
t = self.setupUsageTracker(
rights=[2,0],
usage=[0,0],
periodMask=[1,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 2)
def test_available_manyBinsProduction_getAdded(self):
t = self.setupUsageTracker(
rights=[2,3],
usage=[0,0],
periodMask=[1,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 5)
def test_available_masked(self):
t = self.setupUsageTracker(
rights=[2,3],
usage=[0,0],
periodMask=[0,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 3)
def test_available_manyBinsProduction_used(self):
t = self.setupUsageTracker(
rights=[2,3],
usage=[1,0],
periodMask=[1,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 4)
def test_available_manyBinsProduction_usedMasked(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[2,1],
periodMask=[0,1],
)
kwh = t.available_kwh('soci', self.today, self.today, '2.0A', 'P1')
self.assertEqual(kwh, 2)
def test_use_halfBin(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[0,0],
periodMask=[1,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 4)
self.assertEqual(
[4,0], t.usage('soci', self.today, self.today))
def test_use_fullBin(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[0,0],
periodMask=[1,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 5)
self.assertEqual(
[5,0], t.usage('soci', self.today, self.today))
self.assertEqual(5, real)
def test_use_pastBin(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[0,0],
periodMask=[1,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 6)
self.assertEqual(
[5,1], t.usage('soci', self.today, self.today))
self.assertEqual(6, real)
def test_use_beyondAvailable(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[0,0],
periodMask=[1,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 9)
self.assertEqual(
[5,3], t.usage('soci', self.today, self.today))
self.assertEqual(8, real)
def test_use_previouslyUsed(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[1,0],
periodMask=[1,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 2)
self.assertEqual(
[3,0], t.usage('soci', self.today, self.today))
self.assertEqual(2, real)
def test_use_previouslyUsed(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[1,0],
periodMask=[0,1],
)
real = t.use_kwh('soci', self.today, self.today, '2.0A', 'P1', 2)
self.assertEqual(
[1,2], t.usage('soci', self.today, self.today))
self.assertEqual(2, real)
def test_refund_singleBin(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[3,0],
periodMask=[1,1],
)
real = t.refund_kwh('soci', self.today, self.today, '2.0A', 'P1', 2)
self.assertEqual(
[1,0], t.usage('soci', self.today, self.today))
self.assertEqual(2, real)
def test_refund_severalBins_refundsBackward(self):
t = self.setupUsageTracker(
rights=[3,5],
usage=[2,2],
periodMask=[1,1],
)
real = t.refund_kwh('soci', self.today, self.today, '2.0A', 'P1', 3)
self.assertEqual(
[1,0], t.usage('soci', self.today, self.today))
self.assertEqual(3, real)
def test_refund_beyondUsed(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[2,2],
periodMask=[1,1],
)
real = t.refund_kwh('soci', self.today, self.today, '2.0A', 'P1', 5)
self.assertEqual(
[0,0], t.usage('soci', self.today, self.today))
self.assertEqual(4, real)
def test_refund_masked(self):
t = self.setupUsageTracker(
rights=[5,3],
usage=[2,2],
periodMask=[0,1],
)
real = t.refund_kwh('soci', self.today, self.today, '2.0A', 'P1', 2)
self.assertEqual(
[2,0], t.usage('soci', self.today, self.today))
self.assertEqual(2, real)
# vim: ts=4 sw=4 et
| agpl-3.0 | 4,403,524,168,502,628,000 | 27.728507 | 76 | 0.520869 | false |
FabriceSalvaire/Musica | sphinx-extension/musica-figure/setup.py | 1 | 2088 | ####################################################################################################
#
# Musica - A Music Theory package for Python
# Copyright (C) 2017 Salvaire Fabrice
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
from setuptools import setup, find_packages
####################################################################################################
setup(
name='sphinxcontrib-musica',
version='0.1',
author='Fabrice Salvaire',
author_email='[email protected]',
description='Sphinx mudica extension',
license='GPLv3',
keywords= 'sphinx extension musica',
url='https://musica.fabrice-salvaire.fr',
long_description='',
zip_safe=False,
packages=find_packages(),
namespace_packages=['sphinxcontrib'],
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
install_requires=[
'Sphinx>=0.6',
],
)
| gpl-3.0 | 4,596,779,369,161,779,000 | 36.285714 | 100 | 0.547414 | false |
makuto/redditLikedSavedImageDownloader | downloaders/redditUserImageScraper.py | 1 | 14172 | # -*- coding: utf-8 -*-
import time
import os
# local imports
import settings
import submission
import LikedSavedDatabase
from utils import logger, utilities
from downloaders import imageSaver
from downloaders import redditScraper
from downloaders import tumblrScraper
from downloaders import pinterestScraper
from downloaders import pixivScraper
from downloaders import imgurDownloader
scriptFinishedSentinel = '>>> runLikedSavedDownloader() Process Finished <<<'
def initialize():
settings.getSettings()
if not settings.settings['Database']:
logger.log('Please provide a location for the Database')
return
# Do this early so we can use it anywhere
LikedSavedDatabase.initializeFromSettings(settings.settings)
def runLikedSavedDownloader(pipeConnection):
if pipeConnection:
logger.setPipe(pipeConnection)
initialize()
if (not settings.settings['Use_cached_submissions']
and not settings.hasTumblrSettings()
and not settings.hasRedditSettings()
and not settings.hasPixivSettings()
and not settings.hasPinterestSettings()):
logger.log('Please provide Tumblr, Pixiv, or Reddit account details '
' via the Settings page provided by Content Collector server')
return
if not settings.settings['Gfycat_Client_id']:
logger.log('No Gfycat Client ID and/or Gfycat Client Secret was provided. '
'This is required to download Gfycat media reliably.')
logger.log('Output: ' + settings.settings['Output_dir'])
utilities.makeDirIfNonexistant(settings.settings['Output_dir'])
utilities.makeDirIfNonexistant(settings.settings['Metadata_output_dir'])
submissions = getSubmissionsToSave()
logger.log('Saving images. This will take several minutes...')
unsupportedSubmissions = imageSaver.saveAllImages(settings.settings['Output_dir'], submissions,
imgur_auth = imgurDownloader.getImgurAuth(),
only_download_albums = settings.settings['Only_download_albums'],
skip_n_percent_submissions = settings.settings['Skip_n_percent_submissions'],
soft_retrieve_imgs = settings.settings['Should_soft_retrieve'],
only_important_messages = settings.settings['Only_important_messages'])
# Write out a .json file listing all of the submissions the script failed to download
if unsupportedSubmissions:
submission.saveSubmissionsAsJson(unsupportedSubmissions, settings.settings['Metadata_output_dir'] + u'/'
+ 'UnsupportedSubmissions_' + time.strftime("%Y%m%d-%H%M%S") + '.json')
if settings.settings['Should_soft_retrieve']:
logger.log('\nYou have run the script in Soft Retrieve mode - if you actually\n'
'want to download images now, you should change SHOULD_SOFT_RETRIEVE\n'
'to False in settings.txt')
if pipeConnection:
logger.log(scriptFinishedSentinel)
pipeConnection.close()
def getSubmissionsToSave():
# TODO: Only save one post for early out. Only save once all downloading is done
redditRequestOnlyNewSavedCache = None
redditRequestOnlyNewLikedCache = None
if settings.settings['Reddit_Try_Request_Only_New']:
redditRequestOnlyNewSavedCache = submission.readCacheSubmissions(
settings.settings['Reddit_Try_Request_Only_New_Saved_Cache_File'])
redditRequestOnlyNewLikedCache = submission.readCacheSubmissions(
settings.settings['Reddit_Try_Request_Only_New_Liked_Cache_File'])
tumblrRequestOnlyNewCache = None
if settings.settings['Tumblr_Try_Request_Only_New']:
tumblrRequestOnlyNewCache = submission.readCacheSubmissions(
settings.settings['Tumblr_Try_Request_Only_New_Cache_File'])
pixivRequestOnlyNewCache = None
pixivRequestOnlyNewPrivateCache = None
if settings.settings['Pixiv_Try_Request_Only_New']:
pixivRequestOnlyNewCache = submission.readCacheSubmissions(
settings.settings['Pixiv_Try_Request_Only_New_Cache_File'])
pixivRequestOnlyNewPrivateCache = submission.readCacheSubmissions(
settings.settings['Pixiv_Try_Request_Only_New_Private_Cache_File'])
submissions = []
if settings.settings['Use_cached_submissions']:
logger.log('Using cached submissions')
if settings.settings['Reddit_Enabled']:
submissions += submission.readCacheSubmissions(settings.settings['Reddit_cache_file'])
if settings.settings['Tumblr_Enabled']:
submissions += submission.readCacheSubmissions(settings.settings['Tumblr_cache_file'])
if settings.settings['Pixiv_Enabled']:
submissions += submission.readCacheSubmissions(settings.settings['Pixiv_cache_file'])
else:
if settings.hasRedditSettings():
redditSubmissions, redditComments, earlyOutPoints = redditScraper.getRedditUserLikedSavedSubmissions(
settings.settings['Username'], settings.settings['Password'],
settings.settings['Client_id'], settings.settings['Client_secret'],
request_limit = settings.settings['Reddit_Total_requests'],
saveLiked = settings.settings['Reddit_Save_Liked'],
saveSaved = settings.settings['Reddit_Save_Saved'],
earlyOutPointSaved = redditRequestOnlyNewSavedCache,
earlyOutPointLiked = redditRequestOnlyNewLikedCache,
unlikeLiked = settings.settings['Reddit_Unlike_Liked'],
unsaveSaved = settings.settings['Reddit_Unsave_Saved'])
# Cache them in case it's needed later
submission.writeCacheSubmissions(redditSubmissions, settings.settings['Reddit_cache_file'])
# Set new early out points
submission.writeCacheSubmissions([earlyOutPoints[0]],
settings.settings['Reddit_Try_Request_Only_New_Saved_Cache_File'])
submission.writeCacheSubmissions([earlyOutPoints[1]],
settings.settings['Reddit_Try_Request_Only_New_Liked_Cache_File'])
submissions += redditSubmissions
# For reddit only: write out comments to separate json file
if settings.settings['Reddit_Save_Comments']:
submission.saveSubmissionsAsJson(redditComments, settings.settings['Metadata_output_dir'] + u'/'
+ 'Reddit_SavedComment_Submissions_' + time.strftime("%Y%m%d-%H%M%S") + '.json')
# Output to HTML so the user can look at them easily
submission.saveSubmissionsAsHtml(redditComments, settings.settings['Output_dir'] + u'/'
+ 'Reddit_SavedComment_Submissions_' + time.strftime("%Y%m%d-%H%M%S") + '.html')
logger.log('Saved ' + str(len(redditComments)) + ' reddit comments')
if settings.hasTumblrSettings():
tumblrSubmissions, earlyOutPoint = tumblrScraper.getTumblrUserLikedSubmissions(
settings.settings['Tumblr_Client_id'], settings.settings['Tumblr_Client_secret'],
settings.settings['Tumblr_Client_token'], settings.settings['Tumblr_Client_token_secret'],
likeRequestLimit = settings.settings['Tumblr_Total_requests'],
requestOnlyNewCache = tumblrRequestOnlyNewCache)
# Cache them in case it's needed later
submission.writeCacheSubmissions(tumblrSubmissions, settings.settings['Tumblr_cache_file'])
# Set new early out point
submission.writeCacheSubmissions([earlyOutPoint],
settings.settings['Tumblr_Try_Request_Only_New_Cache_File'])
submissions += tumblrSubmissions
if settings.hasPixivSettings():
pixivSubmissions, nextEarlyOutPair = pixivScraper.getPixivUserBookmarkedSubmissions(settings.settings['Pixiv_username'],
settings.settings['Pixiv_password'],
requestOnlyNewCache = pixivRequestOnlyNewCache,
requestOnlyNewPrivateCache = pixivRequestOnlyNewPrivateCache)
# Cache them in case it's needed later
submission.writeCacheSubmissions(pixivSubmissions, settings.settings['Pixiv_cache_file'])
# Set new early out point
if nextEarlyOutPair[0]:
submission.writeCacheSubmissions([nextEarlyOutPair[0]],
settings.settings['Pixiv_Try_Request_Only_New_Cache_File'])
if nextEarlyOutPair[1]:
submission.writeCacheSubmissions([nextEarlyOutPair[1]],
settings.settings['Pixiv_Try_Request_Only_New_Private_Cache_File'])
submissions += pixivSubmissions
if settings.hasPinterestSettings():
pinterestCacheFile = (settings.settings['Pinterest_Try_Request_Only_New_Cache_File']
if settings.settings['Pinterest_Try_Request_Only_New'] else None)
pinterestSubmissions = pinterestScraper.getPinterestUserPinnedSubmissions(settings.settings['Pinterest_email'],
settings.settings['Pinterest_username'],
settings.settings['Pinterest_password'],
pinterestCacheFile)
submissions += pinterestSubmissions
# Write out a .json file with all of the submissions in case the user wants the data
submission.saveSubmissionsAsJson(submissions, settings.settings['Metadata_output_dir'] + u'/'
+ 'AllSubmissions_' + time.strftime("%Y%m%d-%H%M%S") + '.json')
LikedSavedDatabase.db.addSubmissions(submissions)
return submissions
def saveRequestedSubmissions(pipeConnection, submissionIds):
if pipeConnection:
logger.setPipe(pipeConnection)
initialize()
logger.log('Attempting to save {} requested submissions. This will take several minutes...'
.format(len(submissionIds)))
dbSubmissions = LikedSavedDatabase.db.getSubmissionsByIds(submissionIds)
submissions = []
# Convert from database submissions to Submission
for dbSubmission in dbSubmissions:
convertedSubmission = submission.Submission()
convertedSubmission.initFromDict(dbSubmission)
submissions.append(convertedSubmission)
if len(submissions) != len(submissionIds):
logger.log('Could not find {} submissions in database!'.format(len(submissionIds) - len(submissions)))
unsupportedSubmissions = imageSaver.saveAllImages(settings.settings['Output_dir'], submissions,
imgur_auth = imgurDownloader.getImgurAuth(),
only_download_albums = settings.settings['Only_download_albums'],
skip_n_percent_submissions = settings.settings['Skip_n_percent_submissions'],
soft_retrieve_imgs = settings.settings['Should_soft_retrieve'],
only_important_messages = settings.settings['Only_important_messages'])
logger.log('Download finished. Please refresh the page to see updated entries')
if pipeConnection:
logger.log(scriptFinishedSentinel)
pipeConnection.close()
def saveRequestedUrls(pipeConnection, urls):
if pipeConnection:
logger.setPipe(pipeConnection)
initialize()
logger.log('Attempting to save {} requested urls. This may take several minutes...'
.format(len(urls)))
submissions = []
# Create Submission for each URL
for url in urls:
convertedSubmission = submission.Submission()
convertedSubmission.source = "UserRequested"
convertedSubmission.title = "UserRequested"
convertedSubmission.author = "(Requested by user)"
convertedSubmission.subreddit = "Requested_Downloads"
convertedSubmission.subredditTitle = "Requested Downloads"
convertedSubmission.body = "(Requested by user)"
convertedSubmission.bodyUrl= url
convertedSubmission.postUrl= url
submissions.append(convertedSubmission)
if len(submissions) != len(urls):
logger.log('Could not parse {} URLs!'.format(len(urls) - len(submissions)))
unsupportedSubmissions = imageSaver.saveAllImages(settings.settings['Output_dir'], submissions,
imgur_auth = imgurDownloader.getImgurAuth(),
only_download_albums = settings.settings['Only_download_albums'],
skip_n_percent_submissions = settings.settings['Skip_n_percent_submissions'],
soft_retrieve_imgs = settings.settings['Should_soft_retrieve'],
only_important_messages = settings.settings['Only_important_messages'])
logger.log('Download finished. Output to \'Requested Downloads\' directory')
if pipeConnection:
logger.log(scriptFinishedSentinel)
pipeConnection.close()
if __name__ == '__main__':
runLikedSavedDownloader(None)
| mit | 8,327,915,107,425,567,000 | 51.880597 | 157 | 0.620449 | false |
kionetworks/openstack-dashboard-essex | openstack_dashboard/overrides/images/images/views.py | 1 | 6703 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Nova images.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import api
from horizon import exceptions
from horizon import forms
from horizon import tabs
from .forms import UpdateImageForm, LaunchForm
from .tabs import ImageDetailTabs
LOG = logging.getLogger(__name__)
class LaunchView(forms.ModalFormView):
form_class = LaunchForm
template_name = 'nova/images_and_snapshots/images/launch.html'
context_object_name = 'image'
def get_form_kwargs(self):
kwargs = super(LaunchView, self).get_form_kwargs()
kwargs['flavor_list'] = self.flavor_list()
kwargs['keypair_list'] = self.keypair_list()
kwargs['security_group_list'] = self.security_group_list()
kwargs['volume_list'] = self.volume_list()
return kwargs
def get_object(self, *args, **kwargs):
image_id = self.kwargs["image_id"]
try:
self.object = api.image_get_meta(self.request, image_id)
except:
msg = _('Unable to retrieve image "%s".') % image_id
redirect = reverse('horizon:nova:images:index')
exceptions.handle(self.request, msg, redirect=redirect)
return self.object
def get_context_data(self, **kwargs):
context = super(LaunchView, self).get_context_data(**kwargs)
try:
context['usages'] = api.tenant_quota_usages(self.request)
except:
exceptions.handle(self.request)
return context
def get_initial(self):
return {'image_id': self.kwargs["image_id"],
'tenant_id': self.request.user.tenant_id}
def flavor_list(self):
display = '%(name)s (%(vcpus)sVCPU / %(disk)sGB Disk / %(ram)sMB Ram )'
try:
flavors = api.flavor_list(self.request)
flavor_list = [(flavor.id, display % {"name": flavor.name,
"vcpus": flavor.vcpus,
"disk": flavor.disk,
"ram": flavor.ram})
for flavor in flavors]
except:
flavor_list = []
exceptions.handle(self.request,
_('Unable to retrieve instance flavors.'))
return sorted(flavor_list)
def keypair_list(self):
try:
keypairs = api.keypair_list(self.request)
keypair_list = [(kp.name, kp.name) for kp in keypairs]
except:
keypair_list = []
exceptions.handle(self.request,
_('Unable to retrieve keypairs.'))
return keypair_list
def security_group_list(self):
try:
groups = api.security_group_list(self.request)
security_group_list = [(sg.name, sg.name) for sg in groups]
except:
exceptions.handle(self.request,
_('Unable to retrieve list of security groups'))
security_group_list = []
return security_group_list
def volume_list(self):
volume_options = [("", _("Select Volume"))]
def _get_volume_select_item(volume):
if hasattr(volume, "volume_id"):
vol_type = "snap"
visible_label = _("Snapshot")
else:
vol_type = "vol"
visible_label = _("Volume")
return (("%s:%s" % (volume.id, vol_type)),
("%s - %s GB (%s)" % (volume.display_name,
volume.size,
visible_label)))
# First add volumes to the list
try:
volumes = [v for v in api.volume_list(self.request) \
if v.status == api.VOLUME_STATE_AVAILABLE]
volume_options.extend(
[_get_volume_select_item(vol) for vol in volumes])
except:
exceptions.handle(self.request,
_('Unable to retrieve list of volumes'))
# Next add volume snapshots to the list
try:
snapshots = api.volume_snapshot_list(self.request)
snapshots = [s for s in snapshots \
if s.status == api.VOLUME_STATE_AVAILABLE]
volume_options.extend(
[_get_volume_select_item(snap) for snap in snapshots])
except:
exceptions.handle(self.request,
_('Unable to retrieve list of volumes'))
return volume_options
class UpdateView(forms.ModalFormView):
form_class = UpdateImageForm
template_name = 'nova/images_and_snapshots/images/update.html'
context_object_name = 'image'
def get_object(self, *args, **kwargs):
try:
self.object = api.image_get_meta(self.request, kwargs['image_id'])
except:
msg = _('Unable to retrieve image "%s".') % kwargs['image_id']
redirect = reverse('horizon:nova:images:index')
exceptions.handle(self.request, msg, redirect=redirect)
return self.object
def get_initial(self):
properties = self.object['properties']
return {'image_id': self.kwargs['image_id'],
'name': self.object.get('name', ''),
'kernel': properties.get('kernel_id', ''),
'ramdisk': properties.get('ramdisk_id', ''),
'architecture': properties.get('architecture', ''),
'container_format': self.object.get('container_format', ''),
'disk_format': self.object.get('disk_format', ''), }
class DetailView(tabs.TabView):
tab_group_class = ImageDetailTabs
template_name = 'nova/images_and_snapshots/images/detail.html'
| apache-2.0 | -1,298,537,609,217,687,600 | 37.085227 | 79 | 0.570789 | false |
pbs/django-cms | cms/admin/pageadmin.py | 1 | 70397 | # -*- coding: utf-8 -*-
from cms.admin.change_list import CMSChangeList
from cms.admin.dialog.views import get_copy_dialog
from cms.admin.forms import PageForm, PageAddForm
from cms.admin.permissionadmin import (PAGE_ADMIN_INLINES,
PagePermissionInlineAdmin, ViewRestrictionInlineAdmin)
from cms.admin.views import revert_plugins
from cms.apphook_pool import apphook_pool
from cms.exceptions import NoPermissionsException
from cms.forms.widgets import PluginEditor
from cms.models import (Page, Title, CMSPlugin, PagePermission,
PageModeratorState, EmptyTitle, GlobalPagePermission, titlemodels)
from cms.models.managers import PagePermissionsPermissionManager
from cms.models.placeholdermodel import Placeholder
from cms.plugin_pool import plugin_pool
from cms.templatetags.cms_admin import admin_static_url
from cms.utils import (copy_plugins, helpers, moderator, permissions, plugins,
get_template_from_request, get_language_from_request,
placeholder as placeholder_utils, admin as admin_utils, cms_static_url)
from cms.utils.page_resolver import is_valid_url
from cms.utils.admin import jsonify_request
from cms.utils.permissions import has_plugin_permission
from cms.utils import request_item
from copy import deepcopy
from distutils.version import LooseVersion
from django import template
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.utils import get_deleted_objects
from urllib2 import unquote
from django.contrib import messages
from django.contrib.sites.models import Site
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist, ValidationError
from django.core.urlresolvers import reverse
from django.db import router, transaction, models
from django.forms import CharField
from django.http import (HttpResponseRedirect, HttpResponse, Http404,
HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotAllowed, HttpResponseServerError,
HttpRequest)
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context import RequestContext
from django.template.defaultfilters import (title, escape, force_escape,
escapejs)
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext, ugettext_lazy as _
from menus.menu_pool import menu_pool
import django
import functools
DJANGO_1_3 = LooseVersion(django.get_version()) < LooseVersion('1.4')
if 'reversion' in settings.INSTALLED_APPS:
import reversion
from django.contrib.admin import ModelAdmin
create_on_success = lambda x: x
if DJANGO_1_3:
"""
Backwards compatibility for Django < 1.4 and django-reversion 1.6
"""
class ModelAdmin(ModelAdmin):
def get_inline_instances(self, request):
return self.inline_instances
def get_prepopulated_fields(self, request):
return self.prepopulated_fields
def contribute_fieldsets(cls):
if settings.CMS_MENU_TITLE_OVERWRITE:
general_fields = [('title', 'menu_title')]
else:
general_fields = ['title']
general_fields += ['slug', ('published', 'in_navigation')]
additional_hidden_fields = []
advanced_fields = ['reverse_id', 'overwrite_url', 'redirect', 'login_required', 'limit_visibility_in_menu']
template_fields = ['template']
hidden_fields = ['site', 'parent']
seo_fields = []
if settings.CMS_SOFTROOT:
advanced_fields.append('soft_root')
if settings.CMS_SHOW_START_DATE and settings.CMS_SHOW_END_DATE:
general_fields.append(('publication_date', 'publication_end_date'))
elif settings.CMS_SHOW_START_DATE:
general_fields.append('publication_date')
elif settings.CMS_SHOW_END_DATE:
general_fields.append( 'publication_end_date')
if settings.CMS_MODERATOR:
additional_hidden_fields += ['moderator_state', 'moderator_message']
if settings.CMS_SEO_FIELDS:
seo_fields = ['page_title', 'meta_description', 'meta_keywords']
if not settings.CMS_URL_OVERWRITE:
advanced_fields.remove("overwrite_url")
if not settings.CMS_REDIRECTS:
advanced_fields.remove('redirect')
if menu_pool.get_menus_by_attribute("cms_enabled", True):
advanced_fields.append("navigation_extenders")
if apphook_pool.get_apphooks():
advanced_fields.append("application_urls")
fieldsets = [
(None, {
'fields': general_fields,
'classes': ('general',),
}),
(_('Basic Settings'), {
'fields': template_fields,
'classes': ('low',),
}),
(_('Hidden'), {
'fields': hidden_fields + additional_hidden_fields,
'classes': ('hidden',),
}),
(_('Advanced Settings'), {
'fields': advanced_fields,
'classes': ('collapse',),
}),
]
if settings.CMS_SEO_FIELDS:
fieldsets.append((_("SEO Settings"), {
'fields': seo_fields,
'classes': ('collapse',),
}))
setattr(cls, 'fieldsets', fieldsets)
setattr(cls, 'advanced_fields', advanced_fields)
setattr(cls, 'hidden_fields', hidden_fields)
setattr(cls, 'general_fields', general_fields)
setattr(cls, 'template_fields', template_fields)
setattr(cls, 'additional_hidden_fields', additional_hidden_fields)
setattr(cls, 'seo_fields', seo_fields)
def contribute_list_filter(cls):
list_filter = ['published', 'in_navigation', 'template', 'changed_by']
if settings.CMS_MODERATOR:
list_filter.append('moderator_state')
if settings.CMS_SOFTROOT:
list_filter.append('soft_root')
setattr(cls, 'list_filter', list_filter)
def _get_request_from_varags(args, kwargs):
for arg in args:
if isinstance(arg, HttpRequest):
return arg
for k, v in kwargs.iteritems():
if k == 'request' and isinstance(v, HttpRequest):
return v
raise ValueError('This decorator should be used on views (=> at least one request obj)')
def mutually_exclusive_on_post(func):
@transaction.atomic
def wrap(*args, **kwargs):
request = _get_request_from_varags(args, kwargs)
if request.method == 'POST':
Page.objects.select_for_update().using(router.db_for_write(Page))\
.all().exists()
return func(*args, **kwargs)
functools.update_wrapper(wrap, func)
return wrap
class PageAdmin(ModelAdmin):
form = PageForm
# TODO: add the new equivalent of 'cmsplugin__text__body' to search_fields'
search_fields = ('title_set__slug', 'title_set__title', 'reverse_id')
revision_form_template = "admin/cms/page/revision_form.html"
recover_form_template = "admin/cms/page/recover_form.html"
exclude = []
mandatory_placeholders = ('title', 'slug', 'parent', 'site', 'meta_description', 'meta_keywords', 'page_title', 'menu_title')
add_general_fields = ['title', 'slug', 'language', 'template']
change_list_template = "admin/cms/page/change_list.html"
# take care with changing fieldsets, get_fieldsets() method removes some
# fields depending on permissions, but its very static!!
add_fieldsets = [
(None, {
'fields': add_general_fields,
'classes': ('general',),
}),
(_('Hidden'), {
'fields': ['site', 'parent'],
'classes': ('hidden',),
}),
]
inlines = PAGE_ADMIN_INLINES
class Media:
css = {
'all': [cms_static_url(path) for path in (
'css/rte.css',
'css/pages.css',
'css/change_form.css',
'css/jquery.dialog.css',
)]
}
js = ['%sjs/jquery.min.js' % admin_static_url()] + [cms_static_url(path) for path in [
'js/plugins/admincompat.js',
'js/libs/jquery.query.js',
'js/libs/jquery.ui.core.js',
'js/libs/jquery.ui.dialog.js',
]
]
def get_urls(self):
"""Get the admin urls
"""
from django.conf.urls import patterns, url
info = "%s_%s" % (self.model._meta.app_label, self.model._meta.model_name)
pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = patterns('',
pat(r'copy-plugins/$', self.copy_plugins),
pat(r'add-plugin/$', self.add_plugin),
pat(r'^(?P<current_page>\d+)/add-plugin/$', self.add_plugin),
pat(r'edit-plugin/([0-9]+)/$', self.edit_plugin),
pat(r'^(?P<current_page>\d+)/edit-plugin/([0-9]+)/$', self.edit_plugin),
pat(r'remove-plugin/$', self.remove_plugin),
pat(r'^(?P<current_page>\d+)/remove-plugin/$', self.remove_plugin),
pat(r'move-plugin/$', self.move_plugin),
pat(r'^(?P<current_page>\d+)/move-plugin/$', self.move_plugin),
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/move-page/$', self.move_page),
pat(r'^([0-9]+)/copy-page/$', self.copy_page),
pat(r'^([0-9]+)/change-status/$', self.change_status),
pat(r'^([0-9]+)/change-navigation/$', self.change_innavigation),
pat(r'^([0-9]+)/jsi18n/$', self.redirect_jsi18n),
pat(r'^([0-9]+)/permissions/$', self.get_permissions),
pat(r'^([0-9]+)/moderation-states/$', self.get_moderation_states),
pat(r'^([0-9]+)/change-moderation/$', self.change_moderation),
pat(r'^([0-9]+)/approve/$', self.approve_page), # approve page
pat(r'^([0-9]+)/publish/$', self.publish_page), # publish page
pat(r'^([0-9]+)/remove-delete-state/$', self.remove_delete_state),
pat(r'^([0-9]+)/dialog/copy/$', get_copy_dialog), # copy dialog
pat(r'^([0-9]+)/preview/$', self.preview_page), # copy dialog
pat(r'^([0-9]+)/descendants/$', self.descendants), # menu html for page descendants
pat(r'^(?P<object_id>\d+)/change_template/$', self.change_template), # copy dialog
)
url_patterns = url_patterns + super(PageAdmin, self).get_urls()
return url_patterns
def redirect_jsi18n(self, request):
return HttpResponseRedirect(reverse('admin:jsi18n'))
def save_model(self, request, obj, form, change):
"""
Move the page in the tree if neccesary and save every placeholder
Content object.
"""
target = request.GET.get('target', None)
position = request.GET.get('position', None)
if 'recover' in request.path:
pk = obj.pk
if obj.parent_id:
parent = Page.objects.get(pk=obj.parent_id)
else:
parent = None
obj.lft = 0
obj.rght = 0
obj.tree_id = 0
obj.level = 0
obj.pk = None
obj.insert_at(parent, save=False)
obj.pk = pk
obj.save(no_signals=True)
obj.save()
else:
if 'history' in request.path:
old_obj = Page.objects.get(pk=obj.pk)
obj.level = old_obj.level
obj.parent_id = old_obj.parent_id
obj.rght = old_obj.rght
obj.lft = old_obj.lft
obj.tree_id = old_obj.tree_id
force_with_moderation = target is not None and position is not None and \
moderator.will_require_moderation(target, position)
obj.save(force_with_moderation=force_with_moderation)
if 'recover' in request.path or 'history' in request.path:
obj.pagemoderatorstate_set.all().delete()
if settings.CMS_MODERATOR:
from cms.utils.moderator import page_changed
page_changed(obj, force_moderation_action=PageModeratorState.ACTION_CHANGED)
revert_plugins(request, obj.version.pk, obj)
language = form.cleaned_data['language']
if target is not None and position is not None:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
obj.move_to(target, position)
Title.objects.set_or_create(
request,
obj,
form,
language,
)
# is there any moderation message? save/update state
if settings.CMS_MODERATOR and 'moderator_message' in form.cleaned_data and \
form.cleaned_data['moderator_message']:
moderator.update_moderation_message(obj, form.cleaned_data['moderator_message'])
if obj and "reversion" in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(obj)
@create_on_success
def change_template(self, request, object_id):
page = get_object_or_404(Page, pk=object_id)
if page.has_change_permission(request):
to_template = request.POST.get("template", None)
if to_template in dict(settings.CMS_TEMPLATES):
page.template = to_template
page.save()
if "reversion" in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(page)
return HttpResponse(str("ok"))
else:
return HttpResponseBadRequest("template not valid")
else:
return HttpResponseForbidden(_("You have no permission to change the template"))
def get_fieldsets(self, request, obj=None):
"""
Add fieldsets of placeholders to the list of already existing
fieldsets.
"""
if obj: # edit
given_fieldsets = deepcopy(self.fieldsets)
if not obj.has_publish_permission(request):
fields = list(given_fieldsets[0][1]['fields'][2])
fields.remove('published')
given_fieldsets[0][1]['fields'][2] = tuple(fields)
hidden_fields = given_fieldsets[2][1]['fields']
hidden_fields.append('published')
# delete publication_date publication_end_date
del given_fieldsets[0][1]['fields'][3]
if not obj.has_set_navigation_permission(request):
fields = list(given_fieldsets[0][1]['fields'][2])
fields.remove('in_navigation')
given_fieldsets[0][1]['fields'][2] = tuple(fields)
hidden_fields = given_fieldsets[2][1]['fields']
hidden_fields.append('in_navigation')
placeholders_template = get_template_from_request(request, obj)
for placeholder_name in self.get_fieldset_placeholders(placeholders_template):
name = placeholder_utils.get_placeholder_conf("name", placeholder_name, obj.template, placeholder_name)
name = _(name)
given_fieldsets += [(title(name), {'fields':[placeholder_name], 'classes':['plugin-holder']})]
advanced = given_fieldsets.pop(3)
if obj.has_advanced_settings_permission(request):
given_fieldsets.append(advanced)
if settings.CMS_SEO_FIELDS:
seo = given_fieldsets.pop(3)
given_fieldsets.append(seo)
else: # new page
given_fieldsets = deepcopy(self.add_fieldsets)
return given_fieldsets
def get_fieldset_placeholders(self, template):
return plugins.get_placeholders(template)
def get_placeholders_formfields(
self, template, obj, language, version_id, versioned):
formfields = {}
placeholders = self.get_fieldset_placeholders(template)
for placeholder_name in placeholders:
plugin_list = []
show_copy = False
copy_languages = {}
if versioned:
from reversion.models import Version
version = get_object_or_404(Version, pk=version_id)
installed_plugins = plugin_pool.get_all_plugins()
plugin_list = []
actual_plugins = []
bases = {}
revs = []
for related_version in version.revision.version_set.all():
try:
rev = related_version.object_version
except models.FieldDoesNotExist:
# in case the model has changed in the meantime
continue
else:
revs.append(rev)
for rev in revs:
pobj = rev.object
if pobj.__class__ == Placeholder:
if pobj.slot == placeholder_name:
placeholder = pobj
break
for rev in revs:
pobj = rev.object
if pobj.__class__ == CMSPlugin:
if pobj.language == language and pobj.placeholder_id == placeholder.id and not pobj.parent_id:
if pobj.get_plugin_class() == CMSPlugin:
plugin_list.append(pobj)
else:
bases[int(pobj.pk)] = pobj
if hasattr(pobj, "cmsplugin_ptr_id"):
actual_plugins.append(pobj)
for plugin in actual_plugins:
if int(plugin.cmsplugin_ptr_id) in bases:
bases[int(plugin.cmsplugin_ptr_id)].placeholder = placeholder
bases[int(plugin.cmsplugin_ptr_id)].set_base_attr(plugin)
plugin_list.append(plugin)
else:
placeholder, created = obj.placeholders.get_or_create(slot=placeholder_name)
installed_plugins = plugin_pool.get_all_plugins(placeholder_name, obj)
plugin_list = CMSPlugin.objects.filter(language=language, placeholder=placeholder, parent=None).order_by('position')
other_plugins = CMSPlugin.objects.filter(placeholder=placeholder, parent=None).exclude(language=language)
dict_cms_languages = dict(settings.CMS_LANGUAGES)
for plugin in other_plugins:
if (not plugin.language in copy_languages) and (plugin.language in dict_cms_languages):
copy_languages[plugin.language] = dict_cms_languages[plugin.language]
if copy_languages and len(settings.CMS_LANGUAGES) > 1:
show_copy = True
widget = PluginEditor(attrs={
'installed': installed_plugins,
'list': plugin_list,
'copy_languages': copy_languages.items(),
'show_copy': show_copy,
'language': language,
'placeholder': placeholder
})
formfields[placeholder.slot] = CharField(widget=widget, required=False)
return formfields
def get_form(self, request, obj=None, **kwargs):
"""
Get PageForm for the Page model and modify its fields depending on
the request.
"""
language = get_language_from_request(request, obj)
if obj:
self.inlines = PAGE_ADMIN_INLINES
if not settings.CMS_SOFTROOT and 'soft_root' in self.exclude:
self.exclude.remove('soft_root')
version_id = None
versioned = False
if "history" in request.path or 'recover' in request.path:
versioned = True
version_id = request.path.split("/")[-2]
parent = (object,)
if hasattr(self.form, 'Meta'):
parent = (self.form.Meta, object)
attrs = {}
accepted_attrs = [
'fields', 'exclude', 'widgets', 'localized_fields',
'labels', 'help_texts', 'error_messages', 'model'
]
for attr_name in accepted_attrs:
val = getattr(parent, attr_name, None)
if val is not None:
attrs[attr_name] = val
form_class_attrs = {
'Meta': type(str('Meta'), parent, attrs),
'formfield_callback': functools.partial(
self.formfield_for_dbfield, request=request
)
}
if settings.CMS_TEMPLATES:
selected_template = get_template_from_request(request, obj)
form_class_attrs.update(self.get_placeholders_formfields(
selected_template, obj, language, version_id, versioned))
form = type('PageForm', (self.form, ), form_class_attrs)
else:
self.inlines = []
form = PageAddForm
if obj:
try:
title_obj = obj.get_title_obj(language=language, fallback=False, version_id=version_id, force_reload=True)
except titlemodels.Title.DoesNotExist:
title_obj = EmptyTitle()
if form.base_fields['site'].initial is None:
form.base_fields['site'].initial = obj.site
for name in ['slug',
'title',
'application_urls',
'redirect',
'meta_description',
'meta_keywords',
'menu_title',
'page_title']:
form.base_fields[name].initial = getattr(title_obj, name)
if title_obj.overwrite_url:
form.base_fields['overwrite_url'].initial = title_obj.path
else:
form.base_fields['overwrite_url'].initial = ""
if settings.CMS_TEMPLATES:
template_choices = list(settings.CMS_TEMPLATES)
form.base_fields['template'].choices = template_choices
form.base_fields['template'].initial = force_unicode(selected_template)
else:
for name in ['slug','title']:
form.base_fields[name].initial = u''
form.base_fields['parent'].initial = request.GET.get('target', None)
form.base_fields['site'].initial = request.session.get('cms_admin_site', None)
form.base_fields['template'].initial = settings.CMS_TEMPLATES[0][0]
if obj and not obj.has_advanced_settings_permission(request):
for field in self.advanced_fields:
del form.base_fields[field]
return form
def get_inline_instances(self, request, obj=None):
inlines = super(PageAdmin, self).get_inline_instances(request, obj)
if settings.CMS_PERMISSION and obj:
filtered_inlines = []
for inline in inlines:
if isinstance(inline, PagePermissionInlineAdmin)\
and not isinstance(inline, ViewRestrictionInlineAdmin):
if "recover" in request.path or "history" in request.path:
# do not display permissions in recover mode
continue
if not obj.has_change_permissions_permission(request):
continue
filtered_inlines.append(inline)
inlines = filtered_inlines
return inlines
@mutually_exclusive_on_post
def add_view(self, request, form_url='', extra_context=None):
extra_context = extra_context or {}
if settings.CMS_MODERATOR and 'target' in request.GET and 'position' in request.GET:
moderation_required = moderator.will_require_moderation(
request.GET['target'], request.GET['position']
)
extra_context.update({
'moderation_required': moderation_required,
'moderation_level': _('higher'),
'show_save_and_continue':True,
})
language = get_language_from_request(request)
extra_context.update({
'language': language,
})
return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context)
@mutually_exclusive_on_post
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
The 'change' admin view for the Page model.
"""
try:
obj = self.model.objects.get(pk=object_id)
except (self.model.DoesNotExist, ValueError):
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
else:
selected_template = get_template_from_request(request, obj)
moderation_level, moderation_required = moderator.get_test_moderation_level(obj, request.user)
# if there is a delete request for this page
moderation_delete_request = (settings.CMS_MODERATOR and
obj.pagemoderatorstate_set.get_delete_actions(
).count())
if request.method == 'POST':
try:
self._assert_user_rights_allow_operations(request, obj)
except PermissionDenied as exc:
return HttpResponseForbidden(exc.message)
#activate(user_lang_set)
extra_context = {
'placeholders': self.get_fieldset_placeholders(selected_template),
'page': obj,
'CMS_PERMISSION': settings.CMS_PERMISSION,
'CMS_MODERATOR': settings.CMS_MODERATOR,
'ADMIN_MEDIA_URL': settings.STATIC_URL,
'has_change_permissions_permission': obj.has_change_permissions_permission(request),
'has_moderate_permission': obj.has_moderate_permission(request),
'moderation_level': moderation_level,
'moderation_required': moderation_required,
'moderator_should_approve': moderator.moderator_should_approve(request, obj),
'moderation_delete_request': moderation_delete_request,
'show_delete_translation': len(obj.get_languages()) > 1,
'current_site_id': settings.SITE_ID,
}
extra_context = self.update_language_tab_context(request, obj, extra_context)
tab_language = request.GET.get("language", None)
response = super(PageAdmin, self).change_view(request, object_id, form_url=form_url, extra_context=extra_context)
if tab_language and response.status_code == 302 and response._headers['location'][1] == request.path :
location = response._headers['location']
response._headers['location'] = (location[0], "%s?language=%s" % (location[1], tab_language))
return response
def _assert_user_rights_allow_operations(self, request, page):
"""
Some page attributes (like published state) can be changed as a result of the request.
Check that the user has the special rights for these operations.
"""
new_published_state = 'published' in request.POST
if new_published_state != page.published and not page.has_publish_permission(request):
raise PermissionDenied(_("You have no permission to publish/unpublish the page."))
new_navigation_state = 'in_navigation' in request.POST
if new_navigation_state != page.in_navigation and \
not page.has_set_navigation_permission(request):
raise PermissionDenied(_("You have no permission to set if the page "\
"should be visible or not in navigation."))
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in self._get_site_languages(obj)]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(PageAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def _get_site_languages(self, obj):
site_id = None
if obj:
site_id = obj.site_id
languages = []
if site_id and site_id in settings.CMS_SITE_LANGUAGES:
for lang in settings.CMS_SITE_LANGUAGES[site_id]:
lang_label = dict(settings.CMS_LANGUAGES).get(lang, dict(settings.LANGUAGES).get(lang, lang))
languages.append((lang, lang_label))
else:
languages = settings.CMS_LANGUAGES
return languages
def update_language_tab_context(self, request, obj, context=None):
if not context:
context = {}
language = get_language_from_request(request, obj)
languages = self._get_site_languages(obj)
context.update({
'language': language,
'language_tabs': languages,
'show_language_tabs': len(languages) > 1,
})
return context
def response_change(self, request, obj):
"""Called always when page gets changed, call save on page, there may be
some new stuff, which should be published after all other objects on page
are collected.
"""
if settings.CMS_MODERATOR:
# save the object again, so all the related changes to page model
# can be published if required
obj.save()
return super(PageAdmin, self).response_change(request, obj)
def has_add_permission(self, request):
"""
Return true if the current user has permission to add a new page.
"""
if settings.CMS_PERMISSION:
return permissions.has_page_add_permission(
request, on_any_sites=True)
return super(PageAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
"""
Return true if the current user has permission on the page.
Return the string 'All' if the user has all rights.
"""
if settings.CMS_PERMISSION:
if obj:
return obj.has_change_permission(request)
else:
return permissions.has_page_change_permission(
request, on_any_sites=True)
return super(PageAdmin, self).has_change_permission(request, obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance. If CMS_PERMISSION are in use also takes look to
object permissions.
"""
if settings.CMS_PERMISSION and obj is not None:
return obj.has_delete_permission(request)
return super(PageAdmin, self).has_delete_permission(request, obj)
def has_recover_permission(self, request):
"""
Returns True if the use has the right to recover pages
"""
if not "reversion" in settings.INSTALLED_APPS:
return False
user = request.user
if user.is_superuser:
return True
try:
perm = GlobalPagePermission.objects.get(user=user)
if perm.can_recover:
return True
except:
pass
return False
def changelist_view(self, request, extra_context=None):
"The 'change list' admin view for this model."
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
try:
if DJANGO_1_3:
cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page, self.list_editable, self)
else:
cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page, self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given and
# the 'invalid=1' parameter was already in the query string, something
# is screwed up with the database, so display an error page.
if ERROR_FLAG in request.GET.keys():
return render_to_response('admin/invalid_setup.html', {'title': _('Database error')})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
cl.set_items(request)
site_id = request.GET.get('site__exact', None)
if site_id is None:
site_id = Site.objects.get_current().pk
site_id = int(site_id)
# languages
languages = []
if site_id and site_id in settings.CMS_SITE_LANGUAGES:
languages = settings.CMS_SITE_LANGUAGES[site_id]
else:
languages = [lang[0] for lang in settings.CMS_LANGUAGES]
# parse the cookie that saves which page trees have
# been opened already and extracts the page ID
djangocms_nodes_open = request.COOKIES.get('djangocms_nodes_open', '')
raw_nodes = unquote(djangocms_nodes_open).split(',')
try:
open_menu_trees = [int(c.split('page_', 1)[1]) for c in raw_nodes]
except IndexError:
open_menu_trees = []
context = {
'title': cl.title,
'is_popup': cl.is_popup,
'cl': cl,
'opts':opts,
'has_add_permission': self.has_add_permission(request),
'root_path': reverse('admin:index'),
'app_label': app_label,
'CMS_MEDIA_URL': settings.CMS_MEDIA_URL,
'softroot': settings.CMS_SOFTROOT,
'CMS_PERMISSION': settings.CMS_PERMISSION,
'CMS_MODERATOR': settings.CMS_MODERATOR,
'has_recover_permission': 'reversion' in settings.INSTALLED_APPS and self.has_recover_permission(request),
'DEBUG': settings.DEBUG,
'site_languages': languages,
'open_menu_trees': open_menu_trees,
}
if 'reversion' in settings.INSTALLED_APPS:
context['has_change_permission'] = self.has_change_permission(request)
context.update(extra_context or {})
context.update(self.admin_site.each_context(request))
return render_to_response(self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, context_instance=RequestContext(request))
def recoverlist_view(self, request, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
return super(PageAdmin, self).recoverlist_view(request, extra_context)
def recover_view(self, request, version_id, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
return super(PageAdmin, self).recover_view(request, version_id, extra_context)
def revision_view(self, request, object_id, version_id, extra_context=None):
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
response = super(PageAdmin, self).revision_view(request, object_id, version_id, extra_context)
return response
def history_view(self, request, object_id, extra_context=None):
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
return super(PageAdmin, self).history_view(request, object_id, extra_context)
def render_revision_form(self, request, obj, version, context, revert=False, recover=False):
# reset parent to null if parent is not found
if version.field_dict['parent']:
try:
Page.objects.get(pk=version.field_dict['parent'])
except:
if revert and obj.parent_id != int(version.field_dict['parent']):
version.field_dict['parent'] = obj.parent_id
if recover:
obj.parent = None
obj.parent_id = None
version.field_dict['parent'] = None
obj.version = version
return super(PageAdmin, self).render_revision_form(request, obj, version, context, revert, recover)
@mutually_exclusive_on_post
def move_page(self, request, page_id, extra_context=None):
"""
Move the page to the requested target, at the given position
"""
target = request.POST.get('target', None)
position = request.POST.get('position', None)
if target is None or position is None:
return HttpResponseRedirect('../../')
try:
page = self.model.objects.get(pk=page_id)
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
return jsonify_request(HttpResponseBadRequest("error"))
# does he haves permissions to do this...?
if not page.has_move_page_permission(request) or \
not target.has_add_permission(request):
return jsonify_request(HttpResponseForbidden(_("Error! You don't have permissions to move this page. Please reload the page")))
# move page
page.move_page(target, position)
if "reversion" in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(page)
return jsonify_request(HttpResponse(admin_utils.render_admin_menu_item(request, page).content))
def get_permissions(self, request, page_id):
page = get_object_or_404(Page, id=page_id)
can_change_list = Page.permissions.get_change_id_list(request.user, page.site_id)
global_page_permissions = GlobalPagePermission.objects.filter(sites__in=[page.site_id])
page_permissions = PagePermission.objects.for_page(page)
all_permissions = list(global_page_permissions) + list(page_permissions)
# does he can change global permissions ?
has_global = permissions.has_global_change_permissions_permission(request.user)
permission_set = []
for permission in all_permissions:
if isinstance(permission, GlobalPagePermission):
if has_global:
permission_set.append([(True, True), permission])
else:
permission_set.append([(True, False), permission])
else:
if can_change_list == PagePermissionsPermissionManager.GRANT_ALL:
can_change = True
else:
can_change = permission.page_id in can_change_list
permission_set.append([(False, can_change), permission])
context = {
'page': page,
'permission_set': permission_set,
}
return render_to_response('admin/cms/page/permissions.html', context)
@mutually_exclusive_on_post
def copy_page(self, request, page_id, extra_context=None):
"""
Copy the page and all its plugins and descendants to the requested target, at the given position
"""
context = {}
page = Page.objects.get(pk=page_id)
target = request.POST.get('target', None)
position = request.POST.get('position', None)
site = request.POST.get('site', None)
if target is not None and position is not None and site is not None:
try:
target = self.model.objects.get(pk=target)
# does he have permissions to copy this page under target?
assert target.has_add_permission(request)
site = Site.objects.get(pk=site)
except (ObjectDoesNotExist, AssertionError):
return HttpResponse("error")
#context.update({'error': _('Page could not been moved.')})
else:
try:
kwargs = {
'copy_permissions': request_item(
request, 'copy_permissions', False),
'copy_moderation': request_item(
request, 'copy_moderation', False),
}
page.copy_page(target, site, position, **kwargs)
return jsonify_request(HttpResponse("ok"))
except ValidationError,e:
return jsonify_request(HttpResponseBadRequest(e.messages))
context.update(extra_context or {})
return HttpResponseRedirect('../../')
def get_moderation_states(self, request, page_id):
"""Returns moderation messsages. Is loaded over ajax to inline-group
element in change form view.
"""
page = get_object_or_404(Page, id=page_id)
if not page.has_moderate_permission(request):
raise Http404()
context = {
'page': page,
}
return render_to_response('admin/cms/page/moderation_messages.html', context)
@transaction.atomic
def approve_page(self, request, page_id):
"""Approve changes on current page by user from request.
"""
#TODO: change to POST method !! get is not safe
page = get_object_or_404(Page, id=page_id)
if not page.has_moderate_permission(request):
raise Http404()
moderator.approve_page(request, page)
# Django SQLite bug. Does not convert to string the lazy instances
from django.utils.translation import ugettext as _
self.message_user(request, _('Page was successfully approved.'))
if request_item(request, 'node'):
# if request comes from tree..
return admin_utils.render_admin_menu_item(request, page)
referer = request.META.get('HTTP_REFERER', reverse('admin:cms_page_changelist'))
path = '../../'
if 'admin' not in referer:
path = '%s?edit-off' % referer.split('?')[0]
return HttpResponseRedirect( path )
@transaction.atomic
def publish_page(self, request, page_id):
page = get_object_or_404(Page, id=page_id)
# ensure user has permissions to publish this page
if not page.has_moderate_permission(request):
return HttpResponseForbidden("Denied")
page.publish()
referer = request.META.get('HTTP_REFERER', '')
path = '../../'
# TODO: use admin base here!
if 'admin' not in referer:
path = '%s?edit-off' % referer.split('?')[0]
return HttpResponseRedirect( path )
@mutually_exclusive_on_post
def delete_view(self, request, object_id, *args, **kwargs):
"""If page is under modaretion, just mark this page for deletion = add
delete action to page states.
"""
page = get_object_or_404(Page, id=object_id)
if not self.has_delete_permission(request, page):
raise PermissionDenied
if settings.CMS_MODERATOR and page.is_under_moderation():
# don't perform a delete action, just mark page for deletion
page.force_moderation_action = PageModeratorState.ACTION_DELETE
page.moderator_state = Page.MODERATOR_NEED_DELETE_APPROVEMENT
page.save()
if not self.has_change_permission(request, None):
return HttpResponseRedirect("../../../../")
return HttpResponseRedirect("../../")
max_pages_to_delete = settings.CMS_MAX_PAGE_COUNT_FOR_DELETION
if page.get_descendant_count() > max_pages_to_delete:
messages.error(request, "You can't delete more than %d pages at once" %
max_pages_to_delete)
return HttpResponseRedirect(reverse('admin:cms_page_changelist'))
response = super(PageAdmin, self).delete_view(request, object_id, *args, **kwargs)
return response
@create_on_success
def delete_translation(self, request, object_id, extra_context=None):
language = get_language_from_request(request)
opts = Page._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_unicode(opts.verbose_name),
'key': escape(object_id)
})
if not len(obj.get_languages()) > 1:
raise Http404(_('There only exists one translation for this page'))
titleobj = get_object_or_404(Title, page__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__page__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set( list(perms_needed) + list(perms_needed_plugins) )
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': [name for code, name in settings.CMS_LANGUAGES if code == language][0]
}
self.log_change(request, titleobj, message)
self.message_user(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if "reversion" in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(obj)
if not self.has_change_permission(request, None):
return HttpResponseRedirect("../../../../")
return HttpResponseRedirect("../../")
context = {
"title": _("Are you sure?"),
"object_name": force_unicode(titleopts.verbose_name),
"object": titleobj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": reverse('admin:index'),
"app_label": app_label,
}
context.update(extra_context or {})
context_instance = RequestContext(request, current_app=self.admin_site.name)
return render_to_response(self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, titleopts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, context_instance=context_instance)
def remove_delete_state(self, request, object_id):
"""Remove all delete action from page states, requires change permission
"""
page = get_object_or_404(Page, id=object_id)
if not self.has_change_permission(request, page):
raise PermissionDenied
page.pagemoderatorstate_set.get_delete_actions().delete()
page.moderator_state = Page.MODERATOR_NEED_APPROVEMENT
page.save()
return HttpResponseRedirect("../../%d/" % page.id)
def preview_page(self, request, object_id):
"""Redirecting preview function based on draft_id
"""
page = get_object_or_404(Page, id=object_id)
attrs = "?preview=1"
if request_item(request, 'public', None):
if not page.publisher_public_id:
raise Http404
page = page.publisher_public
else:
attrs += "&draft=1"
url = page.get_absolute_url() + attrs
site = Site.objects.get_current()
if not site == page.site:
url = "http%s://%s%s" % ('s' if request.is_secure() else '',
page.site.domain, url)
return HttpResponseRedirect(url)
@mutually_exclusive_on_post
def change_status(self, request, page_id):
"""
Switch the status of a page
"""
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
page = get_object_or_404(Page, pk=page_id)
if page.has_publish_permission(request):
try:
if page.published or is_valid_url(page.get_absolute_url(),page,False):
page.published = not page.published
page.save()
if page.publisher_public:
page.publisher_public.published = page.published
page.publisher_public.save()
return jsonify_request(HttpResponse(admin_utils.render_admin_menu_item(request, page).content))
except ValidationError,e:
return jsonify_request(HttpResponseBadRequest(e.messages))
else:
return HttpResponseForbidden(unicode(_("You do not have permission to publish this page")))
@mutually_exclusive_on_post
def change_innavigation(self, request, page_id):
"""
Switch the in_navigation of a page
"""
# why require post and still have page id in the URL???
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
page = get_object_or_404(Page, pk=page_id)
if page.has_change_permission(request):
page.in_navigation = not page.in_navigation
page.save(force_state=Page.MODERATOR_NEED_APPROVEMENT)
return admin_utils.render_admin_menu_item(request, page)
return HttpResponseForbidden(_("You do not have permission to change this page's in_navigation status"))
def descendants(self, request, page_id):
"""
Get html for descendants of given page
Used for lazy loading pages in change_list.js
Permission checks is done in admin_utils.get_admin_menu_item_context
which is called by admin_utils.render_admin_menu_item.
"""
page = get_object_or_404(Page, pk=page_id)
return admin_utils.render_admin_menu_item(request, page,
template="admin/cms/page/lazy_menu.html")
@create_on_success
def add_plugin(self, request):
'''
Could be either a page or a parent - if it's a parent we get the page via parent.
'''
if 'history' in request.path or 'recover' in request.path:
return HttpResponse(str("error"))
if request.method != "POST":
raise Http404
plugin_type = request.POST['plugin_type']
if not permissions.has_plugin_permission(request.user, plugin_type, "add"):
return HttpResponseForbidden(ugettext('You have no permission to add a plugin'))
placeholder_id = request.POST.get('placeholder', None)
parent_id = request.POST.get('parent_id', None)
if placeholder_id:
placeholder = get_object_or_404(Placeholder, pk=placeholder_id)
page = placeholder.page
else:
placeholder = None
page = None
parent = None
# page add-plugin
if page:
language = request.POST['language'] or get_language_from_request(request)
position = CMSPlugin.objects.filter(language=language, placeholder=placeholder).count()
limits = placeholder_utils.get_placeholder_conf("limits", placeholder.slot, page.get_template())
if limits:
global_limit = limits.get("global")
type_limit = limits.get(plugin_type)
if global_limit and position >= global_limit:
return HttpResponseBadRequest("This placeholder already has the maximum number of plugins")
elif type_limit:
type_count = CMSPlugin.objects.filter(language=language, placeholder=placeholder, plugin_type=plugin_type).count()
if type_count >= type_limit:
plugin_name = unicode(plugin_pool.get_plugin(plugin_type).name)
return HttpResponseBadRequest("This placeholder already has the maximum number allowed of %s plugins." % plugin_name)
# in-plugin add-plugin
elif parent_id:
parent = get_object_or_404(CMSPlugin, pk=parent_id)
placeholder = parent.placeholder
page = placeholder.page if placeholder else None
if not page: # Make sure we do have a page
raise Http404
language = parent.language
position = None
# placeholder (non-page) add-plugin
else:
# do NOT allow non-page placeholders to use this method, they
# should use their respective admin!
raise Http404
if not page.has_change_permission(request):
# we raise a 404 instead of 403 for a slightly improved security
# and to be consistent with placeholder admin
raise Http404
# Sanity check to make sure we're not getting bogus values from JavaScript:
if not language or not language in [ lang[0] for lang in settings.LANGUAGES ]:
return HttpResponseBadRequest(ugettext("Language must be set to a supported language!"))
plugin = CMSPlugin(language=language, plugin_type=plugin_type, position=position, placeholder=placeholder)
if parent:
plugin.parent = parent
plugin.save()
if 'reversion' in settings.INSTALLED_APPS and page:
helpers.make_revision_with_plugins(page)
reversion.revision.user = request.user
plugin_name = unicode(plugin_pool.get_plugin(plugin_type).name)
reversion.revision.comment = unicode(_(u"%(plugin_name)s plugin added to %(placeholder)s") % {'plugin_name':plugin_name, 'placeholder':placeholder})
return HttpResponse(str(plugin.pk))
@create_on_success
@transaction.atomic
def copy_plugins(self, request):
if 'history' in request.path or 'recover' in request.path:
return HttpResponse(str("error"))
if request.method != "POST":
raise Http404
copy_from = request.POST['copy_from']
placeholder_id = request.POST['placeholder']
placeholder = get_object_or_404(Placeholder, pk=placeholder_id)
page = placeholder.page
language = request.POST['language'] or get_language_from_request(request)
if not page.has_change_permission(request):
return HttpResponseForbidden(ugettext("You do not have permission to change this page"))
if not language or not language in [ lang[0] for lang in settings.CMS_LANGUAGES ]:
return HttpResponseBadRequest(ugettext("Language must be set to a supported language!"))
if language == copy_from:
return HttpResponseBadRequest(ugettext("Language must be different than the copied language!"))
plugins = list(placeholder.cmsplugin_set.filter(language=copy_from).order_by('tree_id', '-rght'))
# check permissions before copy the plugins:
for plugin in plugins:
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"):
return HttpResponseForbidden(ugettext("You do not have permission to add plugins"))
copy_plugins.copy_plugins_to(plugins, placeholder, language)
if page and "reversion" in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(page)
reversion.revision.user = request.user
reversion.revision.comment = _(u"Copied %(language)s plugins to %(placeholder)s") % {'language': ugettext(dict(settings.LANGUAGES)[language]),
'placeholder': placeholder}
plugin_list = CMSPlugin.objects.filter(language=language, placeholder=placeholder, parent=None).order_by('position')
return render_to_response('admin/cms/page/widgets/plugin_item.html', {'plugin_list':plugin_list}, RequestContext(request))
@create_on_success
def edit_plugin(self, request, plugin_id):
plugin_id = int(plugin_id)
if not 'history' in request.path and not 'recover' in request.path:
cms_plugin = get_object_or_404(CMSPlugin.objects.select_related('placeholder'), pk=plugin_id)
page = cms_plugin.placeholder.page if cms_plugin.placeholder else None
instance, plugin_admin = cms_plugin.get_plugin_instance(self.admin_site)
if page and not page.has_change_permission(request):
return HttpResponseForbidden(ugettext("You have no permission to change this page"))
else:
# history view with reversion
from reversion.models import Version
pre_edit = request.path.split("/edit-plugin/")[0]
version_id = pre_edit.split("/")[-1]
version = get_object_or_404(Version, pk=version_id)
rev_objs = []
for related_version in version.revision.version_set.all():
try:
rev = related_version.object_version
except models.FieldDoesNotExist:
continue
else:
rev_objs.append(rev.object)
# TODO: check permissions
for obj in rev_objs:
if obj.__class__ == CMSPlugin and obj.pk == plugin_id:
cms_plugin = obj
break
inst, plugin_admin = cms_plugin.get_plugin_instance(self.admin_site)
instance = None
if cms_plugin.get_plugin_class().model == CMSPlugin:
instance = cms_plugin
else:
for obj in rev_objs:
if hasattr(obj, "cmsplugin_ptr_id") and int(obj.cmsplugin_ptr_id) == int(cms_plugin.pk):
instance = obj
break
if not instance:
raise Http404("This plugin is not saved in a revision")
if not permissions.has_plugin_permission(request.user, cms_plugin.plugin_type, "change"):
return HttpResponseForbidden(ugettext("You have no permission to edit a plugin"))
plugin_admin.cms_plugin_instance = cms_plugin
try:
plugin_admin.placeholder = cms_plugin.placeholder # TODO: what for reversion..? should it be inst ...?
except Placeholder.DoesNotExist:
pass
if request.method == "POST":
# set the continue flag, otherwise will plugin_admin make redirect to list
# view, which actually doesn't exists
post_request = request.POST.copy()
post_request['_continue'] = True
request.POST = post_request
if 'reversion' in settings.INSTALLED_APPS and ('history' in request.path or 'recover' in request.path):
# in case of looking to history just render the plugin content
context = RequestContext(request)
return render_to_response(plugin_admin.render_template, plugin_admin.render(context, instance, plugin_admin.placeholder))
if request.POST.get("_cancel", False):
# cancel button was clicked
context = {
'CMS_MEDIA_URL': settings.CMS_MEDIA_URL,
'plugin': cms_plugin,
'is_popup': True,
'name': unicode(cms_plugin),
"type": cms_plugin.get_plugin_name(),
'plugin_id': plugin_id,
'icon': force_escape(cms_plugin.get_instance_icon_src()),
'alt': force_escape(cms_plugin.get_instance_icon_alt()),
'cancel': True,
}
instance = cms_plugin.get_plugin_instance()[0]
if not instance:
# cancelled before any content was added to plugin
cms_plugin.delete()
context.update({
"deleted":True,
})
return render_to_response('admin/cms/page/plugin_forms_ok.html', context, RequestContext(request))
if not instance:
# instance doesn't exist, call add view
response = plugin_admin.add_view(request)
else:
# already saved before, call change view
# we actually have the instance here, but since i won't override
# change_view method, is better if it will be loaded again, so
# just pass id to plugin_admin
response = plugin_admin.change_view(request, str(plugin_id))
if request.method == "POST" and plugin_admin.object_successfully_changed:
# if reversion is installed, save version of the page plugins
if 'reversion' in settings.INSTALLED_APPS and page:
helpers.make_revision_with_plugins(page)
reversion.revision.user = request.user
plugin_name = unicode(plugin_pool.get_plugin(cms_plugin.plugin_type).name)
reversion.revision.comment = ugettext(u"%(plugin_name)s plugin edited at position %(position)s in %(placeholder)s") % {
'plugin_name': plugin_name,
'position': cms_plugin.position,
'placeholder': cms_plugin.placeholder.slot
}
# read the saved object from plugin_admin - ugly but works
saved_object = plugin_admin.saved_object
context = {
'CMS_MEDIA_URL': settings.CMS_MEDIA_URL,
'plugin': saved_object,
'is_popup': True,
'name': unicode(saved_object),
"type": saved_object.get_plugin_name(),
'plugin_id': plugin_id,
'icon': force_escape(saved_object.get_instance_icon_src()),
'alt': force_escape(saved_object.get_instance_icon_alt()),
}
return render_to_response('admin/cms/page/plugin_forms_ok.html', context, RequestContext(request))
return response
@create_on_success
def move_plugin(self, request):
if request.method != "POST":
return HttpResponse(str("error"))
if 'history' in request.path:
return HttpResponse(str("error"))
pos = 0
page = None
success = False
if 'plugin_id' in request.POST:
plugin = CMSPlugin.objects.get(pk=int(request.POST['plugin_id']))
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return HttpResponseForbidden()
page = plugins.get_page_from_plugin_or_404(plugin)
if not page.has_change_permission(request):
return HttpResponseForbidden(ugettext("You have no permission to change this page"))
placeholder_slot = request.POST['placeholder']
placeholders = self.get_fieldset_placeholders(page.get_template())
if not placeholder_slot in placeholders:
return HttpResponse(str("error"))
placeholder = page.placeholders.get(slot=placeholder_slot)
plugin.placeholder = placeholder
# plugin positions are 0 based, so just using count here should give us 'last_position + 1'
position = CMSPlugin.objects.filter(placeholder=placeholder).count()
plugin.position = position
# update the placeholder on all descendant plugins as well
for child in plugin.get_descendants():
child.placeholder = placeholder
child.save()
plugin.save()
success = True
if 'ids' in request.POST:
for plugin_id in request.POST['ids'].split("_"):
plugin = CMSPlugin.objects.select_related('placeholder').get(pk=plugin_id)
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return HttpResponseForbidden(ugettext("You have no permission to move a plugin"))
page = plugin.placeholder.page if plugin.placeholder else None
if not page: # use placeholderadmin instead!
raise Http404
if not page.has_change_permission(request):
return HttpResponseForbidden(ugettext("You have no permission to change this page"))
if plugin.position != pos:
plugin.position = pos
plugin.save()
pos += 1
success = True
if not success:
return HttpResponse(str("error"))
if page and 'reversion' in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(page)
reversion.revision.user = request.user
reversion.revision.comment = ugettext(u"Plugins where moved")
return HttpResponse(str("ok"))
@create_on_success
def remove_plugin(self, request):
if request.method != "POST":
raise Http404
if 'history' in request.path:
raise Http404
plugin_id = request.POST['plugin_id']
plugin = get_object_or_404(CMSPlugin.objects.select_related('placeholder'), pk=plugin_id)
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"):
return HttpResponseForbidden(ugettext("You have no permission to remove a plugin"))
placeholder = plugin.placeholder
page = placeholder.page if placeholder else None
if page and not page.has_change_permission(request):
raise Http404
if page and settings.CMS_MODERATOR and page.is_under_moderation():
# delete the draft version of the plugin
plugin.delete()
# set the page to require approval and save
page.moderator_state = Page.MODERATOR_NEED_APPROVEMENT
page.save()
else:
plugin.delete_with_public()
plugin_name = unicode(plugin_pool.get_plugin(plugin.plugin_type).name)
comment = ugettext(u"%(plugin_name)s plugin at position %(position)s in %(placeholder)s was deleted.") % {
'plugin_name': plugin_name,
'position': plugin.position,
'placeholder': plugin.placeholder,
}
if page and 'reversion' in settings.INSTALLED_APPS:
helpers.make_revision_with_plugins(page)
reversion.revision.user = request.user
reversion.revision.comment = comment
return HttpResponse("%s,%s" % (plugin_id, comment))
def change_moderation(self, request, page_id):
"""Called when user clicks on a moderation checkbox in tree vies, so if he
wants to add/remove/change moderation required by him. Moderate is sum of
mask values.
"""
from cms.models.moderatormodels import MASK_PAGE, MASK_CHILDREN, MASK_DESCENDANTS
if request.method != 'POST':
return HttpResponseNotAllowed(['POST'])
page = get_object_or_404(Page, id=page_id)
moderate = request.POST.get('moderate', None)
if moderate is not None and page.has_moderate_permission(request):
try:
moderate = int(moderate)
except:
moderate = 0
if moderate == 0:
# kill record with moderation which equals zero
try:
page.pagemoderator_set.get(user=request.user).delete()
except ObjectDoesNotExist:
pass
return admin_utils.render_admin_menu_item(request, page)
elif moderate <= MASK_PAGE + MASK_CHILDREN + MASK_DESCENDANTS:
page_moderator, created = page.pagemoderator_set.get_or_create(user=request.user)
# split value to attributes
page_moderator.set_decimal(moderate)
page_moderator.save()
return admin_utils.render_admin_menu_item(request, page)
raise Http404
def lookup_allowed(self, key, *args, **kwargs):
if key == 'site__exact':
return True
return super(PageAdmin, self).lookup_allowed(key, *args, **kwargs)
contribute_fieldsets(PageAdmin)
contribute_list_filter(PageAdmin)
admin.site.register(Page, PageAdmin)
| bsd-3-clause | -2,424,291,827,764,147,000 | 43.896046 | 160 | 0.594201 | false |
ptthiem/nose2 | nose2/tests/functional/test_session.py | 1 | 1916 | import sys
from nose2 import session
from nose2.tests._common import support_file, FunctionalTestCase
class SessionFunctionalTests(FunctionalTestCase):
def setUp(self):
self.s = session.Session()
self.s.loadConfigFiles(support_file('cfg', 'a.cfg'),
support_file('cfg', 'b.cfg'))
sys.path.insert(0, support_file('lib'))
def test_session_can_load_config_files(self):
assert self.s.config.has_section('a')
assert self.s.config.has_section('b')
def test_session_holds_plugin_config(self):
plug_config = self.s.get('a')
assert plug_config
def test_session_can_load_plugins_from_modules(self):
self.s.loadPlugins()
assert self.s.plugins
plug = self.s.plugins[0]
self.assertEqual(plug.a, 1)
def test_session_config_cacheing(self):
"""Test cacheing of config sections works"""
# Create new session (generic one likely already cached
# depending on test order)
cache_sess = session.Session()
cache_sess.loadConfigFiles(support_file('cfg', 'a.cfg'))
# First access to given section, should read from config file
firstaccess = cache_sess.get('a')
assert firstaccess.as_int("a") == 1
# Hack cached Config object internals to make the stored value
# something different
cache_sess.configCache["a"]._mvd["a"] = "0"
newitems = []
for item in cache_sess.configCache["a"]._items:
if item != ("a", "1"):
newitems.append(item)
else:
newitems.append(("a", "0"))
cache_sess.configCache["a"]._items = newitems
# Second access to given section, confirm returns cached value
# rather than parsing config file again
secondaccess = cache_sess.get("a")
assert secondaccess.as_int("a") == 0
| bsd-2-clause | 3,197,995,564,410,282,500 | 32.614035 | 70 | 0.608559 | false |
BackupTheBerlios/espressopp | src/bc/OrthorhombicBC.py | 1 | 2332 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
************************************
**OrthorhombicBC** - Object
************************************
Like all boundary condition objects, this class implements
all the methods of the base class **BC** , which are described in detail
in the documentation of the abstract class **BC**.
The OrthorhombicBC class is responsible for the orthorhombic boundary condition.
Currently only periodic boundary conditions are supported.
Example:
>>> boxsize = (Lx, Ly, Lz)
>>> bc = espresso.bc.OrthorhombicBC(rng, boxsize)
"""
from espresso.esutil import cxxinit
from espresso import pmi
from espresso import toReal3D
from espresso.bc.BC import *
from _espresso import bc_OrthorhombicBC
class OrthorhombicBCLocal(BCLocal, bc_OrthorhombicBC):
def __init__(self, rng, boxL=1.0):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup() or pmi.isController:
cxxinit(self, bc_OrthorhombicBC, rng, toReal3D(boxL))
# override length property
def setBoxL(self, boxL):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.boxL.fset(self, toReal3D(boxL))
boxL = property(bc_OrthorhombicBC.boxL.fget, setBoxL)
if pmi.isController :
class OrthorhombicBC(BC):
pmiproxydefs = dict(
cls = 'espresso.bc.OrthorhombicBCLocal',
pmiproperty = [ 'boxL' ]
)
| gpl-3.0 | 1,707,733,058,514,005,000 | 34.876923 | 132 | 0.689966 | false |
tneumann/splocs | view_splocs.py | 1 | 3302 | import argparse
from itertools import count
import numpy as np
import h5py
from traits.api import HasTraits, Range, Instance, Bool, Int, on_trait_change
from traitsui.api import View, Item, HGroup, RangeEditor
from tvtk.api import tvtk
from tvtk.pyface.scene_editor import SceneEditor
from tvtk.common import configure_input, configure_input_data
from mayavi.tools.mlab_scene_model import MlabSceneModel
from mayavi.core.ui.mayavi_scene import MayaviScene
from pyface.timer.api import Timer
from util import veclen
from inout import load_splocs
class Visualization(HasTraits):
component = Int(0)
_max_component_index = Int()
activation = Range(-1., 1.)
oscillate = Bool(True)
allow_negative = Bool(False)
pd = Instance(tvtk.PolyData)
normals = Instance(tvtk.PolyDataNormals)
actor = Instance(tvtk.Actor)
scene = Instance(MlabSceneModel, (), kw=dict(background=(1,1,1)))
timer = Instance(Timer)
def __init__(self, Xmean, tris, components):
HasTraits.__init__(self)
self._components = components
self._max_component_index = len(components)
self._Xmean = Xmean
self.pd = tvtk.PolyData(points=Xmean, polys=tris)
self.normals = tvtk.PolyDataNormals(splitting=False)
configure_input_data(self.normals, self.pd)
mapper = tvtk.PolyDataMapper(immediate_mode_rendering=True)
self.actor = tvtk.Actor(mapper=mapper)
configure_input(self.actor.mapper, self.normals)
self.actor.mapper.lookup_table = tvtk.LookupTable(
hue_range = (0.45, 0.6),
saturation_range = (0., 0.8),
value_range = (.6, 1.),
)
self.scene.add_actor(self.actor)
self.timer = Timer(40, self.animate().next)
def animate(self):
for i in count():
if self.oscillate:
frame = i % 30
alpha = np.sin(frame/30. * np.pi*2)
if not self.allow_negative:
alpha = np.abs(alpha)
self.activation = alpha
yield
@on_trait_change('activation, component')
def update_plot(self):
c = self._components[self.component]
self.pd.points = self._Xmean + self.activation * c
magnitude = veclen(c)
self.pd.point_data.scalars = magnitude
self.actor.mapper.scalar_range = (0, magnitude.max())
self.scene.render()
view = View(
Item('scene', editor=SceneEditor(scene_class=MayaviScene),
height=600, width=800, show_label=False),
HGroup(
Item('component', editor=RangeEditor(
is_float=False, low=0, high_name='_max_component_index', mode='spinner')),
'activation',
'oscillate',
'allow_negative',
),
resizable=True, title="View SPLOC's",
)
def main(component_hdf5_file):
Xmean, tris, components, names = load_splocs(component_hdf5_file)
visualization = Visualization(Xmean, tris, components)
visualization.configure_traits()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Viewer for sparse localized deformation components')
parser.add_argument('input_sploc_file')
args = parser.parse_args()
main(args.input_sploc_file)
| mit | 4,805,950,588,558,271,000 | 34.12766 | 90 | 0.635978 | false |
DarkFenX/Pyfa | gui/builtinStatsViews/capacitorViewFull.py | 1 | 7227 | # =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
# noinspection PyPackageRequirements
import wx
from gui.statsView import StatsView
from gui.bitmap_loader import BitmapLoader
from gui.utils.numberFormatter import formatAmount, roundToPrec
class CapacitorViewFull(StatsView):
name = "capacitorViewFull"
def __init__(self, parent):
StatsView.__init__(self)
self.parent = parent
def getHeaderText(self, fit):
return "Capacitor"
def getTextExtentW(self, text):
width, height = self.parent.GetTextExtent(text)
return width
def populatePanel(self, contentPanel, headerPanel):
contentSizer = contentPanel.GetSizer()
parent = self.panel = contentPanel
self.headerPanel = headerPanel
panel = "full"
sizerCapacitor = wx.GridSizer(1, 2, 0, 0)
contentSizer.Add(sizerCapacitor, 0, wx.EXPAND, 0)
# Capacitor capacity and time
baseBox = wx.BoxSizer(wx.HORIZONTAL)
sizerCapacitor.Add(baseBox, 0, wx.ALIGN_LEFT)
bitmap = BitmapLoader.getStaticBitmap("capacitorInfo_big", parent, "gui")
tooltip = wx.ToolTip("Capacitor stability")
bitmap.SetToolTip(tooltip)
baseBox.Add(bitmap, 0, wx.ALIGN_CENTER)
box = wx.BoxSizer(wx.VERTICAL)
baseBox.Add(box, 0, wx.ALIGN_LEFT)
hbox = wx.BoxSizer(wx.HORIZONTAL)
box.Add(hbox, 0, wx.ALIGN_LEFT)
hbox.Add(wx.StaticText(parent, wx.ID_ANY, "Total: "), 0, wx.ALIGN_LEFT | wx.LEFT, 3)
lbl = wx.StaticText(parent, wx.ID_ANY, "0.0")
setattr(self, "label%sCapacitorCapacity" % panel.capitalize(), lbl)
hbox.Add(lbl, 0, wx.ALIGN_LEFT)
hbox.Add(wx.StaticText(parent, wx.ID_ANY, " GJ"), 0, wx.ALIGN_LEFT)
hbox = wx.BoxSizer(wx.HORIZONTAL)
box.Add(hbox, 0, wx.ALIGN_LEFT)
lbl = wx.StaticText(parent, wx.ID_ANY, "Lasts ")
hbox.Add(lbl, 0, wx.ALIGN_LEFT | wx.LEFT, 3)
setattr(self, "label%sCapacitorState" % panel.capitalize(), lbl)
lbl = wx.StaticText(parent, wx.ID_ANY, "0s")
setattr(self, "label%sCapacitorTime" % panel.capitalize(), lbl)
hbox.Add(lbl, 0, wx.ALIGN_LEFT)
# Capacitor balance
baseBox = wx.BoxSizer(wx.HORIZONTAL)
sizerCapacitor.Add(baseBox, 0, wx.ALIGN_CENTER_HORIZONTAL)
tooltip = wx.ToolTip("Extra stats")
bitmap = BitmapLoader.getStaticBitmap("capacitorRecharge_big", parent, "gui")
bitmap.SetToolTip(tooltip)
baseBox.Add(bitmap, 0, wx.ALIGN_CENTER)
# Delta
chargeSizer = wx.BoxSizer(wx.VERTICAL)
baseBox.Add(chargeSizer, 0, wx.ALIGN_CENTER)
lbl = wx.StaticText(parent, wx.ID_ANY, "0 GJ/s")
setattr(self, "label%sCapacitorDelta" % panel.capitalize(), lbl)
chargeSizer.Add(lbl, 0, wx.ALIGN_CENTER)
# Resists
lbl = wx.StaticText(parent, wx.ID_ANY, "0%")
setattr(self, "label%sCapacitorResist" % panel.capitalize(), lbl)
chargeSizer.Add(lbl, 0, wx.ALIGN_CENTER)
def refreshPanel(self, fit):
# If we did anything intresting, we'd update our labels to reflect the new fit's stats here
stats = (
("label%sCapacitorCapacity", lambda: fit.ship.getModifiedItemAttr("capacitorCapacity"), 3, 0, 9, False, ''),
("label%sCapacitorDelta", lambda: fit.capDelta, 3, 0, 0, True, ' GJ/s'),
("label%sCapacitorResist", lambda: (1 - fit.ship.getModifiedItemAttr("energyWarfareResistance", 1)) * 100, 3, 0, 0, False, '%'),
)
if fit is not None:
cap_amount = fit.ship.getModifiedItemAttr("capacitorCapacity")
cap_recharge = fit.capRecharge
cap_use = fit.capUsed
neut_res = fit.ship.getModifiedItemAttr("energyWarfareResistance", 1)
else:
cap_amount = 0
cap_recharge = 0
cap_use = 0
neut_res = 1
panel = "Full"
for labelName, value, prec, lowest, highest, forceSign, unit in stats:
label = getattr(self, labelName % panel)
value = value() if fit is not None else 0
value = value if value is not None else 0
if isinstance(value, str):
label.SetLabel(value)
label.SetToolTip(wx.ToolTip(value))
else:
label.SetLabel('{}{}'.format(formatAmount(value, prec, lowest, highest, forceSign=forceSign), unit))
label.SetToolTip(wx.ToolTip("%.1f" % value))
if labelName == 'label%sCapacitorDelta' and (cap_recharge or cap_use):
lines = []
lines.append('Capacitor delta:')
lines.append(' +{} GJ/s'.format(formatAmount(cap_recharge, 3, 0, 3)))
lines.append(' -{} GJ/s'.format(formatAmount(cap_use, 3, 0, 3)))
delta = round(cap_recharge - cap_use, 3)
if delta > 0 and 0 < round(neut_res, 4) < 1:
lines.append('')
lines.append('Effective excessive gain:')
lines.append(' +{} GJ/s'.format(formatAmount(delta / neut_res, 3, 0, 3)))
label.SetToolTip(wx.ToolTip('\n'.join(lines)))
if labelName == 'label%sCapacitorResist':
texts = ['Neutralizer resistance']
if cap_amount > 0 and 0 < round(neut_res, 4) < 1:
texts.append('Effective capacity: {} GJ'.format(formatAmount(cap_amount / neut_res, 3, 0, 9)))
label.SetToolTip(wx.ToolTip('\n'.join(texts)))
capState = fit.capState if fit is not None else 0
capStable = fit.capStable if fit is not None else False
lblNameTime = "label%sCapacitorTime"
lblNameState = "label%sCapacitorState"
if isinstance(capState, tuple) and len(capState) >= 2:
t = ("{0}%-{1}%", capState[0], capState[1])
s = ""
else:
if capStable:
t = "%.1f%%" % capState
else:
if capState > 60:
t = "%dm%ds" % divmod(capState, 60)
else:
t = "%ds" % capState
s = "Stable: " if capStable else "Lasts "
getattr(self, lblNameTime % panel).SetLabel(t)
getattr(self, lblNameState % panel).SetLabel(s)
self.panel.Layout()
self.headerPanel.Layout()
CapacitorViewFull.register()
| gpl-3.0 | -9,062,078,565,782,485,000 | 39.601124 | 140 | 0.588764 | false |
losonczylab/Zaremba_NatNeurosci_2017 | losonczy_analysis_bundle/lab/misc/data_dumper.py | 1 | 7175 | """README for structure of the output data & how to load.
The .csv output files are tab-delimited
The first 3 rows contain the following information:
* ROW 1: ROI ID's (there is one unique ID for each field-of-view)
* ROW 2: ROI tags (e.g. 'r' indicates that the ROI was tdTomato positive)
* ROW 3: Column headers:
** Time (s)
** Running (boolean for whether the animal was classified as running)
** Velocity (cm/s)
** Position (normalized)
** Licking (boolean for whether a lick was detected)
** ...ROI ID... (df/f data)
** ...ROI ID... (is active data -- boolean on whether the ROI was
firing a significant transient at that time point)
Here is some sample Python code for pulling out the data:
>>> import csv
>>> import numpy as np
>>> time_s = []; running = []; velocity = []; position = []; licking = [];
>>> dff_data = []; is_active = [];
>>> with open('path_to_csv', 'rb') as f:
>>> reader = csv.reader(f, delimiter='\t')
>>> roi_ids = reader.next()[5:] # First row is ROI IDs
>>> tags = reader.next()[5:] # Second row is ROI tags
>>> reader.next() # Pop off third row (column headers)
>>> for row in reader:
>>> time_s.append(row[0])
>>> running.append(row[1])
>>> velocity.append(row[2])
>>> position.append(row[3])
>>> licking.append(row[4])
>>> dff_data.append(row[5:5 + len(roi_ids)])
>>> is_active.append(row[5 + len(roi_ids):])
>>> time_s = np.array(time_s).astype(float)
>>> running = np.array(running).astype(int)
>>> velocity = np.array(velocity).astype(float)
>>> position = np.array(position).astype(float)
>>> licking = np.array(licking).astype(int)
>>> dff_data = np.array(dff_data).astype(float).T # rois x time
>>> is_active = np.array(is_active).astype(int).T # rois x time
"""
import argparse
import csv
import os
import itertools as it
from lab.classes import ExperimentSet, ExperimentGroup
import lab.analysis.behavior_analysis as ba
import lab.analysis.imaging_analysis as ia
channel = 'Ch2'
label = None
argParser = argparse.ArgumentParser()
argParser.add_argument(
"xml", action='store', type=str, default='behavior.xml',
help="name of xml file to parse")
argParser.add_argument(
"path", action="store", type=str,
help="Path to store the dumped data")
argParser.add_argument(
"-m", "--mouse", type=str,
help="enter a single mouseID string to data dump")
argParser.add_argument(
"-d", "--directory", action="store", type=str, default='',
help="All data found below this directory will be dumped")
args = argParser.parse_args()
experimentSet = ExperimentSet(
'/analysis/experimentSummaries/.clean-code/experiments/' + args.xml,
'/data/BehaviorData')
if args.mouse is None and args.directory == '':
raise Exception("Must pass in either a mouse or directory to dump")
if args.mouse:
miceToAnalyze = [experimentSet.grabMouse(args.mouse)]
else:
miceToAnalyze = experimentSet.root.findall('mouse')
for mouse in miceToAnalyze:
exptGrp = ExperimentGroup(
[expt for expt in mouse.findall('experiment')
if expt.get('tSeriesDirectory', '') and args.directory
in expt.get('tSeriesDirectory', '')])
exptGrp.removeDatalessTrials()
for expt in exptGrp:
print('Dumping data for {}: {}'.format(
expt.parent.get('mouseID'),
expt.sima_path().split('/')[-1].split('.')[0]))
imaging_data = expt.imagingData(dFOverF='from_file')
trans_data = ia.isActive(expt)
imaging_times = expt.imagingTimes()
for trial_idx, trial in enumerate(expt.findall('trial')):
filename = '{}_{}_{}.csv'.format(
expt.parent.get('mouseID'),
expt.sima_path().split('/')[-1].split('.')[0],
trial_idx)
labels = ['Time(s)']
data = [imaging_times]
try:
running = ba.runningIntervals(
trial, imageSync=True, returnBoolList=True)
except:
pass
else:
labels.append('run')
data.append([int(run) for run in running])
try:
velocity = ba.velocity(trial, imageSync=True)
except:
pass
else:
labels.append('velocity')
data.append(velocity)
try:
position = trial.behaviorData(
imageSync=True)['treadmillPosition']
except:
pass
else:
labels.append('position')
data.append(position)
try:
licking = ba.lickingIntervals(
trial, imageSync=True, returnBoolList=True)
except:
pass
else:
labels.append('licking')
data.append([int(lick) for lick in licking])
data.extend(
[imag.tolist() for imag in imaging_data[..., trial_idx]])
data.extend(
[trans.astype('int').tolist()
for trans in trans_data[..., trial_idx]])
with open(os.path.join(args.path, filename), 'wb') as csvfile:
writer = csv.writer(csvfile, delimiter='\t')
writer.writerow(
[''] * (len(labels) - 1) + ['id'] +
[roi.id for roi in expt.rois()])
writer.writerow(
[''] * (len(labels) - 1) + ['tags'] +
[''.join(t + ',' for t in sorted(roi.tags))[:-1]
for roi in expt.rois()])
writer.writerow(
labels + [roi.label for roi in expt.rois()] + [
roi.label for roi in expt.rois()])
for row in it.izip(*data):
writer.writerow(row)
# writer.writerow(
# ['Time(s)', 'run', 'velocity', 'position', 'licking']
# + [roi.label for roi in expt.rois()]
# + [roi.label for roi in expt.rois()])
# writer.writerow(
# ['', '', '', '', 'id']
# + [roi.id for roi in expt.rois()])
# writer.writerow(
# ['', '', '', '', 'tags']
# + [''.join(t + ',' for t in sorted(roi.tags))[:-1]
# for roi in expt.rois()])
# for time, run, vel, pos, lick, imag, tran in it.izip(
# imaging_times, running, velocity,
# position, licking, imaging_data[..., trial_idx].T,
# trans_data[..., trial_idx].T):
# writer.writerow([time, int(run), vel, pos, int(lick)]
# + imag.tolist()
# + tran.astype('int').tolist())
| mit | -8,581,814,794,429,062,000 | 37.423077 | 77 | 0.514146 | false |
vorburger/mcedit2 | src/mceditlib/worldeditor.py | 1 | 31141 | from __future__ import absolute_import
import collections
import logging
import time
import weakref
import itertools
import numpy
import re
from mceditlib import cachefunc
from mceditlib.block_copy import copyBlocksIter
from mceditlib.blocktypes import BlockType
from mceditlib.nbtattr import NBTListProxy
from mceditlib.operations.block_fill import FillBlocksOperation
from mceditlib.operations.analyze import AnalyzeOperation
from mceditlib.selection import BoundingBox
from mceditlib.findadapter import findAdapter
from mceditlib.multi_block import getBlocks, setBlocks
from mceditlib.schematic import createSchematic
from mceditlib.util import displayName, chunk_pos, exhaust, matchEntityTags
from mceditlib.util.lazyprop import weakrefprop
from mceditlib.blocktypes import BlockType
log = logging.getLogger(__name__)
DIM_NETHER = -1
DIM_END = 1
_zeros = {}
def string_func(array):
numpy.set_string_function(None)
string = repr(array)
string = string[:-1] + ", shape=%s)" % (array.shape,)
numpy.set_string_function(string_func)
return string
numpy.set_string_function(string_func)
class EntityListProxy(collections.MutableSequence):
"""
A proxy for the Entities and TileEntities lists of a WorldEditorChunk. Accessing an element returns an EntityRef
or TileEntityRef wrapping the element of the underlying NBT compound, with a reference to the WorldEditorChunk.
These Refs cannot be created at load time as they hold a reference to the chunk, preventing the chunk from being
unloaded when its refcount reaches zero.
"""
chunk = weakrefprop()
def __init__(self, chunk, attrName, refClass):
self.attrName = attrName
self.refClass = refClass
self.chunk = chunk
def __getitem__(self, key):
return self.refClass(getattr(self.chunk.chunkData, self.attrName)[key], self.chunk)
def __setitem__(self, key, value):
tagList = getattr(self.chunk.chunkData, self.attrName)
if isinstance(key, slice):
tagList[key] = [v.rootTag for v in value]
else:
tagList[key] = value.rootTag
def __delitem__(self, key):
del getattr(self.chunk.chunkData, self.attrName)[key]
def __len__(self):
return len(getattr(self.chunk.chunkData, self.attrName))
def insert(self, index, value):
getattr(self.chunk.chunkData, self.attrName).insert(index, value.rootTag)
class WorldEditorChunk(object):
"""
This is a 16x16xH chunk in a format-independent world.
The Blocks, Data, SkyLight, and BlockLight arrays are divided into
vertical sections of 16x16x16, accessed using the `getSection` method.
"""
def __init__(self, chunkData, editor):
self.worldEditor = editor
self.chunkData = chunkData
self.cx, self.cz = chunkData.cx, chunkData.cz
self.dimName = chunkData.dimName
self.dimension = editor.getDimension(self.dimName)
self.Entities = EntityListProxy(self, "Entities", editor.adapter.EntityRef)
self.TileEntities = EntityListProxy(self, "TileEntities", editor.adapter.TileEntityRef)
#self.Entities = [editor.adapter.EntityRef(tag, self) for tag in chunkData.Entities]
#self.TileEntities = [editor.adapter.TileEntityRef(tag, self) for tag in chunkData.TileEntities]
def buildNBTTag(self):
return self.chunkData.buildNBTTag()
def __str__(self):
return u"WorldEditorChunk, coords:{0}, world: {1}, dim: {2} D:{3}".format(
(self.cx, self.cz),
self.worldEditor.displayName,
self.dimName, self.dirty)
# --- WorldEditorChunkData accessors ---
@property
def bounds(self):
return self.chunkData.bounds
@property
def chunkPosition(self):
return self.cx, self.cz
@property
def rootTag(self):
return self.chunkData.rootTag
@property
def dirty(self):
return self.chunkData.dirty
@dirty.setter
def dirty(self, val):
self.chunkData.dirty = val
# --- Chunk attributes ---
def sectionPositions(self):
return self.chunkData.sectionPositions()
def getSection(self, cy, create=False):
return self.chunkData.getSection(cy, create)
@property
def blocktypes(self):
return self.dimension.blocktypes
@property
def Biomes(self):
return self.chunkData.Biomes
@property
def HeightMap(self):
return self.chunkData.HeightMap
@property
def TerrainPopulated(self):
return self.chunkData.TerrainPopulated
@TerrainPopulated.setter
def TerrainPopulated(self, val):
self.chunkData.TerrainPopulated = val
def addEntity(self, ref):
if ref.chunk is self:
return
self.chunkData.Entities.append(ref.rootTag)
ref.chunk = self
self.dirty = True
def removeEntity(self, ref):
self.chunkData.Entities.remove(ref.rootTag)
ref.chunk = None
self.dirty = True
def removeEntities(self, entities):
for ref in entities: # xxx O(n*m)
self.removeEntity(ref)
def addTileEntity(self, ref):
if ref.chunk is self:
return
self.chunkData.TileEntities.append(ref.rootTag)
ref.chunk = self
self.dirty = True
def removeTileEntity(self, ref):
if ref.chunk is not self:
return
self.chunkData.TileEntities.remove(ref.rootTag)
ref.chunk = None
ref.rootTag = None
self.dirty = True
@property
def TileTicks(self):
"""
Directly accesses the TAG_List of TAG_Compounds. Not protected by Refs like Entities and TileEntities are.
:return:
:rtype:
"""
return self.chunkData.TileTicks
class WorldEditor(object):
def __init__(self, filename=None, create=False, readonly=False, adapterClass=None, adapter=None, resume=None):
"""
Load a Minecraft level of any format from the given filename.
If you try to create an existing world, IOError will be raised.
:type filename: str or unknown or unicode
:type create: bool
:type readonly: bool
:type adapter: unknown or mceditlib.anvil.adapter.AnvilWorldAdapter or mceditlib.schematic.SchematicFileAdapter
:type adapterClass: class
:type resume: None or bool
:return:
:rtype: WorldEditor
"""
self.playerCache = {}
assert not (create and readonly)
assert not create or adapterClass, "create=True requires an adapterClass"
if adapter:
self.adapter = adapter
elif adapterClass:
self.adapter = adapterClass(filename, create, readonly, resume=resume)
else:
self.adapter = findAdapter(filename, readonly, resume=resume)
self.filename = filename
self.readonly = readonly
# maps (cx, cz, dimName) tuples to WorldEditorChunk
self._loadedChunks = weakref.WeakValueDictionary()
# caches ChunkData from adapter
self._chunkDataCache = cachefunc.lru_cache_object(self._getChunkDataRaw, 1000)
self._chunkDataCache.should_decache = self._shouldUnloadChunkData
self._chunkDataCache.will_decache = self._willUnloadChunkData
# caches recently used WorldEditorChunks
self.recentChunks = collections.deque(maxlen=100)
self._allChunks = None
self.dimensions = {}
self.currentRevision = 0
def __repr__(self):
return "WorldEditor(adapter=%r)" % self.adapter
# --- Summary Info ---
@classmethod
def getWorldInfo(cls, filename):
worldInfo = findAdapter(filename, readonly=True, getInfo=True)
return worldInfo
# --- Debug ---
def setCacheLimit(self, size):
self._chunkDataCache.setCacheLimit(size)
# --- Undo/redo ---
def requireRevisions(self):
self.adapter.requireRevisions()
def undo(self):
self.gotoRevision(self.currentRevision - 1)
def redo(self):
self.gotoRevision(self.currentRevision + 1)
def beginUndo(self):
"""
Begin a new undo revision, creating a new revision in the underlying storage chain if an editable
revision is not selected.
:return:
:rtype:
"""
self.adapter.createRevision()
self.currentRevision += 1
log.info("Opened revision %d", self.currentRevision)
def commitUndo(self, revisionInfo=None):
exhaust(self.commitUndoIter(revisionInfo))
def commitUndoIter(self, revisionInfo=None):
"""
Record all changes since the last call to beginUndo into the adapter's current revision. The revision is closed
and beginUndo must be called to open the next revision.
:param revisionInfo: May be supplied to record metadata for this undo
:type revisionInfo: object | None
:return:
:rtype:
"""
self.adapter.setRevisionInfo(revisionInfo)
for status in self.syncToDiskIter():
yield status
self.adapter.closeRevision()
log.info("Closed revision %d", self.currentRevision)
def undoRevisions(self):
"""
Iterate through all revisions and return (index, revisionInfo) tuples. revisionInfo is the info stored with
commitUndo for each revision. Call selectUndoRevision with the index of the desired revision to rewind time.
:return:
:rtype:
"""
for index, revision in self.adapter.listRevisions():
yield index, revision.revisionInfo()
def gotoRevision(self, index):
"""
:param index:
:type index:
:return:
:rtype:
"""
assert index is not None, "None is not a revision index!"
self.syncToDisk()
self.playerCache.clear()
changes = self.adapter.selectRevision(index)
self.currentRevision = index
if changes is None:
return
log.info("Going to revision %d", index)
log.debug("Changes: %s", changes)
self.recentChunks.clear()
for dimName, chunkPositions in changes.chunks.iteritems():
for cx, cz in chunkPositions:
self._chunkDataCache.decache(cx, cz, dimName)
self._loadedChunks.pop((cx, cz, dimName), None)
# xxx slow, scan changes for chunks and check if they are added/removed
self._allChunks = None
def getRevisionChanges(self, oldIndex, newIndex):
return self.adapter.getRevisionChanges(oldIndex, newIndex)
# --- Save ---
def syncToDisk(self):
exhaust(self.syncToDiskIter())
def syncToDiskIter(self):
"""
Write all loaded chunks, player files, etc to disk.
:return:
:rtype:
"""
dirtyPlayers = 0
for player in self.playerCache.itervalues():
# xxx should be in adapter?
if player.dirty:
dirtyPlayers += 1
player.save()
dirtyChunkCount = 0
for i, (cx, cz, dimName) in enumerate(self._chunkDataCache):
yield i, len(self._chunkDataCache), "Writing modified chunks"
chunkData = self._chunkDataCache(cx, cz, dimName)
if chunkData.dirty:
dirtyChunkCount += 1
self.adapter.writeChunk(chunkData)
chunkData.dirty = False
self.adapter.syncToDisk()
log.info(u"Saved %d chunks and %d players", dirtyChunkCount, dirtyPlayers)
def saveChanges(self):
exhaust(self.saveChangesIter())
def saveChangesIter(self):
if self.readonly:
raise IOError("World is opened read only.")
self.syncToDisk()
self.playerCache.clear()
for status in self.adapter.saveChangesIter():
yield status
def saveToFile(self, filename):
# XXXX only works with .schematics!!!
self.adapter.saveToFile(filename)
def close(self):
"""
Unload all chunks and close all open filehandles.
"""
self.adapter.close()
self.recentChunks.clear()
self._allChunks = None
self._loadedChunks.clear()
self._chunkDataCache.clear()
# --- World limits ---
@property
def maxHeight(self):
return self.adapter.maxHeight
# --- World info ---
@property
def displayName(self):
return displayName(self.filename)
@property
def blocktypes(self):
return self.adapter.blocktypes
# --- Chunk I/O ---
def preloadChunkPositions(self):
log.info(u"Scanning for regions in %s...", self.adapter.filename)
self._allChunks = collections.defaultdict(set)
for dimName in self.adapter.listDimensions():
start = time.time()
chunkPositions = set(self.adapter.chunkPositions(dimName))
chunkPositions.update((cx, cz) for cx, cz, cDimName in self._chunkDataCache if cDimName == dimName)
log.info("Dim %s: Found %d chunks in %0.2f seconds.",
dimName,
len(chunkPositions),
time.time() - start)
self._allChunks[dimName] = chunkPositions
def chunkCount(self, dimName):
return self.adapter.chunkCount(dimName)
def chunkPositions(self, dimName):
"""
Iterates over (xPos, zPos) tuples, one for each chunk in the given dimension.
May initiate a costly chunk scan.
:param dimName: Name of dimension
:type dimName: str
:return:
:rtype:
"""
if self._allChunks is None:
self.preloadChunkPositions()
return self._allChunks[dimName].__iter__()
def _getChunkDataRaw(self, cx, cz, dimName):
"""
Wrapped by cachefunc.lru_cache in __init__
"""
return self.adapter.readChunk(cx, cz, dimName)
def _shouldUnloadChunkData(self, key):
return key not in self._loadedChunks
def _willUnloadChunkData(self, chunkData):
if chunkData.dirty and not self.readonly:
self.adapter.writeChunk(chunkData)
def getChunk(self, cx, cz, dimName, create=False):
"""
:return: Chunk at the given position.
:rtype: WorldEditorChunk
"""
if create and not self.containsChunk(cx, cz, dimName):
self.createChunk(cx, cz, dimName)
chunk = self._loadedChunks.get((cx, cz, dimName))
if chunk is not None:
return chunk
startTime = time.time()
chunkData = self._chunkDataCache(cx, cz, dimName)
chunk = WorldEditorChunk(chunkData, self)
duration = time.time() - startTime
if duration > 1:
log.warn("Chunk %s took %0.2f seconds to load! entities=%s tileentities=%s tileticks=%s",
(cx, cz), duration, len(chunk.Entities), len(chunk.TileEntities),
len(chunk.rootTag.get("TileTicks", ())))
self._loadedChunks[cx, cz, dimName] = chunk
self.recentChunks.append(chunk)
return chunk
# --- Chunk dirty bit ---
def listDirtyChunks(self):
for cx, cz, dimName in self._chunkDataCache:
chunkData = self._chunkDataCache(cx, cz, dimName)
if chunkData.dirty:
yield cx, cz, dimName
# --- HeightMaps ---
def heightMapAt(self, x, z, dimName):
zc = z >> 4
xc = x >> 4
xInChunk = x & 0xf
zInChunk = z & 0xf
ch = self.getChunk(xc, zc, dimName)
heightMap = ch.HeightMap
return heightMap[zInChunk, xInChunk] # HeightMap indices are backwards
# --- Chunk manipulation ---
def containsChunk(self, cx, cz, dimName):
if self._allChunks is not None:
return (cx, cz) in self._allChunks[dimName]
if (cx, cz, dimName) in self._chunkDataCache:
return True
return self.adapter.containsChunk(cx, cz, dimName)
def containsPoint(self, x, y, z, dimName):
if y < 0 or y > 127:
return False
return self.containsChunk(x >> 4, z >> 4, dimName)
def createChunk(self, cx, cz, dimName):
if self.containsChunk(cx, cz, dimName):
raise ValueError("%r:Chunk %s already present in %s!".format(self, (cx, cz), dimName))
if hasattr(self.adapter, 'createChunk'):
if self._allChunks is not None:
self._allChunks[dimName].add((cx, cz))
chunk = self.adapter.createChunk(cx, cz, dimName)
self._chunkDataCache.store(chunk, cx, cz, dimName)
def deleteChunk(self, cx, cz, dimName):
self.adapter.deleteChunk(cx, cz, dimName)
if self._allChunks is not None:
self._allChunks[dimName].discard((cx, cz))
# --- World metadata ---
def getWorldMetadata(self):
"""
Return an object containing global info about the world.
Different level formats can return different objects for the world metadata.
At the very least, you can expect the object to have Spawn and Seed attributes.
Currently, only AnvilWorldMetadata is ever returned.
:return:
"""
return self.adapter.metadata
# --- Maps ---
def listMaps(self):
"""
Return a list of map IDs for this world's map items.
:return:
"""
return self.adapter.listMaps()
def getMap(self, mapID):
"""
Return a map object for the given map ID
:param mapID: Map ID returned by listMaps
:return:
"""
return self.adapter.getMap(mapID)
def createMap(self):
return self.adapter.createMap()
# --- Players ---
def listPlayers(self):
return self.adapter.listPlayers()
def getPlayer(self, playerUUID=""):
player = self.playerCache.get(playerUUID)
if player is None:
player = self.adapter.getPlayer(playerUUID)
self.playerCache[playerUUID] = player
return player
def createPlayer(self, playerName):
return self.adapter.createPlayer(playerName)
# --- Dimensions ---
def listDimensions(self):
return self.adapter.listDimensions()
def getDimension(self, dimName=""):
"""
:type dimName: str
:return:
:rtype: WorldEditorDimension
"""
dim = self.dimensions.get(dimName)
if dim is None:
dim = WorldEditorDimension(self, dimName)
self.dimensions[dimName] = dim
return dim
def dimNameFromNumber(self, dimNo):
"""
Return the dimension name for the given number, as would be stored in the player's "dimension" tag.
Handles "DIM1" and "DIM-1" for vanilla dimensions. Most mods add more dimensions similar to "DIM-42", "DIM-100"
but some mods like Galacticraft use "DIM_SPACESTATION3" so make an educated guess about the dimension's name
ending with its number.
:param dimNo:
:type dimNo:
:return:
:rtype:
"""
dimNoStr = str(dimNo)
for name in self.listDimensions():
if name.endswith(dimNoStr):
return name
def dimNumberFromName(self, dimName):
matches = re.findall(r'-[0-9]+', dimName)
if not len(matches):
raise ValueError("Could not parse a dimension number from %s", dimName)
return int(matches[-1])
# --- Entity Creation ---
def createEntity(self, entityID):
"""
Create a new EntityRef subclass matching the given entity ID.
If no subclass matches, return None.
Does not add the EntityRef to this world.
:param entityID:
:return:
"""
ref = self.adapter.EntityRef.create(entityID)
ref.parent = self # make blockTypes available for item IDs
return ref
class WorldEditorDimension(object):
def __init__(self, worldEditor, dimName):
self.worldEditor = worldEditor
self.adapter = worldEditor.adapter
self.dimName = dimName
def __repr__(self):
return "WorldEditorDimension(dimName=%r, adapter=%r)" % (self.dimName, self.adapter)
# --- Bounds ---
_bounds = None
@property
def bounds(self):
"""
:return:
:rtype: BoundingBox
"""
if self._bounds is None:
if hasattr(self.adapter, "getDimensionBounds"):
self._bounds = self.adapter.getDimensionBounds(self.dimName)
else:
self._bounds = self.getWorldBounds()
return self._bounds
def getWorldBounds(self):
chunkPositions = list(self.chunkPositions())
if len(chunkPositions) == 0:
return BoundingBox((0, 0, 0), (0, 0, 0))
chunkPositions = numpy.array(chunkPositions)
mincx = (chunkPositions[:, 0]).min()
maxcx = (chunkPositions[:, 0]).max()
mincz = (chunkPositions[:, 1]).min()
maxcz = (chunkPositions[:, 1]).max()
origin = (mincx << 4, 0, mincz << 4)
size = ((maxcx - mincx + 1) << 4, self.worldEditor.maxHeight, (maxcz - mincz + 1) << 4)
return BoundingBox(origin, size)
@property
def size(self):
return self.bounds.size
@property
def blocktypes(self):
return self.worldEditor.blocktypes
# --- Chunks ---
def chunkCount(self):
return self.worldEditor.chunkCount(self.dimName)
def chunkPositions(self):
return self.worldEditor.chunkPositions(self.dimName)
def containsChunk(self, cx, cz):
return self.worldEditor.containsChunk(cx, cz, self.dimName)
def getChunk(self, cx, cz, create=False):
"""
:type cx: int or dtype
:type cz: int or dtype
:type create: bool
:return:
:rtype: WorldEditorChunk
"""
return self.worldEditor.getChunk(cx, cz, self.dimName, create)
def getChunks(self, chunkPositions=None):
"""
:type chunkPositions(): iterator
:rtype: iterator
"""
if chunkPositions is None:
chunkPositions = self.chunkPositions()
for cx, cz in chunkPositions:
if self.containsChunk(cx, cz):
yield self.getChunk(cx, cz)
def createChunk(self, cx, cz):
return self.worldEditor.createChunk(cx, cz, self.dimName)
def deleteChunk(self, cx, cz):
self.worldEditor.deleteChunk(cx, cz, self.dimName)
# --- Entities and TileEntities ---
def getEntities(self, selection, **kw):
for chunk in self.getChunks(selection.chunkPositions()):
for ref in chunk.Entities:
if ref.Position in selection:
if matchEntityTags(ref, kw):
yield ref
def getTileEntities(self, selection, **kw):
for chunk in self.getChunks(selection.chunkPositions()):
for ref in chunk.TileEntities:
if ref.Position in selection:
if matchEntityTags(ref, kw):
yield ref
def getTileEntity(self, pos, **kw):
cx = pos[0] >> 4
cz = pos[2] >> 4
chunk = self.getChunk(cx, cz)
for ref in chunk.TileEntities:
if ref.Position == pos:
if matchEntityTags(ref, kw):
return ref
def addEntity(self, ref):
x, y, z = ref.Position
cx, cz = chunk_pos(x, z)
chunk = self.getChunk(cx, cz, create=True)
chunk.addEntity(ref.copy())
def addTileEntity(self, ref):
x, y, z = ref.Position
cx, cz = chunk_pos(x, z)
chunk = self.getChunk(cx, cz, create=True)
existing = [old for old in chunk.TileEntities
if old.Position == (x, y, z)]
for e in existing:
chunk.removeTileEntity(e)
chunk.addTileEntity(ref.copy())
# --- Import/Export ---
def copyBlocksIter(self, sourceLevel, sourceSelection, destinationPoint, blocksToCopy=None, entities=True, create=False, biomes=False, updateLights=False):
return copyBlocksIter(self, sourceLevel, sourceSelection, destinationPoint, blocksToCopy, entities, create,
biomes, updateLights)
def copyBlocks(self, sourceLevel, sourceSelection, destinationPoint, blocksToCopy=None, entities=True, create=False, biomes=False, updateLights=False):
return exhaust(self.copyBlocksIter(sourceLevel, sourceSelection, destinationPoint, blocksToCopy,
entities, create, biomes, updateLights))
def exportSchematicIter(self, selection):
schematic = createSchematic(shape=selection.size, blocktypes=self.blocktypes)
return itertools.chain(copyBlocksIter(schematic.getDimension(), self, selection, (0, 0, 0)), [schematic])
def exportSchematic(self, selection):
"""
:type selection: mceditlib.box.BoundingBox
:return:
:rtype: WorldEditor
"""
return exhaust(self.exportSchematicIter(selection))
def importSchematicIter(self, schematic, destPoint):
dim = schematic.getDimension()
return copyBlocksIter(self, dim, dim.bounds, destPoint, biomes=True, create=True)
def importSchematic(self, schematic, destPoint):
return self.importSchematicIter(schematic, destPoint)
# --- Fill/Replace ---
def fillBlocksIter(self, box, block, blocksToReplace=(), updateLights=True):
return FillBlocksOperation(self, box, block, blocksToReplace, updateLights)
def fillBlocks(self, box, block, blocksToReplace=(), updateLights=True):
return exhaust(self.fillBlocksIter(box, block, blocksToReplace, updateLights))
# --- Analyze ---
def analyzeIter(self, selection):
return AnalyzeOperation(self, selection)
# --- Blocks by single coordinate ---
def getBlock(self, x, y, z):
ID = self.getBlockID(x, y, z)
meta = self.getBlockData(x, y, z)
return self.blocktypes[ID, meta]
def setBlock(self, x, y, z, blocktype):
if not isinstance(blocktype, BlockType):
blocktype = self.blocktypes[blocktype]
self.setBlockID(x, y, z, blocktype.ID)
self.setBlockData(x, y, z, blocktype.meta)
def getBlockID(self, x, y, z, default=0):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy)
if sec:
array = sec.Blocks
if array is not None:
return array[y & 0xf, z & 0xf, x & 0xf]
return default
def setBlockID(self, x, y, z, value):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy, create=True)
if sec:
array = sec.Blocks
assert array is not None
if array is not None:
array[y & 0xf, z & 0xf, x & 0xf] = value
chunk.dirty = True
def getBlockData(self, x, y, z, default=0):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy)
if sec:
array = sec.Data
if array is not None:
return array[y & 0xf, z & 0xf, x & 0xf]
return default
def setBlockData(self, x, y, z, value):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy, create=True)
if sec:
array = sec.Data
assert array is not None
if array is not None:
array[y & 0xf, z & 0xf, x & 0xf] = value
chunk.dirty = True
def getLight(self, arrayName, x, y, z, default=0):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy)
if sec:
array = getattr(sec, arrayName)
if array is not None:
return array[y & 0xf, z & 0xf, x & 0xf]
return default
def setLight(self, arrayName, x, y, z, value):
cx = x >> 4
cy = y >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
sec = chunk.getSection(cy, create=True)
if sec:
array = getattr(sec, arrayName)
if array is not None:
array[y & 0xf, z & 0xf, x & 0xf] = value
chunk.dirty = True
def getBlockLight(self, x, y, z, default=0):
return self.getLight("BlockLight", x, y, z, default)
def setBlockLight(self, x, y, z, value):
return self.setLight("BlockLight", x, y, z, value)
def getSkyLight(self, x, y, z, default=0):
return self.getLight("SkyLight", x, y, z, default)
def setSkyLight(self, x, y, z, value):
return self.setLight("SkyLight", x, y, z, value)
def getBiomeID(self, x, z, default=0):
cx = x >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
array = chunk.Biomes
if array is not None:
return array[z & 0xf, x & 0xf]
return default
def setBiomeID(self, x, z, value):
cx = x >> 4
cz = z >> 4
if self.containsChunk(cx, cz):
chunk = self.getChunk(cx, cz)
array = chunk.Biomes
assert array is not None
if array is not None:
array[z & 0xf, x & 0xf] = value
chunk.dirty = True
# --- Blocks by coordinate arrays ---
def getBlocks(self, x, y, z,
return_Blocks=True,
return_Data=False,
return_BlockLight=False,
return_SkyLight=False,
return_Biomes=False):
return getBlocks(self, x, y, z,
return_Blocks,
return_Data,
return_BlockLight,
return_SkyLight,
return_Biomes)
def setBlocks(self, x, y, z,
Blocks=None,
Data=None,
BlockLight=None,
SkyLight=None,
Biomes=None,
updateLights=True):
return setBlocks(self, x, y, z,
Blocks,
Data,
BlockLight,
SkyLight,
Biomes,
updateLights and self.adapter.hasLights)
| bsd-3-clause | 7,376,605,027,935,685,000 | 30.647358 | 159 | 0.594104 | false |
devlights/try-python | trypython/stdlib/dataclasses_/dataclasses01.py | 1 | 1767 | """
dataclasses パッケージに関するサンプルです.
基本的な使い方について
REFERENCES:: http://bit.ly/2KTZynw
http://bit.ly/2KJCnwk
http://bit.ly/2KHeNA9
http://bit.ly/2KFLGxc
"""
import dataclasses as dc
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
@dc.dataclass
class Data1:
"""
dataclasses.dataclass デコレータを利用すると手軽にデータ型を定義できる。
仕様は、PEP557 として定義されている。
普通にクラス定義しても、もちろん同等のものは作れるが dataclass を用いたほうが
コンストラクタやその他の dunder method も自動的にいい感じに作成してくれるので便利。
以下のものは自動的に定義される。
- __init__
- __repr__
- __eq__
"""
name: str
unit_price: float
quantity: int = 0
def total_cost(self) -> float:
return self.unit_price * self.quantity
class Sample(SampleBase):
def exec(self):
# dataclass 定義時、フィールドの初期値を設定していないものに関してはコンストラクト時に指定が必要となる
# quantity フィールドは、初期値を設定しているので設定しなくてもコンストラクト出来る.
obj = Data1(name='test', unit_price=300.5)
obj.quantity = 5
# __repr__ が自動生成されているので見やすい文字列表現が得られる
pr('obj', obj)
# もちろん普通のクラスと同様に自分で定義したメソッドも利用可能
pr('obj.total_cost', obj.total_cost())
def go():
obj = Sample()
obj.exec()
| mit | -441,771,589,765,826,600 | 21.12963 | 65 | 0.650209 | false |
open-machine-learning/mldata | repository/challenge_urls.py | 1 | 1959 | """
URL patterns for Repository
"""
from django.conf.urls.defaults import *
import repository.views as views
import repository.views.challenge
urlpatterns = patterns('',
url(r'^$', views.challenge.index, name='challenge_index'),
url(r'^by_pub_date/$', views.challenge.index, {'order_by' : '-pub_date'}, name='challenge_index_by_pub_date'),
url(r'^by_name/$', views.challenge.index, {'order_by' : 'name'}, name='challenge_index_by_name'),
url(r'^by_rating/$', views.challenge.index, {'order_by' : '-rating_avg'}, name='challenge_index_by_rating'),
url(r'^by_submitter/$', views.challenge.index, {'order_by' : 'user__username'}, name='challenge_index_by_submitter'),
url(r'^by_downloads/$', views.challenge.index, {'order_by' : '-downloads'}, name='challenge_index_by_downloads'),
url(r'^by_views/$', views.challenge.index, {'order_by' : '-hits'}, name='challenge_index_by_views'),
url(r'^my/$', views.challenge.my, name='challenge_my'),
url(r'^view/(?P<id>\d+)/$', views.challenge.view, name='challenge_view'),
url(r'^viewslug/(?P<slug_challenge>[A-Za-z0-9-_]+)/$', views.challenge.view_slug, name='challenge_view_slug'),
url(r'^viewslug/(?P<slug_challenge>[A-Za-z0-9-_]+)/(?P<version>\d+)/$', views.challenge.view_slug, name='challenge_view_slug_ver'),
url(r'^new/$', views.challenge.new, name='challenge_new'),
url(r'^edit/(?P<id>\d+)/$', views.challenge.edit, name='challenge_edit'),
url(r'^delete/(?P<id>\d+)/$', views.challenge.delete, name='challenge_delete'),
url(r'^activate/(?P<id>\d+)/$', views.challenge.activate, name='challenge_activate'),
url(r'^fork/(?P<id>\d+)/$', views.challenge.fork, name='challenge_fork'),
url(r'^rate/(?P<id>\d+)/$', views.challenge.rate, name='challenge_rate'),
url(r'^score/download/(?P<id>\d+)/$', views.challenge.score_download, name='challenge_download'),
url(r'^tasks/(?P<id>\d+)/$', views.challenge.get_tasks, name='challenge_tasks'),
)
| gpl-3.0 | -869,931,197,066,796,000 | 66.551724 | 135 | 0.649311 | false |
ibaidev/bolib | bolib/objective_functions/branin.py | 1 | 1899 | # -*- coding: utf-8 -*-
#
# Copyright 2018 Ibai Roman
#
# This file is part of BOlib.
#
# BOlib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BOlib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BOlib. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from .objective_function import ObjectiveFunction
class Branin(ObjectiveFunction):
"""
"""
def __init__(self):
"""
"""
super(Branin, self).__init__(
d=2,
gaussian_noise=0,
f_bias=0.0,
max_eval=200,
lower=[-5, 0],
upper=[10, 15],
objective=[9.42478, 2.475],
objective_val=0.397887,
params=['x', 'y'],
types=[float, float]
)
def batch_evaluate(self, points):
"""
Branin test function
The number of variables n = 2.
constraints:
-5 <= x <= 10, 0 <= y <= 15
three global optima: (-pi, 12.275), (pi, 2.275), (9.42478, 2.475),
where branin = 0.397887
:param points:
:type points:
:return:
:rtype:
"""
x = points[:, 0][:, None]
y = points[:, 1][:, None]
result = np.power((y-(5.1/(4 * np.power(np.pi, 2))) *
np.power(x, 2)+5 * x/np.pi-6), 2)
result += 10*(1-1/(8*np.pi))*np.cos(x)+10
return result + self.f_bias
| gpl-3.0 | 809,592,971,549,454 | 28.215385 | 75 | 0.546077 | false |
hedgerwang/simple_react_py_app | lib/py/webserver.py | 1 | 2678 | #!/usr/bin/env python
# System
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from os import curdir, sep
import BaseHTTPServer
import CGIHTTPServer
import os
import socket
import string, cgi, time
import sys
import urlparse
# Lib
import app_config
import webserver_util
app_config_path = (
os.path.dirname(app_config.__file__) + '/app_config.py'
)
webserver_util_path = (
os.path.dirname(webserver_util.__file__) + '/webserver_util.py'
)
app_config_update_time = os.path.getmtime(app_config_path)
webserver_util_update_time = os.path.getmtime(webserver_util_path)
def get_local_ip_address(target):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((target,8000))
ipaddr = s.getsockname()[0]
s.close()
return ipaddr
def should_refresh():
global app_config_update_time
app_config_update_time_2 = os.path.getmtime(app_config_path)
if app_config_update_time != app_config_update_time_2:
# app_config_update_time = app_config_update_time_2
return True
global webserver_util_update_time
webserver_util_update_time_2 = os.path.getmtime(webserver_util_path)
if webserver_util_update_time != webserver_util_update_time_2:
# webserver_util_update_time = webserver_util_update_time_2
return True
print 'app_config_update_time_2=' + str(app_config_update_time_2)
print 'webserver_util_update_time_2' + str(webserver_util_update_time_2)
return False
class WebHandler(BaseHTTPRequestHandler) :
def do_GET(self) :
mine = 'text/plain'
content = ''
try:
parsed_url = urlparse.urlparse(self.path)
query_params = urlparse.parse_qs(parsed_url.query)
data = webserver_util.handle_get(self.path, query_params)
mine = data.get('mime')
content = data.get('content')
except Exception as error :
self.send_error(404, 'File Not Found: "%s"' % str(error.message))
self.send_response(200)
self.send_header('Content-type', mine)
self.end_headers()
self.wfile.write(content)
if should_refresh():
print '-' * 80
print 'Soft Refresh Web Server from version "%s"' % app_config.VERSION
print '-' * 80
reload(app_config)
reload(webserver_util)
def do_POST(self) :
raise Exception('NO POST FOR NOW')
def main() :
try :
# port must not be smaller than 1024
server = HTTPServer(('', app_config.PORT), WebHandler)
print 'started httpserver...\n\nhttp://%s:%s' % (
get_local_ip_address('www.google.com'),
app_config.PORT
)
server.serve_forever()
except KeyboardInterrupt :
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__' :
main()
| mit | 4,164,137,172,991,248,400 | 27.189474 | 76 | 0.685213 | false |
CityGenerator/Megacosm-Generator | fixtures/__init__.py | 1 | 1077 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Fixtures package
"""
import fixtures.artwork
import fixtures.bond
import fixtures.business
import fixtures.city
import fixtures.continent
import fixtures.country
import fixtures.cuisine
import fixtures.currency
import fixtures.curse
import fixtures.deity
import fixtures.drink
import fixtures.dungeon
import fixtures.event
import fixtures.flag
import fixtures.flaw
import fixtures.gem
import fixtures.generator
import fixtures.geomorphdungeon
import fixtures.govt
import fixtures.grafitti
import fixtures.jobposting
import fixtures.leader
import fixtures.legend
import fixtures.magicitem
import fixtures.misfire
import fixtures.moon
import fixtures.mundaneitem
import fixtures.npc
import fixtures.organization
import fixtures.motivation
import fixtures.planet
import fixtures.phobia
import fixtures.region
import fixtures.resource
import fixtures.roguedungeon
import fixtures.rumor
import fixtures.sect
import fixtures.star
import fixtures.starsystem
import fixtures.street
import fixtures.wanted
import fixtures.weather
| gpl-2.0 | 4,420,723,242,532,615,000 | 20.979592 | 31 | 0.848654 | false |
tsmt/j34Bot | telebot34.py | 1 | 4185 | import urllib.request
import json
class TeleBot34:
token = None
botinfo = None
curroffset = 0
updateQueue = []
regClients = []
def __init__(self, token):
self.token = token
self.getBotInfo()
pass
def getBotInfo(self):
# make request
f = urllib.request.urlopen("https://api.telegram.org/bot" + self.token +
"/getMe")
if f.status == 200:
self.botinfo = json.loads(f.read().decode("utf-8"))
def getBotUpdates(self):
# make request
f = urllib.request.urlopen("https://api.telegram.org/bot" + self.token +
"/getUpdates")
if f.status == 200:
update = json.loads(f.read().decode("utf-8"))
self.updateQueue.extend(update['result'])
# search for biggest offset
for e in self.updateQueue:
if e['update_id'] > self.curroffset:
self.curroffset = e['update_id']
# delete list
f = urllib.request.urlopen("https://api.telegram.org/bot" + self.token +
"/getUpdates?offset=" + str(self.curroffset+1))
if f.status != 200:
print("Error deleting updates")
def sendMessage(self, message, client):
postParams = {
'chat_id': client['id'],
'text': message
}
postParams = json.dumps(postParams).encode('utf-8')
req = urllib.request.Request("https://api.telegram.org/bot" + self.token +
"/sendMessage", data=postParams,
headers={'content-type': 'application/json'})
response = urllib.request.urlopen(req)
if response == 200:
print("send ok")
def sendMessageToAllRegistered(self, message):
for u in self.regClients:
self.sendMessage(message, u)
def handleBotUpdates(self):
#iterate items
for e in self.updateQueue:
# get username
if 'username' in e['message']['from']:
username = e['message']['from']['username']
elif 'first_name' in e['message']['from']:
username = e['message']['from']['first_name']
else:
username = e['id']
# check for texts
if e['message']['text'] == '/start':
print("user registers " + username, end='')
# search for id, dont register if registered
isRegistered = False
for u in self.regClients:
if u['id'] == e['message']['from']['id']:
isRegistered = True
# TODO: send telegram answers
if not isRegistered:
self.regClients.append(e['message']['from'])
self.sendMessage("Du wurdest registriert!", e['message']['from'])
print("... registered")
else:
print("... not registered")
self.sendMessage("Du warst schon registriert!", e['message']['from'])
pass
elif e['message']['text'] == '/stop':
print("user unregisters " + username, end='')
# search for element
isRemoved = False
for u in self.regClients:
if e['message']['from']['id'] == u['id']:
print("... removed")
self.regClients.remove(u)
self.sendMessage("Du wurdest entfernt!", e['message']['from'])
isRemoved = True
if not isRemoved:
print("... not removed")
self.sendMessage("Du warst nicht auf der Liste :/!", e['message']['from'])
self.updateQueue.clear()
def loop(self):
self.getBotUpdates()
self.handleBotUpdates()
if __name__ == "__main__":
t = TeleBot34('306535330:AAGMiYkaXuTNyXK_qUDKKnH_bCslZbQ2oqE')
t.getBotUpdates()
t.handleBotUpdates()
t.sendMessageToAllRegistered("Hallo du Hurensohn")
| unlicense | -2,532,760,651,653,362,000 | 35.710526 | 94 | 0.49773 | false |
xiaohuanshu/Video-Share-site | douban.py | 1 | 2672 | # coding: UTF-8
import urllib
import urllib2
import re
from pyquery import PyQuery as pq
from lxml import etree
import json
import sys
import string
reload(sys)
sys.setdefaultencoding("utf-8")
#urllib函数,用于提交http数据
def open(aurl,post='',Referer=''):
#proxy = 'http://127.0.0.1:8088'
#opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
#urllib2.install_opener(opener)
if post!='':
test_data_urlencode = urllib.urlencode(post)
req = urllib2.Request(url=aurl,data = test_data_urlencode)
else:
req = urllib2.Request(url=aurl)
if Referer!='':
req.add_header('Referer',Referer)
res_data = urllib2.urlopen(req)
return res_data
def timedeal(t):
t=string.atoi(t)
h=t/60
m=t-(h*60)
return "%02d:%2d"%(h,m)
#程序开始
if __name__ == '__main__':
try:
moviename=sys.argv[1].decode('utf-8')
url="http://movie.douban.com/subject_search?search_text="+urllib.quote(moviename.encode("utf8"))
res = open(url).read()#.decode('utf8')
d = pq(res)
item = d(".item").eq(0)
title = item(".nbg").attr('title')
href=item(".nbg").attr('href')
#print title
res = open(href).read()#.decode('utf8')
d = pq(res)
info = d('#info').html()
#info = info.replace("<br/>","\n")
info = re.sub('<[^>]+>','',info).strip()
info = info.replace(" ","")
info = info.replace("\n\n","\n")
#print info
indent = d('#link-report')
intro=indent("span").eq(0).text()
if u"... (展开全部)" in intro:
intro=indent(".hidden").eq(0).text()
try:
time = timedeal(re.findall(u"(?<=片长:).*?(?=分钟)",info,re.DOTALL)[0])
except:
time = ''
type = re.findall(u"(?<=类型:).*?(?=\n)",info,re.DOTALL)[0].split("/")
#print intro
res = open(href+"/photos?type=R").read()#.decode('utf8')
d = pq(res)
poster = d('.poster-col4')
posterurl = poster('li').eq(0)('div')('a').attr('href')
try:
posterurl = re.findall(r"(?<=photos/photo/).*?(?=/)",posterurl,re.DOTALL)[0]
except:
posterurl = ''
#posterurl = "http://img5.douban.com/view/photo/raw/public/"+posterurl+".jpg"
#print posterurl
ele={"title":title,"info":info,"intro":intro,"posterurl":posterurl,"time":time,"type":type}
ele.update({"status":"ok"})
print json.dumps(ele,ensure_ascii=False,indent=2)
except:
ele={}
ele.update({"status":"error"})
print json.dumps(ele,ensure_ascii=False,indent=2) | apache-2.0 | 3,239,341,484,663,931,000 | 33.565789 | 104 | 0.55179 | false |
CenterForOpenScience/lookit-api | accounts/migrations/0048_add_otp_model.py | 1 | 1099 | # Generated by Django 3.0.7 on 2020-07-21 15:51
import django.db.models.deletion
import pyotp
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("accounts", "0047_remove_user_organization"),
]
operations = [
migrations.CreateModel(
name="GoogleAuthenticatorTOTP",
fields=[
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
primary_key=True,
related_name="_otp",
serialize=False,
to=settings.AUTH_USER_MODEL,
),
),
(
"secret",
models.CharField(
db_index=True, default=pyotp.random_base32, max_length=16
),
),
("activated", models.BooleanField(default=False)),
],
),
]
| apache-2.0 | -7,623,430,741,125,163,000 | 27.921053 | 81 | 0.463148 | false |
NixaSoftware/CVis | venv/lib/python2.7/site-packages/pandas/tests/frame/test_indexing.py | 1 | 106857 | # -*- coding: utf-8 -*-
from __future__ import print_function
from warnings import catch_warnings
from datetime import datetime, date, timedelta, time
from pandas.compat import map, zip, range, lrange, lzip, long
from pandas import compat
from numpy import nan
from numpy.random import randn
import pytest
import numpy as np
import pandas.core.common as com
from pandas import (DataFrame, Index, Series, notna, isna,
MultiIndex, DatetimeIndex, Timestamp,
date_range)
import pandas as pd
from pandas._libs.tslib import iNaT
from pandas.tseries.offsets import BDay
from pandas.core.dtypes.common import (
is_float_dtype,
is_integer,
is_scalar)
from pandas.util.testing import (assert_almost_equal,
assert_series_equal,
assert_frame_equal)
from pandas.core.indexing import IndexingError
import pandas.util.testing as tm
from pandas.tests.frame.common import TestData
class TestDataFrameIndexing(TestData):
def test_getitem(self):
# Slicing
sl = self.frame[:20]
assert len(sl.index) == 20
# Column access
for _, series in compat.iteritems(sl):
assert len(series.index) == 20
assert tm.equalContents(series.index, sl.index)
for key, _ in compat.iteritems(self.frame._series):
assert self.frame[key] is not None
assert 'random' not in self.frame
with tm.assert_raises_regex(KeyError, 'random'):
self.frame['random']
df = self.frame.copy()
df['$10'] = randn(len(df))
ad = randn(len(df))
df['@awesome_domain'] = ad
with pytest.raises(KeyError):
df.__getitem__('df["$10"]')
res = df['@awesome_domain']
tm.assert_numpy_array_equal(ad, res.values)
def test_getitem_dupe_cols(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=['a', 'a', 'b'])
try:
df[['baf']]
except KeyError:
pass
else:
self.fail("Dataframe failed to raise KeyError")
def test_get(self):
b = self.frame.get('B')
assert_series_equal(b, self.frame['B'])
assert self.frame.get('foo') is None
assert_series_equal(self.frame.get('foo', self.frame['B']),
self.frame['B'])
# None
# GH 5652
for df in [DataFrame(), DataFrame(columns=list('AB')),
DataFrame(columns=list('AB'), index=range(3))]:
result = df.get(None)
assert result is None
def test_getitem_iterator(self):
idx = iter(['A', 'B', 'C'])
result = self.frame.loc[:, idx]
expected = self.frame.loc[:, ['A', 'B', 'C']]
assert_frame_equal(result, expected)
idx = iter(['A', 'B', 'C'])
result = self.frame.loc[:, idx]
expected = self.frame.loc[:, ['A', 'B', 'C']]
assert_frame_equal(result, expected)
def test_getitem_list(self):
self.frame.columns.name = 'foo'
result = self.frame[['B', 'A']]
result2 = self.frame[Index(['B', 'A'])]
expected = self.frame.loc[:, ['B', 'A']]
expected.columns.name = 'foo'
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
assert result.columns.name == 'foo'
with tm.assert_raises_regex(KeyError, 'not in index'):
self.frame[['B', 'A', 'food']]
with tm.assert_raises_regex(KeyError, 'not in index'):
self.frame[Index(['B', 'A', 'foo'])]
# tuples
df = DataFrame(randn(8, 3),
columns=Index([('foo', 'bar'), ('baz', 'qux'),
('peek', 'aboo')], name=['sth', 'sth2']))
result = df[[('foo', 'bar'), ('baz', 'qux')]]
expected = df.iloc[:, :2]
assert_frame_equal(result, expected)
assert result.columns.names == ['sth', 'sth2']
def test_getitem_callable(self):
# GH 12533
result = self.frame[lambda x: 'A']
tm.assert_series_equal(result, self.frame.loc[:, 'A'])
result = self.frame[lambda x: ['A', 'B']]
tm.assert_frame_equal(result, self.frame.loc[:, ['A', 'B']])
df = self.frame[:3]
result = df[lambda x: [True, False, True]]
tm.assert_frame_equal(result, self.frame.iloc[[0, 2], :])
def test_setitem_list(self):
self.frame['E'] = 'foo'
data = self.frame[['A', 'B']]
self.frame[['B', 'A']] = data
assert_series_equal(self.frame['B'], data['A'], check_names=False)
assert_series_equal(self.frame['A'], data['B'], check_names=False)
with tm.assert_raises_regex(ValueError,
'Columns must be same length as key'):
data[['A']] = self.frame[['A', 'B']]
with tm.assert_raises_regex(ValueError, 'Length of values '
'does not match '
'length of index'):
data['A'] = range(len(data.index) - 1)
df = DataFrame(0, lrange(3), ['tt1', 'tt2'], dtype=np.int_)
df.loc[1, ['tt1', 'tt2']] = [1, 2]
result = df.loc[df.index[1], ['tt1', 'tt2']]
expected = Series([1, 2], df.columns, dtype=np.int_, name=1)
assert_series_equal(result, expected)
df['tt1'] = df['tt2'] = '0'
df.loc[df.index[1], ['tt1', 'tt2']] = ['1', '2']
result = df.loc[df.index[1], ['tt1', 'tt2']]
expected = Series(['1', '2'], df.columns, name=1)
assert_series_equal(result, expected)
def test_setitem_list_not_dataframe(self):
data = np.random.randn(len(self.frame), 2)
self.frame[['A', 'B']] = data
assert_almost_equal(self.frame[['A', 'B']].values, data)
def test_setitem_list_of_tuples(self):
tuples = lzip(self.frame['A'], self.frame['B'])
self.frame['tuples'] = tuples
result = self.frame['tuples']
expected = Series(tuples, index=self.frame.index, name='tuples')
assert_series_equal(result, expected)
def test_setitem_mulit_index(self):
# GH7655, test that assigning to a sub-frame of a frame
# with multi-index columns aligns both rows and columns
it = ['jim', 'joe', 'jolie'], ['first', 'last'], \
['left', 'center', 'right']
cols = MultiIndex.from_product(it)
index = pd.date_range('20141006', periods=20)
vals = np.random.randint(1, 1000, (len(index), len(cols)))
df = pd.DataFrame(vals, columns=cols, index=index)
i, j = df.index.values.copy(), it[-1][:]
np.random.shuffle(i)
df['jim'] = df['jolie'].loc[i, ::-1]
assert_frame_equal(df['jim'], df['jolie'])
np.random.shuffle(j)
df[('joe', 'first')] = df[('jolie', 'last')].loc[i, j]
assert_frame_equal(df[('joe', 'first')], df[('jolie', 'last')])
np.random.shuffle(j)
df[('joe', 'last')] = df[('jolie', 'first')].loc[i, j]
assert_frame_equal(df[('joe', 'last')], df[('jolie', 'first')])
def test_setitem_callable(self):
# GH 12533
df = pd.DataFrame({'A': [1, 2, 3, 4], 'B': [5, 6, 7, 8]})
df[lambda x: 'A'] = [11, 12, 13, 14]
exp = pd.DataFrame({'A': [11, 12, 13, 14], 'B': [5, 6, 7, 8]})
tm.assert_frame_equal(df, exp)
def test_setitem_other_callable(self):
# GH 13299
inc = lambda x: x + 1
df = pd.DataFrame([[-1, 1], [1, -1]])
df[df > 0] = inc
expected = pd.DataFrame([[-1, inc], [inc, -1]])
tm.assert_frame_equal(df, expected)
def test_getitem_boolean(self):
# boolean indexing
d = self.tsframe.index[10]
indexer = self.tsframe.index > d
indexer_obj = indexer.astype(object)
subindex = self.tsframe.index[indexer]
subframe = self.tsframe[indexer]
tm.assert_index_equal(subindex, subframe.index)
with tm.assert_raises_regex(ValueError, 'Item wrong length'):
self.tsframe[indexer[:-1]]
subframe_obj = self.tsframe[indexer_obj]
assert_frame_equal(subframe_obj, subframe)
with tm.assert_raises_regex(ValueError, 'boolean values only'):
self.tsframe[self.tsframe]
# test that Series work
indexer_obj = Series(indexer_obj, self.tsframe.index)
subframe_obj = self.tsframe[indexer_obj]
assert_frame_equal(subframe_obj, subframe)
# test that Series indexers reindex
# we are producing a warning that since the passed boolean
# key is not the same as the given index, we will reindex
# not sure this is really necessary
with tm.assert_produces_warning(UserWarning, check_stacklevel=False):
indexer_obj = indexer_obj.reindex(self.tsframe.index[::-1])
subframe_obj = self.tsframe[indexer_obj]
assert_frame_equal(subframe_obj, subframe)
# test df[df > 0]
for df in [self.tsframe, self.mixed_frame,
self.mixed_float, self.mixed_int]:
data = df._get_numeric_data()
bif = df[df > 0]
bifw = DataFrame(dict([(c, np.where(data[c] > 0, data[c], np.nan))
for c in data.columns]),
index=data.index, columns=data.columns)
# add back other columns to compare
for c in df.columns:
if c not in bifw:
bifw[c] = df[c]
bifw = bifw.reindex(columns=df.columns)
assert_frame_equal(bif, bifw, check_dtype=False)
for c in df.columns:
if bif[c].dtype != bifw[c].dtype:
assert bif[c].dtype == df[c].dtype
def test_getitem_boolean_casting(self):
# don't upcast if we don't need to
df = self.tsframe.copy()
df['E'] = 1
df['E'] = df['E'].astype('int32')
df['E1'] = df['E'].copy()
df['F'] = 1
df['F'] = df['F'].astype('int64')
df['F1'] = df['F'].copy()
casted = df[df > 0]
result = casted.get_dtype_counts()
expected = Series({'float64': 4, 'int32': 2, 'int64': 2})
assert_series_equal(result, expected)
# int block splitting
df.loc[df.index[1:3], ['E1', 'F1']] = 0
casted = df[df > 0]
result = casted.get_dtype_counts()
expected = Series({'float64': 6, 'int32': 1, 'int64': 1})
assert_series_equal(result, expected)
# where dtype conversions
# GH 3733
df = DataFrame(data=np.random.randn(100, 50))
df = df.where(df > 0) # create nans
bools = df > 0
mask = isna(df)
expected = bools.astype(float).mask(mask)
result = bools.mask(mask)
assert_frame_equal(result, expected)
def test_getitem_boolean_list(self):
df = DataFrame(np.arange(12).reshape(3, 4))
def _checkit(lst):
result = df[lst]
expected = df.loc[df.index[lst]]
assert_frame_equal(result, expected)
_checkit([True, False, True])
_checkit([True, True, True])
_checkit([False, False, False])
def test_getitem_boolean_iadd(self):
arr = randn(5, 5)
df = DataFrame(arr.copy(), columns=['A', 'B', 'C', 'D', 'E'])
df[df < 0] += 1
arr[arr < 0] += 1
assert_almost_equal(df.values, arr)
def test_boolean_index_empty_corner(self):
# #2096
blah = DataFrame(np.empty([0, 1]), columns=['A'],
index=DatetimeIndex([]))
# both of these should succeed trivially
k = np.array([], bool)
blah[k]
blah[k] = 0
def test_getitem_ix_mixed_integer(self):
df = DataFrame(np.random.randn(4, 3),
index=[1, 10, 'C', 'E'], columns=[1, 2, 3])
result = df.iloc[:-1]
expected = df.loc[df.index[:-1]]
assert_frame_equal(result, expected)
with catch_warnings(record=True):
result = df.ix[[1, 10]]
expected = df.ix[Index([1, 10], dtype=object)]
assert_frame_equal(result, expected)
# 11320
df = pd.DataFrame({"rna": (1.5, 2.2, 3.2, 4.5),
-1000: [11, 21, 36, 40],
0: [10, 22, 43, 34],
1000: [0, 10, 20, 30]},
columns=['rna', -1000, 0, 1000])
result = df[[1000]]
expected = df.iloc[:, [3]]
assert_frame_equal(result, expected)
result = df[[-1000]]
expected = df.iloc[:, [1]]
assert_frame_equal(result, expected)
def test_getitem_setitem_ix_negative_integers(self):
with catch_warnings(record=True):
result = self.frame.ix[:, -1]
assert_series_equal(result, self.frame['D'])
with catch_warnings(record=True):
result = self.frame.ix[:, [-1]]
assert_frame_equal(result, self.frame[['D']])
with catch_warnings(record=True):
result = self.frame.ix[:, [-1, -2]]
assert_frame_equal(result, self.frame[['D', 'C']])
with catch_warnings(record=True):
self.frame.ix[:, [-1]] = 0
assert (self.frame['D'] == 0).all()
df = DataFrame(np.random.randn(8, 4))
with catch_warnings(record=True):
assert isna(df.ix[:, [-1]].values).all()
# #1942
a = DataFrame(randn(20, 2), index=[chr(x + 65) for x in range(20)])
with catch_warnings(record=True):
a.ix[-1] = a.ix[-2]
with catch_warnings(record=True):
assert_series_equal(a.ix[-1], a.ix[-2], check_names=False)
assert a.ix[-1].name == 'T'
assert a.ix[-2].name == 'S'
def test_getattr(self):
assert_series_equal(self.frame.A, self.frame['A'])
pytest.raises(AttributeError, getattr, self.frame,
'NONEXISTENT_NAME')
def test_setattr_column(self):
df = DataFrame({'foobar': 1}, index=lrange(10))
df.foobar = 5
assert (df.foobar == 5).all()
def test_setitem(self):
# not sure what else to do here
series = self.frame['A'][::2]
self.frame['col5'] = series
assert 'col5' in self.frame
assert len(series) == 15
assert len(self.frame) == 30
exp = np.ravel(np.column_stack((series.values, [np.nan] * 15)))
exp = Series(exp, index=self.frame.index, name='col5')
tm.assert_series_equal(self.frame['col5'], exp)
series = self.frame['A']
self.frame['col6'] = series
tm.assert_series_equal(series, self.frame['col6'], check_names=False)
with pytest.raises(KeyError):
self.frame[randn(len(self.frame) + 1)] = 1
# set ndarray
arr = randn(len(self.frame))
self.frame['col9'] = arr
assert (self.frame['col9'] == arr).all()
self.frame['col7'] = 5
assert((self.frame['col7'] == 5).all())
self.frame['col0'] = 3.14
assert((self.frame['col0'] == 3.14).all())
self.frame['col8'] = 'foo'
assert((self.frame['col8'] == 'foo').all())
# this is partially a view (e.g. some blocks are view)
# so raise/warn
smaller = self.frame[:2]
def f():
smaller['col10'] = ['1', '2']
pytest.raises(com.SettingWithCopyError, f)
assert smaller['col10'].dtype == np.object_
assert (smaller['col10'] == ['1', '2']).all()
# with a dtype
for dtype in ['int32', 'int64', 'float32', 'float64']:
self.frame[dtype] = np.array(arr, dtype=dtype)
assert self.frame[dtype].dtype.name == dtype
# dtype changing GH4204
df = DataFrame([[0, 0]])
df.iloc[0] = np.nan
expected = DataFrame([[np.nan, np.nan]])
assert_frame_equal(df, expected)
df = DataFrame([[0, 0]])
df.loc[0] = np.nan
assert_frame_equal(df, expected)
def test_setitem_tuple(self):
self.frame['A', 'B'] = self.frame['A']
assert_series_equal(self.frame['A', 'B'], self.frame[
'A'], check_names=False)
def test_setitem_always_copy(self):
s = self.frame['A'].copy()
self.frame['E'] = s
self.frame['E'][5:10] = nan
assert notna(s[5:10]).all()
def test_setitem_boolean(self):
df = self.frame.copy()
values = self.frame.values
df[df['A'] > 0] = 4
values[values[:, 0] > 0] = 4
assert_almost_equal(df.values, values)
# test that column reindexing works
series = df['A'] == 4
series = series.reindex(df.index[::-1])
df[series] = 1
values[values[:, 0] == 4] = 1
assert_almost_equal(df.values, values)
df[df > 0] = 5
values[values > 0] = 5
assert_almost_equal(df.values, values)
df[df == 5] = 0
values[values == 5] = 0
assert_almost_equal(df.values, values)
# a df that needs alignment first
df[df[:-1] < 0] = 2
np.putmask(values[:-1], values[:-1] < 0, 2)
assert_almost_equal(df.values, values)
# indexed with same shape but rows-reversed df
df[df[::-1] == 2] = 3
values[values == 2] = 3
assert_almost_equal(df.values, values)
with tm.assert_raises_regex(TypeError, 'Must pass '
'DataFrame with '
'boolean values only'):
df[df * 0] = 2
# index with DataFrame
mask = df > np.abs(df)
expected = df.copy()
df[df > np.abs(df)] = nan
expected.values[mask.values] = nan
assert_frame_equal(df, expected)
# set from DataFrame
expected = df.copy()
df[df > np.abs(df)] = df * 2
np.putmask(expected.values, mask.values, df.values * 2)
assert_frame_equal(df, expected)
def test_setitem_cast(self):
self.frame['D'] = self.frame['D'].astype('i8')
assert self.frame['D'].dtype == np.int64
# #669, should not cast?
# this is now set to int64, which means a replacement of the column to
# the value dtype (and nothing to do with the existing dtype)
self.frame['B'] = 0
assert self.frame['B'].dtype == np.int64
# cast if pass array of course
self.frame['B'] = np.arange(len(self.frame))
assert issubclass(self.frame['B'].dtype.type, np.integer)
self.frame['foo'] = 'bar'
self.frame['foo'] = 0
assert self.frame['foo'].dtype == np.int64
self.frame['foo'] = 'bar'
self.frame['foo'] = 2.5
assert self.frame['foo'].dtype == np.float64
self.frame['something'] = 0
assert self.frame['something'].dtype == np.int64
self.frame['something'] = 2
assert self.frame['something'].dtype == np.int64
self.frame['something'] = 2.5
assert self.frame['something'].dtype == np.float64
# GH 7704
# dtype conversion on setting
df = DataFrame(np.random.rand(30, 3), columns=tuple('ABC'))
df['event'] = np.nan
df.loc[10, 'event'] = 'foo'
result = df.get_dtype_counts().sort_values()
expected = Series({'float64': 3, 'object': 1}).sort_values()
assert_series_equal(result, expected)
# Test that data type is preserved . #5782
df = DataFrame({'one': np.arange(6, dtype=np.int8)})
df.loc[1, 'one'] = 6
assert df.dtypes.one == np.dtype(np.int8)
df.one = np.int8(7)
assert df.dtypes.one == np.dtype(np.int8)
def test_setitem_boolean_column(self):
expected = self.frame.copy()
mask = self.frame['A'] > 0
self.frame.loc[mask, 'B'] = 0
expected.values[mask.values, 1] = 0
assert_frame_equal(self.frame, expected)
def test_setitem_corner(self):
# corner case
df = DataFrame({'B': [1., 2., 3.],
'C': ['a', 'b', 'c']},
index=np.arange(3))
del df['B']
df['B'] = [1., 2., 3.]
assert 'B' in df
assert len(df.columns) == 2
df['A'] = 'beginning'
df['E'] = 'foo'
df['D'] = 'bar'
df[datetime.now()] = 'date'
df[datetime.now()] = 5.
# what to do when empty frame with index
dm = DataFrame(index=self.frame.index)
dm['A'] = 'foo'
dm['B'] = 'bar'
assert len(dm.columns) == 2
assert dm.values.dtype == np.object_
# upcast
dm['C'] = 1
assert dm['C'].dtype == np.int64
dm['E'] = 1.
assert dm['E'].dtype == np.float64
# set existing column
dm['A'] = 'bar'
assert 'bar' == dm['A'][0]
dm = DataFrame(index=np.arange(3))
dm['A'] = 1
dm['foo'] = 'bar'
del dm['foo']
dm['foo'] = 'bar'
assert dm['foo'].dtype == np.object_
dm['coercable'] = ['1', '2', '3']
assert dm['coercable'].dtype == np.object_
def test_setitem_corner2(self):
data = {"title": ['foobar', 'bar', 'foobar'] + ['foobar'] * 17,
"cruft": np.random.random(20)}
df = DataFrame(data)
ix = df[df['title'] == 'bar'].index
df.loc[ix, ['title']] = 'foobar'
df.loc[ix, ['cruft']] = 0
assert df.loc[1, 'title'] == 'foobar'
assert df.loc[1, 'cruft'] == 0
def test_setitem_ambig(self):
# Difficulties with mixed-type data
from decimal import Decimal
# Created as float type
dm = DataFrame(index=lrange(3), columns=lrange(3))
coercable_series = Series([Decimal(1) for _ in range(3)],
index=lrange(3))
uncoercable_series = Series(['foo', 'bzr', 'baz'], index=lrange(3))
dm[0] = np.ones(3)
assert len(dm.columns) == 3
dm[1] = coercable_series
assert len(dm.columns) == 3
dm[2] = uncoercable_series
assert len(dm.columns) == 3
assert dm[2].dtype == np.object_
def test_setitem_clear_caches(self):
# see gh-304
df = DataFrame({'x': [1.1, 2.1, 3.1, 4.1], 'y': [5.1, 6.1, 7.1, 8.1]},
index=[0, 1, 2, 3])
df.insert(2, 'z', np.nan)
# cache it
foo = df['z']
df.loc[df.index[2:], 'z'] = 42
expected = Series([np.nan, np.nan, 42, 42], index=df.index, name='z')
assert df['z'] is not foo
tm.assert_series_equal(df['z'], expected)
def test_setitem_None(self):
# GH #766
self.frame[None] = self.frame['A']
assert_series_equal(
self.frame.iloc[:, -1], self.frame['A'], check_names=False)
assert_series_equal(self.frame.loc[:, None], self.frame[
'A'], check_names=False)
assert_series_equal(self.frame[None], self.frame[
'A'], check_names=False)
repr(self.frame)
def test_setitem_empty(self):
# GH 9596
df = pd.DataFrame({'a': ['1', '2', '3'],
'b': ['11', '22', '33'],
'c': ['111', '222', '333']})
result = df.copy()
result.loc[result.b.isna(), 'a'] = result.a
assert_frame_equal(result, df)
def test_setitem_empty_frame_with_boolean(self):
# Test for issue #10126
for dtype in ('float', 'int64'):
for df in [
pd.DataFrame(dtype=dtype),
pd.DataFrame(dtype=dtype, index=[1]),
pd.DataFrame(dtype=dtype, columns=['A']),
]:
df2 = df.copy()
df[df > df2] = 47
assert_frame_equal(df, df2)
def test_setitem_scalars_no_index(self):
# GH16823 / 17894
df = DataFrame()
df['foo'] = 1
expected = DataFrame(columns=['foo']).astype(np.int64)
assert_frame_equal(df, expected)
def test_getitem_empty_frame_with_boolean(self):
# Test for issue #11859
df = pd.DataFrame()
df2 = df[df > 0]
assert_frame_equal(df, df2)
def test_delitem_corner(self):
f = self.frame.copy()
del f['D']
assert len(f.columns) == 3
pytest.raises(KeyError, f.__delitem__, 'D')
del f['B']
assert len(f.columns) == 2
def test_getitem_fancy_2d(self):
f = self.frame
with catch_warnings(record=True):
assert_frame_equal(f.ix[:, ['B', 'A']],
f.reindex(columns=['B', 'A']))
subidx = self.frame.index[[5, 4, 1]]
with catch_warnings(record=True):
assert_frame_equal(f.ix[subidx, ['B', 'A']],
f.reindex(index=subidx, columns=['B', 'A']))
# slicing rows, etc.
with catch_warnings(record=True):
assert_frame_equal(f.ix[5:10], f[5:10])
assert_frame_equal(f.ix[5:10, :], f[5:10])
assert_frame_equal(f.ix[:5, ['A', 'B']],
f.reindex(index=f.index[:5],
columns=['A', 'B']))
# slice rows with labels, inclusive!
with catch_warnings(record=True):
expected = f.ix[5:11]
result = f.ix[f.index[5]:f.index[10]]
assert_frame_equal(expected, result)
# slice columns
with catch_warnings(record=True):
assert_frame_equal(f.ix[:, :2], f.reindex(columns=['A', 'B']))
# get view
with catch_warnings(record=True):
exp = f.copy()
f.ix[5:10].values[:] = 5
exp.values[5:10] = 5
assert_frame_equal(f, exp)
with catch_warnings(record=True):
pytest.raises(ValueError, f.ix.__getitem__, f > 0.5)
def test_slice_floats(self):
index = [52195.504153, 52196.303147, 52198.369883]
df = DataFrame(np.random.rand(3, 2), index=index)
s1 = df.loc[52195.1:52196.5]
assert len(s1) == 2
s1 = df.loc[52195.1:52196.6]
assert len(s1) == 2
s1 = df.loc[52195.1:52198.9]
assert len(s1) == 3
def test_getitem_fancy_slice_integers_step(self):
df = DataFrame(np.random.randn(10, 5))
# this is OK
result = df.iloc[:8:2] # noqa
df.iloc[:8:2] = np.nan
assert isna(df.iloc[:8:2]).values.all()
def test_getitem_setitem_integer_slice_keyerrors(self):
df = DataFrame(np.random.randn(10, 5), index=lrange(0, 20, 2))
# this is OK
cp = df.copy()
cp.iloc[4:10] = 0
assert (cp.iloc[4:10] == 0).values.all()
# so is this
cp = df.copy()
cp.iloc[3:11] = 0
assert (cp.iloc[3:11] == 0).values.all()
result = df.iloc[2:6]
result2 = df.loc[3:11]
expected = df.reindex([4, 6, 8, 10])
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
# non-monotonic, raise KeyError
df2 = df.iloc[lrange(5) + lrange(5, 10)[::-1]]
pytest.raises(KeyError, df2.loc.__getitem__, slice(3, 11))
pytest.raises(KeyError, df2.loc.__setitem__, slice(3, 11), 0)
def test_setitem_fancy_2d(self):
# case 1
frame = self.frame.copy()
expected = frame.copy()
with catch_warnings(record=True):
frame.ix[:, ['B', 'A']] = 1
expected['B'] = 1.
expected['A'] = 1.
assert_frame_equal(frame, expected)
# case 2
frame = self.frame.copy()
frame2 = self.frame.copy()
expected = frame.copy()
subidx = self.frame.index[[5, 4, 1]]
values = randn(3, 2)
with catch_warnings(record=True):
frame.ix[subidx, ['B', 'A']] = values
frame2.ix[[5, 4, 1], ['B', 'A']] = values
expected['B'].ix[subidx] = values[:, 0]
expected['A'].ix[subidx] = values[:, 1]
assert_frame_equal(frame, expected)
assert_frame_equal(frame2, expected)
# case 3: slicing rows, etc.
frame = self.frame.copy()
with catch_warnings(record=True):
expected1 = self.frame.copy()
frame.ix[5:10] = 1.
expected1.values[5:10] = 1.
assert_frame_equal(frame, expected1)
with catch_warnings(record=True):
expected2 = self.frame.copy()
arr = randn(5, len(frame.columns))
frame.ix[5:10] = arr
expected2.values[5:10] = arr
assert_frame_equal(frame, expected2)
# case 4
with catch_warnings(record=True):
frame = self.frame.copy()
frame.ix[5:10, :] = 1.
assert_frame_equal(frame, expected1)
frame.ix[5:10, :] = arr
assert_frame_equal(frame, expected2)
# case 5
with catch_warnings(record=True):
frame = self.frame.copy()
frame2 = self.frame.copy()
expected = self.frame.copy()
values = randn(5, 2)
frame.ix[:5, ['A', 'B']] = values
expected['A'][:5] = values[:, 0]
expected['B'][:5] = values[:, 1]
assert_frame_equal(frame, expected)
with catch_warnings(record=True):
frame2.ix[:5, [0, 1]] = values
assert_frame_equal(frame2, expected)
# case 6: slice rows with labels, inclusive!
with catch_warnings(record=True):
frame = self.frame.copy()
expected = self.frame.copy()
frame.ix[frame.index[5]:frame.index[10]] = 5.
expected.values[5:11] = 5
assert_frame_equal(frame, expected)
# case 7: slice columns
with catch_warnings(record=True):
frame = self.frame.copy()
frame2 = self.frame.copy()
expected = self.frame.copy()
# slice indices
frame.ix[:, 1:3] = 4.
expected.values[:, 1:3] = 4.
assert_frame_equal(frame, expected)
# slice with labels
frame.ix[:, 'B':'C'] = 4.
assert_frame_equal(frame, expected)
# new corner case of boolean slicing / setting
frame = DataFrame(lzip([2, 3, 9, 6, 7], [np.nan] * 5),
columns=['a', 'b'])
lst = [100]
lst.extend([np.nan] * 4)
expected = DataFrame(lzip([100, 3, 9, 6, 7], lst),
columns=['a', 'b'])
frame[frame['a'] == 2] = 100
assert_frame_equal(frame, expected)
def test_fancy_getitem_slice_mixed(self):
sliced = self.mixed_frame.iloc[:, -3:]
assert sliced['D'].dtype == np.float64
# get view with single block
# setting it triggers setting with copy
sliced = self.frame.iloc[:, -3:]
def f():
sliced['C'] = 4.
pytest.raises(com.SettingWithCopyError, f)
assert (self.frame['C'] == 4).all()
def test_fancy_setitem_int_labels(self):
# integer index defers to label-based indexing
df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2))
with catch_warnings(record=True):
tmp = df.copy()
exp = df.copy()
tmp.ix[[0, 2, 4]] = 5
exp.values[:3] = 5
assert_frame_equal(tmp, exp)
with catch_warnings(record=True):
tmp = df.copy()
exp = df.copy()
tmp.ix[6] = 5
exp.values[3] = 5
assert_frame_equal(tmp, exp)
with catch_warnings(record=True):
tmp = df.copy()
exp = df.copy()
tmp.ix[:, 2] = 5
# tmp correctly sets the dtype
# so match the exp way
exp[2] = 5
assert_frame_equal(tmp, exp)
def test_fancy_getitem_int_labels(self):
df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2))
with catch_warnings(record=True):
result = df.ix[[4, 2, 0], [2, 0]]
expected = df.reindex(index=[4, 2, 0], columns=[2, 0])
assert_frame_equal(result, expected)
with catch_warnings(record=True):
result = df.ix[[4, 2, 0]]
expected = df.reindex(index=[4, 2, 0])
assert_frame_equal(result, expected)
with catch_warnings(record=True):
result = df.ix[4]
expected = df.xs(4)
assert_series_equal(result, expected)
with catch_warnings(record=True):
result = df.ix[:, 3]
expected = df[3]
assert_series_equal(result, expected)
def test_fancy_index_int_labels_exceptions(self):
df = DataFrame(np.random.randn(10, 5), index=np.arange(0, 20, 2))
with catch_warnings(record=True):
# labels that aren't contained
pytest.raises(KeyError, df.ix.__setitem__,
([0, 1, 2], [2, 3, 4]), 5)
# try to set indices not contained in frame
pytest.raises(KeyError, self.frame.ix.__setitem__,
['foo', 'bar', 'baz'], 1)
pytest.raises(KeyError, self.frame.ix.__setitem__,
(slice(None, None), ['E']), 1)
# partial setting now allows this GH2578
# pytest.raises(KeyError, self.frame.ix.__setitem__,
# (slice(None, None), 'E'), 1)
def test_setitem_fancy_mixed_2d(self):
with catch_warnings(record=True):
self.mixed_frame.ix[:5, ['C', 'B', 'A']] = 5
result = self.mixed_frame.ix[:5, ['C', 'B', 'A']]
assert (result.values == 5).all()
self.mixed_frame.ix[5] = np.nan
assert isna(self.mixed_frame.ix[5]).all()
self.mixed_frame.ix[5] = self.mixed_frame.ix[6]
assert_series_equal(self.mixed_frame.ix[5], self.mixed_frame.ix[6],
check_names=False)
# #1432
with catch_warnings(record=True):
df = DataFrame({1: [1., 2., 3.],
2: [3, 4, 5]})
assert df._is_mixed_type
df.ix[1] = [5, 10]
expected = DataFrame({1: [1., 5., 3.],
2: [3, 10, 5]})
assert_frame_equal(df, expected)
def test_ix_align(self):
b = Series(randn(10), name=0).sort_values()
df_orig = DataFrame(randn(10, 4))
df = df_orig.copy()
with catch_warnings(record=True):
df.ix[:, 0] = b
assert_series_equal(df.ix[:, 0].reindex(b.index), b)
with catch_warnings(record=True):
dft = df_orig.T
dft.ix[0, :] = b
assert_series_equal(dft.ix[0, :].reindex(b.index), b)
with catch_warnings(record=True):
df = df_orig.copy()
df.ix[:5, 0] = b
s = df.ix[:5, 0]
assert_series_equal(s, b.reindex(s.index))
with catch_warnings(record=True):
dft = df_orig.T
dft.ix[0, :5] = b
s = dft.ix[0, :5]
assert_series_equal(s, b.reindex(s.index))
with catch_warnings(record=True):
df = df_orig.copy()
idx = [0, 1, 3, 5]
df.ix[idx, 0] = b
s = df.ix[idx, 0]
assert_series_equal(s, b.reindex(s.index))
with catch_warnings(record=True):
dft = df_orig.T
dft.ix[0, idx] = b
s = dft.ix[0, idx]
assert_series_equal(s, b.reindex(s.index))
def test_ix_frame_align(self):
b = DataFrame(np.random.randn(3, 4))
df_orig = DataFrame(randn(10, 4))
df = df_orig.copy()
with catch_warnings(record=True):
df.ix[:3] = b
out = b.ix[:3]
assert_frame_equal(out, b)
b.sort_index(inplace=True)
with catch_warnings(record=True):
df = df_orig.copy()
df.ix[[0, 1, 2]] = b
out = df.ix[[0, 1, 2]].reindex(b.index)
assert_frame_equal(out, b)
with catch_warnings(record=True):
df = df_orig.copy()
df.ix[:3] = b
out = df.ix[:3]
assert_frame_equal(out, b.reindex(out.index))
def test_getitem_setitem_non_ix_labels(self):
df = tm.makeTimeDataFrame()
start, end = df.index[[5, 10]]
result = df.loc[start:end]
result2 = df[start:end]
expected = df[5:11]
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
result = df.copy()
result.loc[start:end] = 0
result2 = df.copy()
result2[start:end] = 0
expected = df.copy()
expected[5:11] = 0
assert_frame_equal(result, expected)
assert_frame_equal(result2, expected)
def test_ix_multi_take(self):
df = DataFrame(np.random.randn(3, 2))
rs = df.loc[df.index == 0, :]
xp = df.reindex([0])
assert_frame_equal(rs, xp)
""" #1321
df = DataFrame(np.random.randn(3, 2))
rs = df.loc[df.index==0, df.columns==1]
xp = df.reindex([0], [1])
assert_frame_equal(rs, xp)
"""
def test_ix_multi_take_nonint_index(self):
df = DataFrame(np.random.randn(3, 2), index=['x', 'y', 'z'],
columns=['a', 'b'])
with catch_warnings(record=True):
rs = df.ix[[0], [0]]
xp = df.reindex(['x'], columns=['a'])
assert_frame_equal(rs, xp)
def test_ix_multi_take_multiindex(self):
df = DataFrame(np.random.randn(3, 2), index=['x', 'y', 'z'],
columns=[['a', 'b'], ['1', '2']])
with catch_warnings(record=True):
rs = df.ix[[0], [0]]
xp = df.reindex(['x'], columns=[('a', '1')])
assert_frame_equal(rs, xp)
def test_ix_dup(self):
idx = Index(['a', 'a', 'b', 'c', 'd', 'd'])
df = DataFrame(np.random.randn(len(idx), 3), idx)
with catch_warnings(record=True):
sub = df.ix[:'d']
assert_frame_equal(sub, df)
with catch_warnings(record=True):
sub = df.ix['a':'c']
assert_frame_equal(sub, df.ix[0:4])
with catch_warnings(record=True):
sub = df.ix['b':'d']
assert_frame_equal(sub, df.ix[2:])
def test_getitem_fancy_1d(self):
f = self.frame
# return self if no slicing...for now
with catch_warnings(record=True):
assert f.ix[:, :] is f
# low dimensional slice
with catch_warnings(record=True):
xs1 = f.ix[2, ['C', 'B', 'A']]
xs2 = f.xs(f.index[2]).reindex(['C', 'B', 'A'])
tm.assert_series_equal(xs1, xs2)
with catch_warnings(record=True):
ts1 = f.ix[5:10, 2]
ts2 = f[f.columns[2]][5:10]
tm.assert_series_equal(ts1, ts2)
# positional xs
with catch_warnings(record=True):
xs1 = f.ix[0]
xs2 = f.xs(f.index[0])
tm.assert_series_equal(xs1, xs2)
with catch_warnings(record=True):
xs1 = f.ix[f.index[5]]
xs2 = f.xs(f.index[5])
tm.assert_series_equal(xs1, xs2)
# single column
with catch_warnings(record=True):
assert_series_equal(f.ix[:, 'A'], f['A'])
# return view
with catch_warnings(record=True):
exp = f.copy()
exp.values[5] = 4
f.ix[5][:] = 4
tm.assert_frame_equal(exp, f)
with catch_warnings(record=True):
exp.values[:, 1] = 6
f.ix[:, 1][:] = 6
tm.assert_frame_equal(exp, f)
# slice of mixed-frame
with catch_warnings(record=True):
xs = self.mixed_frame.ix[5]
exp = self.mixed_frame.xs(self.mixed_frame.index[5])
tm.assert_series_equal(xs, exp)
def test_setitem_fancy_1d(self):
# case 1: set cross-section for indices
frame = self.frame.copy()
expected = self.frame.copy()
with catch_warnings(record=True):
frame.ix[2, ['C', 'B', 'A']] = [1., 2., 3.]
expected['C'][2] = 1.
expected['B'][2] = 2.
expected['A'][2] = 3.
assert_frame_equal(frame, expected)
with catch_warnings(record=True):
frame2 = self.frame.copy()
frame2.ix[2, [3, 2, 1]] = [1., 2., 3.]
assert_frame_equal(frame, expected)
# case 2, set a section of a column
frame = self.frame.copy()
expected = self.frame.copy()
with catch_warnings(record=True):
vals = randn(5)
expected.values[5:10, 2] = vals
frame.ix[5:10, 2] = vals
assert_frame_equal(frame, expected)
with catch_warnings(record=True):
frame2 = self.frame.copy()
frame2.ix[5:10, 'B'] = vals
assert_frame_equal(frame, expected)
# case 3: full xs
frame = self.frame.copy()
expected = self.frame.copy()
with catch_warnings(record=True):
frame.ix[4] = 5.
expected.values[4] = 5.
assert_frame_equal(frame, expected)
with catch_warnings(record=True):
frame.ix[frame.index[4]] = 6.
expected.values[4] = 6.
assert_frame_equal(frame, expected)
# single column
frame = self.frame.copy()
expected = self.frame.copy()
with catch_warnings(record=True):
frame.ix[:, 'A'] = 7.
expected['A'] = 7.
assert_frame_equal(frame, expected)
def test_getitem_fancy_scalar(self):
f = self.frame
ix = f.loc
# individual value
for col in f.columns:
ts = f[col]
for idx in f.index[::5]:
assert ix[idx, col] == ts[idx]
def test_setitem_fancy_scalar(self):
f = self.frame
expected = self.frame.copy()
ix = f.loc
# individual value
for j, col in enumerate(f.columns):
ts = f[col] # noqa
for idx in f.index[::5]:
i = f.index.get_loc(idx)
val = randn()
expected.values[i, j] = val
ix[idx, col] = val
assert_frame_equal(f, expected)
def test_getitem_fancy_boolean(self):
f = self.frame
ix = f.loc
expected = f.reindex(columns=['B', 'D'])
result = ix[:, [False, True, False, True]]
assert_frame_equal(result, expected)
expected = f.reindex(index=f.index[5:10], columns=['B', 'D'])
result = ix[f.index[5:10], [False, True, False, True]]
assert_frame_equal(result, expected)
boolvec = f.index > f.index[7]
expected = f.reindex(index=f.index[boolvec])
result = ix[boolvec]
assert_frame_equal(result, expected)
result = ix[boolvec, :]
assert_frame_equal(result, expected)
result = ix[boolvec, f.columns[2:]]
expected = f.reindex(index=f.index[boolvec],
columns=['C', 'D'])
assert_frame_equal(result, expected)
def test_setitem_fancy_boolean(self):
# from 2d, set with booleans
frame = self.frame.copy()
expected = self.frame.copy()
mask = frame['A'] > 0
frame.loc[mask] = 0.
expected.values[mask.values] = 0.
assert_frame_equal(frame, expected)
frame = self.frame.copy()
expected = self.frame.copy()
frame.loc[mask, ['A', 'B']] = 0.
expected.values[mask.values, :2] = 0.
assert_frame_equal(frame, expected)
def test_getitem_fancy_ints(self):
result = self.frame.iloc[[1, 4, 7]]
expected = self.frame.loc[self.frame.index[[1, 4, 7]]]
assert_frame_equal(result, expected)
result = self.frame.iloc[:, [2, 0, 1]]
expected = self.frame.loc[:, self.frame.columns[[2, 0, 1]]]
assert_frame_equal(result, expected)
def test_getitem_setitem_fancy_exceptions(self):
ix = self.frame.iloc
with tm.assert_raises_regex(IndexingError, 'Too many indexers'):
ix[:, :, :]
with pytest.raises(IndexingError):
ix[:, :, :] = 1
def test_getitem_setitem_boolean_misaligned(self):
# boolean index misaligned labels
mask = self.frame['A'][::-1] > 1
result = self.frame.loc[mask]
expected = self.frame.loc[mask[::-1]]
assert_frame_equal(result, expected)
cp = self.frame.copy()
expected = self.frame.copy()
cp.loc[mask] = 0
expected.loc[mask] = 0
assert_frame_equal(cp, expected)
def test_getitem_setitem_boolean_multi(self):
df = DataFrame(np.random.randn(3, 2))
# get
k1 = np.array([True, False, True])
k2 = np.array([False, True])
result = df.loc[k1, k2]
expected = df.loc[[0, 2], [1]]
assert_frame_equal(result, expected)
expected = df.copy()
df.loc[np.array([True, False, True]),
np.array([False, True])] = 5
expected.loc[[0, 2], [1]] = 5
assert_frame_equal(df, expected)
def test_getitem_setitem_float_labels(self):
index = Index([1.5, 2, 3, 4, 5])
df = DataFrame(np.random.randn(5, 5), index=index)
result = df.loc[1.5:4]
expected = df.reindex([1.5, 2, 3, 4])
assert_frame_equal(result, expected)
assert len(result) == 4
result = df.loc[4:5]
expected = df.reindex([4, 5]) # reindex with int
assert_frame_equal(result, expected, check_index_type=False)
assert len(result) == 2
result = df.loc[4:5]
expected = df.reindex([4.0, 5.0]) # reindex with float
assert_frame_equal(result, expected)
assert len(result) == 2
# loc_float changes this to work properly
result = df.loc[1:2]
expected = df.iloc[0:2]
assert_frame_equal(result, expected)
df.loc[1:2] = 0
result = df[1:2]
assert (result == 0).all().all()
# #2727
index = Index([1.0, 2.5, 3.5, 4.5, 5.0])
df = DataFrame(np.random.randn(5, 5), index=index)
# positional slicing only via iloc!
pytest.raises(TypeError, lambda: df.iloc[1.0:5])
result = df.iloc[4:5]
expected = df.reindex([5.0])
assert_frame_equal(result, expected)
assert len(result) == 1
cp = df.copy()
def f():
cp.iloc[1.0:5] = 0
pytest.raises(TypeError, f)
def f():
result = cp.iloc[1.0:5] == 0 # noqa
pytest.raises(TypeError, f)
assert result.values.all()
assert (cp.iloc[0:1] == df.iloc[0:1]).values.all()
cp = df.copy()
cp.iloc[4:5] = 0
assert (cp.iloc[4:5] == 0).values.all()
assert (cp.iloc[0:4] == df.iloc[0:4]).values.all()
# float slicing
result = df.loc[1.0:5]
expected = df
assert_frame_equal(result, expected)
assert len(result) == 5
result = df.loc[1.1:5]
expected = df.reindex([2.5, 3.5, 4.5, 5.0])
assert_frame_equal(result, expected)
assert len(result) == 4
result = df.loc[4.51:5]
expected = df.reindex([5.0])
assert_frame_equal(result, expected)
assert len(result) == 1
result = df.loc[1.0:5.0]
expected = df.reindex([1.0, 2.5, 3.5, 4.5, 5.0])
assert_frame_equal(result, expected)
assert len(result) == 5
cp = df.copy()
cp.loc[1.0:5.0] = 0
result = cp.loc[1.0:5.0]
assert (result == 0).values.all()
def test_setitem_single_column_mixed(self):
df = DataFrame(randn(5, 3), index=['a', 'b', 'c', 'd', 'e'],
columns=['foo', 'bar', 'baz'])
df['str'] = 'qux'
df.loc[df.index[::2], 'str'] = nan
expected = np.array([nan, 'qux', nan, 'qux', nan], dtype=object)
assert_almost_equal(df['str'].values, expected)
def test_setitem_single_column_mixed_datetime(self):
df = DataFrame(randn(5, 3), index=['a', 'b', 'c', 'd', 'e'],
columns=['foo', 'bar', 'baz'])
df['timestamp'] = Timestamp('20010102')
# check our dtypes
result = df.get_dtype_counts()
expected = Series({'float64': 3, 'datetime64[ns]': 1})
assert_series_equal(result, expected)
# set an allowable datetime64 type
df.loc['b', 'timestamp'] = iNaT
assert isna(df.loc['b', 'timestamp'])
# allow this syntax
df.loc['c', 'timestamp'] = nan
assert isna(df.loc['c', 'timestamp'])
# allow this syntax
df.loc['d', :] = nan
assert not isna(df.loc['c', :]).all()
# as of GH 3216 this will now work!
# try to set with a list like item
# pytest.raises(
# Exception, df.loc.__setitem__, ('d', 'timestamp'), [nan])
def test_setitem_frame(self):
piece = self.frame.loc[self.frame.index[:2], ['A', 'B']]
self.frame.loc[self.frame.index[-2]:, ['A', 'B']] = piece.values
result = self.frame.loc[self.frame.index[-2:], ['A', 'B']].values
expected = piece.values
assert_almost_equal(result, expected)
# GH 3216
# already aligned
f = self.mixed_frame.copy()
piece = DataFrame([[1., 2.], [3., 4.]],
index=f.index[0:2], columns=['A', 'B'])
key = (slice(None, 2), ['A', 'B'])
f.loc[key] = piece
assert_almost_equal(f.loc[f.index[0:2], ['A', 'B']].values,
piece.values)
# rows unaligned
f = self.mixed_frame.copy()
piece = DataFrame([[1., 2.], [3., 4.], [5., 6.], [7., 8.]],
index=list(f.index[0:2]) + ['foo', 'bar'],
columns=['A', 'B'])
key = (slice(None, 2), ['A', 'B'])
f.loc[key] = piece
assert_almost_equal(f.loc[f.index[0:2:], ['A', 'B']].values,
piece.values[0:2])
# key is unaligned with values
f = self.mixed_frame.copy()
piece = f.loc[f.index[:2], ['A']]
piece.index = f.index[-2:]
key = (slice(-2, None), ['A', 'B'])
f.loc[key] = piece
piece['B'] = np.nan
assert_almost_equal(f.loc[f.index[-2:], ['A', 'B']].values,
piece.values)
# ndarray
f = self.mixed_frame.copy()
piece = self.mixed_frame.loc[f.index[:2], ['A', 'B']]
key = (slice(-2, None), ['A', 'B'])
f.loc[key] = piece.values
assert_almost_equal(f.loc[f.index[-2:], ['A', 'B']].values,
piece.values)
# needs upcasting
df = DataFrame([[1, 2, 'foo'], [3, 4, 'bar']], columns=['A', 'B', 'C'])
df2 = df.copy()
df2.loc[:, ['A', 'B']] = df.loc[:, ['A', 'B']] + 0.5
expected = df.reindex(columns=['A', 'B'])
expected += 0.5
expected['C'] = df['C']
assert_frame_equal(df2, expected)
def test_setitem_frame_align(self):
piece = self.frame.loc[self.frame.index[:2], ['A', 'B']]
piece.index = self.frame.index[-2:]
piece.columns = ['A', 'B']
self.frame.loc[self.frame.index[-2:], ['A', 'B']] = piece
result = self.frame.loc[self.frame.index[-2:], ['A', 'B']].values
expected = piece.values
assert_almost_equal(result, expected)
def test_getitem_setitem_ix_duplicates(self):
# #1201
df = DataFrame(np.random.randn(5, 3),
index=['foo', 'foo', 'bar', 'baz', 'bar'])
result = df.loc['foo']
expected = df[:2]
assert_frame_equal(result, expected)
result = df.loc['bar']
expected = df.iloc[[2, 4]]
assert_frame_equal(result, expected)
result = df.loc['baz']
expected = df.iloc[3]
assert_series_equal(result, expected)
def test_getitem_ix_boolean_duplicates_multiple(self):
# #1201
df = DataFrame(np.random.randn(5, 3),
index=['foo', 'foo', 'bar', 'baz', 'bar'])
result = df.loc[['bar']]
exp = df.iloc[[2, 4]]
assert_frame_equal(result, exp)
result = df.loc[df[1] > 0]
exp = df[df[1] > 0]
assert_frame_equal(result, exp)
result = df.loc[df[0] > 0]
exp = df[df[0] > 0]
assert_frame_equal(result, exp)
def test_getitem_setitem_ix_bool_keyerror(self):
# #2199
df = DataFrame({'a': [1, 2, 3]})
pytest.raises(KeyError, df.loc.__getitem__, False)
pytest.raises(KeyError, df.loc.__getitem__, True)
pytest.raises(KeyError, df.loc.__setitem__, False, 0)
pytest.raises(KeyError, df.loc.__setitem__, True, 0)
def test_getitem_list_duplicates(self):
# #1943
df = DataFrame(np.random.randn(4, 4), columns=list('AABC'))
df.columns.name = 'foo'
result = df[['B', 'C']]
assert result.columns.name == 'foo'
expected = df.iloc[:, 2:]
assert_frame_equal(result, expected)
def test_get_value(self):
for idx in self.frame.index:
for col in self.frame.columns:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result = self.frame.get_value(idx, col)
expected = self.frame[col][idx]
assert result == expected
def test_lookup(self):
def alt(df, rows, cols, dtype):
result = []
for r, c in zip(rows, cols):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
result.append(df.get_value(r, c))
return np.array(result, dtype=dtype)
def testit(df):
rows = list(df.index) * len(df.columns)
cols = list(df.columns) * len(df.index)
result = df.lookup(rows, cols)
expected = alt(df, rows, cols, dtype=np.object_)
tm.assert_almost_equal(result, expected, check_dtype=False)
testit(self.mixed_frame)
testit(self.frame)
df = DataFrame({'label': ['a', 'b', 'a', 'c'],
'mask_a': [True, True, False, True],
'mask_b': [True, False, False, False],
'mask_c': [False, True, False, True]})
df['mask'] = df.lookup(df.index, 'mask_' + df['label'])
exp_mask = alt(df, df.index, 'mask_' + df['label'], dtype=np.bool_)
tm.assert_series_equal(df['mask'], pd.Series(exp_mask, name='mask'))
assert df['mask'].dtype == np.bool_
with pytest.raises(KeyError):
self.frame.lookup(['xyz'], ['A'])
with pytest.raises(KeyError):
self.frame.lookup([self.frame.index[0]], ['xyz'])
with tm.assert_raises_regex(ValueError, 'same size'):
self.frame.lookup(['a', 'b', 'c'], ['a'])
def test_set_value(self):
for idx in self.frame.index:
for col in self.frame.columns:
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
self.frame.set_value(idx, col, 1)
assert self.frame[col][idx] == 1
def test_set_value_resize(self):
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
res = self.frame.set_value('foobar', 'B', 0)
assert res is self.frame
assert res.index[-1] == 'foobar'
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
assert res.get_value('foobar', 'B') == 0
self.frame.loc['foobar', 'qux'] = 0
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
assert self.frame.get_value('foobar', 'qux') == 0
res = self.frame.copy()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
res3 = res.set_value('foobar', 'baz', 'sam')
assert res3['baz'].dtype == np.object_
res = self.frame.copy()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
res3 = res.set_value('foobar', 'baz', True)
assert res3['baz'].dtype == np.object_
res = self.frame.copy()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
res3 = res.set_value('foobar', 'baz', 5)
assert is_float_dtype(res3['baz'])
assert isna(res3['baz'].drop(['foobar'])).all()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
pytest.raises(ValueError, res3.set_value, 'foobar', 'baz', 'sam')
def test_set_value_with_index_dtype_change(self):
df_orig = DataFrame(randn(3, 3), index=lrange(3), columns=list('ABC'))
# this is actually ambiguous as the 2 is interpreted as a positional
# so column is not created
df = df_orig.copy()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
df.set_value('C', 2, 1.0)
assert list(df.index) == list(df_orig.index) + ['C']
# assert list(df.columns) == list(df_orig.columns) + [2]
df = df_orig.copy()
df.loc['C', 2] = 1.0
assert list(df.index) == list(df_orig.index) + ['C']
# assert list(df.columns) == list(df_orig.columns) + [2]
# create both new
df = df_orig.copy()
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
df.set_value('C', 'D', 1.0)
assert list(df.index) == list(df_orig.index) + ['C']
assert list(df.columns) == list(df_orig.columns) + ['D']
df = df_orig.copy()
df.loc['C', 'D'] = 1.0
assert list(df.index) == list(df_orig.index) + ['C']
assert list(df.columns) == list(df_orig.columns) + ['D']
def test_get_set_value_no_partial_indexing(self):
# partial w/ MultiIndex raise exception
index = MultiIndex.from_tuples([(0, 1), (0, 2), (1, 1), (1, 2)])
df = DataFrame(index=index, columns=lrange(4))
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
pytest.raises(KeyError, df.get_value, 0, 1)
def test_single_element_ix_dont_upcast(self):
self.frame['E'] = 1
assert issubclass(self.frame['E'].dtype.type, (int, np.integer))
with catch_warnings(record=True):
result = self.frame.ix[self.frame.index[5], 'E']
assert is_integer(result)
result = self.frame.loc[self.frame.index[5], 'E']
assert is_integer(result)
# GH 11617
df = pd.DataFrame(dict(a=[1.23]))
df["b"] = 666
with catch_warnings(record=True):
result = df.ix[0, "b"]
assert is_integer(result)
result = df.loc[0, "b"]
assert is_integer(result)
expected = Series([666], [0], name='b')
with catch_warnings(record=True):
result = df.ix[[0], "b"]
assert_series_equal(result, expected)
result = df.loc[[0], "b"]
assert_series_equal(result, expected)
def test_iloc_row(self):
df = DataFrame(np.random.randn(10, 4), index=lrange(0, 20, 2))
result = df.iloc[1]
exp = df.loc[2]
assert_series_equal(result, exp)
result = df.iloc[2]
exp = df.loc[4]
assert_series_equal(result, exp)
# slice
result = df.iloc[slice(4, 8)]
expected = df.loc[8:14]
assert_frame_equal(result, expected)
# verify slice is view
# setting it makes it raise/warn
def f():
result[2] = 0.
pytest.raises(com.SettingWithCopyError, f)
exp_col = df[2].copy()
exp_col[4:8] = 0.
assert_series_equal(df[2], exp_col)
# list of integers
result = df.iloc[[1, 2, 4, 6]]
expected = df.reindex(df.index[[1, 2, 4, 6]])
assert_frame_equal(result, expected)
def test_iloc_col(self):
df = DataFrame(np.random.randn(4, 10), columns=lrange(0, 20, 2))
result = df.iloc[:, 1]
exp = df.loc[:, 2]
assert_series_equal(result, exp)
result = df.iloc[:, 2]
exp = df.loc[:, 4]
assert_series_equal(result, exp)
# slice
result = df.iloc[:, slice(4, 8)]
expected = df.loc[:, 8:14]
assert_frame_equal(result, expected)
# verify slice is view
# and that we are setting a copy
def f():
result[8] = 0.
pytest.raises(com.SettingWithCopyError, f)
assert (df[8] == 0).all()
# list of integers
result = df.iloc[:, [1, 2, 4, 6]]
expected = df.reindex(columns=df.columns[[1, 2, 4, 6]])
assert_frame_equal(result, expected)
def test_iloc_duplicates(self):
df = DataFrame(np.random.rand(3, 3), columns=list('ABC'),
index=list('aab'))
result = df.iloc[0]
with catch_warnings(record=True):
result2 = df.ix[0]
assert isinstance(result, Series)
assert_almost_equal(result.values, df.values[0])
assert_series_equal(result, result2)
with catch_warnings(record=True):
result = df.T.iloc[:, 0]
result2 = df.T.ix[:, 0]
assert isinstance(result, Series)
assert_almost_equal(result.values, df.values[0])
assert_series_equal(result, result2)
# multiindex
df = DataFrame(np.random.randn(3, 3),
columns=[['i', 'i', 'j'], ['A', 'A', 'B']],
index=[['i', 'i', 'j'], ['X', 'X', 'Y']])
with catch_warnings(record=True):
rs = df.iloc[0]
xp = df.ix[0]
assert_series_equal(rs, xp)
with catch_warnings(record=True):
rs = df.iloc[:, 0]
xp = df.T.ix[0]
assert_series_equal(rs, xp)
with catch_warnings(record=True):
rs = df.iloc[:, [0]]
xp = df.ix[:, [0]]
assert_frame_equal(rs, xp)
# #2259
df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=[1, 1, 2])
result = df.iloc[:, [0]]
expected = df.take([0], axis=1)
assert_frame_equal(result, expected)
def test_iloc_sparse_propegate_fill_value(self):
from pandas.core.sparse.api import SparseDataFrame
df = SparseDataFrame({'A': [999, 1]}, default_fill_value=999)
assert len(df['A'].sp_values) == len(df.iloc[:, 0].sp_values)
def test_iat(self):
for i, row in enumerate(self.frame.index):
for j, col in enumerate(self.frame.columns):
result = self.frame.iat[i, j]
expected = self.frame.at[row, col]
assert result == expected
def test_nested_exception(self):
# Ignore the strange way of triggering the problem
# (which may get fixed), it's just a way to trigger
# the issue or reraising an outer exception without
# a named argument
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6],
"c": [7, 8, 9]}).set_index(["a", "b"])
l = list(df.index)
l[0] = ["a", "b"]
df.index = l
try:
repr(df)
except Exception as e:
assert type(e) != UnboundLocalError
def test_reindex_methods(self):
df = pd.DataFrame({'x': list(range(5))})
target = np.array([-0.1, 0.9, 1.1, 1.5])
for method, expected_values in [('nearest', [0, 1, 1, 2]),
('pad', [np.nan, 0, 1, 1]),
('backfill', [0, 1, 2, 2])]:
expected = pd.DataFrame({'x': expected_values}, index=target)
actual = df.reindex(target, method=method)
assert_frame_equal(expected, actual)
actual = df.reindex_like(df, method=method, tolerance=0)
assert_frame_equal(df, actual)
actual = df.reindex_like(df, method=method, tolerance=[0, 0, 0, 0])
assert_frame_equal(df, actual)
actual = df.reindex(target, method=method, tolerance=1)
assert_frame_equal(expected, actual)
actual = df.reindex(target, method=method, tolerance=[1, 1, 1, 1])
assert_frame_equal(expected, actual)
e2 = expected[::-1]
actual = df.reindex(target[::-1], method=method)
assert_frame_equal(e2, actual)
new_order = [3, 0, 2, 1]
e2 = expected.iloc[new_order]
actual = df.reindex(target[new_order], method=method)
assert_frame_equal(e2, actual)
switched_method = ('pad' if method == 'backfill'
else 'backfill' if method == 'pad'
else method)
actual = df[::-1].reindex(target, method=switched_method)
assert_frame_equal(expected, actual)
expected = pd.DataFrame({'x': [0, 1, 1, np.nan]}, index=target)
actual = df.reindex(target, method='nearest', tolerance=0.2)
assert_frame_equal(expected, actual)
expected = pd.DataFrame({'x': [0, np.nan, 1, np.nan]}, index=target)
actual = df.reindex(target, method='nearest',
tolerance=[0.5, 0.01, 0.4, 0.1])
assert_frame_equal(expected, actual)
def test_reindex_frame_add_nat(self):
rng = date_range('1/1/2000 00:00:00', periods=10, freq='10s')
df = DataFrame({'A': np.random.randn(len(rng)), 'B': rng})
result = df.reindex(lrange(15))
assert np.issubdtype(result['B'].dtype, np.dtype('M8[ns]'))
mask = com.isna(result)['B']
assert mask[-5:].all()
assert not mask[:-5].any()
def test_set_dataframe_column_ns_dtype(self):
x = DataFrame([datetime.now(), datetime.now()])
assert x[0].dtype == np.dtype('M8[ns]')
def test_non_monotonic_reindex_methods(self):
dr = pd.date_range('2013-08-01', periods=6, freq='B')
data = np.random.randn(6, 1)
df = pd.DataFrame(data, index=dr, columns=list('A'))
df_rev = pd.DataFrame(data, index=dr[[3, 4, 5] + [0, 1, 2]],
columns=list('A'))
# index is not monotonic increasing or decreasing
pytest.raises(ValueError, df_rev.reindex, df.index, method='pad')
pytest.raises(ValueError, df_rev.reindex, df.index, method='ffill')
pytest.raises(ValueError, df_rev.reindex, df.index, method='bfill')
pytest.raises(ValueError, df_rev.reindex, df.index, method='nearest')
def test_reindex_level(self):
from itertools import permutations
icol = ['jim', 'joe', 'jolie']
def verify_first_level(df, level, idx, check_index_type=True):
f = lambda val: np.nonzero(df[level] == val)[0]
i = np.concatenate(list(map(f, idx)))
left = df.set_index(icol).reindex(idx, level=level)
right = df.iloc[i].set_index(icol)
assert_frame_equal(left, right, check_index_type=check_index_type)
def verify(df, level, idx, indexer, check_index_type=True):
left = df.set_index(icol).reindex(idx, level=level)
right = df.iloc[indexer].set_index(icol)
assert_frame_equal(left, right, check_index_type=check_index_type)
df = pd.DataFrame({'jim': list('B' * 4 + 'A' * 2 + 'C' * 3),
'joe': list('abcdeabcd')[::-1],
'jolie': [10, 20, 30] * 3,
'joline': np.random.randint(0, 1000, 9)})
target = [['C', 'B', 'A'], ['F', 'C', 'A', 'D'], ['A'],
['A', 'B', 'C'], ['C', 'A', 'B'], ['C', 'B'], ['C', 'A'],
['A', 'B'], ['B', 'A', 'C']]
for idx in target:
verify_first_level(df, 'jim', idx)
# reindex by these causes different MultiIndex levels
for idx in [['D', 'F'], ['A', 'C', 'B']]:
verify_first_level(df, 'jim', idx, check_index_type=False)
verify(df, 'joe', list('abcde'), [3, 2, 1, 0, 5, 4, 8, 7, 6])
verify(df, 'joe', list('abcd'), [3, 2, 1, 0, 5, 8, 7, 6])
verify(df, 'joe', list('abc'), [3, 2, 1, 8, 7, 6])
verify(df, 'joe', list('eca'), [1, 3, 4, 6, 8])
verify(df, 'joe', list('edc'), [0, 1, 4, 5, 6])
verify(df, 'joe', list('eadbc'), [3, 0, 2, 1, 4, 5, 8, 7, 6])
verify(df, 'joe', list('edwq'), [0, 4, 5])
verify(df, 'joe', list('wq'), [], check_index_type=False)
df = DataFrame({'jim': ['mid'] * 5 + ['btm'] * 8 + ['top'] * 7,
'joe': ['3rd'] * 2 + ['1st'] * 3 + ['2nd'] * 3 +
['1st'] * 2 + ['3rd'] * 3 + ['1st'] * 2 +
['3rd'] * 3 + ['2nd'] * 2,
# this needs to be jointly unique with jim and joe or
# reindexing will fail ~1.5% of the time, this works
# out to needing unique groups of same size as joe
'jolie': np.concatenate([
np.random.choice(1000, x, replace=False)
for x in [2, 3, 3, 2, 3, 2, 3, 2]]),
'joline': np.random.randn(20).round(3) * 10})
for idx in permutations(df['jim'].unique()):
for i in range(3):
verify_first_level(df, 'jim', idx[:i + 1])
i = [2, 3, 4, 0, 1, 8, 9, 5, 6, 7, 10,
11, 12, 13, 14, 18, 19, 15, 16, 17]
verify(df, 'joe', ['1st', '2nd', '3rd'], i)
i = [0, 1, 2, 3, 4, 10, 11, 12, 5, 6,
7, 8, 9, 15, 16, 17, 18, 19, 13, 14]
verify(df, 'joe', ['3rd', '2nd', '1st'], i)
i = [0, 1, 5, 6, 7, 10, 11, 12, 18, 19, 15, 16, 17]
verify(df, 'joe', ['2nd', '3rd'], i)
i = [0, 1, 2, 3, 4, 10, 11, 12, 8, 9, 15, 16, 17, 13, 14]
verify(df, 'joe', ['3rd', '1st'], i)
def test_getitem_ix_float_duplicates(self):
df = pd.DataFrame(np.random.randn(3, 3),
index=[0.1, 0.2, 0.2], columns=list('abc'))
expect = df.iloc[1:]
assert_frame_equal(df.loc[0.2], expect)
with catch_warnings(record=True):
assert_frame_equal(df.ix[0.2], expect)
expect = df.iloc[1:, 0]
assert_series_equal(df.loc[0.2, 'a'], expect)
df.index = [1, 0.2, 0.2]
expect = df.iloc[1:]
assert_frame_equal(df.loc[0.2], expect)
with catch_warnings(record=True):
assert_frame_equal(df.ix[0.2], expect)
expect = df.iloc[1:, 0]
assert_series_equal(df.loc[0.2, 'a'], expect)
df = pd.DataFrame(np.random.randn(4, 3),
index=[1, 0.2, 0.2, 1], columns=list('abc'))
expect = df.iloc[1:-1]
assert_frame_equal(df.loc[0.2], expect)
with catch_warnings(record=True):
assert_frame_equal(df.ix[0.2], expect)
expect = df.iloc[1:-1, 0]
assert_series_equal(df.loc[0.2, 'a'], expect)
df.index = [0.1, 0.2, 2, 0.2]
expect = df.iloc[[1, -1]]
assert_frame_equal(df.loc[0.2], expect)
with catch_warnings(record=True):
assert_frame_equal(df.ix[0.2], expect)
expect = df.iloc[[1, -1], 0]
assert_series_equal(df.loc[0.2, 'a'], expect)
def test_setitem_with_sparse_value(self):
# GH8131
df = pd.DataFrame({'c_1': ['a', 'b', 'c'], 'n_1': [1., 2., 3.]})
sp_series = pd.Series([0, 0, 1]).to_sparse(fill_value=0)
df['new_column'] = sp_series
assert_series_equal(df['new_column'], sp_series, check_names=False)
def test_setitem_with_unaligned_sparse_value(self):
df = pd.DataFrame({'c_1': ['a', 'b', 'c'], 'n_1': [1., 2., 3.]})
sp_series = (pd.Series([0, 0, 1], index=[2, 1, 0])
.to_sparse(fill_value=0))
df['new_column'] = sp_series
exp = pd.Series([1, 0, 0], name='new_column')
assert_series_equal(df['new_column'], exp)
def test_setitem_with_unaligned_tz_aware_datetime_column(self):
# GH 12981
# Assignment of unaligned offset-aware datetime series.
# Make sure timezone isn't lost
column = pd.Series(pd.date_range('2015-01-01', periods=3, tz='utc'),
name='dates')
df = pd.DataFrame({'dates': column})
df['dates'] = column[[1, 0, 2]]
assert_series_equal(df['dates'], column)
df = pd.DataFrame({'dates': column})
df.loc[[0, 1, 2], 'dates'] = column[[1, 0, 2]]
assert_series_equal(df['dates'], column)
def test_setitem_datetime_coercion(self):
# gh-1048
df = pd.DataFrame({'c': [pd.Timestamp('2010-10-01')] * 3})
df.loc[0:1, 'c'] = np.datetime64('2008-08-08')
assert pd.Timestamp('2008-08-08') == df.loc[0, 'c']
assert pd.Timestamp('2008-08-08') == df.loc[1, 'c']
df.loc[2, 'c'] = date(2005, 5, 5)
assert pd.Timestamp('2005-05-05') == df.loc[2, 'c']
def test_setitem_datetimelike_with_inference(self):
# GH 7592
# assignment of timedeltas with NaT
one_hour = timedelta(hours=1)
df = DataFrame(index=date_range('20130101', periods=4))
df['A'] = np.array([1 * one_hour] * 4, dtype='m8[ns]')
df.loc[:, 'B'] = np.array([2 * one_hour] * 4, dtype='m8[ns]')
df.loc[:3, 'C'] = np.array([3 * one_hour] * 3, dtype='m8[ns]')
df.loc[:, 'D'] = np.array([4 * one_hour] * 4, dtype='m8[ns]')
df.loc[df.index[:3], 'E'] = np.array([5 * one_hour] * 3,
dtype='m8[ns]')
df['F'] = np.timedelta64('NaT')
df.loc[df.index[:-1], 'F'] = np.array([6 * one_hour] * 3,
dtype='m8[ns]')
df.loc[df.index[-3]:, 'G'] = date_range('20130101', periods=3)
df['H'] = np.datetime64('NaT')
result = df.dtypes
expected = Series([np.dtype('timedelta64[ns]')] * 6 +
[np.dtype('datetime64[ns]')] * 2,
index=list('ABCDEFGH'))
assert_series_equal(result, expected)
def test_at_time_between_time_datetimeindex(self):
index = date_range("2012-01-01", "2012-01-05", freq='30min')
df = DataFrame(randn(len(index), 5), index=index)
akey = time(12, 0, 0)
bkey = slice(time(13, 0, 0), time(14, 0, 0))
ainds = [24, 72, 120, 168]
binds = [26, 27, 28, 74, 75, 76, 122, 123, 124, 170, 171, 172]
result = df.at_time(akey)
expected = df.loc[akey]
expected2 = df.iloc[ainds]
assert_frame_equal(result, expected)
assert_frame_equal(result, expected2)
assert len(result) == 4
result = df.between_time(bkey.start, bkey.stop)
expected = df.loc[bkey]
expected2 = df.iloc[binds]
assert_frame_equal(result, expected)
assert_frame_equal(result, expected2)
assert len(result) == 12
result = df.copy()
result.loc[akey] = 0
result = result.loc[akey]
expected = df.loc[akey].copy()
expected.loc[:] = 0
assert_frame_equal(result, expected)
result = df.copy()
result.loc[akey] = 0
result.loc[akey] = df.iloc[ainds]
assert_frame_equal(result, df)
result = df.copy()
result.loc[bkey] = 0
result = result.loc[bkey]
expected = df.loc[bkey].copy()
expected.loc[:] = 0
assert_frame_equal(result, expected)
result = df.copy()
result.loc[bkey] = 0
result.loc[bkey] = df.iloc[binds]
assert_frame_equal(result, df)
def test_xs(self):
idx = self.frame.index[5]
xs = self.frame.xs(idx)
for item, value in compat.iteritems(xs):
if np.isnan(value):
assert np.isnan(self.frame[item][idx])
else:
assert value == self.frame[item][idx]
# mixed-type xs
test_data = {
'A': {'1': 1, '2': 2},
'B': {'1': '1', '2': '2', '3': '3'},
}
frame = DataFrame(test_data)
xs = frame.xs('1')
assert xs.dtype == np.object_
assert xs['A'] == 1
assert xs['B'] == '1'
with pytest.raises(KeyError):
self.tsframe.xs(self.tsframe.index[0] - BDay())
# xs get column
series = self.frame.xs('A', axis=1)
expected = self.frame['A']
assert_series_equal(series, expected)
# view is returned if possible
series = self.frame.xs('A', axis=1)
series[:] = 5
assert (expected == 5).all()
def test_xs_corner(self):
# pathological mixed-type reordering case
df = DataFrame(index=[0])
df['A'] = 1.
df['B'] = 'foo'
df['C'] = 2.
df['D'] = 'bar'
df['E'] = 3.
xs = df.xs(0)
exp = pd.Series([1., 'foo', 2., 'bar', 3.],
index=list('ABCDE'), name=0)
tm.assert_series_equal(xs, exp)
# no columns but Index(dtype=object)
df = DataFrame(index=['a', 'b', 'c'])
result = df.xs('a')
expected = Series([], name='a', index=pd.Index([], dtype=object))
assert_series_equal(result, expected)
def test_xs_duplicates(self):
df = DataFrame(randn(5, 2), index=['b', 'b', 'c', 'b', 'a'])
cross = df.xs('c')
exp = df.iloc[2]
assert_series_equal(cross, exp)
def test_xs_keep_level(self):
df = (DataFrame({'day': {0: 'sat', 1: 'sun'},
'flavour': {0: 'strawberry', 1: 'strawberry'},
'sales': {0: 10, 1: 12},
'year': {0: 2008, 1: 2008}})
.set_index(['year', 'flavour', 'day']))
result = df.xs('sat', level='day', drop_level=False)
expected = df[:1]
assert_frame_equal(result, expected)
result = df.xs([2008, 'sat'], level=['year', 'day'], drop_level=False)
assert_frame_equal(result, expected)
def test_xs_view(self):
# in 0.14 this will return a view if possible a copy otherwise, but
# this is numpy dependent
dm = DataFrame(np.arange(20.).reshape(4, 5),
index=lrange(4), columns=lrange(5))
dm.xs(2)[:] = 10
assert (dm.xs(2) == 10).all()
def test_index_namedtuple(self):
from collections import namedtuple
IndexType = namedtuple("IndexType", ["a", "b"])
idx1 = IndexType("foo", "bar")
idx2 = IndexType("baz", "bof")
index = Index([idx1, idx2],
name="composite_index", tupleize_cols=False)
df = DataFrame([(1, 2), (3, 4)], index=index, columns=["A", "B"])
with catch_warnings(record=True):
result = df.ix[IndexType("foo", "bar")]["A"]
assert result == 1
result = df.loc[IndexType("foo", "bar")]["A"]
assert result == 1
def test_boolean_indexing(self):
idx = lrange(3)
cols = ['A', 'B', 'C']
df1 = DataFrame(index=idx, columns=cols,
data=np.array([[0.0, 0.5, 1.0],
[1.5, 2.0, 2.5],
[3.0, 3.5, 4.0]],
dtype=float))
df2 = DataFrame(index=idx, columns=cols,
data=np.ones((len(idx), len(cols))))
expected = DataFrame(index=idx, columns=cols,
data=np.array([[0.0, 0.5, 1.0],
[1.5, 2.0, -1],
[-1, -1, -1]], dtype=float))
df1[df1 > 2.0 * df2] = -1
assert_frame_equal(df1, expected)
with tm.assert_raises_regex(ValueError, 'Item wrong length'):
df1[df1.index[:-1] > 2] = -1
def test_boolean_indexing_mixed(self):
df = DataFrame({
long(0): {35: np.nan, 40: np.nan, 43: np.nan,
49: np.nan, 50: np.nan},
long(1): {35: np.nan,
40: 0.32632316859446198,
43: np.nan,
49: 0.32632316859446198,
50: 0.39114724480578139},
long(2): {35: np.nan, 40: np.nan, 43: 0.29012581014105987,
49: np.nan, 50: np.nan},
long(3): {35: np.nan, 40: np.nan, 43: np.nan, 49: np.nan,
50: np.nan},
long(4): {35: 0.34215328467153283, 40: np.nan, 43: np.nan,
49: np.nan, 50: np.nan},
'y': {35: 0, 40: 0, 43: 0, 49: 0, 50: 1}})
# mixed int/float ok
df2 = df.copy()
df2[df2 > 0.3] = 1
expected = df.copy()
expected.loc[40, 1] = 1
expected.loc[49, 1] = 1
expected.loc[50, 1] = 1
expected.loc[35, 4] = 1
assert_frame_equal(df2, expected)
df['foo'] = 'test'
with tm.assert_raises_regex(TypeError, 'boolean setting '
'on mixed-type'):
df[df > 0.3] = 1
def test_where(self):
default_frame = DataFrame(np.random.randn(5, 3),
columns=['A', 'B', 'C'])
def _safe_add(df):
# only add to the numeric items
def is_ok(s):
return (issubclass(s.dtype.type, (np.integer, np.floating)) and
s.dtype != 'uint8')
return DataFrame(dict([(c, s + 1) if is_ok(s) else (c, s)
for c, s in compat.iteritems(df)]))
def _check_get(df, cond, check_dtypes=True):
other1 = _safe_add(df)
rs = df.where(cond, other1)
rs2 = df.where(cond.values, other1)
for k, v in rs.iteritems():
exp = Series(
np.where(cond[k], df[k], other1[k]), index=v.index)
assert_series_equal(v, exp, check_names=False)
assert_frame_equal(rs, rs2)
# dtypes
if check_dtypes:
assert (rs.dtypes == df.dtypes).all()
# check getting
for df in [default_frame, self.mixed_frame,
self.mixed_float, self.mixed_int]:
cond = df > 0
_check_get(df, cond)
# upcasting case (GH # 2794)
df = DataFrame(dict([(c, Series([1] * 3, dtype=c))
for c in ['int64', 'int32',
'float32', 'float64']]))
df.iloc[1, :] = 0
result = df.where(df >= 0).get_dtype_counts()
# when we don't preserve boolean casts
#
# expected = Series({ 'float32' : 1, 'float64' : 3 })
expected = Series({'float32': 1, 'float64': 1, 'int32': 1, 'int64': 1})
assert_series_equal(result, expected)
# aligning
def _check_align(df, cond, other, check_dtypes=True):
rs = df.where(cond, other)
for i, k in enumerate(rs.columns):
result = rs[k]
d = df[k].values
c = cond[k].reindex(df[k].index).fillna(False).values
if is_scalar(other):
o = other
else:
if isinstance(other, np.ndarray):
o = Series(other[:, i], index=result.index).values
else:
o = other[k].values
new_values = d if c.all() else np.where(c, d, o)
expected = Series(new_values, index=result.index, name=k)
# since we can't always have the correct numpy dtype
# as numpy doesn't know how to downcast, don't check
assert_series_equal(result, expected, check_dtype=False)
# dtypes
# can't check dtype when other is an ndarray
if check_dtypes and not isinstance(other, np.ndarray):
assert (rs.dtypes == df.dtypes).all()
for df in [self.mixed_frame, self.mixed_float, self.mixed_int]:
# other is a frame
cond = (df > 0)[1:]
_check_align(df, cond, _safe_add(df))
# check other is ndarray
cond = df > 0
_check_align(df, cond, (_safe_add(df).values))
# integers are upcast, so don't check the dtypes
cond = df > 0
check_dtypes = all([not issubclass(s.type, np.integer)
for s in df.dtypes])
_check_align(df, cond, np.nan, check_dtypes=check_dtypes)
# invalid conditions
df = default_frame
err1 = (df + 1).values[0:2, :]
pytest.raises(ValueError, df.where, cond, err1)
err2 = cond.iloc[:2, :].values
other1 = _safe_add(df)
pytest.raises(ValueError, df.where, err2, other1)
pytest.raises(ValueError, df.mask, True)
pytest.raises(ValueError, df.mask, 0)
# where inplace
def _check_set(df, cond, check_dtypes=True):
dfi = df.copy()
econd = cond.reindex_like(df).fillna(True)
expected = dfi.mask(~econd)
dfi.where(cond, np.nan, inplace=True)
assert_frame_equal(dfi, expected)
# dtypes (and confirm upcasts)x
if check_dtypes:
for k, v in compat.iteritems(df.dtypes):
if issubclass(v.type, np.integer) and not cond[k].all():
v = np.dtype('float64')
assert dfi[k].dtype == v
for df in [default_frame, self.mixed_frame, self.mixed_float,
self.mixed_int]:
cond = df > 0
_check_set(df, cond)
cond = df >= 0
_check_set(df, cond)
# aligining
cond = (df >= 0)[1:]
_check_set(df, cond)
# GH 10218
# test DataFrame.where with Series slicing
df = DataFrame({'a': range(3), 'b': range(4, 7)})
result = df.where(df['a'] == 1)
expected = df[df['a'] == 1].reindex(df.index)
assert_frame_equal(result, expected)
def test_where_array_like(self):
# see gh-15414
klasses = [list, tuple, np.array]
df = DataFrame({'a': [1, 2, 3]})
cond = [[False], [True], [True]]
expected = DataFrame({'a': [np.nan, 2, 3]})
for klass in klasses:
result = df.where(klass(cond))
assert_frame_equal(result, expected)
df['b'] = 2
expected['b'] = [2, np.nan, 2]
cond = [[False, True], [True, False], [True, True]]
for klass in klasses:
result = df.where(klass(cond))
assert_frame_equal(result, expected)
def test_where_invalid_input(self):
# see gh-15414: only boolean arrays accepted
df = DataFrame({'a': [1, 2, 3]})
msg = "Boolean array expected for the condition"
conds = [
[[1], [0], [1]],
Series([[2], [5], [7]]),
DataFrame({'a': [2, 5, 7]}),
[["True"], ["False"], ["True"]],
[[Timestamp("2017-01-01")],
[pd.NaT], [Timestamp("2017-01-02")]]
]
for cond in conds:
with tm.assert_raises_regex(ValueError, msg):
df.where(cond)
df['b'] = 2
conds = [
[[0, 1], [1, 0], [1, 1]],
Series([[0, 2], [5, 0], [4, 7]]),
[["False", "True"], ["True", "False"],
["True", "True"]],
DataFrame({'a': [2, 5, 7], 'b': [4, 8, 9]}),
[[pd.NaT, Timestamp("2017-01-01")],
[Timestamp("2017-01-02"), pd.NaT],
[Timestamp("2017-01-03"), Timestamp("2017-01-03")]]
]
for cond in conds:
with tm.assert_raises_regex(ValueError, msg):
df.where(cond)
def test_where_dataframe_col_match(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]])
cond = DataFrame([[True, False, True], [False, False, True]])
result = df.where(cond)
expected = DataFrame([[1.0, np.nan, 3], [np.nan, np.nan, 6]])
tm.assert_frame_equal(result, expected)
# this *does* align, though has no matching columns
cond.columns = ["a", "b", "c"]
result = df.where(cond)
expected = DataFrame(np.nan, index=df.index, columns=df.columns)
tm.assert_frame_equal(result, expected)
def test_where_ndframe_align(self):
msg = "Array conditional must be same shape as self"
df = DataFrame([[1, 2, 3], [4, 5, 6]])
cond = [True]
with tm.assert_raises_regex(ValueError, msg):
df.where(cond)
expected = DataFrame([[1, 2, 3], [np.nan, np.nan, np.nan]])
out = df.where(Series(cond))
tm.assert_frame_equal(out, expected)
cond = np.array([False, True, False, True])
with tm.assert_raises_regex(ValueError, msg):
df.where(cond)
expected = DataFrame([[np.nan, np.nan, np.nan], [4, 5, 6]])
out = df.where(Series(cond))
tm.assert_frame_equal(out, expected)
def test_where_bug(self):
# GH 2793
df = DataFrame({'a': [1.0, 2.0, 3.0, 4.0], 'b': [
4.0, 3.0, 2.0, 1.0]}, dtype='float64')
expected = DataFrame({'a': [np.nan, np.nan, 3.0, 4.0], 'b': [
4.0, 3.0, np.nan, np.nan]}, dtype='float64')
result = df.where(df > 2, np.nan)
assert_frame_equal(result, expected)
result = df.copy()
result.where(result > 2, np.nan, inplace=True)
assert_frame_equal(result, expected)
# mixed
for dtype in ['int16', 'int8', 'int32', 'int64']:
df = DataFrame({'a': np.array([1, 2, 3, 4], dtype=dtype),
'b': np.array([4.0, 3.0, 2.0, 1.0],
dtype='float64')})
expected = DataFrame({'a': [np.nan, np.nan, 3.0, 4.0],
'b': [4.0, 3.0, np.nan, np.nan]},
dtype='float64')
result = df.where(df > 2, np.nan)
assert_frame_equal(result, expected)
result = df.copy()
result.where(result > 2, np.nan, inplace=True)
assert_frame_equal(result, expected)
# transpositional issue
# GH7506
a = DataFrame({0: [1, 2], 1: [3, 4], 2: [5, 6]})
b = DataFrame({0: [np.nan, 8], 1: [9, np.nan], 2: [np.nan, np.nan]})
do_not_replace = b.isna() | (a > b)
expected = a.copy()
expected[~do_not_replace] = b
result = a.where(do_not_replace, b)
assert_frame_equal(result, expected)
a = DataFrame({0: [4, 6], 1: [1, 0]})
b = DataFrame({0: [np.nan, 3], 1: [3, np.nan]})
do_not_replace = b.isna() | (a > b)
expected = a.copy()
expected[~do_not_replace] = b
result = a.where(do_not_replace, b)
assert_frame_equal(result, expected)
def test_where_datetime(self):
# GH 3311
df = DataFrame(dict(A=date_range('20130102', periods=5),
B=date_range('20130104', periods=5),
C=np.random.randn(5)))
stamp = datetime(2013, 1, 3)
result = df[df > stamp]
expected = df.copy()
expected.loc[[0, 1], 'A'] = np.nan
assert_frame_equal(result, expected)
def test_where_none(self):
# GH 4667
# setting with None changes dtype
df = DataFrame({'series': Series(range(10))}).astype(float)
df[df > 7] = None
expected = DataFrame(
{'series': Series([0, 1, 2, 3, 4, 5, 6, 7, np.nan, np.nan])})
assert_frame_equal(df, expected)
# GH 7656
df = DataFrame([{'A': 1, 'B': np.nan, 'C': 'Test'}, {
'A': np.nan, 'B': 'Test', 'C': np.nan}])
expected = df.where(~isna(df), None)
with tm.assert_raises_regex(TypeError, 'boolean setting '
'on mixed-type'):
df.where(~isna(df), None, inplace=True)
def test_where_align(self):
def create():
df = DataFrame(np.random.randn(10, 3))
df.iloc[3:5, 0] = np.nan
df.iloc[4:6, 1] = np.nan
df.iloc[5:8, 2] = np.nan
return df
# series
df = create()
expected = df.fillna(df.mean())
result = df.where(pd.notna(df), df.mean(), axis='columns')
assert_frame_equal(result, expected)
df.where(pd.notna(df), df.mean(), inplace=True, axis='columns')
assert_frame_equal(df, expected)
df = create().fillna(0)
expected = df.apply(lambda x, y: x.where(x > 0, y), y=df[0])
result = df.where(df > 0, df[0], axis='index')
assert_frame_equal(result, expected)
result = df.where(df > 0, df[0], axis='rows')
assert_frame_equal(result, expected)
# frame
df = create()
expected = df.fillna(1)
result = df.where(pd.notna(df), DataFrame(
1, index=df.index, columns=df.columns))
assert_frame_equal(result, expected)
def test_where_complex(self):
# GH 6345
expected = DataFrame(
[[1 + 1j, 2], [np.nan, 4 + 1j]], columns=['a', 'b'])
df = DataFrame([[1 + 1j, 2], [5 + 1j, 4 + 1j]], columns=['a', 'b'])
df[df.abs() >= 5] = np.nan
assert_frame_equal(df, expected)
def test_where_axis(self):
# GH 9736
df = DataFrame(np.random.randn(2, 2))
mask = DataFrame([[False, False], [False, False]])
s = Series([0, 1])
expected = DataFrame([[0, 0], [1, 1]], dtype='float64')
result = df.where(mask, s, axis='index')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, s, axis='index', inplace=True)
assert_frame_equal(result, expected)
expected = DataFrame([[0, 1], [0, 1]], dtype='float64')
result = df.where(mask, s, axis='columns')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, s, axis='columns', inplace=True)
assert_frame_equal(result, expected)
# Upcast needed
df = DataFrame([[1, 2], [3, 4]], dtype='int64')
mask = DataFrame([[False, False], [False, False]])
s = Series([0, np.nan])
expected = DataFrame([[0, 0], [np.nan, np.nan]], dtype='float64')
result = df.where(mask, s, axis='index')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, s, axis='index', inplace=True)
assert_frame_equal(result, expected)
expected = DataFrame([[0, np.nan], [0, np.nan]])
result = df.where(mask, s, axis='columns')
assert_frame_equal(result, expected)
expected = DataFrame({0: np.array([0, 0], dtype='int64'),
1: np.array([np.nan, np.nan], dtype='float64')})
result = df.copy()
result.where(mask, s, axis='columns', inplace=True)
assert_frame_equal(result, expected)
# Multiple dtypes (=> multiple Blocks)
df = pd.concat([
DataFrame(np.random.randn(10, 2)),
DataFrame(np.random.randint(0, 10, size=(10, 2)), dtype='int64')],
ignore_index=True, axis=1)
mask = DataFrame(False, columns=df.columns, index=df.index)
s1 = Series(1, index=df.columns)
s2 = Series(2, index=df.index)
result = df.where(mask, s1, axis='columns')
expected = DataFrame(1.0, columns=df.columns, index=df.index)
expected[2] = expected[2].astype('int64')
expected[3] = expected[3].astype('int64')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, s1, axis='columns', inplace=True)
assert_frame_equal(result, expected)
result = df.where(mask, s2, axis='index')
expected = DataFrame(2.0, columns=df.columns, index=df.index)
expected[2] = expected[2].astype('int64')
expected[3] = expected[3].astype('int64')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, s2, axis='index', inplace=True)
assert_frame_equal(result, expected)
# DataFrame vs DataFrame
d1 = df.copy().drop(1, axis=0)
expected = df.copy()
expected.loc[1, :] = np.nan
result = df.where(mask, d1)
assert_frame_equal(result, expected)
result = df.where(mask, d1, axis='index')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, d1, inplace=True)
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, d1, inplace=True, axis='index')
assert_frame_equal(result, expected)
d2 = df.copy().drop(1, axis=1)
expected = df.copy()
expected.loc[:, 1] = np.nan
result = df.where(mask, d2)
assert_frame_equal(result, expected)
result = df.where(mask, d2, axis='columns')
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, d2, inplace=True)
assert_frame_equal(result, expected)
result = df.copy()
result.where(mask, d2, inplace=True, axis='columns')
assert_frame_equal(result, expected)
def test_where_callable(self):
# GH 12533
df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
result = df.where(lambda x: x > 4, lambda x: x + 1)
exp = DataFrame([[2, 3, 4], [5, 5, 6], [7, 8, 9]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.where(df > 4, df + 1))
# return ndarray and scalar
result = df.where(lambda x: (x % 2 == 0).values, lambda x: 99)
exp = DataFrame([[99, 2, 99], [4, 99, 6], [99, 8, 99]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.where(df % 2 == 0, 99))
# chain
result = (df + 2).where(lambda x: x > 8, lambda x: x + 10)
exp = DataFrame([[13, 14, 15], [16, 17, 18], [9, 10, 11]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result,
(df + 2).where((df + 2) > 8, (df + 2) + 10))
def test_mask(self):
df = DataFrame(np.random.randn(5, 3))
cond = df > 0
rs = df.where(cond, np.nan)
assert_frame_equal(rs, df.mask(df <= 0))
assert_frame_equal(rs, df.mask(~cond))
other = DataFrame(np.random.randn(5, 3))
rs = df.where(cond, other)
assert_frame_equal(rs, df.mask(df <= 0, other))
assert_frame_equal(rs, df.mask(~cond, other))
def test_mask_inplace(self):
# GH8801
df = DataFrame(np.random.randn(5, 3))
cond = df > 0
rdf = df.copy()
rdf.where(cond, inplace=True)
assert_frame_equal(rdf, df.where(cond))
assert_frame_equal(rdf, df.mask(~cond))
rdf = df.copy()
rdf.where(cond, -df, inplace=True)
assert_frame_equal(rdf, df.where(cond, -df))
assert_frame_equal(rdf, df.mask(~cond, -df))
def test_mask_edge_case_1xN_frame(self):
# GH4071
df = DataFrame([[1, 2]])
res = df.mask(DataFrame([[True, False]]))
expec = DataFrame([[nan, 2]])
assert_frame_equal(res, expec)
def test_mask_callable(self):
# GH 12533
df = DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
result = df.mask(lambda x: x > 4, lambda x: x + 1)
exp = DataFrame([[1, 2, 3], [4, 6, 7], [8, 9, 10]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.mask(df > 4, df + 1))
# return ndarray and scalar
result = df.mask(lambda x: (x % 2 == 0).values, lambda x: 99)
exp = DataFrame([[1, 99, 3], [99, 5, 99], [7, 99, 9]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result, df.mask(df % 2 == 0, 99))
# chain
result = (df + 2).mask(lambda x: x > 8, lambda x: x + 10)
exp = DataFrame([[3, 4, 5], [6, 7, 8], [19, 20, 21]])
tm.assert_frame_equal(result, exp)
tm.assert_frame_equal(result,
(df + 2).mask((df + 2) > 8, (df + 2) + 10))
def test_head_tail(self):
assert_frame_equal(self.frame.head(), self.frame[:5])
assert_frame_equal(self.frame.tail(), self.frame[-5:])
assert_frame_equal(self.frame.head(0), self.frame[0:0])
assert_frame_equal(self.frame.tail(0), self.frame[0:0])
assert_frame_equal(self.frame.head(-1), self.frame[:-1])
assert_frame_equal(self.frame.tail(-1), self.frame[1:])
assert_frame_equal(self.frame.head(1), self.frame[:1])
assert_frame_equal(self.frame.tail(1), self.frame[-1:])
# with a float index
df = self.frame.copy()
df.index = np.arange(len(self.frame)) + 0.1
assert_frame_equal(df.head(), df.iloc[:5])
assert_frame_equal(df.tail(), df.iloc[-5:])
assert_frame_equal(df.head(0), df[0:0])
assert_frame_equal(df.tail(0), df[0:0])
assert_frame_equal(df.head(-1), df.iloc[:-1])
assert_frame_equal(df.tail(-1), df.iloc[1:])
# test empty dataframe
empty_df = DataFrame()
assert_frame_equal(empty_df.tail(), empty_df)
assert_frame_equal(empty_df.head(), empty_df)
def test_type_error_multiindex(self):
# See gh-12218
df = DataFrame(columns=['i', 'c', 'x', 'y'],
data=[[0, 0, 1, 2], [1, 0, 3, 4],
[0, 1, 1, 2], [1, 1, 3, 4]])
dg = df.pivot_table(index='i', columns='c',
values=['x', 'y'])
with tm.assert_raises_regex(TypeError, "is an invalid key"):
str(dg[:, 0])
index = Index(range(2), name='i')
columns = MultiIndex(levels=[['x', 'y'], [0, 1]],
labels=[[0, 1], [0, 0]],
names=[None, 'c'])
expected = DataFrame([[1, 2], [3, 4]], columns=columns, index=index)
result = dg.loc[:, (slice(None), 0)]
assert_frame_equal(result, expected)
name = ('x', 0)
index = Index(range(2), name='i')
expected = Series([1, 3], index=index, name=name)
result = dg['x', 0]
assert_series_equal(result, expected)
class TestDataFrameIndexingDatetimeWithTZ(TestData):
def setup_method(self, method):
self.idx = Index(date_range('20130101', periods=3, tz='US/Eastern'),
name='foo')
self.dr = date_range('20130110', periods=3)
self.df = DataFrame({'A': self.idx, 'B': self.dr})
def test_setitem(self):
df = self.df
idx = self.idx
# setitem
df['C'] = idx
assert_series_equal(df['C'], Series(idx, name='C'))
df['D'] = 'foo'
df['D'] = idx
assert_series_equal(df['D'], Series(idx, name='D'))
del df['D']
# assert that A & C are not sharing the same base (e.g. they
# are copies)
b1 = df._data.blocks[1]
b2 = df._data.blocks[2]
assert b1.values.equals(b2.values)
assert id(b1.values.values.base) != id(b2.values.values.base)
# with nan
df2 = df.copy()
df2.iloc[1, 1] = pd.NaT
df2.iloc[1, 2] = pd.NaT
result = df2['B']
assert_series_equal(notna(result), Series(
[True, False, True], name='B'))
assert_series_equal(df2.dtypes, df.dtypes)
def test_set_reset(self):
idx = self.idx
# set/reset
df = DataFrame({'A': [0, 1, 2]}, index=idx)
result = df.reset_index()
assert result['foo'].dtype, 'M8[ns, US/Eastern'
df = result.set_index('foo')
tm.assert_index_equal(df.index, idx)
def test_transpose(self):
result = self.df.T
expected = DataFrame(self.df.values.T)
expected.index = ['A', 'B']
assert_frame_equal(result, expected)
class TestDataFrameIndexingUInt64(TestData):
def setup_method(self, method):
self.ir = Index(np.arange(3), dtype=np.uint64)
self.idx = Index([2**63, 2**63 + 5, 2**63 + 10], name='foo')
self.df = DataFrame({'A': self.idx, 'B': self.ir})
def test_setitem(self):
df = self.df
idx = self.idx
# setitem
df['C'] = idx
assert_series_equal(df['C'], Series(idx, name='C'))
df['D'] = 'foo'
df['D'] = idx
assert_series_equal(df['D'], Series(idx, name='D'))
del df['D']
# With NaN: because uint64 has no NaN element,
# the column should be cast to object.
df2 = df.copy()
df2.iloc[1, 1] = pd.NaT
df2.iloc[1, 2] = pd.NaT
result = df2['B']
assert_series_equal(notna(result), Series(
[True, False, True], name='B'))
assert_series_equal(df2.dtypes, Series([np.dtype('uint64'),
np.dtype('O'), np.dtype('O')],
index=['A', 'B', 'C']))
def test_set_reset(self):
idx = self.idx
# set/reset
df = DataFrame({'A': [0, 1, 2]}, index=idx)
result = df.reset_index()
assert result['foo'].dtype == np.dtype('uint64')
df = result.set_index('foo')
tm.assert_index_equal(df.index, idx)
def test_transpose(self):
result = self.df.T
expected = DataFrame(self.df.values.T)
expected.index = ['A', 'B']
assert_frame_equal(result, expected)
| apache-2.0 | 3,149,025,408,694,788,000 | 33.840887 | 79 | 0.509569 | false |
acabey/acabey.github.io | projects/demos/engineering.purdue.edu/scriptingwithobjects/swocode/chap4/SearchAndReplace.py | 1 | 1410 | #!/usr/bin/env python
## SearchAndReplace.py
import re
input_string = "one hello is like any other hello" #(A)
input_string = re.sub( 'hello', 'armadello', input_string ) #(B)
print input_string #(C)
# one armadello is like any other armadello
input_string = "apples oranges\nbananas\t pears" #(D)
input_string = re.sub( r'\s+', ' ', input_string ) #(E)
print input_string #(F)
# apples oranges bananas pears
input_string = "apples oranges, bananas; pears\n" #(G)
output_string = re.sub( r'\b(\w+)[.,;:]\s+', r'\1 ', input_string ) #(H)
print output_string #(I)
# apples oranges bananas pears
input_string = "http://programming_with_objects.com" #(J)
print re.sub( r'//', r'//www.', input_string ) #(K)
# http://www.programming_with_objects.com
input_string = "apples oranges bananas pears" #(L)
print re.sub( r'(\ba\w+)', r'\1'.upper(), input_string ) #(M)
# apples oranges bananas pears
print re.sub( r'(\ba\w+)', r'hello'.upper(), input_string ) #(N)
# HELLO oranges bananas pears
| gpl-3.0 | -4,574,443,717,160,498,000 | 44.483871 | 72 | 0.465957 | false |
ella/mypage | mypage/widgets/migrations/0002_add_site_fk_to_renderedwidget.py | 1 | 2250 |
from south.db import db
from django.db import models
from mypage.widgets.models import *
import datetime
class Migration:
def forwards(self, orm):
# Adding field 'RenderedWidget.site'
db.add_column('widgets_renderedwidget', 'site', models.ForeignKey(orm['sites.Site'], default=1), keep_default=False)
def backwards(self, orm):
# Deleting field 'RenderedWidget.site'
db.delete_column('widgets_renderedwidget', 'site_id')
models = {
'sites.site': {
'Meta': {'ordering': "('domain',)", 'db_table': "'django_site'"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'widgets.renderedwidget': {
'Meta': {'unique_together': "(('widget','state',),)"},
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'rendered_html': ('models.TextField', [], {}),
'site': ('models.ForeignKey', ["orm['sites.Site']"], {}),
'state': ('models.SmallIntegerField', [], {'default': '0'}),
'widget': ('models.ForeignKey', ["orm['widgets.Widget']"], {'verbose_name': "_('Widget')"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label','model'),)", 'db_table': "'django_content_type'"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'widgets.widget': {
'content_type': ('ContentTypeField', [], {'editable': 'False'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'last_downloaded': ('models.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'next_download': ('models.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'shared': ('models.BooleanField', ["_('Shared')"], {'default': 'True'}),
'slug': ('models.SlugField', [], {'max_length': '100'}),
'title': ('models.CharField', ["_('Title')"], {'max_length': '100'}),
'url': ('models.URLField', ["_('Header link URL')"], {'blank': 'True'})
}
}
complete_apps = ['widgets']
| bsd-3-clause | 7,095,117,171,365,600,000 | 40.666667 | 130 | 0.503556 | false |
francois-vincent/navitia | source/jormungandr/jormungandr/interfaces/v1/Coverage.py | 1 | 2566 | # coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from jormungandr.interfaces.v1.StatedResource import StatedResource
from make_links import add_coverage_link, add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
from fields import NonNullNested, FieldDateTime
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"last_load_at": FieldDateTime(),
"name": fields.String,
"status": fields.String,
"shape": fields.String,
"error": NonNullNested({
"code": fields.String,
"value": fields.String
})
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(StatedResource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
resp = i_manager.regions(region, lon, lat)
if resp.has_key("regions"):
resp["regions"] = sorted(resp["regions"], cmp=lambda reg1, reg2: cmp(reg1['name'], reg2['name']))
return resp, 200
| agpl-3.0 | 36,332,661,314,822,630 | 35.140845 | 109 | 0.724084 | false |
aerler/GeoPy | src/plotting/archive/mapplots.py | 1 | 3238 | '''
Created on 2014-03-19
some useful functions to make map and surface plots that take advantage of variable meta data
@author: Andre R. Erler, GPL v3
'''
# external imports
import matplotlib.pylab as pyl
import matplotlib as mpl
#from mpl_toolkits.axes_grid1 import ImageGrid
linewidth = .75
mpl.rc('lines', linewidth=linewidth)
if linewidth == 1.5: mpl.rc('font', size=12)
elif linewidth == .75: mpl.rc('font', size=8)
else: mpl.rc('font', size=10)
# prevent figures from closing: don't run in interactive mode, or plt.show() will not block
pyl.ioff()
# internal imports
from plotting.misc import expandLevelList
# function to plot
def srfcPlot():
raise NotImplementedError
return
# function to place (shared) colorbars at a specified figure margins
def sharedColorbar(fig, cf, clevs, colorbar, cbls, subplot, margins):
loc = colorbar.pop('location','bottom')
# determine size and spacing
if loc=='top' or loc=='bottom':
orient = colorbar.pop('orientation','horizontal') # colorbar orientation
je = subplot[1] # number of colorbars: number of rows
ie = subplot[0] # number of plots per colorbar: number of columns
cbwd = colorbar.pop('cbwd',0.025) # colorbar height
sp = margins['wspace']
wd = (margins['right']-margins['left'] - sp*(je-1))/je # width of each colorbar axis
else:
orient = colorbar.pop('orientation','vertical') # colorbar orientation
je = subplot[0] # number of colorbars: number of columns
ie = subplot[1] # number of plots per colorbar: number of rows
cbwd = colorbar.pop('cbwd',0.025) # colorbar width
sp = margins['hspace']
wd = (margins['top']-margins['bottom'] - sp*(je-1))/je # width of each colorbar axis
shrink = colorbar.pop('shrinkFactor',1)
# shift existing subplots
if loc=='top': newMargin = margins['top']-margins['hspace'] -cbwd
elif loc=='right': newMargin = margins['right']-margins['left']/2 -cbwd
else: newMargin = 2*margins[loc] + cbwd
fig.subplots_adjust(**{loc:newMargin})
# loop over variables (one colorbar for each)
for i in range(je):
if dir=='vertical': ii = je-i-1
else: ii = i
offset = (wd+sp)*float(ii) + wd*(1-shrink)/2 # offset due to previous colorbars
# horizontal colorbar(s) at the top
if loc == 'top': ci = i; cax = [margins['left']+offset, newMargin+margins['hspace'], shrink*wd, cbwd]
# horizontal colorbar(s) at the bottom
elif loc == 'bottom': ci = i; cax = [margins['left']+offset, margins[loc], shrink*wd, cbwd]
# vertical colorbar(s) to the left (get axes reference right!)
elif loc == 'left': ci = i*ie; cax = [margins[loc], margins['bottom']+offset, cbwd, shrink*wd]
# vertical colorbar(s) to the right (get axes reference right!)
elif loc == 'right': ci = i*ie; cax = [newMargin+margins['wspace'], margins['bottom']+offset, cbwd, shrink*wd]
# make colorbar
fig.colorbar(mappable=cf[ci],cax=fig.add_axes(cax),ticks=expandLevelList(cbls[i],clevs[i]),
orientation=orient,**colorbar)
# return figure with colorbar (just for the sake of returning something)
return fig
if __name__ == '__main__':
pass | gpl-3.0 | -7,852,964,616,939,725,000 | 42.383562 | 118 | 0.656578 | false |
warenlg/shell-complete | shcomplete/tests/test_main.py | 1 | 2059 | import argparse
import sys
import unittest
import shcomplete.__main__ as main
class MainTests(unittest.TestCase):
def test_handlers(self):
action2handler = {
"repos": "fetch_repos",
"filtering": "filter",
"tfdf": "filter_prediction_set",
"corpus": "write_corpus",
"model2predict": "train_predict",
"model2correct": "train_correct"
}
parser = main.get_parser()
subcommands = set([x.dest for x in parser._subparsers._actions[2]._choices_actions])
set_action2handler = set(action2handler)
self.assertFalse(len(subcommands - set_action2handler),
"You forgot to add to this test {} subcommand(s) check".format(
subcommands - set_action2handler))
self.assertFalse(len(set_action2handler - subcommands),
"You cover unexpected subcommand(s) {}".format(
set_action2handler - subcommands))
called_actions = []
args_save = sys.argv
error_save = argparse.ArgumentParser.error
try:
argparse.ArgumentParser.error = lambda self, message: None
for action, handler in action2handler.items():
def handler_append(*args, **kwargs):
called_actions.append(action)
handler_save = getattr(main, handler)
try:
setattr(main, handler, handler_append)
sys.argv = [main.__file__, action]
main.main()
finally:
setattr(main, handler, handler_save)
finally:
sys.argv = args_save
argparse.ArgumentParser.error = error_save
set_called_actions = set(called_actions)
set_actions = set(action2handler)
self.assertEqual(set_called_actions, set_actions)
self.assertEqual(len(set_called_actions), len(called_actions))
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -5,797,258,398,379,277,000 | 35.122807 | 92 | 0.559495 | false |
sarthfrey/Texty | lib/tests/ip_messaging/test_channels.py | 1 | 1934 | import unittest
from mock import patch, Mock
from twilio.rest.resources.ip_messaging import Channels, Channel
from tests.tools import create_mock_json
BASE_URI = "https://ip-messaging.twilio.com/v1/Services/ISxxx"
ACCOUNT_SID = "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
AUTH = (ACCOUNT_SID, "token")
CHANNEL_SID = "CHaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
list_resource = Channels(BASE_URI, AUTH)
class ChannelTest(unittest.TestCase):
@patch("twilio.rest.resources.base.make_twilio_request")
def test_create_channel(self, mock):
resp = create_mock_json("tests/resources/ip_messaging/channel_instance.json")
resp.status_code = 201
mock.return_value = resp
uri = "%s/Channels" % (BASE_URI)
list_resource.create(friendly_name='TestChannel', unique_name='Unique')
exp_params = {
'FriendlyName': "TestChannel",
'UniqueName': 'Unique'
}
mock.assert_called_with("POST", uri, data=exp_params, auth=AUTH,
use_json_extension=False)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_get(self, mock):
resp = create_mock_json("tests/resources/ip_messaging/channel_instance.json")
mock.return_value = resp
uri = "%s/Channels/%s" % (BASE_URI, CHANNEL_SID)
list_resource.get(CHANNEL_SID)
mock.assert_called_with("GET", uri, auth=AUTH,
use_json_extension=False)
@patch("twilio.rest.resources.base.Resource.request")
def test_delete(self, req):
""" Deleting a call should work """
resp = Mock()
resp.content = ""
resp.status_code = 204
req.return_value = resp, {}
app = Channel(list_resource, "CH123")
app.delete()
uri = "%s/Channels/CH123" % (BASE_URI)
req.assert_called_with("DELETE", uri)
| apache-2.0 | 7,620,991,426,081,053,000 | 33.814815 | 85 | 0.614788 | false |
bvasilis/contextual-cnn-tf | text_cnn.py | 1 | 5498 | import tensorflow as tf
class TextCNN(object):
"""
A CNN for text classification.
Uses an embedding layer, followed by a convolutional, kmax-pooling, convolutional, maxpooling and softmax layer.
"""
def __init__(
self, sequence_length, num_classes, vocab_size,
embedding_size, filter_sizes, num_filters, l2_reg_lambda=0.0):
# Placeholders for input, output and dropout
self.input_x = tf.placeholder(tf.int32, [None, sequence_length], name="input_x")
self.input_y = tf.placeholder(tf.float32, [None, num_classes], name="input_y")
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
# Variables from paper
assumed_value_K = 15
num_filters_2 = num_filters
filter_size_2 = 4
# Keeping track of l2 regularization loss (optional)
l2_loss = tf.constant(l2_reg_lambda)
# Embedding layer
with tf.device('/gpu:0'), tf.name_scope("embedding"):
W_emb = tf.Variable(
tf.random_uniform([vocab_size, embedding_size], -1.0, 1.0),
name="W_emb")
self.embedded_chars = tf.nn.embedding_lookup(W_emb, self.input_x)
self.embedded_chars_expanded = tf.expand_dims(self.embedded_chars, -1)
# Create a convolution + kmaxpool layer for each filter size
pooled_outputs = []
for i, filter_size in enumerate(filter_sizes):
with tf.name_scope("conv-kmaxpool-%s" % filter_size):
# Convolution Layer
filter_shape = [filter_size, embedding_size, 1, num_filters]
W1 = tf.Variable(tf.truncated_normal(filter_shape, stddev=0.1), name="W1")
b1 = tf.Variable(tf.constant(0.1, shape=[num_filters]), name="b1")
conv = tf.nn.conv2d(
self.embedded_chars_expanded,
W1,
strides=[1, 1, 1, 1],
padding="VALID",
name="conv")
# Apply nonlinearity
h = tf.nn.relu(tf.nn.bias_add(conv, b1), name="relu")
# Main addition to the original implementation is adding K max pooling
# Optimally we would add a custom op for this
t = tf.transpose(h, perm=[0, 3, 2, 1])
d = tf.shape(t)[-1]
_, indices = tf.nn.top_k(t, k=assumed_value_K, sorted=False, name="k_max")
# Create one - hot boolean tensor
one_hot = tf.one_hot(indices, d, on_value=True, off_value=False, dtype=tf.bool)
# Reduce it to shape t
temporary = tf.reduce_sum(tf.cast(one_hot, tf.float32), 3)
result_flat = tf.boolean_mask(t, tf.not_equal(tf.cast(temporary, tf.bool), False))
result = tf.reshape(result_flat, tf.shape(indices))
kmax_pooled = tf.transpose(result, perm=[0, 3, 2, 1])
pooled_outputs.append(kmax_pooled)
# Combine all the pooled features
self.h_pool = tf.concat(pooled_outputs, 3)
# Add dropout
with tf.name_scope("dropout1"):
self.h1_drop = tf.nn.dropout(self.h_pool, self.dropout_keep_prob)
# Convolutional layer capturing sequential features
with tf.name_scope("conv-maxpool"):
num_filters_total = num_filters * len(filter_sizes)
filter_shape = [filter_size_2, 1, num_filters_total, num_filters_2]
W2 = tf.Variable(tf.truncated_normal(filter_shape, stddev=0.1), name='W2')
b2 = tf.Variable(tf.constant(0.1, shape=[num_filters_2]), name="b2")
conv2 = tf.nn.conv2d(
self.h1_drop,
W2,
strides=[1, 1, 1, 1],
padding='VALID',
name='conv2')
self.h2 = tf.nn.relu(tf.nn.bias_add(conv2, b2), name="relu")
max_pool = tf.nn.max_pool(
self.h2,
ksize=[1, assumed_value_K - filter_size_2 + 1, 1, 1],
strides=[1, 1, 1, 1],
padding='VALID',
name='max-pool')
# Add dropout
with tf.name_scope("dropout2"):
self.h2_drop = tf.nn.dropout(max_pool, self.dropout_keep_prob)
# Add last layer
self.h2_flat = tf.reshape(self.h2_drop, [-1, num_filters_2])
# Final (unnormalized) scores and predictions
with tf.name_scope("output"):
W = tf.get_variable(
"W",
shape=[num_filters_2, num_classes], # to fix
initializer=tf.contrib.layers.xavier_initializer())
b = tf.Variable(tf.constant(0.1, shape=[num_classes]), name="b1")
l2_loss += tf.nn.l2_loss(W_emb)
l2_loss += tf.nn.l2_loss(b)
self.scores = tf.nn.xw_plus_b(self.h2_flat, W, b, name="scores")
self.predictions = tf.argmax(self.scores, 1, name="predictions")
# CalculateMean cross-entropy loss
with tf.name_scope("loss"):
losses = tf.nn.softmax_cross_entropy_with_logits(logits=self.scores, labels=self.input_y)
self.loss = tf.reduce_mean(losses) + l2_reg_lambda * l2_loss
# Accuracy
with tf.name_scope("accuracy"):
correct_predictions = tf.equal(self.predictions, tf.argmax(self.input_y, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
| gpl-3.0 | 5,591,796,612,416,837,000 | 45.201681 | 116 | 0.557112 | false |
tensorflow/autograph | reference_tests/while_loop_function_call_mix_test.py | 1 | 1861 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""While loops mixed with function calls."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import reference_test_base
import tensorflow.compat.v1 as tf
def basic_fn(x):
return x * 2
def function_call_inside_cond(n):
i = 0
s = 0
while i < basic_fn(n):
s += i
i += 1
return s
def function_call_inside_body(n):
i = 0
s = 0
while i < n:
s += basic_fn(i)
i += 1
return s
def print_inside_body(n):
i = 0
s = 0
while i < n:
s += i
print(s)
i += 1
return s
class ReferenceTest(reference_test_base.TestCase):
"""Base class for the reference tests."""
def setUp(self):
super(ReferenceTest, self).setUp()
self.convert = reference_test_base.tf_function_custom(
tf.autograph.experimental.Feature.all_but(
tf.autograph.experimental.Feature.AUTO_CONTROL_DEPS))
def test_basic(self):
self.assertNativeMatchesCompiled(function_call_inside_cond, 3)
self.assertNativeMatchesCompiled(function_call_inside_body, 3)
self.assertNativeMatchesCompiled(print_inside_body, 3)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 200,832,401,768,934,300 | 24.493151 | 80 | 0.663622 | false |
KoffeinFlummi/pymoji | test.py | 1 | 1157 | #!/usr/bin python3
import sys
import pymoji
exitcode = 0
# This fails for every emoji not supported by program/font/OS
for e, a in pymoji.codes.items():
print("# Testing: {} ( {} ) => {}".format(e.ljust(5), pymoji.Emoji(e).char, a).ljust(60), end="")
if not pymoji.Emoji(e).is_supported:
print(" ... not supported")
continue
try:
assert(pymoji.Emoji(e).aliases == a)
assert(pymoji.Emoji(e).alias == a[0])
assert(pymoji.Emoji(e).char == bytes("\\u"+e, "ascii").decode("unicode-escape"))
assert(pymoji.Emoji(pymoji.Emoji(e).char).escape == e)
except:
exitcode += 1
print(" ... FAILED")
else:
print(" ... done")
print("")
print("# Testing replacement functions", end="")
try:
text = "I :heart: Python :bangbang:"
textnew = pymoji.replaceAliases(text)
assert(pymoji.replaceEmoji(textnew) == text)
textnew = pymoji.replaceAliases(text, 1)
assert(pymoji.replaceEmoji(textnew, 1) == text)
except:
exitcode += 1
print(" ... FAILED")
raise
else:
print(" ... done")
print("")
if exitcode == 0:
print("No failed tests.")
else:
print("{} failed test(s).".format(exitcode))
sys.exit(exitcode)
| mit | 6,177,820,783,708,286,000 | 22.612245 | 99 | 0.634399 | false |
boundlessgeo/qgis-geogig-plugin | geogig/gui/dialogs/configdialog.py | 1 | 6170 | # -*- coding: utf-8 -*-
#
# (c) 2016 Boundless, http://boundlessgeo.com
# This code is licensed under the GPL 2.0 license.
#
import os
from PyQt4 import QtGui, QtCore
from qgis.gui import QgsFilterLineEdit
from geogig import config
class ConfigDialog(QtGui.QDialog):
versioIcon = QtGui.QIcon(os.path.dirname(__file__) + "/../../ui/resources/geogig-16.png")
def __init__(self):
QtGui.QDialog.__init__(self)
self.setupUi()
if hasattr(self.searchBox, 'setPlaceholderText'):
self.searchBox.setPlaceholderText(self.tr("Search..."))
self.searchBox.textChanged.connect(self.filterTree)
self.fillTree()
def setupUi(self):
self.resize(640, 450)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSpacing(2)
self.verticalLayout.setMargin(0)
self.searchBox = QgsFilterLineEdit(self)
self.verticalLayout.addWidget(self.searchBox)
self.tree = QtGui.QTreeWidget(self)
self.tree.setAlternatingRowColors(True)
self.verticalLayout.addWidget(self.tree)
self.buttonBox = QtGui.QDialogButtonBox(self)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel | QtGui.QDialogButtonBox.Ok)
self.verticalLayout.addWidget(self.buttonBox)
self.setWindowTitle("Configuration options")
self.searchBox.setToolTip("Enter setting name to filter list")
self.tree.headerItem().setText(0, "Setting")
self.tree.headerItem().setText(1, "Value")
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.setLayout(self.verticalLayout)
def filterTree(self):
text = unicode(self.searchBox.text())
for i in range(self.tree.topLevelItemCount()):
item = self.tree.topLevelItem(i)
visible = False
for j in range(item.childCount()):
subitem = item.child(j)
itemText = subitem.text(0)
if (text.strip() == ""):
subitem.setHidden(False)
visible = True
else:
hidden = text not in itemText
item.setHidden(hidden)
visible = visible or not hidden
item.setHidden(not visible)
item.setExpanded(visible and text.strip() != "")
def fillTree(self):
self.items = {}
self.tree.clear()
generalItem = self._getItem(config.GENERAL, self.versioIcon, config.generalParams)
self.tree.addTopLevelItem(generalItem)
self.tree.setColumnWidth(0, 400)
def _getItem(self, name, icon, params):
item = QtGui.QTreeWidgetItem()
item.setText(0, name)
item.setIcon(0, icon)
for param in params:
paramName = "/GeoGig/Settings/" + name + "/" + param[0]
subItem = TreeSettingItem(self.tree, item, paramName, *param[1:])
item.addChild(subItem)
return item
def accept(self):
iterator = QtGui.QTreeWidgetItemIterator(self.tree)
value = iterator.value()
while value:
if hasattr(value, 'checkValue'):
if value.checkValue():
value.setBackgroundColor(1, QtCore.Qt.white)
else:
value.setBackgroundColor(1, QtCore.Qt.yellow)
return
iterator += 1
value = iterator.value()
iterator = QtGui.QTreeWidgetItemIterator(self.tree)
value = iterator.value()
while value:
if hasattr(value, 'saveValue'):
value.saveValue()
iterator += 1
value = iterator.value()
QtGui.QDialog.accept(self)
class TreeSettingItem(QtGui.QTreeWidgetItem):
def __init__(self, tree, parent, name, description, defaultValue, paramType, check):
QtGui.QTreeWidgetItem.__init__(self, parent)
self.parent = parent
self.name = name
self.check = check
self.paramType = paramType
self.setText(0, description)
self.tree = tree
if paramType == config.TYPE_FOLDER:
self.value = QtCore.QSettings().value(name, defaultValue = defaultValue)
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
self.lineEdit = QtGui.QLineEdit()
self.lineEdit.setText(self.value)
self.label = QtGui.QLabel()
self.label.setText("<a href='#'> Browse</a>")
self.lineEdit.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
layout.addWidget(self.lineEdit)
layout.addWidget(self.label)
def edit():
folder = QtGui.QFileDialog.getExistingDirectory(tree, description, self.value)
if folder:
self.lineEdit.setText(folder)
self.label.linkActivated.connect(edit)
w = QtGui.QWidget()
w.setLayout(layout)
self.tree.setItemWidget(self, 1, w)
elif isinstance(defaultValue, bool):
self.value = QtCore.QSettings().value(name, defaultValue = defaultValue, type = bool)
if self.value:
self.setCheckState(1, QtCore.Qt.Checked)
else:
self.setCheckState(1, QtCore.Qt.Unchecked)
else:
self.value = QtCore.QSettings().value(name, defaultValue = defaultValue)
self.setFlags(self.flags() | QtCore.Qt.ItemIsEditable)
self.setText(1, unicode(self.value))
def getValue(self):
if self.paramType == config.TYPE_FOLDER:
return self.lineEdit.text()
elif isinstance(self.value, bool):
return self.checkState(1) == QtCore.Qt.Checked
else:
return self.text(1)
def saveValue(self):
self.value = self.getValue()
QtCore.QSettings().setValue(self.name, self.value)
def checkValue(self):
try:
return self.check(self.getValue())
except:
return False
| gpl-2.0 | 1,236,078,800,478,527,500 | 36.393939 | 100 | 0.601945 | false |
AugurProject/augur-core | tests/test_controller.py | 1 | 3747 | #!/usr/bin/env python
from ethereum.tools import tester
from ethereum.tools.tester import TransactionFailed
from pytest import raises, fixture
from utils import AssertLog, longToHexString, bytesToHexString, stringToBytes, longTo32Bytes, garbageAddress, garbageBytes20, garbageBytes32, twentyZeros, thirtyTwoZeros
from struct import pack
def test_whitelists(localFixture, controller):
with raises(TransactionFailed): controller.addToWhitelist(tester.a1, sender = tester.k1)
with raises(TransactionFailed): controller.addToWhitelist(tester.a1, sender = tester.k2)
assert controller.addToWhitelist(tester.a1, sender = tester.k0)
assert controller.assertIsWhitelisted(tester.a1, sender = tester.k2)
with raises(TransactionFailed): controller.assertIsWhitelisted(tester.a2, sender = tester.k2)
with raises(TransactionFailed): controller.removeFromWhitelist(tester.a1, sender = tester.k2)
assert controller.removeFromWhitelist(tester.a1, sender = tester.k0)
with raises(TransactionFailed): controller.assertIsWhitelisted(tester.a1, sender = tester.k0)
def test_registry(localFixture, controller):
key1 = 'abc'.ljust(32, '\x00')
key2 = 'foo'.ljust(32, '\x00')
with raises(TransactionFailed): controller.registerContract(key1, 123, garbageBytes20, garbageBytes32, sender = tester.k2)
assert controller.lookup(key1, sender = tester.k2) == longToHexString(0)
assert controller.addToWhitelist(tester.a1, sender = tester.k0)
assert controller.registerContract(key1, 123, garbageBytes20, garbageBytes32, sender = tester.k0)
assert controller.lookup(key1, sender = tester.k2) == longToHexString(123)
# We can't re-upload a contract under the same registry key
with raises(TransactionFailed): controller.registerContract(key1, 123, garbageBytes20, garbageBytes32, sender = tester.k0)
def test_transferOwnership(controller):
with raises(TransactionFailed): controller.transferOwnership(tester.a1, sender = tester.k2)
assert controller.transferOwnership(tester.a1, sender = tester.k0)
assert controller.owner() == bytesToHexString(tester.a1)
def test_emergencyStop(controller):
with raises(TransactionFailed): controller.emergencyStop(sender = tester.k2)
assert controller.stopInEmergency(sender = tester.k2)
with raises(TransactionFailed): controller.onlyInEmergency(sender = tester.k2)
assert controller.emergencyStop(sender = tester.k0)
assert controller.onlyInEmergency(sender = tester.k2)
with raises(TransactionFailed): controller.stopInEmergency(sender = tester.k2)
def test_getContractDetails(controller):
key = stringToBytes('lookup key')
address = garbageAddress
commitHash = garbageBytes20
fileHash = garbageBytes32
assert controller.getContractDetails(key, sender = tester.k2) == [ longToHexString(0), twentyZeros, thirtyTwoZeros ]
assert controller.registerContract(key, address, commitHash, fileHash, sender = tester.k0)
assert controller.getContractDetails(key, sender = tester.k2) == [ address, commitHash, fileHash ]
@fixture(scope='session')
def localSnapshot(fixture, baseSnapshot):
fixture.resetToSnapshot(baseSnapshot)
controller = fixture.upload('../source/contracts/Controller.sol')
assert fixture.contracts['Controller'].owner() == bytesToHexString(tester.a0)
fixture.upload('solidity_test_helpers/ControllerUser.sol')
fixture.uploadAugur()
return fixture.createSnapshot()
@fixture
def localFixture(fixture, localSnapshot):
fixture.resetToSnapshot(localSnapshot)
return fixture
@fixture
def controller(localFixture):
return localFixture.contracts['Controller']
@fixture
def controllerUser(localFixture):
return localFixture.contracts['ControllerUser']
| gpl-3.0 | -4,410,435,504,343,010,000 | 47.038462 | 169 | 0.776088 | false |
Br3nda/witmproxy | libmproxy/console/help.py | 1 | 6497 | # Copyright (C) 2012 Aldo Cortesi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import urwid
import common
from .. import filt, version
footer = [
("heading", 'mitmproxy v%s '%version.VERSION),
('heading_key', "q"), ":back ",
]
class HelpView(urwid.ListBox):
def __init__(self, master, help_context, state):
self.master, self.state = master, state
self.help_context = help_context or []
urwid.ListBox.__init__(
self,
self.helptext()
)
def helptext(self):
text = []
text.append(urwid.Text([("head", "Keys for this view:\n")]))
text.extend(self.help_context)
text.append(urwid.Text([("head", "\n\nMovement:\n")]))
keys = [
("j, k", "up, down"),
("h, l", "left, right (in some contexts)"),
("space", "page down"),
("pg up/down", "page up/down"),
("arrows", "up, down, left, right"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
text.append(urwid.Text([("head", "\n\nGlobal keys:\n")]))
keys = [
("c", "client replay"),
("i", "set interception pattern"),
("M", "change global default display mode"),
(None,
common.highlight_key("automatic", "a") +
[("text", ": automatic detection")]
),
(None,
common.highlight_key("hex", "h") +
[("text", ": Hex")]
),
(None,
common.highlight_key("image", "i") +
[("text", ": Image")]
),
(None,
common.highlight_key("javascript", "j") +
[("text", ": JavaScript")]
),
(None,
common.highlight_key("json", "s") +
[("text", ": JSON")]
),
(None,
common.highlight_key("urlencoded", "u") +
[("text", ": URL-encoded data")]
),
(None,
common.highlight_key("raw", "r") +
[("text", ": raw data")]
),
(None,
common.highlight_key("xml", "x") +
[("text", ": XML")]
),
("o", "toggle options:"),
(None,
common.highlight_key("anticache", "a") +
[("text", ": prevent cached responses")]
),
(None,
common.highlight_key("anticomp", "c") +
[("text", ": prevent compressed responses")]
),
(None,
common.highlight_key("killextra", "k") +
[("text", ": kill requests not part of server replay")]
),
(None,
common.highlight_key("norefresh", "n") +
[("text", ": disable server replay response refresh")]
),
(None,
common.highlight_key("upstream certs", "u") +
[("text", ": sniff cert info from upstream server")]
),
("q", "quit / return to flow list"),
("Q", "quit without confirm prompt"),
("P", "set reverse proxy mode"),
("R", "edit replacement patterns"),
("s", "set/unset script"),
("S", "server replay"),
("t", "set sticky cookie expression"),
("u", "set sticky auth expression"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
text.append(urwid.Text([("head", "\n\nFilter expressions:\n")]))
f = []
for i in filt.filt_unary:
f.append(
("~%s"%i.code, i.help)
)
for i in filt.filt_rex:
f.append(
("~%s regex"%i.code, i.help)
)
for i in filt.filt_int:
f.append(
("~%s int"%i.code, i.help)
)
f.sort()
f.extend(
[
("!", "unary not"),
("&", "and"),
("|", "or"),
("(...)", "grouping"),
]
)
text.extend(common.format_keyvals(f, key="key", val="text", indent=4))
text.append(
urwid.Text(
[
"\n",
("text", " Regexes are Python-style.\n"),
("text", " Regexes can be specified as quoted strings.\n"),
("text", " Header matching (~h, ~hq, ~hs) is against a string of the form \"name: value\".\n"),
("text", " Expressions with no operators are regex matches against URL.\n"),
("text", " Default binary operator is &.\n"),
("head", "\n Examples:\n"),
]
)
)
examples = [
("google\.com", "Url containing \"google.com"),
("~q ~b test", "Requests where body contains \"test\""),
("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
]
text.extend(common.format_keyvals(examples, key="key", val="text", indent=4))
return text
def keypress(self, size, key):
key = common.shortcuts(key)
if key == "q":
self.master.statusbar = self.state[0]
self.master.body = self.state[1]
self.master.header = self.state[2]
self.master.make_view()
return None
elif key == "?":
key = None
return urwid.ListBox.keypress(self, size, key)
| gpl-3.0 | 8,788,021,373,939,527,000 | 35.706215 | 118 | 0.446206 | false |
EthanChappel/Solar-System-Sequencer | solarsystemsequencer/equipment/ascom.py | 1 | 4458 | import os
import json
from typing import List, Union
import numpy as np
import appglobals
import clr
clr.AddReference("lib/ASCOM.DriverAccess")
clr.AddReference("lib/ASCOM.Utilities")
import ASCOM.DriverAccess
import ASCOM.Utilities
class Device:
def __init__(self, device):
self.device = None
self.choose_dialog = ASCOM.Utilities.Chooser()
self.choose_dialog.DeviceType = device
self.choose = self.choose_dialog.Choose()
def name_(self) -> str:
return self.device.Name
def connect(self):
self.device.Connected = True
def connected(self) -> bool:
return self.device.Connected
def disconnect(self):
self.device.Connected = False
def dispose(self):
self.device.Dispose()
del self.device
def setup_dialog(self):
self.device.SetupDialog()
class Telescope(Device):
def __init__(self):
super().__init__("Telescope")
self.device = ASCOM.DriverAccess.Telescope(self.choose)
def can_slew_eq(self) -> bool:
return self.device.CanSlew
def can_slew_alt_az(self) -> bool:
return self.device.CanSlewAltAz
def home(self):
self.device.Unpark()
self.device.FindHome()
def park(self):
self.device.Park()
def can_slew(self) -> bool:
return self.device.CanSlew
def stop_tracking(self):
self.device.Tracking = False
def goto(self, ra, dec):
self.device.Tracking = True
self.device.SlewToCoordinates(ra, dec)
def move_axis(self, axis, rate):
self.device.Tracking = True
self.device.MoveAxis(axis, rate)
class Camera(Device):
def __init__(self):
super().__init__("Camera")
self.device = ASCOM.DriverAccess.Camera(self.choose)
self.connect()
def gain_min(self) -> int:
return self.device.GainMin
def gain_max(self) -> int:
return self.device.GainMax
def gain(self) -> int:
return self.device.Gain
def exposure_min(self) -> float:
return self.device.ExposureMin
def exposure_max(self) -> float:
return self.device.ExposureMax
def num_x(self) -> int:
return self.device.NumX
def num_y(self) -> int:
return self.device.NumY
def image_ready(self) -> bool:
return self.device.ImageReady
def capture(self, exposure: float, light: bool) -> np.ndarray:
self.device.StartExposure(exposure, light)
while not self.device.ImageReady:
pass
image = self.device.ImageArray
width, height = self.device.NumX, self.device.NumY
image = np.asarray(list(image), dtype=np.uint8).reshape(width, height) # list(image) is slow
image = np.rot90(image, 1)
image = np.flipud(image)
return image
def stop_exposure(self):
self.device.StopExposure()
def percent_completed(self) -> int:
return self.device.PercentCompleted
def image_array(self) -> List[int]:
image = list(self.device.ImageArray)
return image
class FilterWheel(Device):
def __init__(self):
super().__init__("FilterWheel")
self.device = ASCOM.DriverAccess.FilterWheel(self.choose)
self.connect()
def wheel_position(self, pos: int):
self.device.Position = pos
def rotate_wheel(self, text: Union[str, int]):
try:
self.device.Position = text
except Exception:
for f in appglobals.filters:
if f["Name"] == text:
wheel_pos = f["Wheel Position"]
try:
self.device.Position = wheel_pos
break
except Exception:
pass
class Focuser(Device):
def __init__(self):
super().__init__("Focuser")
self.device = ASCOM.DriverAccess.Focuser(self.choose)
self.connect()
def move(self, pos: int):
self.device.Move(pos)
def position(self) -> int:
return self.device.Position
def absolute(self) -> bool:
return self.device.Absolute
def max_step(self) -> int:
return self.device.MaxStep
def is_temp_comp(self) -> bool:
return self.device.TempComp
def temp_comp_available(self) -> bool:
return self.device.TempCompAvailable
def temp_comp(self, val: bool):
self.device.TempComp = val
| gpl-3.0 | -4,064,876,173,912,140,000 | 24.768786 | 101 | 0.603185 | false |
etalab/udata | udata/core/activity/models.py | 2 | 2070 | from datetime import datetime
from blinker import Signal
from mongoengine.signals import post_save
from udata.models import db
from udata.auth import current_user
from .signals import new_activity
__all__ = ('Activity', )
_registered_activities = {}
class EmitNewActivityMetaClass(db.BaseDocumentMetaclass):
'''Ensure any child class dispatches the on_new signal'''
def __new__(cls, name, bases, attrs):
new_class = super(EmitNewActivityMetaClass, cls).__new__(
cls, name, bases, attrs)
if new_class.key:
post_save.connect(cls.post_save, sender=new_class)
_registered_activities[new_class.key] = new_class
return new_class
@classmethod
def post_save(cls, sender, document, **kwargs):
sender.on_new.send(sender, activity=document)
class Activity(db.Document, metaclass=EmitNewActivityMetaClass):
'''Store the activity entries for a single related object'''
actor = db.ReferenceField('User', required=True)
organization = db.ReferenceField('Organization')
related_to = db.ReferenceField(db.DomainModel, required=True)
created_at = db.DateTimeField(default=datetime.now, required=True)
kwargs = db.DictField()
on_new = Signal()
meta = {
'indexes': [
'actor',
'organization',
'related_to',
'-created_at',
('actor', '-created_at'),
('organization', '-created_at'),
('related_to', '-created_at'),
],
'allow_inheritance': True,
}
key = None
label = None
badge_type = 'primary'
icon = 'fa fa-info-circle'
template = 'activity/base.html'
@classmethod
def connect(cls, func):
return cls.on_new.connect(func, sender=cls)
@classmethod
def emit(cls, related_to, organization=None, **kwargs):
new_activity.send(cls,
related_to=related_to,
actor=current_user._get_current_object(),
organization=organization)
| agpl-3.0 | -777,017,478,074,924,400 | 27.75 | 70 | 0.614976 | false |
navcoindev/navcoin-core | qa/rpc-tests/getstakereport.py | 1 | 11101 | #!/usr/bin/env python3
# Copyright (c) 2018 The Navcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import NavCoinTestFramework
from test_framework.staticr_util import *
#import time
class GetStakeReport(NavCoinTestFramework):
"""Tests getstakereport accounting."""
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = self.setup_nodes()
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
self.is_network_split = False
def run_test(self):
# Turn off staking until we need it
self.nodes[0].staking(False)
self.nodes[1].staking(False)
self.nodes[2].staking(False)
# Make it to the static rewards fork!
activate_staticr(self.nodes[0])
self.sync_all()
# Use THE spending address
spending_address_public_key = self.nodes[1].getnewaddress()
spending_address_private_key = self.nodes[1].dumpprivkey(spending_address_public_key)
# Create a staking address
staking_address_public_key = self.nodes[2].getnewaddress()
staking_address_private_key = self.nodes[2].dumpprivkey(staking_address_public_key)
# Import the 2 keys into a third wallet
self.nodes[0].importprivkey(spending_address_private_key)
self.nodes[0].importprivkey(staking_address_private_key)
# Create the cold address
coldstaking_address_staking = self.nodes[1].getcoldstakingaddress(staking_address_public_key, spending_address_public_key)
# Send funds to the spending address (leave me NAV for fees)
self.nodes[0].sendtoaddress(spending_address_public_key, self.nodes[0].getbalance() - 1)
self.nodes[0].generate(1)
self.sync_all()
# Stake a block
self.stake_block(self.nodes[1])
# Load the last 24h stake amount for the wallets/nodes
merged_address_last_24h = self.nodes[0].getstakereport()['Last 24H']
spending_address_last_24h = self.nodes[1].getstakereport()['Last 24H']
staking_address_last_24h = self.nodes[2].getstakereport()['Last 24H']
# print('spending', spending_address_last_24h)
# print('staking', staking_address_last_24h)
# print('merged', merged_address_last_24h)
# Make sure we have staked 2 NAV to the spending address
# So that means spending last 24h == 2
# And staking last 24h == 0 We have not sent any coins yet
# And merged will have the total of the spending + staking
assert_equal('2.00', merged_address_last_24h)
assert_equal('2.00', spending_address_last_24h)
assert_equal('0.00', staking_address_last_24h)
# Send funds to the cold staking address (leave some NAV for fees)
self.nodes[1].sendtoaddress(coldstaking_address_staking, self.nodes[1].getbalance() - 1)
self.nodes[1].generate(1)
self.sync_all()
# Stake a block
self.stake_block(self.nodes[2])
# Load the last 24h stake amount for the wallets/nodes
merged_address_last_24h = self.nodes[0].getstakereport()['Last 24H']
spending_address_last_24h = self.nodes[1].getstakereport()['Last 24H']
staking_address_last_24h = self.nodes[2].getstakereport()['Last 24H']
# print('spending', spending_address_last_24h)
# print('staking', staking_address_last_24h)
# print('merged', merged_address_last_24h)
# Make sure we staked 4 NAV in spending address (2 NAV via COLD Stake)
# So that means spending last 24h == 4
# And staking last 24h == 2 We stake 2 NAV via COLD already
# And merged will have the total of the spending + staking
assert_equal('4.00', merged_address_last_24h)
assert_equal('4.00', spending_address_last_24h)
assert_equal('2.00', staking_address_last_24h)
# Time travel 2 days in the future
cur_time = int(time.time())
self.nodes[0].setmocktime(cur_time + 172800)
self.nodes[1].setmocktime(cur_time + 172800)
self.nodes[2].setmocktime(cur_time + 172800)
# Stake a block
self.stake_block(self.nodes[2])
# Load the last 24h stake amount for the wallets/nodes
merged_address_last_24h = self.nodes[0].getstakereport()['Last 24H']
spending_address_last_24h = self.nodes[1].getstakereport()['Last 24H']
staking_address_last_24h = self.nodes[2].getstakereport()['Last 24H']
# Check the amounts
assert_equal('2.00', merged_address_last_24h)
assert_equal('2.00', spending_address_last_24h)
assert_equal('2.00', staking_address_last_24h)
# Load the last 7 days stake amount for the wallets/nodes
merged_address_last_7d = self.nodes[0].getstakereport()['Last 7 Days']
spending_address_last_7d = self.nodes[1].getstakereport()['Last 7 Days']
staking_address_last_7d = self.nodes[2].getstakereport()['Last 7 Days']
# Check the amounts
assert_equal('6.00', merged_address_last_7d)
assert_equal('6.00', spending_address_last_7d)
assert_equal('4.00', staking_address_last_7d)
# Load the averages for stake amounts
avg_last7d = self.nodes[0].getstakereport()['Last 7 Days Avg']
avg_last30d = self.nodes[0].getstakereport()['Last 30 Days Avg']
avg_last365d = self.nodes[0].getstakereport()['Last 365 Days Avg']
# Check the amounts
assert_equal('3.00', avg_last7d)
assert_equal('3.00', avg_last30d)
assert_equal('3.00', avg_last365d)
# Time travel 8 days in the future
cur_time = int(time.time())
self.nodes[0].setmocktime(cur_time + 691200)
self.nodes[1].setmocktime(cur_time + 691200)
self.nodes[2].setmocktime(cur_time + 691200)
# Load the last 24h stake amount for the wallets/nodes
merged_address_last_24h = self.nodes[0].getstakereport()['Last 24H']
spending_address_last_24h = self.nodes[1].getstakereport()['Last 24H']
staking_address_last_24h = self.nodes[2].getstakereport()['Last 24H']
# Check the amounts
assert_equal('0.00', merged_address_last_24h)
assert_equal('0.00', spending_address_last_24h)
assert_equal('0.00', staking_address_last_24h)
# Load the last 7 days stake amount for the wallets/nodes
merged_address_last_7d = self.nodes[0].getstakereport()['Last 7 Days']
spending_address_last_7d = self.nodes[1].getstakereport()['Last 7 Days']
staking_address_last_7d = self.nodes[2].getstakereport()['Last 7 Days']
# Check the amounts
assert_equal('2.00', merged_address_last_7d)
assert_equal('2.00', spending_address_last_7d)
assert_equal('2.00', staking_address_last_7d)
# Load the averages for stake amounts
avg_last7d = self.nodes[0].getstakereport()['Last 7 Days Avg']
avg_last30d = self.nodes[0].getstakereport()['Last 30 Days Avg']
avg_last365d = self.nodes[0].getstakereport()['Last 365 Days Avg']
# Check the amounts
assert_equal('0.28571428', avg_last7d)
assert_equal('0.75', avg_last30d)
assert_equal('0.75', avg_last365d)
# Time travel 31 days in the future
cur_time = int(time.time())
self.nodes[0].setmocktime(cur_time + 2678400)
self.nodes[1].setmocktime(cur_time + 2678400)
self.nodes[2].setmocktime(cur_time + 2678400)
# Load the last 24h stake amount for the wallets/nodes
merged_address_last_24h = self.nodes[0].getstakereport()['Last 24H']
spending_address_last_24h = self.nodes[1].getstakereport()['Last 24H']
staking_address_last_24h = self.nodes[2].getstakereport()['Last 24H']
# Check the amounts
assert_equal('0.00', merged_address_last_24h)
assert_equal('0.00', spending_address_last_24h)
assert_equal('0.00', staking_address_last_24h)
# Load the last 7 days stake amount for the wallets/nodes
merged_address_last_7d = self.nodes[0].getstakereport()['Last 7 Days']
spending_address_last_7d = self.nodes[1].getstakereport()['Last 7 Days']
staking_address_last_7d = self.nodes[2].getstakereport()['Last 7 Days']
# Check the amounts
assert_equal('0.00', merged_address_last_7d)
assert_equal('0.00', spending_address_last_7d)
assert_equal('0.00', staking_address_last_7d)
# Load the averages for stake amounts
avg_last7d = self.nodes[0].getstakereport()['Last 7 Days Avg']
avg_last30d = self.nodes[0].getstakereport()['Last 30 Days Avg']
avg_last365d = self.nodes[0].getstakereport()['Last 365 Days Avg']
# Check the amounts
assert_equal('0.00', avg_last7d)
assert_equal('0.06666666', avg_last30d)
assert_equal('0.19354838', avg_last365d)
# Disconnect the nodes
for node in self.nodes[0].getpeerinfo():
self.nodes[0].disconnectnode(node['addr'])
time.sleep(2) #disconnecting a node needs a little bit of time
assert(self.nodes[0].getpeerinfo() == [])
# Stake a block on node 0
orphaned_block_hash = self.stake_block(self.nodes[0], False)
# Generate some blocks on node 1
self.nodes[1].generate(100)
# Reconnect the nodes
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
# Wait for blocks to sync
self.sync_all()
# Make sure the block was orphaned
assert(self.nodes[0].getblock(orphaned_block_hash)['confirmations'] == -1)
# Check the staked amount
# Should be 0 (Zero) as the last staked block is orphaned
assert_equal('0.00', self.nodes[0].getstakereport()['Last 7 Days'])
def stake_block(self, node, mature = True):
# Get the current block count to check against while we wait for a stake
blockcount = node.getblockcount()
# Turn staking on
node.staking(True)
# wait for a new block to be mined
while node.getblockcount() == blockcount:
# print("waiting for a new block...")
time.sleep(1)
# We got one
# print("found a new block...")
# Turn staking off
node.staking(False)
# Get the staked block
block_hash = node.getbestblockhash()
# Only mature the blocks if we asked for it
if (mature):
# Make sure the blocks are mature before we check the report
slow_gen(node, 5, 0.5)
self.sync_all()
# return the block hash to the function caller
return block_hash
if __name__ == '__main__':
GetStakeReport().main()
| mit | -693,122,512,786,400,400 | 40.267658 | 130 | 0.631745 | false |
skyler/snap | lib/util.py | 1 | 1435 | import os
import errno
import subprocess
def mkdir_p(path):
'''Recursively make all directories in a path'''
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else: raise
def dict_sorted(d):
'''Returns dict sorted by key as a list of tuples'''
return sorted(d.items(), key=lambda x: x[0])
def command_check_error(command,cwd=".",fail_on_stderr=True):
'''Perform a command, piping out both stdout and stderr.
An exception will be raised if command returns a nonzero exit code.
An exception will also be raised if command writes anything to stdout.
This behavior can be disabled by passing False as the argument to
fail_on_stderr.
'''
proc = subprocess.Popen(command, cwd=cwd,
stdout=None, # print to terminal
stderr=subprocess.PIPE)
dup = subprocess.Popen(["tee","/dev/stderr"], stdin=proc.stderr,
stdout=subprocess.PIPE, # catch errors from first
stderr=None) # also print them to terminal
errors = str(dup.stdout.read(),'utf8')
proc.communicate()
if proc.returncode != 0:
raise Exception("{0} returned exit code {1}".format(command,proc.returncode))
elif fail_on_stderr and errors:
raise Exception("There was error output running {0}:\n{1}".format(command,errors))
| mit | -2,426,133,641,585,020,000 | 36.763158 | 90 | 0.627875 | false |
ruipgpinheiro/flac_batch_reencode | reencode.py | 1 | 12245 | #!/usr/bin/python
########################################################################
####################### FLAC Batch Re-encode #########################
# A Python 2.7 script for batch parallel re-encoding many FLAC files. #
# This is useful to make sure that your whole FLAC library is using #
# the latest version of the FLAC encoder, with maximum compression. #
# Files can be skipped if the encoder matches a user-defined vendor #
# string (i.e., they were already encoded using the latest FLAC #
# encoder). #
# #
# Version 1.1 - 9 May 2016 #
# Author: Rui Pinheiro #
########################################################################
########################################################################
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>.#
########################################################################
import sys, getopt, logging, os, fnmatch, subprocess, time, multiprocessing
######################################
# Logging
def init_logging():
# Configure root logger
logging.root.setLevel(logging.INFO)
# create console handler
ch = logging.StreamHandler(sys.stdout)
ch_formatter = logging.Formatter('%(asctime)s [%(name)s:%(levelname)s] %(message)s')
ch.setFormatter(ch_formatter)
logging.root.addHandler(ch)
# Constants
DEFAULT_VENDOR_STRING = 'reference libFLAC 1.3.1 20141125' # for '--vendor'
METAFLAC_EXECUTABLE = './metaflac'
FLAC_EXECUTABLE = './flac'
# Debug constants
SILENT_FLAC = True
######################################
# Main Implementation
def usage(argv_0, exit_val):
print "FLAC Batch Reencode"
print "A Python script for batch re-encoding many *.flac files recursively. This is useful to make sure that your whole FLAC library is using the latest version of the FLAC encoder, with maximum compression. Files can be skipped if the encoder matches a user-defined vendor string (i.e., they were already encoded using the latest FLAC encoder).\n"
print "Usage: %s [-h] [-f <folder>] [-m <mask>] [-p <n_parallel>] [-v [--vendor-string <vendor>]] [--no-verify] [--flac <flac-path>] [--metaflac <metaflac-path>]" % argv_0
print "\t-h / --help : Show this help."
print "\t-f / --folder : Root folder path for recursive search (default: '.')."
print "\t-m / --mask : File mask (default: '*.flac')."
print "\t-p / --parallel : Maximum simultaneous encoder processes (default: max([CPU count]-1,1) = %d)." % max(multiprocessing.cpu_count()-1,1)
print "\t-v / --vendor : Skip file if vendor string matches '<vendor>' (requires metaflac)."
print "\t--vendor-string : Desired vendor string for '-v' (default: '%s')." % DEFAULT_VENDOR_STRING
print "\t--no-verify : Do not verify output for encoding errors before overwriting original files. Faster, but in rare cases could result in corrupt files."
print "\t--flac : Path to the 'flac' executable (default: 'flac')."
print "\t--metaflac : Path to the 'metaflac' executable (only required if using '-v', default: 'metaflac')."
sys.exit(exit_val)
def main(argv):
init_logging()
# Parse opts
global root_folder, file_mask, check_vendor, verify_output, vendor_string, flac_path, metaflac_path, n_parallel
root_folder = '.'
file_mask = '*.flac'
check_vendor = False
verify_output = True
vendor_string = DEFAULT_VENDOR_STRING
flac_path = FLAC_EXECUTABLE
metaflac_path = METAFLAC_EXECUTABLE
n_parallel = max(multiprocessing.cpu_count()-1,1)
logging.debug('Argument List: %s', str(argv))
try:
opts, args = getopt.getopt(argv[1:],'hf:m:vp:',['help','folder=','mask=','vendor','vendor-string=','no-verify','flac=','metaflac=','parallel='])
except getopt.GetoptError:
usage(argv[0], 2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(argv[0], 0)
elif opt in ("-f", "--folder"):
root_folder = arg
elif opt in ("-m", "--mask"):
file_mask = arg
elif opt in ("-v", "--vendor"):
check_vendor = True
elif opt == "--vendor-string":
vendor_string = arg
elif opt == "--no-verify":
verify_output = False
elif opt == "--flac":
flac_path = arg
elif opt == "--metaflac":
metaflac_path = arg
elif opt in ("-p", "--parallel"):
try:
n_parallel = int(arg)
except:
logging.critical("'%s <n_parallel>' must have a positive integer", opt)
sys.exit(-4)
if n_parallel <= 0:
logging.critical("'%s <n_parallel>' must have a positive integer", opt)
sys.exit(-4)
# Start main process
files = get_file_list(root_folder, file_mask)
if len(files) > 0:
reencode_files(files)
logging.info('Finished.')
def compare_vendor_string(path):
"""Compares the vendor string of a certain file with the desired vendor string.
Uses 'metaflac --show-vendor-tag'
Args:
path (str): Path of file to check.
Returns:
bool: True if vendor string matches, False otherwise.
"""
logger = logging.getLogger('compare_vendor_string')
logger.setLevel(logging.INFO)
logger.debug("Obtaining vendor string of file '%s'...", path)
cmd = [metaflac_path, '--show-vendor-tag', path]
cmd_out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
vendor = cmd_out.strip()
res = (vendor == vendor_string)
logger.debug("Vendor: '%s' %s", vendor, 'matches desired' if res else 'differs from desired')
return res
def get_file_list(root_folder, file_mask):
"""Recursively searches a folder for a specific file mask, and creates a list of all such files.
Args:
root_folder (str): Root folder for recursive search.
file_mask (str): File mask using linux patterns (ex: '*.flac').
Returns:
List[str]: Paths of all files inside 'folder' matching 'mask', with matching vendor string (if 'check_vendor' is true).
"""
logger = logging.getLogger('get_file_list')
logger.setLevel(logging.INFO)
out_files = []
logger.info("Searching '%s' recursively for files matching mask '%s'...", root_folder, file_mask)
if check_vendor:
logger.info("Will skip files that match vendor string '%s'.", vendor_string)
for root, dirs, files in os.walk(root_folder, followlinks=True):
logger.debug("Found file(s) in '%s': %s", root, str(files))
for name in files:
if fnmatch.fnmatch(name, file_mask):
path = os.path.join(root, name)
logger.debug("File '%s' matches mask", path)
if check_vendor and not compare_vendor_string(path):
logger.debug("Skipped '%s': Matches desired vendor string.", name)
continue
out_files.append(path)
logger.info("Found %d file(s).", len(out_files))
logger.debug("Found file(s): %s", str(out_files))
return out_files
def start_reencode_file(file):
"""Starts the re-encoding process for a file using 'flac <file> -V -s --force --best'
Args:
file (str): Path of file to re-encode.
Returns:
Tuple[str, Popen]: File name and corresponding Popen object
"""
logger = logging.getLogger('start_reencode_file')
logger.setLevel(logging.INFO)
cmd = [flac_path, file, '--force', '--best']
if verify_output:
cmd.append('-V')
if SILENT_FLAC:
cmd.append('-s')
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return (file, proc)
def finish_reencode_file(file, proc):
"""Finishes the re-encoding process for a file. To be exact, checks whether an error occurred.
Args:
file (str): Path of re-encoded file.
proc (Popen): Popen object of the re-encoding subprocess for 'file'.
Returns:
bool: Whether 'proc' terminated successfuly.
"""
logger = logging.getLogger('finish_reencode_file')
logger.setLevel(logging.INFO)
(cmd_out, cmd_err) = proc.communicate()
if proc.returncode != 0:
logger.critical("File '%s' exited with error code: %d\nSTDOUT:\n%s\nSTDERR: %s", file, proc.returncode, cmd_out, cmd_err)
return False
cmd_out = cmd_out.strip()
cmd_err = cmd_err.strip()
logger.debug("File '%s' STDOUT:\n%s\nSTDERR: %s", file, cmd_out, cmd_err)
if SILENT_FLAC and (cmd_out or cmd_err):
if "Compression failed (ratio" not in cmd_err:
logger.warning("File '%s' - output was not empty:\nSTDOUT: %s\nSTDERR: %s", file, cmd_out, cmd_err)
else:
logger.warning("File '%s': Could not compress further", file)
return True
def wait_for_terminate(procs):
"""Wait for processes in 'procs' to terminate, and if necessary removes the temporary files created by the processes.
Args:
procs (list[tuple[string, Popen]]): File names and corresponding Popen objects
"""
for (file, proc) in procs:
proc.wait()
tmp_filename = file + ".tmp,fl-ac+en'c"
if os.path.exists(tmp_filename):
os.remove(tmp_filename)
def check_proc_success(procs, proc_tuple):
"""Check if a finished process was successful, or exit the application with an error code.
Args:
procs (list[tuple[string, Popen]]): File names and corresponding Popen objects
proc_typle (tuple[string, Popen]): File name and Popen object to check (must be a member of 'procs')
"""
logger = logging.getLogger('check_proc_success')
logger.setLevel(logging.INFO)
(file, proc) = proc_tuple
success = finish_reencode_file(file, proc)
procs.remove(proc_tuple)
if not success:
# Ask user what they want to do
has_input = False
user_input = ""
while not has_input:
user_input = raw_input("Encoding failed. Do you wish to [r]etry, [s]kip or [a]bort? ").lower()
if len(user_input) != 1 or user_input not in ('r', 's', 'a'):
print "Invalid answer '%s'." % (user_input)
else:
has_input = True
# abort
if user_input == 'a':
wait_for_terminate(procs)
logger.critical("Exiting.")
sys.exit(-6)
# retry
elif user_input == 'r':
procs.append(start_reencode_file(file))
def reencode_files(files):
"""Re-encodes a list of files.
Args:
files (list[str]): List of file paths to re-encode.
"""
logger = logging.getLogger('reencode_files')
logger.setLevel(logging.INFO)
total = len(files)
total_len = len(str(total))
i = 0
procs = []
logger.info("Starting re-encode process using %d thread(s)...", n_parallel)
try:
for file in files:
i += 1
i_padded = str(i).rjust(total_len, ' ')
i_pct = float(i) / total * 100
rel_path = os.path.relpath(file, root_folder)
print "%s/%d (%d%%): Re-encoding '%s'..." % (i_padded, total, i_pct, rel_path)
procs.append(start_reencode_file(file))
if n_parallel == 1: # Avoid busy loop logic
cur_tuple = procs[0]
(file, proc) = cur_tuple
proc.wait()
check_proc_success(procs, cur_tuple)
else:
# Limit number of processes to n_parallel
# If limit is reached, wait until at least one finishes
while len(procs) >= n_parallel:
found = False
for j, cur_tuple in enumerate(procs):
(file, proc) = cur_tuple
returncode = proc.poll()
if returncode != None:
check_proc_success(procs, cur_tuple)
found = True
if not found:
time.sleep(1)
except KeyboardInterrupt as e: # subprocesses also receive the signal
logger.critical("Keyboard Interrupt (Ctrl-C) detected. Waiting for encoder(s) to cancel...")
wait_for_terminate(procs)
logger.critical("Exiting.")
sys.exit(-3)
# Make sure all sub-processes exit before terminating
wait_for_terminate(procs)
if __name__ == "__main__":
main(sys.argv) | gpl-3.0 | 6,929,940,522,282,492,000 | 33.988571 | 349 | 0.628175 | false |
mir-group/flare | flare/kernels/three_body_mc_simple.py | 1 | 65505 | import numpy as np
from flare.kernels.kernels import (
force_helper,
force_energy_helper,
grad_helper,
three_body_fe_perm,
three_body_ee_perm,
three_body_se_perm,
three_body_ff_perm,
three_body_sf_perm,
three_body_ss_perm,
three_body_grad_perm,
grad_constants,
)
from numba import njit
from flare.env import AtomicEnvironment
from typing import Callable
import flare.kernels.cutoffs as cf
from math import exp
class ThreeBodyKernel:
def __init__(
self,
hyperparameters: "ndarray",
cutoff: float,
cutoff_func: Callable = cf.quadratic_cutoff,
):
self.hyperparameters = hyperparameters
self.signal_variance = hyperparameters[0]
self.length_scale = hyperparameters[1]
self.cutoff = cutoff
self.cutoff_func = cutoff_func
def energy_energy(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return energy_energy(*args)
def force_energy(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return force_energy(*args)
def stress_energy(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return stress_energy(*args)
def force_force(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return force_force(*args)
def stress_force(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return stress_force(*args)
def stress_stress(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return stress_stress(*args)
def force_force_gradient(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return force_force_gradient(*args)
def efs_energy(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return efs_energy(*args)
def efs_force(self, env1: AtomicEnvironment, env2: AtomicEnvironment):
args = self.get_args(env1, env2)
return efs_force(*args)
def efs_self(self, env1: AtomicEnvironment):
return efs_self(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env1.cross_bond_inds,
env1.cross_bond_dists,
env1.triplet_counts,
self.signal_variance,
self.length_scale,
self.cutoff,
self.cutoff_func,
)
def get_args(self, env1, env2):
return (
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
self.signal_variance,
self.length_scale,
self.cutoff,
self.cutoff_func,
)
@njit
def energy_energy(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two local energies accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body local energy kernel.
"""
kern = 0
sig2 = sig * sig
ls2 = 1 / (2 * ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
ej2 = etypes2[ind2]
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
kern += three_body_ee_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
ls2,
sig2,
)
return kern / 9
@njit
def force_energy(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between a force component and a local
energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body force/energy kernel.
"""
kern = np.zeros(3)
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
ej2 = etypes2[ind2]
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
kern[d1] += three_body_fe_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
return kern / 3
@njit
def stress_energy(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between a force component and a local
energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body force/energy kernel.
"""
kern = np.zeros(6)
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
ej2 = etypes2[ind2]
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
kern[stress_count] += three_body_se_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
stress_count += 1
return kern / 6
@njit
def force_force(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = np.zeros((3, 3))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
for d2 in range(3):
cj1 = bond_array_2[p, d2 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d2 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
kern[d1, d2] += three_body_ff_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
return kern
@njit
def stress_force(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = np.zeros((6, 3))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
for d3 in range(3):
cj1 = bond_array_2[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
kern[stress_count, d3] += three_body_sf_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
stress_count += 1
return kern / 2
@njit
def stress_stress(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = np.zeros((6, 6))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count_1 = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
stress_count_2 = 0
for d3 in range(3):
cj1 = bond_array_2[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj_p1 = fdj1 * fj2 * fj3
fdj_p2 = fj1 * fdj2 * fj3
fdj = fdj_p1 + fdj_p2
for d4 in range(d3, 3):
coord3 = bond_array_2[p, d4 + 1] * rj1
coord4 = bond_array_2[ind2, d4 + 1] * rj2
kern[
stress_count_1, stress_count_2
] += three_body_ss_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
coord3,
coord4,
fdi_p1,
fdi_p2,
fdj_p1,
fdj_p2,
)
stress_count_2 += 1
stress_count_1 += 1
return kern / 4
@njit
def force_force_gradient(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
(float, float):
Value of the 3-body kernel and its gradient with respect to the
hyperparameters.
"""
kernel_matrix = np.zeros((3, 3))
kernel_grad = np.zeros((2, 3, 3))
# pre-compute constants that appear in the inner loop
sig2, sig3, ls1, ls2, ls3, ls4, ls5, ls6 = grad_constants(sig, ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri3 = cross_bond_dists_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj3 = cross_bond_dists_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
ej2 = etypes2[ind2]
fj3, _ = cutoff_func(r_cut, rj3, 0)
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
fi = fi1 * fi2 * fi3
for d2 in range(3):
cj1 = bond_array_2[p, d2 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d2 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
fj = fj1 * fj2 * fj3
kern_term, sig_term, ls_term = three_body_grad_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kernel_matrix[d1, d2] += kern_term
kernel_grad[0, d1, d2] += sig_term
kernel_grad[1, d1, d2] += ls_term
return kernel_matrix, kernel_grad
@njit
def efs_energy(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
energy_kernel = 0
force_kernels = np.zeros(3)
stress_kernels = np.zeros(6)
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
ej2 = etypes2[ind2]
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
energy_kernel += (
three_body_ee_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
ls1,
sig2,
)
/ 9
)
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
force_kernels[d1] += (
three_body_fe_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
/ 3
)
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
stress_kernels[stress_count] += (
three_body_se_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
/ 6
)
stress_count += 1
return energy_kernel, force_kernels, stress_kernels
@njit
def efs_force(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
energy_kernels = np.zeros(3)
force_kernels = np.zeros((3, 3))
stress_kernels = np.zeros((6, 3))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
for d3 in range(3):
cj1 = bond_array_2[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
energy_kernels[d3] += (
three_body_fe_perm(
r11,
r21,
r31,
r12,
r22,
r32,
r13,
r23,
r33,
c2,
c1,
-cj1,
-cj2,
ej1,
ej2,
ei1,
ei2,
fj,
fi,
fdj,
ls1,
ls2,
sig2,
)
/ 3
)
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
force_kernels[d1, d3] += three_body_ff_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
stress_kernels[stress_count, d3] += (
three_body_sf_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
/ 2
)
stress_count += 1
return energy_kernels, force_kernels, stress_kernels
@njit
def efs_self(
bond_array_1,
c1,
etypes1,
cross_bond_inds_1,
cross_bond_dists_1,
triplets_1,
sig,
ls,
r_cut,
cutoff_func,
):
energy_kernel = 0
force_kernels = np.zeros(3)
stress_kernels = np.zeros(6)
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
for p in range(bond_array_1.shape[0]):
rj1 = bond_array_1[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes1[p]
for q in range(triplets_1[p]):
ind2 = cross_bond_inds_1[p, p + 1 + q]
rj2 = bond_array_1[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
rj3 = cross_bond_dists_1[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
ej2 = etypes1[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
energy_kernel += (
three_body_ee_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c1,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
ls1,
sig2,
)
/ 9
)
stress_count = 0
for d3 in range(3):
cj1 = bond_array_1[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_1[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fdj_p1 = fdj1 * fj2 * fj3
fdj_p2 = fj1 * fdj2 * fj3
fdj = fdj_p1 + fdj_p2
ci1 = bond_array_1[m, d3 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d3 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
force_kernels[d3] += three_body_ff_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c1,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
for d2 in range(d3, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
coord3 = bond_array_1[p, d2 + 1] * rj1
coord4 = bond_array_1[ind2, d2 + 1] * rj2
stress_kernels[stress_count] += (
three_body_ss_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c1,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
coord3,
coord4,
fdi_p1,
fdi_p2,
fdj_p1,
fdj_p2,
)
/ 4
)
stress_count += 1
return energy_kernel, force_kernels, stress_kernels
| mit | -7,268,425,266,241,828,000 | 36.113314 | 85 | 0.386948 | false |
abn/python-cafe | cafeteria/datastructs/units/__init__.py | 1 | 1183 | from re import match
try:
long
except NameError:
# noinspection PyShadowingBuiltins
long = int
class BaseUnitClass(float):
UNITS = {}
# noinspection PyInitNewSignature
def __new__(cls, x, unit=None):
if isinstance(x, str):
units_regex = "|".join(cls.UNITS.keys())
m = match(r"^(\d+(.\d+)?) ?({})$".format(units_regex), x)
if m is None:
raise ValueError(
'{} requires number or a string in the format "<value> '
'({})"'.format(cls.__name__, units_regex)
)
x = float(m.group(1)) * cls.UNITS.get(m.group(3))
elif unit is None:
raise ValueError("No unit provided.")
else:
x = x * cls.UNITS[unit]
return super(BaseUnitClass, cls).__new__(cls, x)
def __getattr__(self, item):
if item in self.UNITS:
# if unit is known convert to unit
result = self * 1.0 / self.UNITS[item]
rounded = long(result)
return result if result != rounded else rounded
raise AttributeError("{} is not a valid conversion unit".format(item))
| apache-2.0 | 5,395,334,718,409,300,000 | 30.972973 | 78 | 0.530854 | false |
ajdiaz/mico | mico/lib/aws/ec2/__init__.py | 1 | 12467 | #! /usr/bin/env python
# -*- encoding: utf-8 -*-
# vim:fenc=utf-8:
"""The EC2 librart provide a number of functions and modules to
handle EC2 cloud resources, like instance, volumes and so on.
Example of usage in host definitions::
from mico.lib.aws.ec2 import *
ec2_run('ami-12345')
"""
import time
from os import environ as os_environ
from fnmatch import fnmatch
from boto.ec2 import get_region
from boto.ec2.connection import EC2Connection
import mico.output
from mico.util.dicts import AttrDict
class EC2LibraryError(Exception):
"""Model an exception related with EC2 API."""
def ec2_connect(region=None):
"""Helper to connect to Amazon Web Services EC2, using identify provided
by environment, as also optional region in arguments.
"""
if not os_environ.get("AWS_ACCESS_KEY_ID", None):
raise EC2LibraryError("Environment variable AWS_ACCESS_KEY_ID is not set.")
if not os_environ.get("AWS_SECRET_ACCESS_KEY", None):
raise EC2LibraryError("Environment variable AWS_SECRET_ACCESS_KEY is not set.")
if not region:
region = env.get("ec2_region")
region = get_region(region,
aws_access_key_id=os_environ.get("AWS_ACCESS_KEY_ID"),
aws_secret_access_key=os_environ.get("AWS_ACCESS_SECRET_KEY")
)
connection = EC2Connection(
os_environ.get("AWS_ACCESS_KEY_ID"),
os_environ.get("AWS_SECRET_ACCESS_KEY"),
region=region
)
return connection
def ec2_tag(resource, **kwargs):
"""Tag a resource with specified tags.
Example::
tag(instance, Name='example.host')
"""
connection = ec2_connect()
connection.create_tags([resource.id], kwargs)
mico.output.debug("tag instance %s:" % (
resource.id,
",".join(map(lambda x: "%s=%s" % x, [x for x in kwargs.iteritems()]))
))
def ec2_tag_volumes(instance):
"""Tag volumes in the instance following a basic notation Name as
hostname of the instance host and Device to the properly device in
the system.
"""
connection = ec2_connect()
_obj = instance.get_attribute("blockDeviceMapping")
if u"blockDeviceMapping" in _obj:
_obj = _obj[u"blockDeviceMapping"]
for device, obj in _obj.items():
if obj.volume_id is not None:
_d = {"Device": device}
if "Name" in instance.tags:
_d["Name"] = instance.tags["Name"]
connection.create_tags([obj.volume_id], _d)
def ec2_ensure(ami, name=None, address=None, wait_until_running=True,
tags={}, force=False, region=None,
termination_protection=True, volumes={}, **kwargs):
"""Create a new EC2 instance according with parameters passed as
arguments.
:type ami: string
:param ami: An string which contains the AMI identifier for the
instance.
:type name: string
:param name: a descriptive name for the host, this field will be used as
Name tag for the host. If not present host will be no tagged for
Name. Also you can override this tag using tag parameter.
:type image_id: string
:param image_id: The ID of the image to run.
:type min_count: int
:param min_count: The minimum number of instances to launch.
:type max_count: int
:param max_count: The maximum number of instances to launch.
:type key_name: string
:param key_name: The name of the key pair with which to
launch instances.
:type security_groups: list of strings
:param security_groups: The names of the security groups with which to
associate instances
:type user_data: string
:param user_data: The user data passed to the launched instances
:type instance_type: string
:param instance_type: The type of instance to run:
* t1.micro
* m1.small
* m1.medium
* m1.large
* m1.xlarge
* c1.medium
* c1.xlarge
* m2.xlarge
* m2.2xlarge
* m2.4xlarge
* cc1.4xlarge
* cg1.4xlarge
* cc2.8xlarge
:type placement: string
:param placement: The availability zone in which to launch
the instances.
:type address: string
:param address: the public IP address to associate with the instance.
:type kernel_id: string
:param kernel_id: The ID of the kernel with which to launch the
instances.
:type ramdisk_id: string
:param ramdisk_id: The ID of the RAM disk with which to launch the
instances.
:type monitoring_enabled: bool
:type region: string
:param region: the region name where instance will live.
:type wait_until_running: bool
:param wait_until_running: when setting to True (the default), thread
will be blocked until the instance status will be 'running', if
false, function returns without check the instance status.
:type tags: dict
:param tags: a dictionary which contains tags for this instance.
:type termination_protection: bool
:param termination_protection: set the termination protection of the
instance, by default all nodes are stated with termination
protection set to true.
:type force: bool
:param force: if set to True force the creation tough the instance
already exists (i.e. some other instance has the same tags.)
:type volumes: dict
:param volumes: a dictionary in the form {device: ebs_volume}, where
device is a string which identify the aws device for the volume (i.e
/dev/sdf), and ebs_volume is a volume object created by ebs_ensure.
"""
if not force:
_obj = ec2_exists({"Name": name})
if _obj:
status = _obj.update()
if status != "terminated":
mico.output.info("use existent instance: %s [%s]" % (_obj.id, _obj.ip_address or 'no ip found'))
if getattr(_obj, "ip_address", None) and _obj.ip_address:
if 'mico' in env.roledefs:
env.roledefs['mico'].append(_obj.ip_address)
else:
env.roledefs['mico'] = [_obj.ip_address]
if 'mico' not in env.roles:
env.roles.append('mico')
return _obj
kwargs["disable_api_termination"] = termination_protection
connection = ec2_connect()
reservation = connection.run_instances(ami, **kwargs)
instance = reservation.instances[0]
status = instance.update()
if name is not None:
connection.create_tags([instance.id], {"Name": name})
if tags:
connection.create_tags([instance.id], tags)
if not wait_until_running:
return instance
while status == 'pending':
mico.output.debug("waiting 10 secs for instance initialiation...")
time.sleep(10)
status = instance.update()
time.sleep(2) # yes... amazon weird behaviour :/
for device, volume in volumes.items():
connection.attach_volume(volume.id, instance.id, device)
ec2_tag_volumes(instance)
mico.output.info("attach volume %s as device %s at instance %s" % (
volume.id,
device,
instance.id
))
if not volumes:
# tag only root device
ec2_tag_volumes(instance)
if address:
mico.output.info("associated address %s at instance %s" % (
address,
instance.id
))
connection.associate_address(instance.id, address)
time.sleep(2) # amazon needs time to think about how to associate an address.
if getattr(instance, "ip_address", None) and instance.ip_address:
mico.output.info("created instance: %s as %s [%s]" % (instance.id, instance.instance_type, instance.ip_address))
if 'mico' in env.roledefs:
env.roledefs['mico'].append(instance.ip_address)
else:
env.roledefs['mico'] = [instance.ip_address]
if 'mico' not in env.roles:
env.roles.append('mico')
else:
mico.output.info("created instance: %s [<unassigned address>]" % (instance.id,))
time.sleep(2) # yes... another amazon weird behaviour :/
return instance
def ec2_exists(tags={}):
"""Returns if tagged instance already exists, if exists return the object,
otherwise returns None.
"""
connection = ec2_connect()
ret = []
for reservation in connection.get_all_instances(None, dict(map(lambda (x, y): ("tag:%s" % x, y), tags.items()))):
for instance in reservation.instances:
if instance.update() != "terminated":
ret.append(instance)
if len(ret) == 1:
return ret[0]
else:
return ret
def ec2_list(*args):
"""List instances filtering with tag name, provided in arguments. Glob
expressions are allowed in filters as multiple filters too, for
example::
ec2_list('host-*', '*database*')
"""
conn = ec2_connect()
args = args or ('*',)
for reservation in conn.get_all_instances():
for instance in reservation.instances:
if instance.state == "terminated":
continue
instance.name = instance.ip_address or "pending"
for arg in args:
if arg.startswith("ip:"):
arg = arg[3:]
if instance.ip_address and fnmatch(instance.ip_address, arg):
yield instance
elif arg.startswith("sec:"):
arg = arg[4:]
for group in map(lambda x: x.name, instance.groups):
if fnmatch(group, arg):
if "Name" in instance.tags:
instance.name = instance.tags["Name"]
yield instance
elif "Name" in instance.tags:
if arg.startswith("tag:"):
arg = arg[4:]
if fnmatch(instance.tags["Name"], arg):
instance.name = instance.tags["Name"]
yield instance
def ec2_events():
"""Return pending events in EC2"""
conn = ec2_connect()
stats = conn.get_all_instance_status()
l = []
for stat in stats:
if stat.events:
for ev in stat.events:
if "Completed" not in ev.description:
ev.name = conn.get_all_instances([stat.id])[0].instances[0].tags.get("Name", "%s" % stat.id)
ev.id = stat.id
ev.zone = stat.zone
ev.status = stat.state_name
ev.begin = ev.not_before
ev.end = ev.not_after
l.append(ev)
return l
ec2_launch = ec2_create = ec2_run = ec2_ensure
from mico.lib.aws.ec2.sg import *
from mico.lib.aws.ec2.cw import *
from mico.lib.aws.ec2.eip import *
from mico.lib.aws.ec2.ebs import *
from mico.lib.aws.ec2.elb import *
from mico.lib.aws.ec2.autoscale import *
from mico.environ import environ
@environ('ec2_ami')
def ec2_get_ami():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "ami-id")
@environ('ec2_hostname')
def ec2_get_hostname():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "hostname")
@environ('ec2_instance_action')
def ec2_get_instance_action():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "instance_action")
@environ('ec2_instance_type')
def ec2_get_instance_type():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "instance_type")
@environ('ec2_aki')
def ec2_get_aki():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "kernel-id")
@environ('ec2_local_hostname')
def ec2_get_local_hostname():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "local-hostname")
@environ('ec2_public_hostname')
def ec2_get_public_hostname():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "public-hostname")
@environ('ec2_local_ipv4')
def ec2_get_local_ipv4():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "local-ipv4")
@environ('ec2_public_ipv4')
def ec2_get_public_ipv4():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "public-ipv4")
@environ('ec2_mac')
def ec2_get_mac():
return run("curl http://169.254.169.254/latest/meta-data/%s" % "mac")
| gpl-2.0 | 5,037,979,986,210,122,000 | 30.966667 | 120 | 0.608567 | false |
lowks/pymr | pymr/tests/test_run.py | 1 | 1427 | import os
import unittest
from click.testing import CliRunner
from pymr import run
class TestRun(unittest.TestCase):
def test_run_command_finds_default(self):
runner = CliRunner()
with runner.isolated_filesystem():
with open('.pymr', 'w') as f:
f.write('[tags]\ntags = default\n\n')
expected = 'calling : in ./.pymr\n'
result = runner.invoke(run.run, args=[':'])
self.assertFalse(result.exception)
self.assertEqual(result.output, expected)
def test_run_command_finds_tags(self):
runner = CliRunner()
with runner.isolated_filesystem():
with open('.pymr', 'w') as f:
f.write('[tags]\ntags = test\n\n')
expected = 'calling : in ./.pymr\n'
result = runner.invoke(run.run, args=['-ttest', ':'])
self.assertFalse(result.exception)
self.assertEqual(result.output, expected)
def test_run_command_finds_tags_when_multiple_tags_exist(self):
runner = CliRunner()
with runner.isolated_filesystem():
with open('.pymr', 'w') as f:
f.write('[tags]\ntags = test,test2\n\n')
expected = 'calling : in ./.pymr\n'
result = runner.invoke(run.run, args=['-ttest2', ':'])
self.assertFalse(result.exception)
self.assertEqual(result.output, expected)
| gpl-3.0 | -921,202,194,637,591,300 | 30.711111 | 67 | 0.571829 | false |
ECP-CANDLE/Supervisor | workflows/async-horovod/Task.py | 1 | 1829 |
# TASK
# This should be a user plug-in
from __future__ import print_function
import os
class Task:
def __init__(self, logger, output, script, parallelism, number, params):
self.logger = logger
self.process = None
self.fd = None
self.output = output
self.script = script
self.parallelism = parallelism
self.number = number
self.params = params
def go(self):
import json, subprocess
J = json.loads(self.params)
learning_rate = J["learning_rate"]
self.open_output()
try:
args = [ self.script, self.output, "%04i"%self.number,
str(self.parallelism),
"adam", str(learning_rate) ]
self.logger.debug("task: " + " ".join(args))
self.process = subprocess.Popen(args=args,
stdin=None,
stdout=self.fd,
stderr=subprocess.STDOUT)
print("started: ", self.process.pid)
except Exception as e:
import traceback
traceback.print_exc()
print("")
print("error while attempting to run: " + " ".join(args))
print(e)
return False
return True
def open_output(self):
try:
output_file = self.output + ("/out-%04i.txt" % self.number)
self.fd = open(output_file, "w")
except Exception as e:
print("")
from utils import fail
fail("Could not open task output file: " +
output_file + "\n" + str(e))
def __del__(self):
if self.fd is not None:
print("closing: " + str(self.number))
self.fd.close()
| mit | 2,287,826,957,714,892,000 | 29.483333 | 76 | 0.492619 | false |
shiquanwang/numba | numba/exttypes/tests/test_type_recognition.py | 1 | 1461 | """
>>> test_typeof()
"""
import numba
from numba import *
def make_base(compiler):
@compiler
class Base(object):
value1 = double
value2 = int_
@void(int_, double)
def __init__(self, value1, value2):
self.value1 = value1
self.value2 = value2
return Base
Base = make_base(jit)
@jit
class Derived(Base):
value3 = float_
@void(int_)
def setvalue(self, value):
self.value3 = value
@autojit
def base_typeof():
obj1 = Base(10, 11.0)
return numba.typeof(obj1.value1), numba.typeof(obj1.value2)
@autojit
def derived_typeof():
obj = Derived(10, 11.0)
return (numba.typeof(obj.value1),
numba.typeof(obj.value2),
numba.typeof(obj.value3))
def test_typeof():
pass
# TODO: type recognition of extension object instantiation
# assert base_typeof() == (double, int_), base_typeof()
# assert derived_typeof() == (double, int_, float_), derived_typeof()
#------------------------------------------------------------------------
# Test Specialized autojit typeof
#------------------------------------------------------------------------
AutoBase = make_base(autojit)
@autojit
def attrtypes(obj):
return numba.typeof(obj.value1), numba.typeof(obj.value2)
def test_autobase():
obj = AutoBase(10, 11.0)
assert attrtypes(obj) == (double, int_)
if __name__ == '__main__':
test_typeof()
test_autobase()
| bsd-2-clause | 5,335,723,778,501,530,000 | 20.485294 | 73 | 0.550992 | false |
stdweird/vsc-utils | test/nagios.py | 1 | 5073 | ##
#
# Copyright 2012-2013 Ghent University
#
# This file is part of vsc-utils,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/vsc-utils
#
# vsc-utils is free software: you can redistribute it and/or modify
# it under the terms of the GNU Library General Public License as
# published by the Free Software Foundation, either version 2 of
# the License, or (at your option) any later version.
#
# vsc-utils is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with vsc-utils. If not, see <http://www.gnu.org/licenses/>.
##
"""
Tests for the vsc.utils.nagios module.
@author: Andy Georges (Ghent University)
"""
import os
import tempfile
import time
import sys
import random
import string
import StringIO
from unittest import TestCase, TestLoader
from vsc.utils.nagios import NagiosReporter, NAGIOS_EXIT_OK, NAGIOS_EXIT_WARNING, NAGIOS_EXIT_CRITICAL, NAGIOS_EXIT_UNKNOWN
from pwd import getpwuid
class TestNagios(TestCase):
"""Test for the nagios reporter class."""
def setUp(self):
user = getpwuid(os.getuid())
self.nagios_user = user.pw_name
def test_cache(self):
"""Test the caching mechanism in the reporter."""
length = random.randint(1, 30)
exit_code = random.randint(0, 3)
threshold = random.randint(0, 10)
message = ''.join(random.choice(string.printable) for x in range(length))
message = message.rstrip()
(handle, filename) = tempfile.mkstemp()
os.unlink(filename)
os.close(handle)
reporter = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
nagios_exit = [NAGIOS_EXIT_OK, NAGIOS_EXIT_WARNING, NAGIOS_EXIT_CRITICAL, NAGIOS_EXIT_UNKNOWN][exit_code]
reporter.cache(nagios_exit, message)
(handle, output_filename) = tempfile.mkstemp()
os.close(handle)
try:
old_stdout = sys.stdout
buffer = StringIO.StringIO()
sys.stdout = buffer
reporter_test = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
reporter_test.report_and_exit()
except SystemExit, err:
line = buffer.getvalue().rstrip()
sys.stdout = old_stdout
buffer.close()
self.assertTrue(err.code == nagios_exit[0])
self.assertTrue(line == "%s %s" % (nagios_exit[1], message))
os.unlink(filename)
def test_threshold(self, message="Hello"):
"""Test the threshold borking mechanism in the reporter."""
message = message.rstrip()
threshold = 1
if message == '':
return
(handle, filename) = tempfile.mkstemp()
os.unlink(filename)
reporter = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
# redirect stdout
old_stdout = sys.stdout
buff = StringIO.StringIO()
sys.stdout = buff
nagios_exit = NAGIOS_EXIT_OK
reporter.cache(nagios_exit, message)
os.close(handle)
try:
reporter_test = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
reporter_test.report_and_exit()
except SystemExit, err:
pass
self.assertEqual(err.code, NAGIOS_EXIT_OK[0],
"Exit with status when the cached data is recent")
# restore stdout
buff.close()
sys.stdout = old_stdout
reporter = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
reporter.cache(nagios_exit, message)
time.sleep(threshold + 1)
# redirect stdout
old_stdout = sys.stdout
buff = StringIO.StringIO()
sys.stdout = buff
try:
reporter_test = NagiosReporter('test_cache', filename, threshold, self.nagios_user)
reporter_test.report_and_exit()
except SystemExit, err:
pass
line = buff.getvalue().rstrip()
# restore stdout
buff.close()
sys.stdout = old_stdout
self.assertEqual(err.code, NAGIOS_EXIT_UNKNOWN[0],
"Too old caches lead to unknown status")
self.assertTrue(line.startswith("%s test_cache gzipped JSON file too old (timestamp =" %
(NAGIOS_EXIT_UNKNOWN[1])))
os.unlink(filename)
def suite():
""" return all the tests"""
return TestLoader().loadTestsFromTestCase(TestNagios)
| gpl-2.0 | 8,120,531,783,130,919,000 | 33.277027 | 123 | 0.640647 | false |
JNRowe/jnrbase | jnrbase/pip_support.py | 1 | 3011 | #
"""pip_support - pip workarounds support."""
# Copyright © 2014-2020 James Rowe <[email protected]>
# Nathan McGregor <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of jnrbase.
#
# jnrbase is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# jnrbase is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# jnrbase. If not, see <http://www.gnu.org/licenses/>.
# pip, both as a tool and a package manager, are not available on many of the
# systems I use. However, lots of Python users like to use it so we’ll need to
# support the workflow to some extent…
import re
from pathlib import Path
from sys import version_info
from typing import List
__eval_env = {
'__builtins__': {},
'python_version': '{0.major}.{0.minor}'.format(version_info)
}
def parse_requires(__fname: Path) -> List[str]:
"""Parse ``pip``-style requirements files.
This is a *very* naïve parser, but very few packages make use of the more
advanced features. Support for other features will be added only when
packages in the wild depend on them.
Args:
__fname: Base file to pass
Returns:
Parsed dependencies
"""
deps = []
with __fname.open() as req_file:
entries = [s.split('#')[0].strip() for s in req_file.readlines()]
for dep in entries:
if not dep:
continue
elif dep.startswith('-r '):
include = dep.split()[1]
if '/' not in include:
include = __fname.parent / include
else:
include = Path(include)
deps.extend(parse_requires(include))
continue
elif ';' in dep:
dep, marker = [s.strip() for s in dep.split(';')]
# Support for other markers will be added when they’re actually
# found in the wild
match = re.fullmatch(
r"""
(?:python_version) # Supported markers
\s*
(?:<=?|==|>=?) # Supported comparisons
\s*
(?P<quote>(?:'|"))(?:[\d\.]+)(?P=quote) # Test
""", marker, re.VERBOSE)
if not match:
raise ValueError(f'Invalid marker {marker!r}')
if not eval(marker, __eval_env): # pylint: disable=eval-used
continue
deps.append(dep)
return deps
| gpl-3.0 | 1,351,354,287,192,426,500 | 36.5375 | 79 | 0.574759 | false |
googleads/google-ads-python | google/ads/googleads/v6/enums/types/change_event_resource_type.py | 1 | 1494 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v6.enums",
marshal="google.ads.googleads.v6",
manifest={"ChangeEventResourceTypeEnum",},
)
class ChangeEventResourceTypeEnum(proto.Message):
r"""Container for enum describing supported resource types for
the ChangeEvent resource.
"""
class ChangeEventResourceType(proto.Enum):
r"""Enum listing the resource types support by the ChangeEvent
resource.
"""
UNSPECIFIED = 0
UNKNOWN = 1
AD = 2
AD_GROUP = 3
AD_GROUP_CRITERION = 4
CAMPAIGN = 5
CAMPAIGN_BUDGET = 6
AD_GROUP_BID_MODIFIER = 7
CAMPAIGN_CRITERION = 8
FEED = 9
FEED_ITEM = 10
CAMPAIGN_FEED = 11
AD_GROUP_FEED = 12
AD_GROUP_AD = 13
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 517,260,311,993,465,660 | 27.188679 | 74 | 0.661312 | false |
datastax/python-driver | tests/unit/cqlengine/test_connection.py | 1 | 2275 | # Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from cassandra.cluster import _ConfigMode
from cassandra.cqlengine import connection
from cassandra.query import dict_factory
from mock import Mock
class ConnectionTest(unittest.TestCase):
no_registered_connection_msg = "doesn't exist in the registry"
def setUp(self):
super(ConnectionTest, self).setUp()
self.assertFalse(
connection._connections,
'Test precondition not met: connections are registered: {cs}'.format(cs=connection._connections)
)
def test_set_session_without_existing_connection(self):
"""
Users can set the default session without having a default connection set.
"""
mock_cluster = Mock(
_config_mode=_ConfigMode.LEGACY,
)
mock_session = Mock(
row_factory=dict_factory,
encoder=Mock(mapping={}),
cluster=mock_cluster,
)
connection.set_session(mock_session)
def test_get_session_fails_without_existing_connection(self):
"""
Users can't get the default session without having a default connection set.
"""
with self.assertRaisesRegexp(connection.CQLEngineException, self.no_registered_connection_msg):
connection.get_session(connection=None)
def test_get_cluster_fails_without_existing_connection(self):
"""
Users can't get the default cluster without having a default connection set.
"""
with self.assertRaisesRegexp(connection.CQLEngineException, self.no_registered_connection_msg):
connection.get_cluster(connection=None)
| apache-2.0 | -5,416,232,443,682,471,000 | 34.546875 | 108 | 0.693187 | false |
dunn/gammu | tests/gen_sms_tests.py | 1 | 3021 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# vim: expandtab sw=4 ts=4 sts=4:
'''
Gammu SMS backup generator.
'''
__author__ = 'Michal Čihař'
__email__ = '[email protected]'
__license__ = '''
Copyright © 2003 - 2015 Michal Čihař
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
import os
# Work in both common location when this can be executed:
try:
os.chdir('tests/at-sms-encode/')
except OSError:
os.chdir('at-sms-encode/')
# Numbers we're going to test
NUMBERS = [
'1234',
'800123456',
'+420800123456',
'+41761234567',
]
# Text parts we're going to test
TEXTS = [
'123456',
'Zkouška sirén',
'This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation.',
]
TEMPLATE = '''
[SMSBackup000]
SMSC = "%s"
State = %s
Number = "%s"
Coding = %s
Folder = %d
'''
STATES = [
'Read',
'Read',
'Sent',
]
CODINGS = [
'Default',
'Unicode',
]
def write_text(f, text):
'''
Writes text splitted and encoded in same way as Gammu does it for SMS backups.
'''
encoded = text.encode('UTF-16-BE').encode('HEX')
line = 0
while len(encoded) > 0:
f.write('Text%02d = %s\n' % (line, encoded[:200]))
encoded = encoded[200:]
line = line + 1
def generate_message(index, folder, coding, smscnum, num, text):
'''
Generates single message file.
'''
f = file('%02d.backup' % index, 'w')
f.write(TEMPLATE % (
NUMBERS[smscnum],
STATES[folder],
NUMBERS[num],
CODINGS[coding],
folder
))
if folder > 1:
f.write('Sent = 20070605T135630\n')
write_text(f, TEXTS[text])
f.close()
def generate():
'''
Generates test data based on NUMBERS and TEXTS variables.
'''
index = 1
for smscnum in range(len(NUMBERS)):
for num in range(len(NUMBERS)):
for text in range(len(TEXTS)):
for coding in range(len(CODINGS)):
for folder in [1, 2]:
generate_message(index,
folder,
coding,
smscnum,
num,
text)
index = index + 1
if __name__ == '__main__':
generate()
| gpl-2.0 | -951,379,982,925,268,500 | 24.982759 | 184 | 0.582283 | false |
MikeWoodward/UT330B | UT330BUI/model/UT330.py | 1 | 24526 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provides a cross-platform Python interface for the UNI-T 330A/B/C temperature,
humidity, and pressure data loggers.
This code controls a UNI-T 330 A/B/C device via a cross-platform Python script.
The device accepts commands and provides responses. I’ve decoded all the
commands and responses and put them into this script. There are a few bytes in
both the commands and responses that I couldn’t figure out. If you know what
they are, please add them.
My device is a UNI-T 330B which only has temperature and humidity. The commands
for this device have placeholders for pressure, which I’ve added to my code,
but of course, I can’t check the pressure readings are correct.
Created on Wed Mar 2 18:10:21 2016
@author: Mike Woodward
"""
# =============================================================================
# Imports
# =============================================================================
import datetime
import time
import serial.tools.list_ports
# =============================================================================
# Module info
# =============================================================================
__author__ = "Mike Woodward"
__copyright__ = "2016 Michael Vincent Woodward"
__credits__ = "Philip Gladstone"
__license__ = "MIT"
# =============================================================================
# Function decorators
# =============================================================================
def buffer_safety(func):
"""There can be timing errors where a read takes place when the buffer
is either partially written or not written at all. These errors can be
removed by a short pause of 10ms. This function decorator makes sure
there's at least 10ms between calls."""
def buffer_protection(self, argument=None):
# If we're less than 10ms since the last call, wait 10ms
if datetime.datetime.now() - self.last_op_time \
< datetime.timedelta(0, 0, 10000):
time.sleep(0.01)
# Read functions have no arguments, write functions have one
if argument is None:
data = func(self)
else:
data = func(self, argument)
# We don't know how long the operation took, so use the current time
# as the last op time
self.last_op_time = datetime.datetime.now()
return data
return buffer_protection
# =============================================================================
# Functions
# =============================================================================
TABLE = (
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040)
def modbusCRC(data):
"""Returns the Modbus CRC as two bytes. Be careful of the order."""
# If the contnets of the data list are not all integers, this function
# will have problems. Future action is to check all elements are ints
crc = 0xFFFF
for number in data:
crc = (crc >> 8) ^ TABLE[(crc ^ number) & 0xFF]
MSB = crc >> 8 # Most Significant Byte
LSB = crc & 255 # Least Significant Byte
return MSB, LSB
# =============================================================================
# class UT330
# =============================================================================
class UT330():
"""Provides an object-based interface to the UT330.
Here are the commands I know about
# 0x10 - set configuration info
# 0x11 - read configuration info
# 0x12 - synch time
# 0x16 - set offsets
# 0x17 - read offsets
# 0x18 - delete data
# 0x19 - read data
# 0x20 - factory reset
# 0x51 - get device name
"""
# %%
def __init__(self):
# The time of the last function call. Set to min initially because
# there hasn't been a last call when the software starts.
self.last_op_time = datetime.datetime.min
# The PySerial object
self._ut330 = None
# Input and output buffer object
self._buffer = None
# Index to the position of the current element being processed
self._index = 0
# Time to wait before timing out
self._read_timeout = 5
self._write_timeout = 5
# %%
def __del__(self):
self.disconnect()
# %%
def connect(self):
"""Connects to the device or raises an error"""
# Get the port the device is connected to
# ---------------------------------------
port = None
# Get all the serial ports
port_list = serial.tools.list_ports.comports()
serial.tools.list_ports.comports
# Now find which port has our device
for trial in port_list:
# I'm not sure this is specific enough for general use. It may
# give a false report if another device using the same controller
# is connected. However, I can't find a more specific check.
if trial.vid == 4292 and trial.pid == 60000:
port = trial
if port is None:
raise IOError('Error! The UT330 device was not detected on any '
'USB port.')
# Attempt a connection to the port
# --------------------------------
self._ut330 = serial.Serial(port=port.device,
baudrate=115200,
timeout=self._read_timeout,
write_timeout=self._write_timeout)
# Check that the serial port is open
# ----------------------------------
if not self._ut330.isOpen():
raise IOError('Error! The UT330 is not open on the serial port.')
# %%
def __enter__(self):
"""Function to make this class work with Python's with statement"""
self.connect()
return self
# %%
def __exit__(self, type_ex, value_ex, traceback_ex):
"""Function to make this class work with Python's with statement"""
self.disconnect()
# %%
def _read_buffer(self, byte_count):
"""Reads the contents of the buffer and returns it as an integer list.
"""
# If the page_size is set much larger than this number we tend
# to get problems with partially filled buffers
page_size = 32768
self._buffer = []
# Read in data in as large chuncks as possible to speed up reading.
# Read in the largest possible chunks first.
for i in range(int(byte_count/page_size)):
self._buffer += self._ut330.read(page_size)
# Now read in the smallest chunk.
self._buffer += self._ut330.read(byte_count % page_size)
# %%
def _write_buffer(self):
"""Writes the command string to the buffer"""
bytes_written = self._ut330.write(bytearray(self._buffer))
if bytes_written != len(self._buffer):
raise ValueError('Error! _write_buffer: not all command bytes '
'written')
# %%
def _get_datetime(self):
"""Returns the date and time as a timestamp"""
timestamp = datetime.datetime(2000 + self._buffer[self._index],
self._buffer[self._index + 1],
self._buffer[self._index + 2],
self._buffer[self._index + 3],
self._buffer[self._index + 4],
self._buffer[self._index + 5])
return timestamp
# %%
def _get_temperature(self):
"""Returns the temperature from the device buffer data - including
negative temperatures"""
# Look to see if the temperature's negative - using two's complement
# to represent negative numbers
if self._buffer[self._index + 1] >= 128:
temperature = -float(256*(self._buffer[self._index + 1] ^ 0xff) +
(self._buffer[self._index] ^ 0xff) + 1)/10
# Temperature is positive
else:
temperature = float(256*self._buffer[self._index + 1] +
self._buffer[self._index])/10
return temperature
# %%
def _get_name(self):
"""Retrieves the device name from the buffer data"""
temp = self._buffer[self._index: self._index + 10]
return ''.join(chr(entry) for entry in temp).strip()
# %%
def disconnect(self):
"""Disconnect the device"""
if self._ut330 is not None:
self._ut330.close()
# %%
@buffer_safety
def read_data(self):
"""Downloads the device buffer data (temperature, humidity, pressure),
and decodes it"""
# We split this function into a header and data part to speed up
# reading. Reading the header tells us how much data there is in the
# data part
# The read data command
self._buffer = [0xab, 0xcd, 0x03, 0x19, 0x70, 0xc5]
# Write the command
self._write_buffer()
# Read the header
# ---------------
# Now get the header data from the buffer
self._read_buffer(8)
# Check that some data has actually been returned
if len(self._buffer) == 0:
print("Warning! Empty buffer returned by device")
return []
# Get the length of data in the buffer
length = (self._buffer[4] + 256*self._buffer[5] +
256*256*self._buffer[6] + 256*256*256*self._buffer[7])
# Check that there's actually some data on the device - 22 is the
# minimum buffer length if there's actually data
if length < 22:
# Need to read the CRC code and so clear the buffer before
# returning - gives an error later if this isn't done.
self._read_buffer(2)
print("Warning! No temperature/humidity/pressure data on the " \
"device")
return []
# Now get the data
# ----------------
self._read_buffer(length)
self._index = 0 # This is the offset of the first data item
# The output data structure
data = []
# Loop over every set of readings
while self._index < length - 2:
timestamp = self._get_datetime()
self._index += 6
temperature = self._get_temperature()
self._index += 2
humidity = float(self._buffer[self._index] +
256*self._buffer[self._index + 1])/10
pressure = float(self._buffer[self._index + 2] +
256*self._buffer[self._index + 3])/10
self._index += 4
data.append({'Timestamp': timestamp,
'Temperature (C)': temperature,
'Relative humidity (%)': humidity,
'Pressure (hPa)': pressure})
return data
# %%
@buffer_safety
def delete_data(self):
"""Deletes the temperature, humidity, and pressure data from the
device"""
# The delete command
self._buffer = [0xab, 0xcd, 0x03, 0x18, 0xb1, 0x05]
self._buffer[5], self._buffer[4] = modbusCRC(self._buffer[0:4])
# Write the command
self._write_buffer()
# Now get the response data from the buffer
self._read_buffer(7)
# Check the return code shows the data was correctly deleted
if [171, 205, 4, 24, 0, 116, 181] != self._buffer:
raise IOError("Error! Delete data returned error code.")
# %%
@buffer_safety
def read_config(self):
"""Read the configuration data from the device, saves it to disk"""
# Send the read info command to the device
self._buffer = [0xab, 0xcd, 0x03, 0x11, 0x71, 0x03]
# Write the command
self._write_buffer()
# Now get the data from the buffer. We know the returned length will
# be 46.
self._read_buffer(46)
# Now, interpret the data in the buffer
config = {}
# Get the device name
self._index = 4
config['device name'] = self._get_name()
# I don't know what bytes 15 to 19 are
config['sampling interval'] = (256*256*self._buffer[22] +
256*self._buffer[21] +
self._buffer[20])
config['readings count'] = 256*self._buffer[24] + self._buffer[23]
config['readings limit'] = 256*self._buffer[26] + self._buffer[25]
config['battery power'] = self._buffer[27]
config['overwrite records'] = bool(self._buffer[28])
config['delay start'] = bool(self._buffer[29])
config['delay timing'] = (256*256*self._buffer[32] +
256*self._buffer[31] +
self._buffer[30])
# I don't know what byte 33 is
# It's possible the high temp alarm could be negative
if self._buffer[34] < 128:
config['high temperature alarm'] = self._buffer[34]
else:
config['high temperature alarm'] = -256 + self._buffer[34]
# It's possible the low temperature alarm could be positive
if self._buffer[35] >= 128:
config['low temperature alarm'] = -256 + self._buffer[35]
else:
config['low temperature alarm'] = self._buffer[35]
config['high humidity alarm'] = self._buffer[36]
config['low humidity alarm'] = self._buffer[37]
self._index = 38
config['timestamp'] = self._get_datetime()
return config
# %%
@buffer_safety
def write_config(self, config):
"""Sets the configuration information on the device"""
# The command to send, note we'll be overriding some bytes
self._buffer = [0xab, 0xcd, 0x1a, 0x10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
# Check config parameters
# -----------------------
if len(config['device name']) > 10:
raise ValueError('Error! device name {0} is {1} characters when '
'it can be a maximum of 10'.
format(config['device name'],
len(config['device name'])))
if len(config['device name']) == 0:
raise ValueError('Error! device name is length zero, it needs '
'to be more than zero characters')
if config['sampling interval'] < 0 or \
config['sampling interval'] > 86400:
raise ValueError('Error! sampling interval is {0} but it must be '
'between 0 and 86400'.
format(config['sampling interval']))
if config['delay timing'] < 0 or config['delay timing'] > 604800:
raise ValueError('Error! delay timing is {0} but it must be '
'between 0 and 604800'.
format(config['delay timing']))
# Prepare the data for writing
# ----------------------------
# Add the device name - pad to 10 characters with spaces
for idx, val in enumerate(config['device name'][0:10].rjust(10, ' ')):
self._buffer[4 + idx] = ord(val)
# Add the sampling interval
self._buffer[14] = config['sampling interval'] & 0xff
self._buffer[15] = (config['sampling interval'] & 0x00ff00) >> 8
self._buffer[16] = (config['sampling interval'] & 0xff0000) >> 16
self._buffer[17] = int(config['overwrite records'])
self._buffer[18] = int(config['delay start'])
# Delay timing
self._buffer[19] = config['delay timing'] & 0xff
self._buffer[20] = (config['delay timing'] & 0x00ff00) >> 8
self._buffer[21] = (config['delay timing'] & 0xff0000) >> 16
self._buffer[22] = 0 # I don't know what this byte is
if config['high temperature alarm'] >= 0:
self._buffer[23] = config['high temperature alarm']
else:
self._buffer[23] = 256 + config['high temperature alarm']
if config['low temperature alarm'] < 0:
self._buffer[24] = 256 + config['low temperature alarm']
else:
self._buffer[24] = config['low temperature alarm']
self._buffer[25] = config['high humidity alarm']
self._buffer[26] = config['low humidity alarm']
# Add the CRC bytes
self._buffer[28], self._buffer[27] = modbusCRC(self._buffer[0:27])
# Write the buffer
self._write_buffer()
# Now get the response data from the buffer
self._read_buffer(7)
# Check the return code shows the data was correctly written
if [171, 205, 4, 16, 0, 115, 117] != self._buffer:
raise IOError("Error! Config writing returned error code.")
# %%
@buffer_safety
def write_datetime(self, timestamp):
"""Syncs the time to the timestamp"""
# The command to send, note we'll be overriding some bytes
self._buffer = [0xab, 0xcd, 0x09, 0x12, 0, 0, 0, 0, 0, 0, 0, 0]
self._buffer[4] = timestamp.year - 2000
self._buffer[5] = timestamp.month
self._buffer[6] = timestamp.day
self._buffer[7] = timestamp.hour
self._buffer[8] = timestamp.minute
self._buffer[9] = timestamp.second
# Add the CRC bytes
self._buffer[11], self._buffer[10] = modbusCRC(self._buffer[0:10])
self._write_buffer()
# Now get the response data from the buffer
self._read_buffer(7)
# Check the return code shows the data was correctly written
if [171, 205, 4, 18, 0, 114, 21] != self._buffer:
raise IOError("Error! Writing datetime returned error code.")
# %%
@buffer_safety
def read_offsets(self):
"""Reads the temperature, humidity, pressure offset"""
self._buffer = [0xab, 0xcd, 0x03, 0x17, 0xF1, 0x01]
self._write_buffer()
# Now get the response data from the buffer. The returned buffer length
# is known to be 18.
self._read_buffer(18)
# Decode the data
offsets = {}
self._index = 4
offsets['temperature'] = self._get_temperature()
if self._buffer[6] < 128:
offsets['temperature offset'] = float(self._buffer[6]) / 10
else:
offsets['temperature offset'] = float(self._buffer[6] - 256) / 10
offsets['humidity'] = float(256*self._buffer[8] + self._buffer[7]) / 10
if self._buffer[9] < 128:
offsets['humidity offset'] = float(self._buffer[9]) / 10
else:
offsets['humidity offset'] = float(self._buffer[9] - 256) / 10
offsets['pressure'] = float(256*self._buffer[11] + self._buffer[10])/10
if self._buffer[12] < 128:
offsets['pressure offset'] = float(self._buffer[12]) / 10
else:
offsets['pressure offset'] = float(self._buffer[12] - 256) / 10
# I don't know what bytes 13, 14, and 15 are
return offsets
# %%
@buffer_safety
def write_offsets(self, offsets):
"""Set the device offsets for temperature, humidity, pressure"""
# Check for errors in parameters
if offsets['temperature offset'] > 6.1 or \
offsets['temperature offset'] < -6:
raise ValueError('Error! The temperature offset is {0} when it '
'must be between -6 and 6.1 C'.
format(offsets['temperature offset']))
if offsets['humidity offset'] > 6.1 or offsets['humidity offset'] < -6:
raise ValueError('Error! The humidity offset is {0} when it must '
'be between -6% and 6.1%'.
format(offsets['humidity offset']))
if offsets['pressure offset'] > 6.1 or offsets['pressure offset'] < -6:
raise ValueError('Error! The pressure offset is {0} when it must '
'be between -6hpa and 6.1hpa'.
format(offsets['pressure offset']))
# The command to send, note we'll be overriding some bytes
self._buffer = [0xab, 0xcd, 0x06, 0x16, 0, 0, 0, 0, 0]
if offsets['temperature offset'] < 0:
self._buffer[4] = 256 - int(offsets['temperature offset']*10)
else:
self._buffer[4] = int(offsets['temperature offset']*10)
if offsets['humidity offset'] < 0:
self._buffer[5] = 256 - int(offsets['humidity offset']*10)
else:
self._buffer[5] = int(offsets['humidity offset']*10)
if offsets['pressure offset'] < 0:
self._buffer[6] = 256 - int(offsets['pressure offset']*10)
else:
self._buffer[6] = int(offsets['pressure offset']*10)
# Add the CRC bytes
self._buffer[8], self._buffer[7] = modbusCRC(self._buffer[0:7])
self._write_buffer()
# Now get the response data from the buffer
self._read_buffer(7)
# Check the return code shows the data was correctly written
if [171, 205, 4, 22, 0, 112, 213] != self._buffer:
raise IOError("Error! Offset writing returned error code.")
# %%
@buffer_safety
def restore_factory(self):
"""This command is given as a factory reset in the Windows software"""
self._buffer = [0xab, 0xcd, 0x03, 0x20, 0xb0, 0xd7]
self._write_buffer()
# Now get the data from the buffer
self._read_buffer(7)
# Check the return code shows the data was correctly written
if [171, 205, 4, 32, 0, 103, 117] != self._buffer:
raise IOError("Error! Restore factory returned an error code.")
# %%
@buffer_safety
def read_device_name(self):
"""Returns the device name"""
self._buffer = [0xab, 0xcd, 0x03, 0x51, 0x70, 0xF3]
self._write_buffer()
# Now get the response data from the buffer, we know the length is
# fixed to 16 bytes
self._read_buffer(16)
self._index = 4
return self._get_name()
| mit | 6,224,445,459,551,490,000 | 33.728045 | 79 | 0.551962 | false |
poppogbr/genropy | resources/common/gnrcomponents/batch_handler/batch_handler.py | 1 | 11655 | # -*- coding: UTF-8 -*-
# chat_component.py
# Created by Francesco Porcari on 2010-09-08.
# Copyright (c) 2010 Softwell. All rights reserved.
from gnr.web.gnrwebpage import BaseComponent
from gnr.core.gnrlang import gnrImport, objectExtract
from gnr.core.gnrbag import Bag
class BatchMonitor(BaseComponent):
js_requires = 'gnrcomponents/batch_handler/batch_handler'
css_requires = 'gnrcomponents/batch_handler/batch_handler'
def mainLeft_batch_monitor(self, pane):
"""!!Batch"""
self.bm_monitor_pane(pane)
def bm_monitor_pane(self, pane):
pane.dataController("batch_monitor.on_datachange(_triggerpars.kw);", _fired="^gnr.batch")
#dovrei modificare il clieant in modo che mi prenda l elemento di classe bm_rootnode visibile
# e gli appiccichi i termometri senza usare il node id
pane.div(nodeId='bm_rootnode', _class='bm_rootnode')
pane.dataRpc('dummy', 'setStoreSubscription', subscribe_batch_monitor_on=True,
storename='user', client_path='gnr.batch', active=True,
_onResult='genro.rpc.setPolling(1,1);')
pane.dataRpc('dummy', 'setStoreSubscription', active=False, subscribe_batch_monitor_off=True,
_onCalling='genro.rpc.setPolling();', storename='user')
class TableScriptRunner(BaseComponent):
py_requires = 'foundation/dialogs,gnrcomponents/printer_option_dialog:PrinterOption'
js_requires = 'gnrcomponents/batch_handler/batch_handler'
def onMain_table_script_runner(self):
page = self.pageSource()
plugin_main = page.div(datapath='gnr.plugin.table_script_runner', nodeId='table_script_runner')
plugin_main.dataController(""" var params = table_script_run[0];
SET .res_type= params['res_type'];
SET .table = params['table'];
SET .resource = params['resource'];
SET .selectionName = params['selectionName'];
SET .publishOnResult = params['publishOnResult'];
SET .selectionFilterCb = params['selectionFilterCb'];
SET .gridId = params['gridId'];
SET .selectedRowidx = params['selectedRowidx'];
SET .paramspath = params['paramspath'];
SET .onCalling = params['onCalling'];
FIRE .build_pars_dialog;
FIRE #table_script_dlg_parameters.open;
""", subscribe_table_script_run=True)
rpc = plugin_main.dataRpc('dummy', 'table_script_run',
_fired='^.run',
_onCalling='=.onCalling',
_onResult='if(kwargs._publishOnResult){genro.publish(kwargs._publishOnResult);}',
parameters='=.parameters',
resource='=.resource',
res_type='=.res_type',
table='=.table',
gridId='=.gridId',
_publishOnResult='=.publishOnResult',
selectionName='=.selectionName',
printerOptions='==this.getRelativeData("gnr.server_print.printers."+resource);',
selectionFilterCb='=.selectionFilterCb',
selectedRowidx="=.selectedRowidx", _POST=True, timeout=0)
plugin_main.div().remote('table_script_parameters',
resource='=.resource',
res_type='=.res_type',
title='=.title',
table='=.table',
_fired='^.build_pars_dialog')
def table_script_resource_tree(self, pane, table=None, res_type=None, selectionName=None, gridId=None, _class=None,
**kwargs):
pane.dataRemote('.tree.store', 'table_script_resource_tree_data', table=table, cacheTime=10, res_type=res_type)
pane.tree(storepath='.tree.store', persist=False,
labelAttribute='caption', hideValues=True,
_class=_class,
selected_resource='.resource',
connect_ondblclick='FIRE .run_table_script',
tooltip_callback="return sourceNode.attr.description || sourceNode.label;",
**kwargs)
pane.dataController("""
var selectedRowidx = gridId?genro.wdgById(gridId).getSelectedRowidx():null;
var pars = {table:table,res_type:res_type,selectionName:selectionName,selectedRowidx:selectedRowidx,resource:resource,gridId:gridId}
console.log(pars);
PUBLISH table_script_run=pars;""",
_fired="^.run_table_script", selectionName=selectionName, table=table,
gridId=gridId, res_type=res_type, resource='=.resource')
def table_script_dialog_center(self, parentBc, hasParameters=None, resource=None, **kwargs):
if hasattr(self, 'table_script_option_pane'):
paramsBc = parentBc.borderContainer(pageName='params', datapath='.data', **kwargs)
if hasParameters:
parameters_pane = paramsBc.contentPane(region='top', _class='ts_parametersPane')
parameters_pane.mainStack = parentBc.mainStack
self.table_script_parameters_pane(parameters_pane)
self.table_script_option_pane(paramsBc.contentPane(region='bottom', datapath='.batch_options',
_class='ts_optionsPane'), resource=resource)
elif hasParameters:
parameters_pane = parentBc.contentPane(pageName='params', datapath='.data', **kwargs)
parameters_pane.mainStack = parentBc.mainStack
self.table_script_parameters_pane(parameters_pane)
def remote_table_script_parameters(self, pane, table=None, res_type=None, resource='', title=None, **kwargs):
pkgname, tblname = table.split('.')
if not resource:
return
resource = resource.replace('.py', '')
#cl=self.site.loadResource(pkgname,'tables',tblname,res_type,"%s:Main" %resource) #faccio mixin con prefisso
res_obj = self.site.loadTableScript(self, table, '%s/%s' % (res_type, resource), class_name='Main')
self.current_batch = res_obj
self.mixin(res_obj, methods='table_script_*,rpc_table_script_*')
batch_dict = objectExtract(res_obj, 'batch_')
batch_dict['resource_name'] = resource
batch_dict['res_type'] = res_type
pane.data('.batch', batch_dict)
hasParameters = hasattr(self, 'table_script_parameters_pane')
dlg_dict = objectExtract(res_obj, 'dialog_')
dialog_height_no_par = dlg_dict.pop('height_no_par', dlg_dict.get('height'))
if not hasParameters:
dlg_dict['height'] = dialog_height_no_par
dlg_dict['title'] = dlg_dict.get('title', batch_dict.get('title'))
pane.data('.dialog', dlg_dict)
dlg = self.simpleDialog(pane, datapath='.dialog', title='^.title', height='^.height', width='^.width',
cb_center=self.table_script_dialog_center, dlgId='table_script_dlg_parameters',
hasParameters=hasParameters, dialog_height_no_par=dialog_height_no_par,
resource=resource)
dlg.dataController("""
var modifier = _node.attr.modifier;
if (modifier=='Shift' || immediate){
FIRE .close;
SET #table_script_runner.parameters=pars;
FIRE #table_script_runner.run;
}else{
FIRE .close;
batch_monitor.create_local_root();
SET #table_script_runner.parameters=pars;
PUBLISH batch_monitor_on;
FIRE #table_script_runner.run;
}
""",
_fired="^.save", pars='=.data',immediate=batch_dict.get('immediate',False))
def rpc_table_script_run(self, table=None, resource=None, res_type=None, selectionName=None, selectionFilterCb=None,
selectedRowidx=None,
parameters=None, printerOptions=None, **kwargs):
tblobj = self.tblobj or self.db.table(table)
res_obj = self.site.loadTableScript(self, tblobj, '%s/%s' % (res_type, resource), class_name='Main')
res_obj.defineSelection(selectionName=selectionName, selectedRowidx=selectedRowidx,
selectionFilterCb=selectionFilterCb)
parameters = parameters or {}
parameters['_printerOptions'] = printerOptions
res_obj(parameters=parameters, **kwargs)
def rpc_table_script_resource_tree_data(self, table=None, res_type=None):
#pkg,tblname = table.split('.')
tblobj = self.db.table(table)
pkg = tblobj.pkg.name
tblname = tblobj.name
result = Bag()
resources = self.site.resource_loader.resourcesAtPath(pkg, 'tables/%s/%s' % (tblname, res_type), 'py')
forbiddenNodes = []
def cb(node, _pathlist=None):
has_parameters = False
if node.attr['file_ext'] == 'py':
resmodule = gnrImport(node.attr['abs_path'])
tags = getattr(resmodule, 'tags', '')
if tags and not self.application.checkResourcePermission(tags, self.userTags):
if node.label == '_doc':
forbiddenNodes.append('.'.join(_pathlist))
return
caption = getattr(resmodule, 'caption', node.label)
description = getattr(resmodule, 'description', '')
if node.label == '_doc':
result.setAttr('.'.join(_pathlist), dict(caption=caption, description=description, tags=tags,
has_parameters=has_parameters))
else:
mainclass = getattr(resmodule, 'Main', None)
assert mainclass, 'Main class is mandatory in tablescript resource'
has_parameters = hasattr(mainclass, 'parameters_pane')
result.setItem('.'.join(_pathlist + [node.label]), None, caption=caption, description=description,
resource=node.attr['rel_path'][:-3], has_parameters=has_parameters)
resources.walk(cb, _pathlist=[])
for forbidden in forbiddenNodes:
result.pop(forbidden)
return result
def rpc_table_script_renderTemplate(self, doctemplate=None, record_id=None, templates=None, **kwargs):
doctemplate_tbl = self.db.table('adm.doctemplate')
tplbuilder = doctemplate_tbl.getTemplateBuilder(doctemplate=doctemplate, templates=templates)
return doctemplate_tbl.renderTemplate(tplbuilder, record_id=record_id,
extraData=Bag(dict(host=self.request.host))) | lgpl-2.1 | 4,025,233,142,404,140,000 | 56.418719 | 160 | 0.54955 | false |
pombredanne/spaCy | spacy/tests/tokens/test_tokens_api.py | 1 | 4658 | from __future__ import unicode_literals
from spacy.tokens import Doc
import pytest
@pytest.mark.models
def test_getitem(EN):
tokens = EN(u'Give it back! He pleaded.')
assert tokens[0].orth_ == 'Give'
assert tokens[-1].orth_ == '.'
with pytest.raises(IndexError):
tokens[len(tokens)]
def to_str(span):
return '/'.join(token.orth_ for token in span)
span = tokens[1:1]
assert not to_str(span)
span = tokens[1:4]
assert to_str(span) == 'it/back/!'
span = tokens[1:4:1]
assert to_str(span) == 'it/back/!'
with pytest.raises(ValueError):
tokens[1:4:2]
with pytest.raises(ValueError):
tokens[1:4:-1]
span = tokens[-3:6]
assert to_str(span) == 'He/pleaded'
span = tokens[4:-1]
assert to_str(span) == 'He/pleaded'
span = tokens[-5:-3]
assert to_str(span) == 'back/!'
span = tokens[5:4]
assert span.start == span.end == 5 and not to_str(span)
span = tokens[4:-3]
assert span.start == span.end == 4 and not to_str(span)
span = tokens[:]
assert to_str(span) == 'Give/it/back/!/He/pleaded/.'
span = tokens[4:]
assert to_str(span) == 'He/pleaded/.'
span = tokens[:4]
assert to_str(span) == 'Give/it/back/!'
span = tokens[:-3]
assert to_str(span) == 'Give/it/back/!'
span = tokens[-3:]
assert to_str(span) == 'He/pleaded/.'
span = tokens[4:50]
assert to_str(span) == 'He/pleaded/.'
span = tokens[-50:4]
assert to_str(span) == 'Give/it/back/!'
span = tokens[-50:-40]
assert span.start == span.end == 0 and not to_str(span)
span = tokens[40:50]
assert span.start == span.end == 7 and not to_str(span)
span = tokens[1:4]
assert span[0].orth_ == 'it'
subspan = span[:]
assert to_str(subspan) == 'it/back/!'
subspan = span[:2]
assert to_str(subspan) == 'it/back'
subspan = span[1:]
assert to_str(subspan) == 'back/!'
subspan = span[:-1]
assert to_str(subspan) == 'it/back'
subspan = span[-2:]
assert to_str(subspan) == 'back/!'
subspan = span[1:2]
assert to_str(subspan) == 'back'
subspan = span[-2:-1]
assert to_str(subspan) == 'back'
subspan = span[-50:50]
assert to_str(subspan) == 'it/back/!'
subspan = span[50:-50]
assert subspan.start == subspan.end == 4 and not to_str(subspan)
@pytest.mark.models
def test_serialize(EN):
tokens = EN(u'Give it back! He pleaded.')
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]
@pytest.mark.models
def test_serialize_whitespace(EN):
tokens = EN(u' Give it back! He pleaded. ')
packed = tokens.to_bytes()
new_tokens = Doc(EN.vocab).from_bytes(packed)
assert tokens.string == new_tokens.string
assert [t.orth_ for t in tokens] == [t.orth_ for t in new_tokens]
assert [t.orth for t in tokens] == [t.orth for t in new_tokens]
def test_set_ents(EN):
tokens = EN.tokenizer(u'I use goggle chrone to surf the web')
assert len(tokens.ents) == 0
tokens.ents = [(EN.vocab.strings['PRODUCT'], 2, 4)]
assert len(list(tokens.ents)) == 1
assert [t.ent_iob for t in tokens] == [0, 0, 3, 1, 0, 0, 0, 0]
ent = tokens.ents[0]
assert ent.label_ == 'PRODUCT'
assert ent.start == 2
assert ent.end == 4
def test_merge(EN):
doc = EN('WKRO played songs by the beach boys all night')
assert len(doc) == 9
# merge 'The Beach Boys'
doc.merge(doc[4].idx, doc[6].idx + len(doc[6]), 'NAMED', 'LEMMA', 'TYPE')
assert len(doc) == 7
assert doc[4].text == 'the beach boys'
assert doc[4].text_with_ws == 'the beach boys '
assert doc[4].tag_ == 'NAMED'
def test_merge_end_string(EN):
doc = EN('WKRO played songs by the beach boys all night')
assert len(doc) == 9
# merge 'The Beach Boys'
doc.merge(doc[7].idx, doc[8].idx + len(doc[8]), 'NAMED', 'LEMMA', 'TYPE')
assert len(doc) == 8
assert doc[7].text == 'all night'
assert doc[7].text_with_ws == 'all night'
@pytest.mark.models
def test_merge_children(EN):
"""Test that attachments work correctly after merging."""
doc = EN('WKRO played songs by the beach boys all night')
# merge 'The Beach Boys'
doc.merge(doc[4].idx, doc[6].idx + len(doc[6]), 'NAMED', 'LEMMA', 'TYPE')
for word in doc:
if word.i < word.head.i:
assert word in list(word.head.lefts)
elif word.i > word.head.i:
assert word in list(word.head.rights)
| mit | 1,432,587,112,072,753,400 | 30.053333 | 77 | 0.597252 | false |
wieden-kennedy/django-haikus | django_haikus/migrations/0005_auto__add_field_haikumodel_full_text.py | 1 | 3967 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'HaikuModel.full_text'
db.add_column('django_haikus_haikumodel', 'full_text', self.gf('django.db.models.fields.TextField')(unique=True, null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'HaikuModel.full_text'
db.delete_column('django_haikus_haikumodel', 'full_text')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'django_haikus.haikuline': {
'Meta': {'ordering': "('line_number',)", 'object_name': 'HaikuLine'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_number': ('django.db.models.fields.IntegerField', [], {}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'quality': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'django_haikus.haikumodel': {
'Meta': {'object_name': 'HaikuModel'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'haiku_source'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'full_text': ('django.db.models.fields.TextField', [], {'unique': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_composite': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['django_haikus.HaikuLine']", 'symmetrical': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'quality': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'django_haikus.haikurating': {
'Meta': {'object_name': 'HaikuRating'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'django_haikus.simpletext': {
'Meta': {'object_name': 'SimpleText'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_haiku': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'syllables': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'unique': 'True'})
}
}
complete_apps = ['django_haikus']
| bsd-3-clause | -2,514,587,213,929,043,500 | 58.208955 | 189 | 0.56365 | false |
macarthur-lab/xbrowse | seqr/management/commands/add_project_tag.py | 1 | 1307 | from django.core.management.base import BaseCommand, CommandError
from django.db.models.query_utils import Q
from seqr.models import Project, VariantTagType
from seqr.model_utils import create_seqr_model
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--project', help='Project for tag.', required=True)
parser.add_argument('--name', help='Tag name', required=True)
parser.add_argument('--order', help='Order in project tag list', required=True)
parser.add_argument('--category', help='Category (optional)')
parser.add_argument('--description', help='Description (optional)')
parser.add_argument('--color', help='Color (optional)')
def handle(self, *args, **options):
project_name = options['project']
tag_options = {k: options[k] or '' for k in ['name', 'order', 'category', 'description', 'color']}
project = Project.objects.get(Q(name=project_name) | Q(guid=project_name))
if VariantTagType.objects.filter(name__iexact=options['name']).filter(Q(project=project) | Q(project__isnull=True)):
raise CommandError('Tag "{}" already exists for project {}'.format(options['name'], project_name))
create_seqr_model(VariantTagType, project=project, **tag_options)
| agpl-3.0 | 3,725,878,423,760,104,000 | 49.269231 | 124 | 0.680184 | false |
tectronics/nyctos | src/data.res/scripts/armor/prefixes/reinforced.py | 1 | 1483 | import sim, sim_items, main
from util import *
import random
class ReinforcedPrefix(sim_items.Prefix):
"""
"reinforced" prefix boosts armor by increasing its PV or EV (whichever would
normally be greater) by 1-3 points.
"""
def __init__(self):
super(ReinforcedPrefix, self).__init__(
"reinforced", # name
"Whoever crafted this %(wearable)s evidently used above-average "
"material and paid great attention to reinforcing standard "
"weak points."
)
def initialize(self, wearable):
if not isinstance(wearable, sim_items.Armor):
raise ValueError('Tried to apply %r to non-armor item: %r' % (self,
wearable))
super(ReinforcedPrefix, self).initialize(wearable)
self.bonus = random.randint(1,3)
if self.wearable.bonuses['evMod'] > self.wearable.bonuses['pvMod']:
self.affectedMod = 'evMod'
else:
self.affectedMod = 'pvMod'
self.wearable.bonuses.update({
self.affectedMod: \
self.wearable.bonuses[self.affectedMod] + self.bonus
})
def remove(self, wearable):
super(ReinforcedPrefix, self).remove(wearable)
wearable.bonuses.update({
self.affectedMod: \
self.wearable.bonuses[self.affectedMod] - self.bonus
})
#========================================
thingClass = ReinforcedPrefix
| gpl-2.0 | -9,204,112,638,648,562,000 | 32.704545 | 80 | 0.581254 | false |
jni/skan | skan/vendored/thresholding.py | 1 | 7886 | import itertools
import numpy as np
from skimage.transform import integral_image
from skimage.util import dtype_limits
import numba
def broadcast_mgrid(arrays):
shape = tuple(map(len, arrays))
ndim = len(shape)
result = []
for i, arr in enumerate(arrays, start=1):
reshaped = np.broadcast_to(arr[(...,) + (np.newaxis,) * (ndim - i)],
shape)
result.append(reshaped)
return result
@numba.jit(nopython=True, cache=True, nogil=True)
def _correlate_sparse_offsets(input, indices, offsets, values, output):
for off, val in zip(offsets, values):
# this loop order optimises cache access, gives 10x speedup
for i, j in enumerate(indices):
output[i] += input[j + off] * val
def correlate_sparse(image, kernel, mode='reflect'):
"""Compute valid cross-correlation of `padded_array` and `kernel`.
This function is *fast* when `kernel` is large with many zeros.
See ``scipy.ndimage.correlate`` for a description of cross-correlation.
Parameters
----------
image : array of float, shape (M, N,[ ...,] P)
The input array. It should be already padded, as a margin of the
same shape as kernel (-1) will be stripped off.
kernel : array of float, shape (Q, R,[ ...,] S)
The kernel to be correlated. Must have the same number of
dimensions as `padded_array`. For high performance, it should
be sparse (few nonzero entries).
mode : string, optional
See `np.pad` for valid modes. Additionally, mode 'valid' is
accepted, in which case no padding is applied and the result is
the result for the smaller image for which the kernel is entirely
inside the original data.
Returns
-------
result : array of float, shape (M, N,[ ...,] P)
The result of cross-correlating `image` with `kernel`. If mode
'valid' is used, the resulting shape is (M-Q+1, N-R+1,[ ...,] P-S+1).
"""
if mode == 'valid':
padded_image = image
else:
w = kernel.shape[0] // 2
padded_image = np.pad(image, (w, w-1), mode=mode)
indices = np.nonzero(kernel)
offsets = np.ravel_multi_index(indices, padded_image.shape)
values = kernel[indices]
result = np.zeros([a - b + 1
for a, b in zip(padded_image.shape, kernel.shape)])
corner_multi_indices = broadcast_mgrid([np.arange(i)
for i in result.shape])
corner_indices = np.ravel_multi_index(corner_multi_indices,
padded_image.shape).ravel()
_correlate_sparse_offsets(padded_image.ravel(), corner_indices,
offsets, values, result.ravel())
return result
def _mean_std(image, w):
"""Return local mean and standard deviation of each pixel using a
neighborhood defined by a rectangular window with size w times w.
The algorithm uses integral images to speedup computation. This is
used by threshold_niblack and threshold_sauvola.
Parameters
----------
image : ndarray
Input image.
w : int
Odd window size (e.g. 3, 5, 7, ..., 21, ...).
Returns
-------
m : 2-D array of same size of image with local mean values.
s : 2-D array of same size of image with local standard
deviation values.
References
----------
.. [1] F. Shafait, D. Keysers, and T. M. Breuel, "Efficient
implementation of local adaptive thresholding techniques
using integral images." in Document Recognition and
Retrieval XV, (San Jose, USA), Jan. 2008.
DOI:10.1117/12.767755
"""
if w == 1 or w % 2 == 0:
raise ValueError(
"Window size w = %s must be odd and greater than 1." % w)
left_pad = w // 2 + 1
right_pad = w // 2
padded = np.pad(image.astype('float'), (left_pad, right_pad),
mode='reflect')
padded_sq = padded * padded
integral = integral_image(padded)
integral_sq = integral_image(padded_sq)
kern = np.zeros((w + 1,) * image.ndim)
for indices in itertools.product(*([[0, -1]] * image.ndim)):
kern[indices] = (-1) ** (image.ndim % 2 != np.sum(indices) % 2)
sum_full = correlate_sparse(integral, kern, mode='valid')
m = sum_full / (w ** image.ndim)
sum_sq_full = correlate_sparse(integral_sq, kern, mode='valid')
g2 = sum_sq_full / (w ** image.ndim)
s = np.sqrt(g2 - m * m)
return m, s
def threshold_niblack(image, window_size=15, k=0.2):
"""Apply Niblack local threshold to an array. [1]_
A threshold T is calculated for every pixel in the image using the
following formula:
T = m(x,y) - k * s(x,y)
where m(x,y) and s(x,y) are the mean and standard deviation of
pixel (x,y) neighborhood defined by a rectangular window with size w
times w centered around the pixel. k is a configurable parameter
that weights the effect of standard deviation.
Parameters
----------
image: (N, M) ndarray
Grayscale input image.
window_size : int, optional
Odd size of pixel neighborhood window (e.g. 3, 5, 7...).
k : float, optional
Value of parameter k in threshold formula.
Returns
-------
threshold : (N, M) ndarray
Threshold mask. All pixels with an intensity higher than
this value are assumed to be foreground.
Notes
-----
This algorithm is originally designed for text recognition.
References
----------
.. [1] Niblack, W (1986), An introduction to Digital Image
Processing, Prentice-Hall.
Examples
--------
>>> from skimage import data
>>> image = data.page()
>>> binary_image = threshold_niblack(image, window_size=7, k=0.1)
"""
m, s = _mean_std(image, window_size)
return m - k * s
def threshold_sauvola(image, window_size=15, k=0.2, r=None):
"""Apply Sauvola local threshold to an array. [2]_
In the original method a threshold T is calculated for every pixel
in the image using the following formula:
T = m(x,y) * (1 + k * ((s(x,y) / R) - 1))
where m(x,y) and s(x,y) are the mean and standard deviation of
pixel (x,y) neighborhood defined by a rectangular window with size w
times w centered around the pixel. k is a configurable parameter
that weights the effect of standard deviation.
R is the maximum standard deviation of a greyscale image.
Parameters
----------
image: (N, M) ndarray
Grayscale input image.
window_size : int, optional
Odd size of pixel neighborhood window (e.g. 3, 5, 7...).
k : float, optional
Value of the positive parameter k.
r : float, optional
Value of R, the dynamic range of standard deviation.
If None, set to the half of the image dtype range.
offset : float, optional
Constant subtracted from obtained local thresholds.
Returns
-------
threshold : (N, M) ndarray
Threshold mask. All pixels with an intensity higher than
this value are assumed to be foreground.
Notes
-----
This algorithm is originally designed for text recognition.
References
----------
.. [2] J. Sauvola and M. Pietikainen, "Adaptive document image
binarization," Pattern Recognition 33(2),
pp. 225-236, 2000.
DOI:10.1016/S0031-3203(99)00055-2
Examples
--------
>>> from skimage import data
>>> image = data.page()
>>> binary_sauvola = threshold_sauvola(image,
... window_size=15, k=0.2)
"""
if r is None:
imin, imax = dtype_limits(image, clip_negative=False)
r = 0.5 * (imax - imin)
m, s = _mean_std(image, window_size)
return m * (1 + k * ((s / r) - 1))
| bsd-3-clause | -7,008,705,273,790,303,000 | 33.436681 | 77 | 0.604109 | false |
ScreamingUdder/mantid | Framework/PythonInterface/test/python/plugins/functions/MsdPetersTest.py | 1 | 1296 | from __future__ import (absolute_import, division, print_function)
import unittest
import numpy as np
from MsdTestHelper import is_registered, check_output, create_model, create_test_workspace, create_function_string
from mantid.simpleapi import Fit
class MsdPetersTest(unittest.TestCase):
def test_function_has_been_registered(self):
status, msg = is_registered("MsdPeters")
if not status:
self.fail(msg)
def test_function_output(self):
input = np.array([[0, 1], [2, 3]])
expected = np.array([[1., 0.99173554], [0.96774194, 0.93023256]])
tolerance = 0.000001
status, output = check_output("MsdPeters", input, expected, tolerance, Height=1.0, MSD=0.05, Beta=1.0)
if not status:
self.fail("Computed output " + str(output) + " from input " + str(input) +
" is not equal to the expected output: " + str(expected))
def test_use_in_fit(self):
workspace = create_test_workspace(create_model("MsdPeters", Height=1.0, MSD=0.05, Beta=1.0), 1000)
function_string = create_function_string("MsdPeters", Height=1.0, MSD=0.05, Beta=1.0)
Fit(Function=function_string, InputWorkspace=workspace, StartX=1.2, EndX=1200)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -4,871,528,488,213,749,000 | 38.272727 | 114 | 0.647377 | false |
ampyche/ampyche | setup/defaultpaths.py | 1 | 1620 | #!/usr/bin/python3
import os
import subprocess
class SetupPaths():
def __init__(self):
wlan0 = subprocess.getoutput('/sbin/ifconfig wlan0')
h = "http://"
m = "/ampyche/music"
error = "wlan0: error fetching interface information: Device not found"
self.wlan0 = wlan0
self.h = h
self.m = m
self.error = error
def _get_json_paths(self, ppath):
json = {}
json['artistjson'] = ppath + "/json/artist"
json['albumjson'] = ppath + "/json/album"
json['songjson'] = ppath + "/json/song"
json['artistalbumsongjson'] = ppath + "/json/artalbsong"
json['jsonsoup'] = ppath + "/json/soup/"
json['programpath'] = ppath
return json
def _get_db_paths(self, ppath):
db = {}
db['ampyche'] = ppath + "/db/ampyche.db"
db['ampychePlaylist'] = ppath + "/db/ampychePlaylist.db"
db['settings'] = ppath + "/db/settings.db"
db['catalog'] = ppath + "/db/catalog.db"
db['programpath'] = ppath
return db
def _get_http_addr(self):
if self.wlan0 == self.error:
ipa = subprocess.getoutput('/sbin/ifconfig').split('\n')[1].split()[1][5:]
else:
ipa = subprocess.getoutput('/sbin/ifconfig').split('\n')[17].split()[1][5:]
return self.h + ipa + self.m
def _get_other_paths(self, ppath):
other = {}
httpaddr = self._get_http_addr()
other['httpmusicpath'] = httpaddr + "/music"
other['logfile'] = ppath + "/log/log.txt"
return other
def _get_all_paths(self, ppath):
json = self._get_json_paths(ppath)
db = self._get_db_paths(ppath)
other = self._get_other_paths(ppath)
return json, db, other | mit | 1,901,301,579,964,812,300 | 28.472727 | 78 | 0.608025 | false |
eLvErDe/nicotine-plus | pynicotine/gtkgui/downloads.py | 1 | 19108 | # -*- coding: utf-8 -*-
#
# COPYRIGHT (C) 2016-2017 Michael Labouebe <[email protected]>
# COPYRIGHT (C) 2016-2018 Mutnick <[email protected]>
# COPYRIGHT (C) 2013 eL_vErDe <[email protected]>
# COPYRIGHT (C) 2008-2012 Quinox <[email protected]>
# COPYRIGHT (C) 2009 Hedonist <[email protected]>
# COPYRIGHT (C) 2006-2009 Daelstorm <[email protected]>
# COPYRIGHT (C) 2003-2004 Hyriand <[email protected]>
#
# GNU GENERAL PUBLIC LICENSE
# Version 3, 29 June 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from gettext import gettext as _
import gi
from gi.repository import Gdk
from gi.repository import Gtk as gtk
from _thread import start_new_thread
from pynicotine import slskmessages
from pynicotine.gtkgui.entrydialog import MetaDialog
from pynicotine.gtkgui.entrydialog import OptionDialog
from pynicotine.gtkgui.transferlist import TransferList
from pynicotine.gtkgui.utils import HumanSize
from pynicotine.gtkgui.utils import PopupMenu
from pynicotine.gtkgui.utils import PressHeader
from pynicotine.utils import executeCommand
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
class Downloads(TransferList):
def __init__(self, frame):
TransferList.__init__(self, frame, frame.DownloadList, type='downloads')
self.myvbox = self.frame.downloadsvbox
self.frame.DownloadList.set_property("rules-hint", True)
self.accel_group = gtk.AccelGroup()
self.popup_menu_users = PopupMenu(self.frame, False)
self.popup_menu_clear = popup2 = PopupMenu(self.frame, False)
popup2.setup(
("#" + _("Clear finished/aborted"), self.OnClearFinishedAborted),
("#" + _("Clear finished"), self.OnClearFinished),
("#" + _("Clear aborted"), self.OnClearAborted),
("#" + _("Clear paused"), self.OnClearPaused),
("#" + _("Clear filtered"), self.OnClearFiltered),
("#" + _("Clear queued"), self.OnClearQueued)
)
self.popup_menu = popup = PopupMenu(frame)
popup.setup(
("#" + _("Get place in _queue"), self.OnGetPlaceInQueue),
("", None),
("#" + _("Copy _URL"), self.OnCopyURL),
("#" + _("Copy folder URL"), self.OnCopyDirURL),
("#" + _("Send to _player"), self.OnPlayFiles),
("#" + _("View Metadata of file(s)"), self.OnDownloadMeta),
("#" + _("Open Directory"), self.OnOpenDirectory),
("#" + _("Search"), self.OnFileSearch),
(1, _("User(s)"), self.popup_menu_users, self.OnPopupMenuUsers),
("", None),
("#" + _("_Retry"), self.OnRetryTransfer),
("", None),
("#" + _("Abor_t"), self.OnAbortTransfer),
("#" + _("Abort & Delete"), self.OnAbortRemoveTransfer),
("#" + _("_Clear"), self.OnClearTransfer),
("", None),
(1, _("Clear Groups"), self.popup_menu_clear, None)
)
frame.DownloadList.connect("button_press_event", self.OnPopupMenu, "mouse")
frame.DownloadList.connect("key-press-event", self.on_key_press_event)
cols = frame.DownloadList.get_columns()
try:
print(len(cols))
for i in range(len(cols)):
parent = cols[i].get_widget().get_ancestor(gtk.Button)
if parent:
parent.connect("button_press_event", PressHeader)
# Read Show / Hide column settings from last session
cols[i].set_visible(self.frame.np.config.sections["columns"]["downloads_columns"][i])
except IndexError:
# Column count in config is probably incorrect (outdated?), don't crash
pass
frame.clearFinishedAbortedButton.connect("clicked", self.OnClearFinishedAborted)
frame.clearQueuedButton.connect("clicked", self.OnTryClearQueued)
frame.retryTransferButton.connect("clicked", self.OnRetryTransfer)
frame.abortTransferButton.connect("clicked", self.OnSelectAbortTransfer)
frame.deleteTransferButton.connect("clicked", self.OnAbortRemoveTransfer)
frame.banDownloadButton.connect("clicked", self.OnBan)
frame.DownloadList.expand_all()
self.frame.ToggleAutoRetry.set_active(self.frame.np.config.sections["transfers"]["autoretry_downloads"])
frame.ToggleAutoRetry.connect("toggled", self.OnToggleAutoRetry)
self.frame.ToggleTreeDownloads.set_active(self.frame.np.config.sections["transfers"]["groupdownloads"])
frame.ToggleTreeDownloads.connect("toggled", self.OnToggleTree)
self.OnToggleTree(None)
self.frame.ExpandDownloads.set_active(self.frame.np.config.sections["transfers"]["downloadsexpanded"])
frame.ExpandDownloads.connect("toggled", self.OnExpandDownloads)
self.OnExpandDownloads(None)
def saveColumns(self):
columns = []
widths = []
for column in self.frame.DownloadList.get_columns():
columns.append(column.get_visible())
widths.append(column.get_width())
self.frame.np.config.sections["columns"]["downloads_columns"] = columns
self.frame.np.config.sections["columns"]["downloads_widths"] = widths
def OnToggleAutoRetry(self, widget):
self.frame.np.config.sections["transfers"]["autoretry_downloads"] = self.frame.ToggleAutoRetry.get_active()
def OnTryClearQueued(self, widget):
direction = "down"
win = OptionDialog(self.frame, _("Clear All Queued Downloads?"), modal=True, status=None, option=False, third="")
win.connect("response", self.frame.on_clear_response, direction)
win.set_title(_("Nicotine+") + ": " + _("Clear Queued Transfers"))
win.set_icon(self.frame.images["n"])
win.show()
def expandcollapse(self, path):
if self.frame.ExpandDownloads.get_active():
self.frame.DownloadList.expand_row(path, True)
else:
self.frame.DownloadList.collapse_row(path)
def OnExpandDownloads(self, widget):
expanded = self.frame.ExpandDownloads.get_active()
if expanded:
self.frame.DownloadList.expand_all()
self.frame.ExpandDownloadsImage.set_from_stock(gtk.STOCK_REMOVE, 4)
else:
self.frame.DownloadList.collapse_all()
self.frame.ExpandDownloadsImage.set_from_stock(gtk.STOCK_ADD, 4)
self.frame.np.config.sections["transfers"]["downloadsexpanded"] = expanded
self.frame.np.config.writeConfiguration()
def OnToggleTree(self, widget):
self.TreeUsers = self.frame.ToggleTreeDownloads.get_active()
self.frame.np.config.sections["transfers"]["groupdownloads"] = self.TreeUsers
if not self.TreeUsers:
self.frame.ExpandDownloads.hide()
else:
self.frame.ExpandDownloads.show()
self.RebuildTransfers()
def MetaBox(self, title="Meta Data", message="", data=None, modal=True, Search=False):
win = MetaDialog(self.frame, message, data, modal, Search=Search)
win.set_title(title)
win.set_icon(self.frame.images["n"])
win.show()
gtk.main()
return win.ret
def SelectedResultsAllData(self, model, path, iter, data):
if iter in self.selected_users:
return
user = model.get_value(iter, 0)
filename = model.get_value(iter, 1)
fullname = model.get_value(iter, 10)
size = speed = "0"
length = bitrate = None # noqa: F841
queue = immediate = num = country = bitratestr = ""
for transfer in self.frame.np.transfers.downloads:
if transfer.user == user and fullname == transfer.filename:
size = HumanSize(transfer.size)
try:
speed = str(int(transfer.speed))
speed += _(" KB/s")
except Exception:
pass
bitratestr = str(transfer.bitrate)
length = str(transfer.length)
directory = fullname.rsplit("\\", 1)[0]
data[len(data)] = {
"user": user,
"fn": fullname,
"position": num,
"filename": filename,
"directory": directory,
"size": size,
"speed": speed,
"queue": queue,
"immediate": immediate,
"bitrate": bitratestr,
"length": length,
"country": country
}
def OnDownloadMeta(self, widget):
if not self.frame.np.transfers:
return
data = {}
self.widget.get_selection().selected_foreach(self.SelectedResultsAllData, data)
if data != {}:
self.MetaBox(title=_("Nicotine+:") + " " + _("Downloads Metadata"), message=_("<b>Metadata</b> for Downloads"), data=data, modal=True, Search=False)
def OnOpenDirectory(self, widget):
downloaddir = self.frame.np.config.sections["transfers"]["downloaddir"]
incompletedir = self.frame.np.config.sections["transfers"]["incompletedir"]
if incompletedir == "":
incompletedir = downloaddir
filemanager = self.frame.np.config.sections["ui"]["filemanager"]
transfer = self.selected_transfers[0]
complete_path = os.path.join(downloaddir, transfer.path)
if transfer.path == "":
if transfer.status == "Finished":
executeCommand(filemanager, downloaddir)
else:
executeCommand(filemanager, incompletedir)
elif os.path.exists(complete_path): # and tranfer.status is "Finished"
executeCommand(filemanager, complete_path)
else:
executeCommand(filemanager, incompletedir)
def RebuildTransfers(self):
if self.frame.np.transfers is None:
return
self.Clear()
self.update()
def select_transfers(self):
self.selected_transfers = []
self.selected_users = []
self.widget.get_selection().selected_foreach(self.SelectedTransfersCallback)
def OnBan(self, widgets):
self.select_transfers()
for user in self.selected_users:
self.frame.BanUser(user)
def OnSelectAbortTransfer(self, widget):
self.select_transfers()
self.OnAbortTransfer(widget, False)
def OnSelectUserTransfer(self, widget):
if len(self.selected_users) == 0:
return
selected_user = widget.get_parent().user
sel = self.frame.DownloadList.get_selection()
fmodel = self.frame.DownloadList.get_model()
sel.unselect_all()
for item in self.transfers:
user_file, iter, transfer = item
user, filepath = user_file
if selected_user == user:
ix = fmodel.get_path(iter)
sel.select_path(ix,)
self.select_transfers()
def on_key_press_event(self, widget, event):
key = Gdk.keyval_name(event.keyval)
if key in ("P", "p"):
self.OnPopupMenu(widget, event, "keyboard")
else:
self.select_transfers()
if key in ("T", "t"):
self.OnAbortTransfer(widget)
elif key in ("R", "r"):
self.OnRetryTransfer(widget)
elif key == "Delete":
self.OnAbortTransfer(widget, True, True)
def OnPlayFiles(self, widget, prefix=""):
start_new_thread(self._OnPlayFiles, (widget, prefix))
def _OnPlayFiles(self, widget, prefix=""):
executable = self.frame.np.config.sections["players"]["default"]
downloaddir = self.frame.np.config.sections["transfers"]["downloaddir"]
if "$" not in executable:
return
for fn in self.selected_transfers:
if fn.file is None:
continue
playfile = None
if os.path.exists(fn.file.name):
playfile = fn.file.name
else:
# If this file doesn't exist anymore, it may have finished downloading and have been renamed
# try looking in the download directory and match the original filename.
basename = str.split(fn.filename, '\\')[-1]
path = os.sep.join([downloaddir, basename])
if os.path.exists(path):
playfile = path
if playfile:
executeCommand(executable, playfile, background=False)
def OnPopupMenuUsers(self, widget):
self.selected_transfers = []
self.selected_users = []
self.widget.get_selection().selected_foreach(self.SelectedTransfersCallback)
self.popup_menu_users.clear()
if len(self.selected_users) > 0:
items = []
self.selected_users.sort(key=str.lower)
for user in self.selected_users:
popup = PopupMenu(self.frame, False)
popup.setup(
("#" + _("Send _message"), popup.OnSendMessage),
("#" + _("Show IP a_ddress"), popup.OnShowIPaddress),
("#" + _("Get user i_nfo"), popup.OnGetUserInfo),
("#" + _("Brow_se files"), popup.OnBrowseUser),
("#" + _("Gi_ve privileges"), popup.OnGivePrivileges),
("", None),
("$" + _("_Add user to list"), popup.OnAddToList),
("$" + _("_Ban this user"), popup.OnBanUser),
("$" + _("_Ignore this user"), popup.OnIgnoreUser),
("#" + _("Select User's Transfers"), self.OnSelectUserTransfer)
)
popup.set_user(user)
items.append((1, user, popup, self.OnPopupMenuUser, popup))
self.popup_menu_users.setup(*items)
return True
def OnPopupMenuUser(self, widget, popup=None):
if popup is None:
return
menu = popup
user = menu.user
items = menu.get_children()
act = False
if len(self.selected_users) >= 1:
act = True
items[0].set_sensitive(act)
items[1].set_sensitive(act)
items[2].set_sensitive(act)
items[3].set_sensitive(act)
items[6].set_active(user in [i[0] for i in self.frame.np.config.sections["server"]["userlist"]])
items[7].set_active(user in self.frame.np.config.sections["server"]["banlist"])
items[8].set_active(user in self.frame.np.config.sections["server"]["ignorelist"])
for i in range(4, 9):
items[i].set_sensitive(act)
return True
def DoubleClick(self, event):
self.select_transfers()
dc = self.frame.np.config.sections["transfers"]["download_doubleclick"]
if dc == 1: # Send to player
self.OnPlayFiles(None)
elif dc == 2: # File manager
self.OnOpenDirectory(None)
elif dc == 3: # Search
self.OnFileSearch(None)
elif dc == 4: # Abort
self.OnAbortTransfer(None, False)
elif dc == 5: # Clear
self.OnClearTransfer(None)
elif dc == 6: # Retry
self.OnRetryTransfer(None)
def OnPopupMenu(self, widget, event, kind):
if kind == "mouse":
if event.button != 3:
if event.button == 1 and event.type == Gdk.EventType._2BUTTON_PRESS:
self.DoubleClick(event)
return False
self.selected_transfers = []
self.selected_users = []
self.widget.get_selection().selected_foreach(self.SelectedTransfersCallback)
users = len(self.selected_users) > 0
multi_users = len(self.selected_users) > 1 # noqa: F841
files = len(self.selected_transfers) > 0
multi_files = len(self.selected_transfers) > 1
self.SelectCurrentRow(event, kind)
items = self.popup_menu.get_children()
if users:
items[7].set_sensitive(True) # Users Menu
else:
items[7].set_sensitive(False) # Users Menu
if files:
act = True
else:
act = False
items[0].set_sensitive(act) # Place
items[4].set_sensitive(act) # Send to player
items[5].set_sensitive(act) # View Meta
items[6].set_sensitive(act) # File manager
items[8].set_sensitive(act) # Search filename
act = False
if not multi_files and files:
act = True
items[2].set_sensitive(act) # Copy URL
items[3].set_sensitive(act) # Copy Folder URL
if not users or not files:
# Disable options
# Abort, Abort and Remove, retry, clear
act = False
else:
act = True
for i in range(10, 15):
items[i].set_sensitive(act)
self.popup_menu.popup(None, None, None, None, 3, event.time)
if kind == "keyboard":
widget.emit_stop_by_name("key_press_event")
elif kind == "mouse":
widget.emit_stop_by_name("button_press_event")
return True
def update(self, transfer=None, forced=False):
TransferList.update(self, transfer, forced)
if transfer is None and self.frame.np.transfers is not None:
self.frame.np.transfers.SaveDownloads()
def OnGetPlaceInQueue(self, widget):
self.select_transfers()
for i in self.selected_transfers:
if i.status != "Queued":
continue
self.frame.np.ProcessRequestToPeer(i.user, slskmessages.PlaceInQueueRequest(None, i.filename))
def OnFileSearch(self, widget):
self.select_transfers()
for transfer in self.selected_transfers:
self.frame.SearchEntry.set_text(transfer.filename.rsplit("\\", 1)[1])
self.frame.ChangeMainPage(None, "search")
break
def OnRetryTransfer(self, widget):
self.select_transfers()
for transfer in self.selected_transfers:
if transfer.status in ["Finished", "Old"]:
continue
self.frame.np.transfers.AbortTransfer(transfer)
transfer.req = None
self.frame.np.transfers.getFile(transfer.user, transfer.filename, transfer.path, transfer)
self.frame.np.transfers.SaveDownloads()
def OnAbortRemoveTransfer(self, widget):
self.select_transfers()
self.OnClearTransfer(widget)
| gpl-3.0 | 5,727,615,859,211,155,000 | 34.782772 | 160 | 0.5953 | false |
binarytemple/haproxy-timeplotters-parser | haproxy-parser.py | 1 | 2276 | #!/usr/bin/env python
import sys
from dateutil import parser
file=sys.argv[1:]
if not file:
print "please specify input file as argument to this script"
exit(1)
count = 0
with open(file[0], 'r') as f:
for line in f:
try:
fields = line.split()
start=parser.parse("%s %s %s" % (fields[0],fields[1],fields[2]))
if fields[8].find("UP") > -1:
dhost=fields[6].split("/")[1]
print "%s !ha_host_up.%s " % (start,dhost)
continue
if fields[8].find("DOWN") > -1:
dhost=fields[6].split("/")[1]
print "%s !ha_host_down.%s " % (start,dhost)
continue
end=parser.parse(fields[6][1:-1].replace("/","-").replace(":"," ",1))
measure=fields[10]
dhost=fields[8].split("/")[1]
shost=fields[5].split(":")[0]
sport=fields[5].split(":")[1]
queue_time=fields[9].split("/")[0]
connect_time=fields[9].split("/")[1]
total_time=fields[9].split("/")[2]
if dhost == "<NOSRV>":
print "%s =ha_cnt_nosrv.%s 1" % (start,shost)
continue
if len(fields) > 16:
method=fields[17].replace('"','')
if method == "PUT" or method == "GET" or method == "DELETE":
print "%s =ha_cnt_%s.%s 1" % (start,method,dhost)
req_thread="ha_chat_%s.%s" % ( shost.replace(".","_"),dhost.replace(".","_") )
print "%s >%s" % (end,req_thread)
print "%s <%s" % (start,req_thread )
print "%s =ha_cnt_size.%s %s" % (start,dhost,measure)
print "%s =ha_cnt_life.%s %s" % (end,dhost,(start - end).total_seconds())
print "%s =ha_cnt_queue_time.%s %s" % (end,dhost,queue_time)
print "%s =ha_cnt_connect_time.%s %s" % (end,dhost,connect_time)
print "%s =ha_cnt_connection.%s 1" % (start,dhost)
print "%s =ha_cnt_total_time.%s %s" % (end,dhost,total_time)
except Exception,e:
sys.stderr.write("%s - %s - %s" % (e, line, sys.exc_info()[0] ))
continue
| apache-2.0 | 5,239,927,052,813,543,000 | 35.709677 | 91 | 0.459578 | false |
puttarajubr/commcare-hq | custom/ewsghana/alerts/alerts.py | 1 | 16788 | from celery.schedules import crontab
from celery.task import periodic_task
import datetime
from casexml.apps.stock.models import StockTransaction
from corehq.apps.commtrack.models import SupplyPointCase, StockState
from corehq.apps.locations.dbaccessors import get_users_by_location_id
from corehq.apps.locations.models import SQLLocation
from corehq.apps.products.models import SQLProduct
from corehq.apps.sms.api import send_sms_to_verified_number
from corehq.apps.users.models import CommCareUser
from custom.ewsghana.alerts import ONGOING_NON_REPORTING, ONGOING_STOCKOUT_AT_SDP, ONGOING_STOCKOUT_AT_RMS, \
REPORT_REMINDER, WEB_REMINDER, URGENT_NON_REPORTING, URGENT_STOCKOUT, COMPLETE_REPORT, INCOMPLETE_REPORT, \
STOCKOUTS_MESSAGE, LOW_SUPPLY_MESSAGE, OVERSTOCKED_MESSAGE, RECEIPT_MESSAGE
from django.core.mail import send_mail
from custom.ewsghana.utils import ProductsReportHelper
from custom.ewsghana.utils import send_test_message, can_receive_email
import settings
from custom.ewsghana.models import EWSGhanaConfig
from django.utils.translation import ugettext as _
def send_alert(transactions, sp, user, message):
sp_ids = set()
if sp and not transactions and user.get_verified_number():
send_sms_to_verified_number(user.get_verified_number(), message)
sp_ids.add(sp._id)
# Alert when facilities have not been reported continuously for 3 weeks
@periodic_task(run_every=crontab(hour=10, minute=00),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def on_going_non_reporting():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
on_going_process_user(user)
def on_going_process_user(user, test=False):
now = datetime.datetime.utcnow()
date = now - datetime.timedelta(days=21)
user_location = user.sql_location
if not user_location:
return
facilities = []
if user_location.location_type.name == 'district':
facilities = user_location.get_children()
elif user_location.location_type.name == 'region':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__location_id=user.location._id)
fac = set()
for facility in facilities:
sp = facility.supply_point_id
if not sp:
continue
transactions_exist = StockTransaction.objects.filter(
case_id=sp,
type="stockonhand",
report__date__gte=date
).exists()
if not transactions_exist:
fac.add(unicode(facility.name))
verified_number = user.get_verified_number()
if fac and verified_number:
message = ONGOING_NON_REPORTING % " \n".join(fac)
if not test:
send_sms_to_verified_number(verified_number, message)
else:
send_test_message(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('ONGOING NON REPORTING', message, '[email protected]', [email])
# Ongoing STOCKOUTS at SDP and RMS
@periodic_task(run_every=crontab(hour=10, minute=25),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def on_going_stockout():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
on_going_stockout_process_user(user)
def on_going_stockout_process_user(user, test=False):
now = datetime.datetime.utcnow()
date = now - datetime.timedelta(days=21)
user_location = user.sql_location
if not user_location:
return
facilities = []
if user_location.location_type.name == 'district':
facilities = user_location.get_children()
elif user_location.location_type.name == 'region':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__location_id=user.location._id)
fac = set()
for facility in facilities:
sp = facility.supply_point_id
if not sp:
continue
stockouts = StockTransaction.objects.filter(
case_id=sp,
type="stockonhand",
report__date__gte=date
)
if stockouts.exists() and not stockouts.exclude(stock_on_hand=0).exists():
fac.add(unicode(facility.name))
if fac and user.get_verified_number():
if user_location.location_type.name == 'district':
message = ONGOING_STOCKOUT_AT_SDP % " \n".join(fac)
verified_number = user.get_verified_number()
send_sms_to_verified_number(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('ONGOING STOCKOUT AT SDP', message, '[email protected]', [email])
elif user_location.location_type.name == 'region':
message = ONGOING_STOCKOUT_AT_RMS % " \n".join(fac)
verified_number = user.get_verified_number()
if not test:
send_sms_to_verified_number(verified_number, message)
else:
send_test_message(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('ONGOING STOCKOUT AT RMS', message, '[email protected]', [email])
# Urgent Non-Reporting
@periodic_task(run_every=crontab(day_of_week=1, hour=8, minute=20),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def urgent_non_reporting():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
urgent_non_reporting_process_user(user)
def urgent_non_reporting_process_user(user, test=False):
now = datetime.datetime.utcnow()
date = now - datetime.timedelta(days=30)
user_location = user.sql_location
if not user_location:
return
facilities = []
if user_location.location_type.name == 'district':
facilities = user_location.get_children()
elif user_location.location_type.name == 'region':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__location_id=user.location._id)
elif user_location.location_type.name == 'country':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__parent__location_id=user.location._id)
fac = set()
no_rep = 0
for facility in facilities:
sp = facility.supply_point_id
transaction_exists = StockTransaction.objects.filter(
case_id=sp,
type="stockonhand",
report__date__gte=date
).exists()
if sp and not transaction_exists:
fac.add(unicode(facility.name))
no_rep += 1
if fac and no_rep >= len(facilities) / 2 and user.get_verified_number():
message = URGENT_NON_REPORTING % user.location.name
verified_number = user.get_verified_number()
if not test:
send_sms_to_verified_number(verified_number, message)
else:
send_test_message(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('URGENT NON REPORTING', message, '[email protected]', [email])
# Urgent Stockout
@periodic_task(run_every=crontab(day_of_week=1, hour=8, minute=20),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def urgent_stockout():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
urgent_stockout_process_user(user)
def urgent_stockout_process_user(user, test=False):
user_location = user.sql_location
if not user_location:
return
facilities = []
if user_location.location_type.name == 'district':
facilities = user_location.get_children()
elif user_location.location_type.name == 'region':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__location_id=user.location._id)
elif user_location.location_type.name == 'country':
facilities = SQLLocation.objects.filter(domain=user.domain,
parent__parent__parent__location_id=user.location._id)
stocked_out_products = set()
fac = set()
no_rep = 0
for facility in facilities:
sp = facility.supply_point_id
if sp:
stocked_out = StockState.objects.filter(
case_id=sp, section_id="stockonhand", stock_on_hand=0
)
if stocked_out.exists():
no_rep += 1
fac.add(unicode(facility))
for product in stocked_out:
sql_product = SQLProduct.objects.get(product_id=product.product_id)
stocked_out_products.add(sql_product.name)
if fac and no_rep >= (len(facilities) / 2) and user.get_verified_number():
stockout_str = ", ".join(sorted(
[unicode(product) for product in stocked_out_products]
))
message = URGENT_STOCKOUT % (user_location.name, stockout_str)
verified_number = user.get_verified_number()
if not test:
send_sms_to_verified_number(verified_number, message)
else:
send_test_message(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('URGENT STOCKOUT', message, '[email protected]', [email])
# Web reminder, once every 3 months
@periodic_task(run_every=crontab(day_of_month=1, hour=10, minute=3),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def reminder_to_visit_website():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
thirteen_days_ago = datetime.datetime.utcnow() - datetime.timedelta(weeks=13)
if user.location and user.last_login < thirteen_days_ago and user.get_verified_number()\
and user.location.location_type.name in ['district', 'region', 'country']:
message = WEB_REMINDER % user.name
verified_number = user.get_verified_number()
send_sms_to_verified_number(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('REMINDER TO VISIT WEBSITE', message, '[email protected]', [email])
# One week reminder when facility does not report to EWS
@periodic_task(run_every=crontab(day_of_week=1, hour=11, minute=11),
queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery'))
def report_reminder():
domains = EWSGhanaConfig.get_all_enabled_domains()
for domain in domains:
for user in CommCareUser.by_domain(domain):
report_reminder_process_user(user)
def report_reminder_process_user(user, test=False):
now = datetime.datetime.utcnow()
date = now - datetime.timedelta(days=7)
if not user.location or user.location.location_type.administrative:
return
sp = SupplyPointCase.get_by_location(user.location)
if not sp:
return
transaction_exists = StockTransaction.objects.filter(
case_id=sp._id,
type="stockonhand",
report__date__gte=date
).exists()
if sp and not transaction_exists and user.get_verified_number():
message = REPORT_REMINDER % (user.name, user.location.name)
verified_number = user.get_verified_number()
if not test:
send_sms_to_verified_number(verified_number, message)
else:
send_test_message(verified_number, message)
if can_receive_email(user, verified_number):
email = str(user.email)
send_mail('REPORT REMINDER', message, '[email protected]', [email])
# Checking if report was complete or not
def report_completion_check(user):
sp_id = SQLLocation.objects.get(domain=user.domain, location_id=user.location._id).supply_point_id
now = datetime.datetime.utcnow()
reported_products = set(StockTransaction.objects.filter(case_id=sp_id, type='stockonhand',
report__date=now).values_list('sql_product__name',
flat=True))
expected_products = set(SQLProduct.objects.filter(domain=user.domain).values_list('name', flat=True))
missing_products = set.difference(reported_products, expected_products)
if not missing_products:
message = COMPLETE_REPORT
send_sms_to_verified_number(user.get_verified_number(), message % user.username)
elif missing_products:
message = INCOMPLETE_REPORT % (user.name, user.location.name, ", ".join(sorted(missing_products)))
send_sms_to_verified_number(user.get_verified_number(), message)
# sends overstock, understock, or SOH without receipts alerts
def stock_alerts(transactions, user):
report_helper = ProductsReportHelper(user.sql_location, transactions)
products_below = report_helper.low_supply()
stockouts = report_helper.stockouts()
overstocked = report_helper.overstocked()
receipts = report_helper.receipts()
missings = report_helper.missing_products()
message = ""
super_message = ""
if missings:
products_codes_str = ' '.join(sorted([missing.code for missing in missings]))
message += " still missing %s. " % products_codes_str
if stockouts:
products_codes_str = ' '.join([stockout.sql_product.code for stockout in stockouts])
products_names_str = ' '.join([stockout.sql_product.name for stockout in stockouts])
message += " " + STOCKOUTS_MESSAGE % {'products': products_codes_str}
super_message = _("stockouts %s; ") % products_names_str
if products_below:
products_codes_str = ' '.join([product.sql_product.code for product in products_below])
products_names_str = ' '.join([product.sql_product.name for product in products_below])
message += " " + LOW_SUPPLY_MESSAGE % {'low_supply': products_codes_str}
super_message += _("below reorder level %s; ") % products_names_str
if stockouts or products_below:
reorders = [
u'%s %s' % (code, amount)
for (code, amount) in report_helper.reorders()
if amount
]
if reorders:
message += " Please order %s." % ' '.join(reorders)
if overstocked:
if not message:
products_codes_str = ' '.join([overstock.sql_product.code for overstock in overstocked])
message += " " + OVERSTOCKED_MESSAGE % {'username': user.username, 'overstocked': products_codes_str}
products_names_str = ' '.join([overstock.sql_product.name for overstock in overstocked])
super_message += _("overstocked %s; ") % products_names_str
if not message:
if not receipts:
message = COMPLETE_REPORT % user.username
else:
products_str = ' '.join(
[
"%s %s" % (SQLProduct.objects.get(product_id=receipt.product_id).code, receipt.quantity)
for receipt in receipts
]
)
message = RECEIPT_MESSAGE % {'username': user.username, 'received': products_str}
else:
message = (_('Dear %s,') % user.username) + message
if super_message:
stripped_message = super_message.strip().strip(';')
super_message = _('Dear %s, %s is experiencing the following problems: ') + stripped_message
send_message_to_admins(user, super_message.rstrip())
send_sms_to_verified_number(user.get_verified_number(), message.rstrip())
def send_message_to_admins(user, message):
users = get_users_by_location_id(user.location.get_id)
in_charge_users = [
u
for u in users
if u.get_verified_number() and u.user_data.get('role') == "In Charge"
]
for in_charge_user in in_charge_users:
send_sms_to_verified_number(in_charge_user.get_verified_number(),
message % (in_charge_user.username, in_charge_user.location.name))
| bsd-3-clause | -586,680,483,213,745,700 | 42.832898 | 113 | 0.632535 | false |
openstack/heat | heat/tests/test_properties_group.py | 1 | 4005 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.engine import properties_group as pg
from heat.tests import common
class TestSchemaSimpleValidation(common.HeatTestCase):
scenarios = [
('correct schema', dict(
schema={pg.AND: [['a'], ['b']]},
message=None,
)),
('invalid type schema', dict(
schema=[{pg.OR: [['a'], ['b']]}],
message="Properties group schema incorrectly specified. "
"Schema should be a mapping, "
"found %s instead." % list,
)),
('invalid type subschema', dict(
schema={pg.OR: [['a'], ['b'], [{pg.XOR: [['c'], ['d']]}]]},
message='Properties group schema incorrectly specified. List '
'items should be properties list-type names with format '
'"[prop, prop_child, prop_sub_child, ...]" or nested '
'properties group schemas.',
)),
('several keys schema', dict(
schema={pg.OR: [['a'], ['b']],
pg.XOR: [['v', 'g']]},
message='Properties group schema incorrectly specified. Schema '
'should be one-key dict.',
)),
('several keys subschema', dict(
schema={pg.OR: [['a'], ['b'], {pg.XOR: [['c']], pg.OR: ['d']}]},
message='Properties group schema incorrectly specified. '
'Schema should be one-key dict.',
)),
('invalid key schema', dict(
schema={'NOT KEY': [['a'], ['b']]},
message='Properties group schema incorrectly specified. '
'Properties group schema key should be one of the '
'operators: AND, OR, XOR.',
)),
('invalid key subschema', dict(
schema={pg.AND: [['a'], {'NOT KEY': [['b']]}]},
message='Properties group schema incorrectly specified. '
'Properties group schema key should be one of the '
'operators: AND, OR, XOR.',
)),
('invalid value type schema', dict(
schema={pg.OR: 'a'},
message="Properties group schema incorrectly specified. "
"Schemas' values should be lists of properties names "
"or nested schemas.",
)),
('invalid value type subschema', dict(
schema={pg.OR: [{pg.XOR: 'a'}]},
message="Properties group schema incorrectly specified. "
"Schemas' values should be lists of properties names "
"or nested schemas.",
)),
('invalid prop name schema', dict(
schema={pg.OR: ['a', 'b']},
message='Properties group schema incorrectly specified. List '
'items should be properties list-type names with format '
'"[prop, prop_child, prop_sub_child, ...]" or nested '
'properties group schemas.',
)),
]
def test_properties_group_schema_validate(self):
if self.message is not None:
ex = self.assertRaises(exception.InvalidSchemaError,
pg.PropertiesGroup, self.schema)
self.assertEqual(self.message, str(ex))
else:
self.assertIsInstance(pg.PropertiesGroup(self.schema),
pg.PropertiesGroup)
| apache-2.0 | -1,333,225,698,116,854,500 | 43.010989 | 78 | 0.538826 | false |
kgibm/problemdetermination | scripts/was/tpvlogging.py | 1 | 8271 | # Start, stop, query, or configure TPV logging on a set of servers
# Example: wsadmin -username wsadmin -password wsadmin -lang jython -f tpvlogging.py -userprefs wsadmin -action start -server server1
def usage():
print "usage: wsadmin -lang jython -f tpvlogging.py -action [start|stop|list|setlevel] -userprefs USER [-node NODE] [-server SERVER] [-pmilevel none|basic|extended|all]"
print " -userprefs is required and you can just pass in the same user as -username for wsadmin, or any name otherwise"
print " -pmilevel is only used with -action setlevel. Valid values are none, basic, extended, all"
print " If neither -node nor -server are specified, then all application servers on all nodes will be executed"
print " If -node is specified but -server isn't, then all application servers on the node will be executed"
print " This script does not yet support a custom statistics set for -action setlevel"
sys.exit()
import sys
import com.ibm.ws.tpv.engine.UserPreferences as UserPreferences
import com.ibm.ws.tpv.engine.utils.ServerBean as ServerBean
import jarray
import javax.management as mgmt
sType = "APPLICATION_SERVER"
action = "start"
targetNode = ""
targetApplicationServer = ""
user = ""
filename = "tpv"
duration = 999999
fileSize = 52428800
numFiles = 5
outputType = "xml" # or "bin"
bufferSize = 40
pmilevel = "extended" # only if -action setlevel
help = 0
refreshRate = 30
affectedCount = 0
verbose = 0
l = len(sys.argv)
i = 0
while i < l:
arg = sys.argv[i]
if arg == "-help" or arg == "-h" or arg == "-usage" or arg == "-?":
help = 1
if arg == "-action":
action = sys.argv[i + 1]
if arg == "-node":
targetNode = sys.argv[i + 1]
if arg == "-server":
targetApplicationServer = sys.argv[i + 1]
if arg == "-userprefs":
user = sys.argv[i + 1]
if arg == "-filename":
filename = sys.argv[i + 1]
if arg == "-duration":
duration = int(sys.argv[i + 1])
if arg == "-filesize":
fileSize = int(sys.argv[i + 1])
if arg == "-numfiles":
numFiles = int(sys.argv[i + 1])
if arg == "-buffersize":
bufferSize = int(sys.argv[i + 1])
if arg == "-refreshrate":
refreshRate = int(sys.argv[i + 1])
if arg == "-outputtype":
outputType = sys.argv[i + 1]
if arg == "-pmilevel":
pmilevel = sys.argv[i + 1]
if arg == "-verbose":
verbose = 1
i = i + 1
if help == 1:
usage()
if len(user) == 0:
print ""
print "ERROR: -userprefs must be specified (see usage below)"
print ""
usage()
def getExceptionText(typ, value, tb):
value = `value`
sd = `tb.dumpStack()`
sd = sd.replace("\\\\","/")
i = sd.rfind(" File ")
j = sd.rfind(", line ")
k = sd.rfind(", in ")
locn = ""
if(i>0 and j>0 and k>0):
file = sd[i+7:j]
line = sd[j+7:k]
func = sd[k+4:-3]
locn = "Function="+func+" Line="+line+" File="+file
return value+" "+locn
def convertToList( inlist ):
outlist = []
clist = None
if (len(inlist) > 0):
if (inlist[0] == '[' and inlist[len(inlist) - 1] == ']'):
if (inlist[1] == "\"" and inlist[len(inlist)-2] == "\""):
clist = inlist[1:len(inlist) -1].split(")\" ")
else:
clist = inlist[1:len(inlist) - 1].split(" ")
else:
clist = inlist.split(java.lang.System.getProperty("line.separator"))
if clist != None:
for elem in clist:
elem = elem.rstrip();
if (len(elem) > 0):
if (elem[0] == "\"" and elem[len(elem) -1] != "\""):
elem = elem+")\""
outlist.append(elem)
return outlist
def listNodes():
nodes = AdminConfig.list("Node")
nodeList = convertToList(nodes)
return nodeList
def listServers(serverType="", nodeName=""):
optionalParamList = []
if (len(serverType) > 0):
optionalParamList = ['-serverType', serverType]
if (len(nodeName) > 0):
node = AdminConfig.getid("/Node:" +nodeName+"/")
optionalParamList = optionalParamList + ['-nodeName', nodeName]
servers = AdminTask.listServers(optionalParamList)
servers = convertToList(servers)
newservers = []
for aServer in servers:
sname = aServer[0:aServer.find("(")]
nname = aServer[aServer.find("nodes/")+6:aServer.find("servers/")-1]
sid = AdminConfig.getid("/Node:"+nname+"/Server:"+sname)
if (newservers.count(sid) <= 0):
newservers.append(sid)
return newservers
print "Action: " + action
print "User: " + user
print "Node: " + targetNode
print "Server: " + targetApplicationServer
print "File name: " + filename
print "Duration: " + str(duration)
print "File Size: " + str(fileSize)
print "Historical Files: " + str(numFiles)
print "Output type: " + outputType
print "Refresh Rate: " + str(refreshRate)
nodeList = listNodes()
for nodeObject in nodeList:
nodeName = nodeObject.split("(")[0]
if len(targetNode) > 0 and targetNode.lower() != nodeName.lower():
print "Skipping node " + nodeName + " because it did not match targetNode"
continue
print ""
print "Processing node: " + nodeName
try:
# build list of Application Servers in the Node
serverList = listServers(sType,nodeName)
except:
typ, val, tb = sys.exc_info()
value = `val`
sd = `tb.dumpStack()`
sd = sd.replace("\\\\","/")
print "Could not process node. Probably the DMGR (which is ok to skip)? Continuing with the other nodes... " + value + " " + sd
continue
if verbose:
print "Number of servers: " + str(len(serverList))
for serverObject in serverList:
serverName = serverObject.split("(")[0]
if len(targetApplicationServer) > 0 and targetApplicationServer.lower() != serverName.lower():
if verbose:
print "Skipping server " + serverName + " (node " + nodeName + ")"
continue
prefs = UserPreferences()
prefs.setServerName(serverName)
prefs.setNodeName(nodeName)
prefs.setLoggingDuration(duration)
prefs.setLogFileSize(fileSize)
prefs.setNumLogFiles(numFiles)
prefs.setTpvLogFormat(outputType)
prefs.setLogFileName(filename)
prefs.setBufferSize(bufferSize)
prefs.setUserId(user)
prefs.setRefreshRate(refreshRate)
params = [prefs]
sig = ["com.ibm.ws.tpv.engine.UserPreferences"]
target = "node=" + nodeName
name = AdminControl.completeObjectName("type=TivoliPerfEngine," + target + ",*")
mbeanObjectName = mgmt.ObjectName(name)
display = nodeName + "\\" + serverName
if action == "start":
print "Calling TivoliPerfEngine.monitorServer on " + display
AdminControl.invoke_jmx(mbeanObjectName, "monitorServer", params, sig)
print "Calling TivoliPerfEngine.startLogging on " + display
AdminControl.invoke_jmx(mbeanObjectName, "startLogging", params, sig)
affectedCount = affectedCount + 1
elif action == "stop":
print "Calling TivoliPerfEngine.stopLogging on " + display
AdminControl.invoke_jmx(mbeanObjectName, "stopLogging", params, sig)
print "Calling TivoliPerfEngine.disableServer on " + display
AdminControl.invoke_jmx(mbeanObjectName, "disableServer", params, sig)
affectedCount = affectedCount + 1
elif action == "list":
print "Monitored Servers (by " + user + ")"
print "======================"
servers = AdminControl.invoke(name, "getMonitoredServers", user)
if len(servers) > 0:
isLoggingSig = ["com.ibm.ws.tpv.engine.utils.ServerBean"]
for server in servers.split("\n"):
pieces = server.split(".")
bean = ServerBean(pieces[0], pieces[1])
isLoggingParams = [bean]
res = AdminControl.invoke_jmx(mbeanObjectName, "isServerLogging", isLoggingParams, isLoggingSig)
perftarget = "node=" + nodeName + ",process=" + pieces[1]
perfname = AdminControl.completeObjectName("type=Perf," + perftarget + ",*")
print server + " ; Logging=" + str(res) + " ; Level=" + AdminControl.invoke(perfname, "getStatisticSet")
break # otherwise we'll do the list for each server in the node -- TODO break outter loop too?
elif action == "setlevel":
target = target + ",process=" + serverName
perfname = AdminControl.completeObjectName("type=Perf," + target + ",*")
# none, basic, extended, all, custom
print "Setting PMI level to " + pmilevel + " on " + serverName
AdminControl.invoke(perfname, "setStatisticSet", pmilevel)
AdminControl.invoke(perfname, "savePMIConfiguration")
affectedCount = affectedCount + 1
elif action == "debug":
print "Debug"
else:
print "Unknown action " + action
print ""
print "Script finished. " + str(affectedCount) + " servers affected."
| apache-2.0 | -5,027,780,480,985,339,000 | 31.182879 | 170 | 0.671019 | false |
AlexStarov/Shop | applications/damon/utils.py | 1 | 3715 | import asyncio
import logging
import multiprocessing
from django.conf import settings
import asterisk.manager
from redis_collections import Dict
logger = multiprocessing.log_to_stderr(logging.INFO)
def _connect_to_ami() -> asterisk.manager.Manager:
"""
Connecting to asterisk(AMI)
"""
while True:
manager = asterisk.manager.Manager()
try:
manager.connect(settings.AS_MANAGER_IP)
manager.login(settings.AS_MANAGER_LOGIN,
settings.AS_MANAGER_PASSWORD)
except asterisk.manager.ManagerException as e:
logger.error("connect to ami Error: %s" % e)
if manager.connected():
return manager
def _do_command_safety(manager: asterisk.manager.Manager,
command_to_asterisk: str) -> tuple:
"""
Trying send command to an asterisk and receiving an answer,
if got error will create new connection to the asterisk and do same
:return: tuple : where
0 - asterisk.manager.Manager;
1 - result of executing command by asterisk
"""
while True:
try:
if manager.connected():
response = manager.command(command_to_asterisk)
return manager, response
except asterisk.manager.ManagerException as e:
logger.error("command safety Error: %s" % e)
try:
manager.logoff()
except asterisk.manager.ManagerException as e:
logger.error("logoff Error: %s" % e)
finally:
manager.close()
logger.info('manager.close()')
manager = _connect_to_ami()
logger.info('new manager')
@asyncio.coroutine
def _get_response_to_command(manager: asterisk.manager.Manager,
command_to_manager: str,
key: str,
loop: asyncio.events.BaseDefaultEventLoopPolicy):
"""
Getting an answer from an asterisk, and put it to a redis storage
"""
asyncio.sleep(1, loop=loop)
manager, response = _do_command_safety(manager, command_to_manager)
monitor = Dict(key=settings.MONITORING_KEY)
monitor[key] = response.data
asyncio.async(_get_response_to_command(manager, command_to_manager,
response, loop), loop=loop)
def _send_command_to_asterisk(command_to_asterisk: str, key: str):
"""
Sending command to asterisk through AMI in an async loop
"""
manager = _connect_to_ami()
loop = asyncio.new_event_loop()
asyncio.async(
_get_response_to_command(manager, command_to_asterisk, key, loop),
loop=loop
)
try:
logger.info('loop.run_forever with command - "%s"' %
command_to_asterisk)
loop.run_forever()
finally:
logger.info('loop.close()')
loop.close()
def create_daemons(keys_with_command: list):
"""
Creating processes daemons and asked in each of them an asterisk
:param keys_with_command: list of tuples, where each tuple consist with
0 - str: command;
1 - str: key in redis dict.
:return: list of multiprocessing.Process
"""
processes = []
for command, key in keys_with_command:
process = multiprocessing.Process(
target=_send_command_to_asterisk,
kwargs={'command_to_asterisk': command, 'key': key}
)
process.daemon = True
processes.append(
process
)
process.start()
for process in processes:
logger.info('join %s' % process.name)
process.join(timeout=1)
| apache-2.0 | 1,162,429,027,114,803,000 | 30.218487 | 78 | 0.592194 | false |
XENON1T/pax | pax/core.py | 1 | 19071 | """The backbone of pax - the Processor class
"""
import glob
import logging
import six
import itertools
import os
import time
from prettytable import PrettyTable # Timing report
from tqdm import tqdm # Progress bar
import pax # Needed for pax.__version__
from pax.configuration import load_configuration
from pax.exceptions import InvalidConfigurationError
from pax import simulation, utils
if six.PY2:
import imp
else:
import importlib
# For diagnosing suspected memory leaks, uncomment this code
# and similar code in process_event
# import gc
# import objgraph
class Processor:
def __init__(self, config_names=(), config_paths=(), config_string=None, config_dict=None, just_testing=False):
"""Setup pax using configuration data from three sources:
- config_names: List of named configurations to load (sequentially)
- config_paths: List of config file paths to load (sequentially)
- config_string: A config string (overrides all config from files)
- config_dict: A final config ovveride dict: {'section' : {'setting' : 17, ...
Files from config_paths will be loaded after config_names, and can thus override their settings.
Configuration files can inherit from others using parent_configuration and parent_configuration_files.
Each value in the ini files (and the config_string) is eval()'ed in a context with physical unit variables:
4 * 2 -> 8
4 * cm**(2)/s -> correctly interpreted as a physical value with units of cm^2/s
The config_dict's settings are not evaluated.
If config['pax']['look_for_config_in_runs_db'], will try to connect to the runs db and fetch configuration
for this particular run. The run id is fetched either by number (config['DEFAULT']['run_number']
Setting just_testing disables some warnings about not specifying any plugins or plugin groups in the config.
Use only if, for some reason, you don't want to load a full configuration file.
.. note::
Although the log level can be specified in the configuration, it is an application wide
setting that will not be modified once set. New instances of the Processor class will have
the same log level as the first, regardless of their configuration. See #78.
"""
self.config = load_configuration(config_names, config_paths, config_string, config_dict, maybe_call_mongo=True)
# Check for outdated n_cpus option
if self.config.get('pax', {}).get('n_cpus', 1) != 1:
raise RuntimeError("The n_cpus option is no longer supported. Start a MultiProcessor instead.")
pc = self.config['pax']
self.log = setup_logging(pc.get('logging_level'))
self.log.info("This is PAX version %s, running with configuration for %s." % (
pax.__version__, self.config['DEFAULT'].get('tpc_name', 'UNSPECIFIED TPC NAME')))
# Start up the simulator
# Must be done explicitly here, as plugins can rely on its presence in startup
if 'WaveformSimulator' in self.config:
wvsim_config = {}
wvsim_config.update(self.config['DEFAULT'])
wvsim_config.update(self.config['WaveformSimulator'])
self.simulator = simulation.Simulator(wvsim_config)
elif not just_testing:
self.log.warning('You did not specify any configuration for the waveform simulator!\n' +
'If you attempt to load the waveform simulator, pax will crash!')
# Get the list of plugins from the configuration
# plugin_names is a dict with group names as keys, and the plugins we have to initialize per group as values
plugin_names = {}
if 'plugin_group_names' not in pc:
if not just_testing:
self.log.warning('You did not specify any plugin groups to load: are you testing me?')
pc['plugin_group_names'] = []
# Make plugin group names for the encoder and decoder plugins
# By having this code here, we ensure they are always just after/before input/output,
# no matter what plugin group names the user is using
if pc.get('decoder_plugin') is not None:
decoder_pos = 0
if len(pc['plugin_group_names']) and pc['plugin_group_names'][0] == 'input':
decoder_pos += 1
pc['plugin_group_names'].insert(decoder_pos, 'decoder_plugin')
if pc.get('encoder_plugin') is not None:
encoder_pos = len(pc['plugin_group_names'])
if len(pc['plugin_group_names']) and pc['plugin_group_names'][-1] == 'output':
encoder_pos -= 1
pc['plugin_group_names'].insert(encoder_pos, 'encoder_plugin')
for plugin_group_name in pc['plugin_group_names']:
if plugin_group_name not in pc:
raise InvalidConfigurationError('Plugin group list %s missing' % plugin_group_name)
plugin_names[plugin_group_name] = pc[plugin_group_name]
if not isinstance(plugin_names[plugin_group_name], (str, list)):
raise InvalidConfigurationError("Plugin group list %s should be a string, not %s" % (
plugin_group_name, type(plugin_names)))
if not isinstance(plugin_names[plugin_group_name], list):
plugin_names[plugin_group_name] = [plugin_names[plugin_group_name]]
# Ensure each plugin has a configuration
for plugin_name in plugin_names[plugin_group_name]:
self.config[plugin_name] = self.config.get(plugin_name, {})
# Separate input and actions (which for now includes output).
# For the plugin groups which are action plugins, get all names, flatten them
action_plugin_names = list(itertools.chain(*[plugin_names[g]
for g in pc['plugin_group_names']
if g != 'input']))
# Hand out input & output override instructions
if 'input_name' in pc and 'input' in pc['plugin_group_names']:
self.log.debug('User-defined input override: %s' % pc['input_name'])
self.config[plugin_names['input'][0]]['input_name'] = pc['input_name']
if 'output_name' in pc and 'output' in pc['plugin_group_names']:
self.log.debug('User-defined output override: %s' % pc['output_name'])
for o in plugin_names['output']:
self.config[o]['output_name'] = pc['output_name']
self.plugin_search_paths = self.get_plugin_search_paths(pc.get('plugin_paths', None))
self.log.debug("Search path for plugins is %s" % str(self.plugin_search_paths))
# Load input plugin & setup the get_events generator
if 'input' in pc['plugin_group_names']:
if len(plugin_names['input']) != 1:
raise InvalidConfigurationError("There should be one input plugin listed, not %s" %
len(plugin_names['input']))
self.input_plugin = self.instantiate_plugin(plugin_names['input'][0])
self.number_of_events = self.input_plugin.number_of_events
self.stop_after = pc.get('stop_after', float('inf'))
# Parse the event numbers file, if one is given
if pc.get('event_numbers_file', None) is not None:
with open(pc['event_numbers_file'], mode='r') as f:
pc['events_to_process'] = [int(line.rstrip()) for line in f]
if pc.get('events_to_process', None) is not None:
# The user specified which events to process:
self.number_of_events = len(pc['events_to_process'])
def get_events():
for event_number in pc['events_to_process']:
yield self.input_plugin.get_single_event(event_number)
self.get_events = get_events
else:
# Let the input plugin decide which events to process:
self.get_events = self.input_plugin.get_events
self.number_of_events = min(self.number_of_events, self.stop_after)
else:
# During multiprocessing or testing there is often no input plugin events are added manually
self.input_plugin = None
self.log.debug("No input plugin specified: how are you planning to get any events?")
# Load the action plugins
if len(action_plugin_names) > 0:
self.action_plugins = [self.instantiate_plugin(x) for x in action_plugin_names]
# During tests of input plugins there is often no action plugin
else:
self.action_plugins = []
self.log.debug("No action plugins specified: this will be a pretty boring processing run...")
self.timer = utils.Timer()
# Sometimes the config tells us to start running immediately (e.g. if fetching from a queue
if pc.get('autorun', False):
self.run()
@staticmethod
def get_plugin_search_paths(extra_paths=None):
"""Returns paths where we should search for plugins
Search for plugins in ., ./plugins, utils.PAX_DIR/plugins, any directories in config['plugin_paths']
Search in all subdirs of the above, except for __pycache__ dirs
"""
plugin_search_paths = ['./plugins', os.path.join(utils.PAX_DIR, 'plugins')]
if extra_paths is not None:
plugin_search_paths += extra_paths
# Look in all subdirectories
for entry in plugin_search_paths:
plugin_search_paths.extend(glob.glob(os.path.join(entry, '*/')))
# Don't look in __pychache__ folders
plugin_search_paths = [path for path in plugin_search_paths if '__pycache__' not in path]
return plugin_search_paths
def instantiate_plugin(self, name):
"""Take plugin class name and build class from it
The python default module locations are also searched... I think.. so don't name your module 'glob'...
"""
self.log.debug('Instantiating %s' % name)
name_module, name_class = name.split('.')
# Find and load the module which includes the plugin
if six.PY2:
file, pathname, description = imp.find_module(name_module, self.plugin_search_paths)
if file is None:
raise InvalidConfigurationError('Plugin %s not found.' % name)
plugin_module = imp.load_module(name_module, file, pathname, description)
else:
# imp has been deprecated in favor of importlib.
# Moreover, the above code gives non-closed file warnings in py3, so although it works,
# we really don't want to use it.
spec = importlib.machinery.PathFinder.find_spec(name_module, self.plugin_search_paths)
if spec is None:
raise InvalidConfigurationError('Plugin %s not found.' % name)
plugin_module = spec.loader.load_module()
this_plugin_config = {}
this_plugin_config.update(self.config['DEFAULT']) # First load the default settings
if name_module in self.config:
this_plugin_config.update(self.config[name_module]) # Then override with module-level settings
if name in self.config:
this_plugin_config.update(self.config[name]) # Then override with plugin-level settings
# Let each plugin access its own config, and the processor instance as well
# -- needed to e.g. access self.simulator in the simulator plugins or self.config for dumping the config file
# TODO: Is this wise? If s there another way?
instance = getattr(plugin_module, name_class)(this_plugin_config, processor=self)
self.log.debug('Instantiated %s succesfully' % name)
return instance
def get_plugin_by_name(self, name):
"""Return plugin by class name. Use for testing."""
plugins_by_name = {p.__class__.__name__: p for p in self.action_plugins}
if self.input_plugin is not None:
plugins_by_name[self.input_plugin.__class__.__name__] = self.input_plugin
if name in plugins_by_name:
return plugins_by_name[name]
else:
raise ValueError("No plugin named %s has been initialized." % name)
def get_metadata(self):
# Remove any 'queue' arguments from the "config",
# they are used to pass queue objects (which we certainly can't serialize!
cleaned_conf = {sn: {k: v
for k, v in section.items() if k != 'queue'}
for sn, section in self.config.items()}
return dict(run_number=self.config['DEFAULT']['run_number'],
tpc=self.config['DEFAULT']['tpc_name'],
file_builder_name='pax',
file_builder_version=pax.__version__,
timestamp=time.time(),
configuration=cleaned_conf)
def process_event(self, event):
"""Process one event with all action plugins. Returns processed event."""
total_plugins = len(self.action_plugins)
for j, plugin in enumerate(self.action_plugins):
self.log.debug("%s (step %d/%d)" % (plugin.__class__.__name__, j, total_plugins))
event = plugin.process_event(event)
plugin.total_time_taken += self.timer.punch()
# Uncomment to diagnose memory leaks
# gc.collect() # don't care about stuff that would be garbage collected properly
# objgraph.show_growth(limit=5)
return event
def run(self, clean_shutdown=True):
"""Run the processor over all events, then shuts down the plugins (unless clean_shutdown=False)
If clean_shutdown=False, will not shutdown plugin classes
(they still shut down if the Processor class is deleted)
Use only if for some arcane reason you want to run a single instance more than once.
If you do, you get in trouble if you start a new Processor instance that tries to write to the same files.
"""
if self.input_plugin is None:
# You're allowed to specify no input plugin, which is useful for testing. (You may want to feed events
# in by hand). If you do this, you can't use the run method. In case somebody ever tries:
raise InvalidConfigurationError("You just tried to run a Processor without specifying input plugin.")
if self.input_plugin.has_shut_down:
raise RuntimeError("Attempt to run a Processor twice!")
i = 0 # in case loop does not run
self.timer.punch()
if self.config['pax'].get('show_progress_bar', True):
wrapper = tqdm
else:
def wrapper(x, **kwargs):
return x
for i, event in enumerate(wrapper(self.get_events(),
desc='Event',
total=self.number_of_events)):
self.input_plugin.total_time_taken += self.timer.punch()
if i >= self.stop_after:
self.log.info("User-defined limit of %d events reached." % i)
break
self.process_event(event)
self.log.debug("Event %d (%d processed)" % (event.event_number, i))
else: # If no break occurred:
self.log.info("All events from input source have been processed.")
if self.config['pax']['print_timing_report']:
self.make_timing_report(i + 1)
# Shutdown all plugins now -- don't wait until this Processor instance gets deleted
if clean_shutdown:
self.shutdown()
def make_timing_report(self, events_actually_processed):
all_plugins = [self.input_plugin] + self.action_plugins
timing_report = PrettyTable(['Plugin',
'%',
'/event (ms)',
'#/s',
'Total (s)'])
timing_report.align = "r"
timing_report.align["Plugin"] = "l"
total_time = sum([plugin.total_time_taken for plugin in all_plugins])
for plugin in all_plugins:
t = plugin.total_time_taken
if t > 0:
time_per_event_ms = round(t / events_actually_processed, 1)
event_rate_hz = round(1000 * events_actually_processed / t, 1)
if event_rate_hz > 100:
event_rate_hz = ''
timing_report.add_row([plugin.__class__.__name__,
round(100 * t / total_time, 1),
time_per_event_ms,
event_rate_hz,
round(t / 1000, 1)])
else:
timing_report.add_row([plugin.__class__.__name__,
0,
0,
'n/a',
round(t / 1000, 1)])
if total_time > 0:
timing_report.add_row(['TOTAL',
round(100., 1),
round(total_time / events_actually_processed, 1),
round(1000 * events_actually_processed / total_time, 1),
round(total_time / 1000, 1)])
else:
timing_report.add_row(['TOTAL',
round(100., 1),
0,
'n/a',
round(total_time / 1000, 1)])
self.log.info("Timing report:\n" + str(timing_report))
def shutdown(self):
"""Call shutdown on all plugins"""
self.log.debug("Shutting down all plugins...")
if self.input_plugin is not None:
self.log.debug("Shutting down %s..." % self.input_plugin.name)
self.input_plugin.shutdown()
self.input_plugin.has_shut_down = True
for ap in self.action_plugins:
self.log.debug("Shutting down %s..." % ap.name)
ap.shutdown()
ap.has_shut_down = True
def setup_logging(level_str, name='processor'):
if level_str is None:
level_str = 'INFO'
log_spec = level_str.upper()
numeric_level = getattr(logging, log_spec, None)
if not isinstance(numeric_level, int):
raise InvalidConfigurationError('Invalid log level: %s' % log_spec)
logging.basicConfig(level=numeric_level,
format='%(name)s %(processName)-10s L%(lineno)s %(levelname)s %(message)s')
logger = logging.getLogger(name)
logger.debug('Logging initialized with level %s' % log_spec)
return logger
| bsd-3-clause | 5,229,035,555,853,781,000 | 46.558603 | 119 | 0.58623 | false |
msyriac/orphics | orphics/maps.py | 1 | 63177 | from __future__ import print_function
from pixell import enmap, utils, resample, curvedsky as cs, reproject
import numpy as np
from pixell.fft import fft,ifft
from scipy.interpolate import interp1d
import yaml,six
from orphics import io,cosmology,stats
import math
from scipy.interpolate import RectBivariateSpline,interp2d,interp1d
import warnings
import healpy as hp
def mask_srcs(shape,wcs,srcs_deg,width_arcmin):
r = np.deg2rad(width_arcmin/60.)
return enmap.distance_from(shape,wcs,np.deg2rad(srcs_deg), rmax=r) >= r
def grow_mask(mask,width_deg):
r = width_deg * np.pi / 180.
return mask.distance_transform(rmax=r)>=r
def cosine_apodize(bmask,width_deg):
r = width_deg * np.pi / 180.
return 0.5*(1-np.cos(bmask.distance_transform(rmax=r)*(np.pi/r)))
def kspace_coadd(kcoadds,kbeams,kncovs,fkbeam=1):
kcoadds = np.asarray(kcoadds)
kbeams = np.asarray(kbeams)
kncovs = np.asarray(kncovs)
numer = np.sum(kcoadds * kbeams * fkbeam / kncovs,axis=0)
numer[~np.isfinite(numer)] = 0
denom = np.sum(kbeams**2 / kncovs,axis=0)
f = numer/denom
f[~np.isfinite(f)] = 0
return f
def atm_factor(ells,lknee,alpha):
with np.errstate(divide='ignore', invalid='ignore',over='ignore'):
ret = (lknee*np.nan_to_num(1./ells))**(-alpha) if lknee>1.e-3 else 0.*ells
return ret
def rednoise(ells,rms_noise,lknee=0.,alpha=1.):
"""Atmospheric noise model
rms_noise in muK-arcmin
[(lknee/ells)^(-alpha) + 1] * rms_noise**2
"""
rms = rms_noise * (1./60.)*(np.pi/180.)
wnoise = ells*0.+rms**2.
return (atm_factor(ells,lknee,alpha)+1.)*wnoise
def modulated_noise_map(ivar,lknee=None,alpha=None,lmax=None,
N_ell_standard=None,parea=None,cylindrical=False,
seed=None,lmin=None):
"""
Produces a simulated noise map (using SHTs)
corresponding to a Gaussian map which when its
white noise standard deviation has been divided out,
has a power spectrum N_ell_whitened (which should asymptote
to 1 at high ell). Instead of specifying N_ell_whitened,
one can specify lknee and alpha from which the
following whitened N_ell is generated:
N_ell_standard = [(lknee/ells)^(-alpha) + 1]
"""
if (N_ell_standard is None) and not(lknee is None):
ells = np.arange(lmax)
N_ell_standard = atm_factor(ells,lknee,alpha) + 1.
N_ell_standard[~np.isfinite(N_ell_standard)] = 0
if lmin is not None: N_ell_standard[ells<lmin] = 0
shape,wcs = ivar.shape[-2:],ivar.wcs
if N_ell_standard is None and (lknee is None):
if seed is not None: np.random.seed(seed)
return np.random.standard_normal(shape) / np.sqrt(ivar)
else:
smap = cs.rand_map((1,)+shape,wcs,ps=N_ell_standard[None,None],seed=seed)[0]
return rms_from_ivar(ivar,parea=parea,cylindrical=cylindrical) * smap *np.pi / 180./ 60.
def galactic_mask(shape,wcs,nside,theta1,theta2):
npix = hp.nside2npix(nside)
orig = np.ones(npix)
orig[hp.query_strip(nside,theta1,theta2)] = 0
return reproject.ivar_hp_to_cyl(orig, shape, wcs, rot=True,do_mask=False,extensive=False)
def north_galactic_mask(shape,wcs,nside):
return galactic_mask(shape,wcs,nside,0,np.deg2rad(90))
def south_galactic_mask(shape,wcs,nside):
return galactic_mask(shape,wcs,nside,np.deg2rad(90),np.deg2rad(180))
def rms_from_ivar(ivar,parea=None,cylindrical=True):
"""
Return rms noise for each pixel in a map in physical units
(uK-arcmin) given a map of the inverse variance per pixel.
Optionally, provide a map of the pixel area.
"""
if parea is None:
shape,wcs = ivar.shape, ivar.wcs
parea = psizemap(shape,wcs) if cylindrical else enmap.pixsizemap(shape,wcs)
with np.errstate(divide='ignore', invalid='ignore',over='ignore'):
var = (1./ivar)
var[ivar<=0] = 0
assert np.all(np.isfinite(var))
return np.sqrt(var*parea)*180*60./np.pi
def psizemap(shape,wcs):
"""
Return map of pixel areas in radians for a cylindrical map.
Contrast with enmap.pixsizemap which is not specific to cylindrical
maps but is not accurate near the poles at the time of this writing.
"""
dra, ddec = wcs.wcs.cdelt*utils.degree
dec = enmap.posmap([shape[-2],1],wcs)[0,:,0]
area = np.abs(dra*(np.sin(np.minimum(np.pi/2.,dec+ddec/2))-np.sin(np.maximum(-np.pi/2.,dec-ddec/2))))
Nx = shape[-1]
return enmap.ndmap(area[...,None].repeat(Nx,axis=-1),wcs)
def ivar(shape,wcs,noise_muK_arcmin,ipsizemap=None):
if ipsizemap is None: ipsizemap = psizemap(shape,wcs)
pmap = ipsizemap*((180.*60./np.pi)**2.)
return pmap/noise_muK_arcmin**2.
def white_noise(shape,wcs,noise_muK_arcmin=None,seed=None,ipsizemap=None,div=None):
"""
Generate a non-band-limited white noise map.
"""
if div is None: div = ivar(shape,wcs,noise_muK_arcmin,ipsizemap=ipsizemap)
if seed is not None: np.random.seed(seed)
return np.random.standard_normal(shape) / np.sqrt(div)
def get_ecc(img):
"""Returns eccentricity from central moments of image
"""
from skimage import measure
M = measure.moments_central(np.asarray(img),order=2)
Cov = np.array([[M[2,0],M[1,1]],
[M[1,1],M[0,2]]])/M[0,0]
mu20 = M[2,0]/M[0,0]
mu11 = M[1,1]/M[0,0]
mu02 = M[0,2]/M[0,0]
l1 = (mu20+mu02)/2. + np.sqrt(4.*mu11**2.+(mu20-mu02)**2.)/2.
l2 = (mu20+mu02)/2. - np.sqrt(4.*mu11**2.+(mu20-mu02)**2.)/2.
e = np.sqrt(1.-l2/l1)
return e
def filter_alms(alms,lmin,lmax):
import healpy as hp
ells = np.arange(0,lmax+20,1)
fs = np.ones(ells.shape)
fs[ells<lmin] = 0.
fs[ells>lmax] = 0.
return hp.almxfl(alms,fs)
def rotate_pol_power(shape,wcs,cov,iau=True,inverse=False):
"""Rotate a 2D power spectrum from TQU to TEB (inverse=False) or
back (inverse=True). cov is a (3,3,Ny,Nx) 2D power spectrum.
WARNING: This function is duplicated in orphics.pixcov to make
that module independent. Ideally, it should be implemented in
enlib.enmap.
"""
assert np.all(np.isfinite(cov))
rot = np.zeros((3,3,cov.shape[-2],cov.shape[-1]))
rot[0,0,:,:] = 1
prot = enmap.queb_rotmat(enmap.lmap(shape,wcs), inverse=inverse, iau=iau)
rot[1:,1:,:,:] = prot
Rt = np.transpose(rot, (1,0,2,3))
tmp = np.einsum("ab...,bc...->ac...",rot,cov)
rp2d = np.einsum("ab...,bc...->ac...",tmp,Rt)
return rp2d
def binary_mask(mask,threshold=0.5):
m = np.abs(mask)
m[m<=threshold] = 0
m[m>threshold] = 1
return m
def area_from_mask(mask):
m = binary_mask(mask)
frac = m.sum()*1./np.prod(m.shape[-2:])
return frac*mask.area()*(180./np.pi)**2., frac
def get_central(img,fracy,fracx=None):
if fracy is None and fracx is None: return img
fracx = fracy if fracx is None else fracx
Ny,Nx = img.shape[-2:]
cropy = int(fracy*Ny)
cropx = int(fracx*Nx)
if cropy%2==0 and Ny%2==1:
cropy -= 1
else:
if cropy%2==1 and Ny%2==0: cropy -= 1
if cropx%2==0 and Nx%2==1:
cropx -= 1
else:
if cropx%2==1 and Nx%2==0: cropx -= 1
return crop_center(img,cropy,cropx)
def crop_center(img,cropy,cropx=None,sel=False):
cropx = cropy if cropx is None else cropx
y,x = img.shape[-2:]
startx = x//2-(cropx//2)
starty = y//2-(cropy//2)
selection = np.s_[...,starty:starty+cropy,startx:startx+cropx]
if sel:
ret = selection
else:
ret = img[selection]
return ret
def binned_power(imap,bin_edges=None,binner=None,fc=None,modlmap=None,imap2=None,mask=1):
"""Get the binned power spectrum of a map in one line of code.
(At the cost of flexibility and reusability of expensive parts)"""
from orphics import stats
shape,wcs = imap.shape,imap.wcs
modlmap = enmap.modlmap(shape,wcs) if modlmap is None else modlmap
fc = FourierCalc(shape,wcs) if fc is None else fc
binner = stats.bin2D(modlmap,bin_edges) if binner is None else binner
p2d,_,_ = fc.power2d(imap*mask,imap2*mask if imap2 is not None else None)
cents,p1d = binner.bin(p2d)
return cents,p1d/np.mean(mask**2.)
def interp(x,y,bounds_error=False,fill_value=0.,**kwargs):
return interp1d(x,y,bounds_error=bounds_error,fill_value=fill_value,**kwargs)
def flat_sim(deg,px,lmax=6000,lensed=True,pol=False):
"""
Get some commonly used objects for flat-sky sims.
Not very flexible but is a one-line replacement for
a large fraction of use cases.
"""
from orphics import cosmology
shape,wcs = rect_geometry(width_deg=deg,px_res_arcmin=px,pol=pol)
modlmap = enmap.modlmap(shape,wcs)
cc = cosmology.Cosmology(lmax=lmax,pickling=True,dimensionless=False)
Lmax = modlmap.max()
ells = np.arange(0,Lmax,1)
ps = cosmology.power_from_theory(ells,cc.theory,lensed=lensed,pol=pol)
mgen = MapGen(shape,wcs,ps)
return shape,wcs,modlmap,cc,mgen
def resample_fft(imap,res):
"""
Wrapper around enlib.resample.resample_fft.
Accepts a target map resolution instead of target shape.
Returns an enmap instead of an array.
imap must be periodic/windowed
"""
shape,wcs = imap.shape,imap.wcs
inres = resolution(shape,wcs)
scale = inres/res
oshape,owcs = enmap.scale_geometry(shape, wcs, scale)
return enmap.enmap(resample.resample_fft(imap,oshape[-2:]),owcs)
def resampled_geometry(shape,wcs,res):
inres = resolution(shape,wcs)
scale = inres/res
oshape,owcs = enmap.scale_geometry(shape, wcs, scale)
return oshape,owcs
def split_sky(dec_width,num_decs,ra_width,dec_start=0.,ra_start=0.,ra_extent=90.):
ny = num_decs
wy = dec_width
xw = ra_width
boxes = []
for yindex in range(ny):
y0 = dec_start+yindex*wy
y1 = dec_start+(yindex+1)*wy
ymean = (y0+y1)/2.
cosfact = np.cos(ymean*np.pi/180.)
xfw = ra_extent*cosfact
nx = int(xfw/xw)
for xindex in range(nx):
x0 = ra_start+xindex*xw/cosfact
x1 = ra_start+(xindex+1)*xw/cosfact
box = np.array([[y0,x0],[y1,x1]])
boxes.append(box.copy())
return boxes
def slice_from_box(shape, wcs, box, inclusive=False):
"""slice_from_box(shape, wcs, box, inclusive=False)
Extract the part of the map inside the given box as a selection
without returning the data. Does not work for boxes
that straddle boundaries of maps. Use enmap.submap instead.
Parameters
----------
box : array_like
The [[fromy,fromx],[toy,tox]] bounding box to select.
The resulting map will have a bounding box as close
as possible to this, but will differ slightly due to
the finite pixel size.
inclusive : boolean
Whether to include pixels that are only partially
inside the bounding box. Default: False."""
ibox = enmap.subinds(shape, wcs, box, inclusive)
print(shape,ibox)
islice = utils.sbox2slice(ibox.T)
return islice
def cutup(shape,numy,numx,pad=0):
Ny,Nx = shape
pixs_y = np.linspace(0,shape[-2],num=numy+1,endpoint=True)
pixs_x = np.linspace(0,shape[-1],num=numx+1,endpoint=True)
num_boxes = numy*numx
boxes = np.zeros((num_boxes,2,2))
boxes[:,0,0] = np.tile(pixs_y[:-1],numx) - pad
boxes[:,0,0][boxes[:,0,0]<0] = 0
boxes[:,1,0] = np.tile(pixs_y[1:],numx) + pad
boxes[:,1,0][boxes[:,1,0]>(Ny-1)] = Ny-1
boxes[:,0,1] = np.repeat(pixs_x[:-1],numy) - pad
boxes[:,0,1][boxes[:,0,1]<0] = 0
boxes[:,1,1] = np.repeat(pixs_x[1:],numy) + pad
boxes[:,1,1][boxes[:,1,1]>(Nx-1)] = Nx-1
boxes = boxes.astype(np.int)
return boxes
def bounds_from_list(blist):
"""Given blist = [dec0,ra0,dec1,ra1] in degrees
return ndarray([[dec0,ra0],[dec1,ra1]]) in radians
"""
return np.array(blist).reshape((2,2))*np.pi/180.
def rect_geometry(width_arcmin=None,width_deg=None,px_res_arcmin=0.5,proj="car",pol=False,height_deg=None,height_arcmin=None,xoffset_degree=0.,yoffset_degree=0.,extra=False,**kwargs):
"""
Get shape and wcs for a rectangular patch of specified size and coordinate center
"""
if width_deg is not None:
width_arcmin = 60.*width_deg
if height_deg is not None:
height_arcmin = 60.*height_deg
hwidth = width_arcmin/2.
if height_arcmin is None:
vwidth = hwidth
else:
vwidth = height_arcmin/2.
arcmin = utils.arcmin
degree = utils.degree
pos = [[-vwidth*arcmin+yoffset_degree*degree,-hwidth*arcmin+xoffset_degree*degree],[vwidth*arcmin+yoffset_degree*degree,hwidth*arcmin+xoffset_degree*degree]]
shape, wcs = enmap.geometry(pos=pos, res=px_res_arcmin*arcmin, proj=proj,**kwargs)
if pol: shape = (3,)+shape
if extra:
modlmap = enmap.modlmap(shape,wcs)
lmax = modlmap.max()
ells = np.arange(0,lmax,1.)
return shape,wcs,modlmap,ells
else:
return shape, wcs
def downsample_power(shape,wcs,cov,ndown=16,order=0,exp=None,fftshift=True,fft=False,logfunc=lambda x: x,ilogfunc=lambda x: x,fft_up=False):
"""
Smooth a power spectrum by averaging. This can be used to, for example:
1. calculate a PS for use in a noise model
2. calculate an ILC covariance empirically in Fourier-Cartesian domains
shape -- tuple specifying shape of
"""
if ndown<1: return cov
ndown = np.array(ndown).ravel()
if ndown.size==1:
Ny,Nx = shape[-2:]
nmax = max(Ny,Nx)
nmin = min(Ny,Nx)
ndown1 = ndown[0]
ndown2 = int(ndown*nmax*1./nmin)
ndown = np.array((ndown2,ndown1)) if Ny>Nx else np.array((ndown1,ndown2))
else:
assert ndown.size==2
ndown = np.array((ndown[0],ndown[1]))
print("Downsampling power spectrum by factor ", ndown)
cov = logfunc(cov)
afftshift = np.fft.fftshift if fftshift else lambda x: x
aifftshift = np.fft.ifftshift if fftshift else lambda x: x
if fft:
dshape = np.array(cov.shape)
dshape[-2] /= ndown[0]
dshape[-1] /= ndown[1]
cov_low = resample.resample_fft(afftshift(cov), dshape.astype(np.int))
else:
cov_low = enmap.downgrade(afftshift(cov), ndown)
if not(fft_up):
pix_high = enmap.pixmap(shape[-2:],wcs)
pix_low = pix_high/ndown.reshape((2,1,1))
if exp is not None:
covexp = enmap.enmap(enmap.multi_pow(cov_low,exp),wcs)
else:
covexp = enmap.enmap(cov_low,wcs)
if fft_up:
retcov = resample.resample_fft(covexp, shape)
else:
retcov = covexp.at(pix_low, order=order, mask_nan=False, unit="pix")
return ilogfunc(aifftshift(retcov))
class MapGen(object):
"""
Once you know the shape and wcs of an ndmap and the input power spectra, you can
pre-calculate some things to speed up random map generation.
"""
def __init__(self,shape,wcs,cov=None,covsqrt=None,pixel_units=False,smooth="auto",ndown=None,order=1):
self.shape = shape
self.wcs = wcs
assert cov.ndim>=3 , "Power spectra have to be of shape (ncomp,ncomp,lmax) or (ncomp,ncomp,Ny,Nx)."
if covsqrt is not None:
self.covsqrt = covsqrt
else:
if cov.ndim==4:
if not(pixel_units): cov = cov * np.prod(shape[-2:])/enmap.area(shape,wcs )
if ndown:
self.covsqrt = downsample_power(shape,wcs,cov,ndown,order,exp=0.5)
else:
self.covsqrt = enmap.multi_pow(cov, 0.5)
else:
self.covsqrt = enmap.spec2flat(shape, wcs, cov, 0.5, mode="constant",smooth=smooth)
def get_map(self,seed=None,scalar=False,iau=True,real=False,harm=False):
if seed is not None: np.random.seed(seed)
rand = enmap.fft(enmap.rand_gauss(self.shape, self.wcs)) if real else enmap.rand_gauss_harm(self.shape, self.wcs)
data = enmap.map_mul(self.covsqrt, rand)
kmap = enmap.ndmap(data, self.wcs)
if harm:
return kmap
else:
if scalar:
return enmap.ifft(kmap).real
else:
return enmap.harm2map(kmap,iau=iau)
def spec1d_to_2d(shape,wcs,ps):
return enmap.spec2flat(shape,wcs,ps)/(np.prod(shape[-2:])/enmap.area(shape,wcs ))
class FourierCalc(object):
"""
Once you know the shape and wcs of an ndmap, you can pre-calculate some things
to speed up fourier transforms and power spectra.
"""
def __init__(self,shape,wcs,iau=True):
"""Initialize with a geometry shape and wcs."""
self.shape = shape
self.wcs = wcs
self.normfact = enmap.area(self.shape,self.wcs )/ np.prod(self.shape[-2:])**2.
if len(shape) > 2 and shape[-3] > 1:
self.rot = enmap.queb_rotmat(enmap.lmap(shape,wcs),iau=iau)
def iqu2teb(self,emap, nthread=0, normalize=True, rot=True):
"""Performs the 2d FFT of the enmap pixels, returning a complex enmap.
Similar to harm2map, but uses a pre-calculated self.rot matrix.
"""
emap = enmap.samewcs(enmap.fft(emap,nthread=nthread,normalize=normalize), emap)
if emap.ndim > 2 and emap.shape[-3] > 1 and rot:
emap[...,-2:,:,:] = enmap.map_mul(self.rot, emap[...,-2:,:,:])
return emap
def f2power(self,kmap1,kmap2,pixel_units=False):
"""Similar to power2d, but assumes both maps are already FFTed """
norm = 1. if pixel_units else self.normfact
res = np.real(np.conjugate(kmap1)*kmap2)*norm
return res
def f1power(self,map1,kmap2,pixel_units=False,nthread=0):
"""Similar to power2d, but assumes map2 is already FFTed """
kmap1 = self.iqu2teb(map1,nthread,normalize=False)
norm = 1. if pixel_units else self.normfact
return np.real(np.conjugate(kmap1)*kmap2)*norm,kmap1
def ifft(self,kmap):
return enmap.enmap(ifft(kmap,axes=[-2,-1],normalize=True),self.wcs)
def fft(self,emap):
return enmap.samewcs(enmap.fft(emap,normalize=False), emap)
def power2d(self,emap=None, emap2=None,nthread=0,pixel_units=False,skip_cross=False,rot=True, kmap=None, kmap2=None, dtype=None):
"""
Calculate the power spectrum of emap crossed with emap2 (=emap if None)
Returns in radians^2 by default unles pixel_units specified
"""
wcs = emap.wcs if emap is not None else kmap.wcs
if kmap is not None:
lteb1 = kmap
ndim = kmap.ndim
if ndim>2 : ncomp = kmap.shape[-3]
else:
lteb1 = self.iqu2teb(emap,nthread,normalize=False,rot=rot)
ndim = emap.ndim
if ndim>2 : ncomp = emap.shape[-3]
if kmap2 is not None:
lteb2 = kmap2
else:
lteb2 = self.iqu2teb(emap2,nthread,normalize=False,rot=rot) if emap2 is not None else lteb1
assert lteb1.shape==lteb2.shape
if ndim > 2 and ncomp > 1:
retpow = np.zeros((ncomp,ncomp,lteb1.shape[-2],lteb1.shape[-1]),dtype=dtype)
for i in range(ncomp):
retpow[i,i] = self.f2power(lteb1[i],lteb2[i],pixel_units)
if not(skip_cross):
for i in range(ncomp):
for j in range(i+1,ncomp):
retpow[i,j] = self.f2power(lteb1[i],lteb2[j],pixel_units)
retpow[j,i] = retpow[i,j]
return retpow,lteb1,lteb2
else:
if lteb1.ndim>2:
lteb1 = lteb1[0]
if lteb2.ndim>2:
lteb2 = lteb2[0]
p2d = self.f2power(lteb1,lteb2,pixel_units)
return enmap.enmap(p2d,wcs),enmap.enmap(lteb1,wcs),enmap.enmap(lteb2,wcs)
class MapRotator(object):
def __init__(self,shape_source,wcs_source,shape_target,wcs_target):
self.pix_target = get_rotated_pixels(shape_source,wcs_source,shape_target,wcs_target)
def rotate(self,imap,**kwargs):
return rotate_map(imap,pix_target=self.pix_target,**kwargs)
class MapRotatorEquator(MapRotator):
def __init__(self,shape_source,wcs_source,patch_width,patch_height,width_multiplier=1.,
height_multiplier=1.5,pix_target_override_arcmin=None,proj="car",verbose=False,
downsample=True,downsample_pix_arcmin=None):
self.source_pix = np.min(enmap.extent(shape_source,wcs_source)/shape_source[-2:])*60.*180./np.pi
if pix_target_override_arcmin is None:
input_dec = enmap.posmap(shape_source,wcs_source)[0]
max_dec = np.max(np.abs(input_dec))
del input_dec
recommended_pix = self.source_pix*np.cos(max_dec)
if verbose:
print("INFO: Maximum declination in southern patch : ",max_dec*180./np.pi, " deg.")
print("INFO: Recommended pixel size for northern patch : ",recommended_pix, " arcmin")
else:
recommended_pix = pix_target_override_arcmin
shape_target,wcs_target = rect_geometry(width_arcmin=width_multiplier*patch_width*60.,
height_arcmin=height_multiplier*patch_height*60.,
px_res_arcmin=recommended_pix,yoffset_degree=0.,proj=proj)
self.target_pix = recommended_pix
self.wcs_target = wcs_target
if verbose:
print("INFO: Source pixel : ",self.source_pix, " arcmin")
if downsample:
dpix = downsample_pix_arcmin if downsample_pix_arcmin is not None else self.source_pix
self.shape_final,self.wcs_final = rect_geometry(width_arcmin=width_multiplier*patch_width*60.,
height_arcmin=height_multiplier*patch_height*60.,
px_res_arcmin=dpix,yoffset_degree=0.,proj=proj)
else:
self.shape_final = shape_target
self.wcs_final = wcs_target
self.downsample = downsample
MapRotator.__init__(self,shape_source,wcs_source,shape_target,wcs_target)
def rotate(self,imap,**kwargs):
rotated = MapRotator.rotate(self,imap,**kwargs)
if self.downsample:
from pixell import resample
return enmap.ndmap(resample.resample_fft(rotated,self.shape_final),self.wcs_final)
else:
return rotated
def get_rotated_pixels(shape_source,wcs_source,shape_target,wcs_target,inverse=False,pos_target=None,center_target=None,center_source=None):
""" Given a source geometry (shape_source,wcs_source)
return the pixel positions in the target geometry (shape_target,wcs_target)
if the source geometry were rotated such that its center lies on the center
of the target geometry.
WARNING: Only currently tested for a rotation along declination from one CAR
geometry to another CAR geometry.
"""
from pixell import coordinates
# what are the center coordinates of each geometries
if center_source is None: center_source = enmap.pix2sky(shape_source,wcs_source,(shape_source[0]/2.,shape_source[1]/2.))
if center_target is None: center_target = enmap.pix2sky(shape_target,wcs_target,(shape_target[0]/2.,shape_target[1]/2.))
decs,ras = center_source
dect,rat = center_target
# what are the angle coordinates of each pixel in the target geometry
if pos_target is None: pos_target = enmap.posmap(shape_target,wcs_target)
lra = pos_target[1,:,:].ravel()
ldec = pos_target[0,:,:].ravel()
del pos_target
# recenter the angle coordinates of the target from the target center to the source center
if inverse:
newcoord = coordinates.decenter((lra,ldec),(rat,dect,ras,decs))
else:
newcoord = coordinates.recenter((lra,ldec),(rat,dect,ras,decs))
del lra
del ldec
# reshape these new coordinates into enmap-friendly form
new_pos = np.empty((2,shape_target[0],shape_target[1]))
new_pos[0,:,:] = newcoord[1,:].reshape(shape_target)
new_pos[1,:,:] = newcoord[0,:].reshape(shape_target)
del newcoord
# translate these new coordinates to pixel positions in the target geometry based on the source's wcs
pix_new = enmap.sky2pix(shape_source,wcs_source,new_pos)
return pix_new
def rotate_map(imap,shape_target=None,wcs_target=None,pix_target=None,**kwargs):
if pix_target is None:
pix_target = get_rotated_pixels(shape_source,wcs_source,shape_target,wcs_target)
else:
assert (shape_target is None) and (wcs_target is None), "Both pix_target and shape_target,wcs_target must not be specified."
rotmap = enmap.at(imap,pix_target,unit="pix",**kwargs)
return rotmap
## MAXLIKE
def diagonal_cov(power2d):
ny,nx = power2d.shape[-2:]
assert power2d.ndim==2 or power2d.ndim==4
if power2d.ndim == 2: power2d = power2d[None,None]
ncomp = len(power2d)
Cflat = np.zeros((ncomp,ncomp,nx*ny,nx*ny))
# ncomp=3 at most so let's just for loop it without trying to be clever
# Sigurd suggests
# np.einsum("...ii->...i", Cflat)[:] = power2d.reshape(Cflat.shape[:-1])
# but warns it might break with older numpy versions
for i in range(ncomp):
for j in range(ncomp):
np.fill_diagonal(Cflat[i,j],power2d[i,j].reshape(-1))
return Cflat.reshape((ncomp,ncomp,ny,nx,ny,nx))
def ncov(shape,wcs,noise_uk_arcmin):
noise_uK_rad = noise_uk_arcmin*np.pi/180./60.
normfact = np.sqrt(np.prod(enmap.pixsize(shape[-2:],wcs)))
noise_uK_pixel = noise_uK_rad/normfact
return np.diag([(noise_uK_pixel)**2.]*np.prod(shape[-2:]))
def pixcov(shape,wcs,fourier_cov):
#fourier_cov = fourier_cov.astype(np.float32, copy=False)
fourier_cov = fourier_cov.astype(np.complex64, copy=False)
bny,bnx = shape[-2:]
#from numpy.fft import fft2 as hfft,ifft2 as hifft # TODO: update to fast fft
from pixell.fft import fft as hfft,ifft as hifft # This doesn't work ValueError:
# Invalid scheme: The output array and input array dtypes do not correspond to a valid fftw scheme.
#pcov = hfft((hifft(fourier_cov,axes=(-4,-3))),axes=(-2,-1)).real
pcov = hfft((hifft(fourier_cov,axes=(-4,-3),normalize=True)),axes=(-2,-1)).real # gotta normalize if using enlib.fft
return pcov*bnx*bny/enmap.area(shape,wcs)
def get_lnlike(covinv,instamp):
Npix = instamp.size
assert covinv.size==Npix**2
vec = instamp.reshape((Npix,1))
ans = np.dot(np.dot(vec.T,covinv),vec)
assert ans.size==1
return ans[0,0]
def pixcov_sim(shape,wcs,ps,Nsims,seed=None,mean_sub=True,pad=0):
if pad>0:
retmap = enmap.pad(enmap.zeros(shape,wcs), pad, return_slice=False, wrap=False)
oshape,owcs = retmap.shape,retmap.wcs
else:
oshape,owcs = shape,wcs
mg = MapGen(oshape,owcs,ps)
np.random.seed(seed)
umaps = []
for i in range(Nsims):
cmb = mg.get_map()
if mean_sub: cmb -= cmb.mean()
if pad>0:
ocmb = enmap.extract(cmb, shape, wcs)
else:
ocmb = cmb
umaps.append(ocmb.ravel())
pixcov = np.cov(np.array(umaps).T)
return pixcov
## MAP OPERATIONS
def butterworth(ells,ell0,n):
return 1./(1.+(ells*1./ell0)**(2.*n))
def get_taper(shape,wcs,taper_percent = 12.0,pad_percent = 3.0,weight=None):
Ny,Nx = shape[-2:]
if weight is None: weight = np.ones(shape[-2:])
taper = cosine_window(Ny,Nx,lenApodY=int(taper_percent*min(Ny,Nx)/100.),lenApodX=int(taper_percent*min(Ny,Nx)/100.),padY=int(pad_percent*min(Ny,Nx)/100.),padX=int(pad_percent*min(Ny,Nx)/100.))*weight
w2 = np.mean(taper**2.)
return enmap.enmap(taper,wcs),w2
def get_taper_deg(shape,wcs,taper_width_degrees = 1.0,pad_width_degrees = 0.,weight=None,only_y = False):
Ny,Nx = shape[-2:]
if weight is None: weight = np.ones(shape[-2:])
res = resolution(shape,wcs)
pix_apod = int(taper_width_degrees*np.pi/180./res)
pix_pad = int(pad_width_degrees*np.pi/180./res)
taper = enmap.enmap(cosine_window(Ny,Nx,lenApodY=pix_apod,lenApodX=pix_apod if not(only_y) else 0,padY=pix_pad,padX=pix_pad if not(only_y) else 0)*weight,wcs)
w2 = np.mean(taper**2.)
return taper,w2
def cosine_window(Ny,Nx,lenApodY=30,lenApodX=30,padY=0,padX=0):
# Based on a routine by Thibaut Louis
win=np.ones((Ny,Nx))
i = np.arange(Nx)
j = np.arange(Ny)
ii,jj = np.meshgrid(i,j)
# ii is array of x indices
# jj is array of y indices
# numpy indexes (j,i)
# xdirection
if lenApodX>0:
r=ii.astype(float)-padX
sel = np.where(ii<=(lenApodX+padX))
win[sel] = 1./2*(1-np.cos(-np.pi*r[sel]/lenApodX))
sel = np.where(ii>=((Nx-1)-lenApodX-padX))
r=((Nx-1)-ii-padX).astype(float)
win[sel] = 1./2*(1-np.cos(-np.pi*r[sel]/lenApodX))
# ydirection
if lenApodY>0:
r=jj.astype(float)-padY
sel = np.where(jj<=(lenApodY+padY))
win[sel] *= 1./2*(1-np.cos(-np.pi*r[sel]/lenApodY))
sel = np.where(jj>=((Ny-1)-lenApodY-padY))
r=((Ny-1)-jj-padY).astype(float)
win[sel] *= 1./2*(1-np.cos(-np.pi*r[sel]/lenApodY))
win[0:padY,:]=0
win[:,0:padX]=0
win[Ny-padY:,:]=0
win[:,Nx-padX:]=0
return win
def filter_map(imap,kfilter):
return enmap.enmap(np.real(ifft(fft(imap,axes=[-2,-1])*kfilter,axes=[-2,-1],normalize=True)) ,imap.wcs)
def gauss_beam(ell,fwhm):
tht_fwhm = np.deg2rad(fwhm / 60.)
return np.exp(-(tht_fwhm**2.)*(ell**2.) / (16.*np.log(2.)))
def sigma_from_fwhm(fwhm):
return fwhm/2./np.sqrt(2.*np.log(2.))
def gauss_beam_real(rs,fwhm):
"""rs in radians ; fwhm in arcmin"""
tht_fwhm = np.deg2rad(fwhm / 60.)
sigma = sigma_from_fwhm(tht_fwhm)
return np.exp(-(rs**2.) / 2./sigma**2.)
def mask_kspace(shape,wcs, lxcut = None, lycut = None, lmin = None, lmax = None):
output = enmap.ones(shape[-2:],wcs, dtype = np.int)
if (lmin is not None) or (lmax is not None): modlmap = enmap.modlmap(shape, wcs)
if (lxcut is not None) or (lycut is not None): ly, lx = enmap.laxes(shape, wcs, oversample=1)
if lmin is not None:
output[np.where(modlmap <= lmin)] = 0
if lmax is not None:
output[np.where(modlmap >= lmax)] = 0
if lxcut is not None:
output[:,np.where(np.abs(lx) < lxcut)] = 0
if lycut is not None:
output[np.where(np.abs(ly) < lycut),:] = 0
return output
## ILC (MAP-LEVEL AND SPEC-LEVEL)
def silc(kmaps,cinv,response=None):
"""Make a simple internal linear combination (ILC) of given fourier space maps at different frequencies
and an inverse covariance matrix for its variance.
From Eq 4 of arXiv:1006.5599
Accepts
-------
kmaps -- (nfreq,Ny,Nx) array of beam-deconvolved fourier transforms at each frequency
cinv -- (nfreq,nfreq,Ny,Nx) array of the inverted covariance matrix
response -- (nfreq,) array of f_nu response factors. Defaults to unity for CMB estimate.
Returns
-------
Fourier transform of ILC estimate, (Ny,Nx) array
"""
response = ilc_def_response(response,cinv)
# Get response^T Cinv kmaps
weighted = ilc_map_term(kmaps,cinv,response)
return weighted * silc_noise(cinv,response)
def cilc(kmaps,cinv,response_a,response_b):
"""Constrained ILC -- Make a constrained internal linear combination (ILC) of given fourier space maps at different frequencies
and an inverse covariance matrix for its variance. The component of interest is specified through its f_nu response vector
response_a. The component to explicitly project out is specified through response_b.
Derived from Eq 18 of arXiv:1006.5599
Accepts
-------
kmaps -- (nfreq,Ny,Nx) array of beam-deconvolved fourier transforms at each frequency
cinv -- (nfreq,nfreq,Ny,Nx) array of the inverted covariance matrix
response_a -- (nfreq,) array of f_nu response factors for component of interest.
response_b -- (nfreq,) array of f_nu response factors for component to project out.
Returns
-------
Fourier transform of ILC estimate, (Ny,Nx) array
"""
brb = ilc_comb_a_b(response_b,response_b,cinv)
arb = ilc_comb_a_b(response_a,response_b,cinv)
arM = ilc_map_term(kmaps,cinv,response_a)
brM = ilc_map_term(kmaps,cinv,response_b)
ara = ilc_comb_a_b(response_a,response_a,cinv)
numer = brb * arM - arb*brM
norm = (ara*brb-arb**2.)
return np.nan_to_num(numer/norm)
def ilc_def_response(response,cinv):
"""Default CMB response -- vector of ones"""
if response is None:
# assume CMB
nfreq = cinv.shape[0]
response = np.ones((nfreq,))
return response
def ilc_index(ndim):
"""Returns einsum indexing given ndim of cinv.
If covmat of 1d powers, return single index, else
return 2 indices for 2D kspace matrix."""
if ndim==3:
return "p"
elif ndim==4:
return "ij"
else:
raise ValueError
def silc_noise(cinv,response=None):
""" Derived from Eq 4 of arXiv:1006.5599"""
response = ilc_def_response(response,cinv)
return np.nan_to_num(1./ilc_comb_a_b(response,response,cinv))
def cilc_noise(cinv,response_a,response_b):
""" Derived from Eq 18 of arXiv:1006.5599 """
brb = ilc_comb_a_b(response_b,response_b,cinv)
ara = ilc_comb_a_b(response_a,response_a,cinv)
arb = ilc_comb_a_b(response_a,response_b,cinv)
bra = ilc_comb_a_b(response_b,response_a,cinv)
numer = (brb)**2. * ara + (arb)**2.*brb - brb*arb*arb - arb*brb*bra
denom = (ara*brb-arb**2.)**2.
return np.nan_to_num(numer/denom)
def ilc_map_term(kmaps,cinv,response):
"""response^T . Cinv . kmaps """
return np.einsum('k,k...->...',response,np.einsum('kl...,l...->k...',cinv,kmaps))
def ilc_comb_a_b(response_a,response_b,cinv):
"""Return a^T cinv b"""
pind = ilc_index(cinv.ndim) # either "p" or "ij" depending on whether we are dealing with 1d or 2d power
return np.nan_to_num(np.einsum('l,l...->...',response_a,np.einsum('k,kl...->l...',response_b,cinv)))
def ilc_empirical_cov(kmaps,bin_edges=None,ndown=16,order=1,fftshift=True,method="isotropic"):
assert method in ['isotropic','downsample']
assert kmaps.ndim==3
ncomp = kmaps.shape[0]
if method=='isotropic':
modlmap = enmap.modlmap(kmaps[0].shape,kmaps.wcs)
binner = stats.bin2D(modlmap,bin_edges)
from scipy.interpolate import interp1d
retpow = np.zeros((ncomp,ncomp,kmaps.shape[-2],kmaps.shape[-1]))
for i in range(ncomp):
for j in range(i+1,ncomp):
retpow[i,j] = np.real(kmaps[i]*kmaps[j].conj())
if method=='isotropic':
cents,p1d = binner.bin(retpow[i,j])
retpow[i,j] = interp1d(cents,p1d,fill_value="extrapolate",bounds_error=False)(modlmap)
retpow[j,i] = retpow[i,j].copy()
if method=='isotropic':
return retpow
elif method=='downsample':
return downsample_power(retpow.shape,kmaps[0].wcs,retpow,ndown=ndown,order=order,exp=None,fftshift=fftshift,fft=False,logfunc=lambda x: x,ilogfunc=lambda x: x,fft_up=False)
def ilc_cov(ells,cmb_ps,kbeams,freqs,noises,components,fnoise=None,plot=False,
plot_save=None,ellmaxes=None,data=True,fgmax=None,
narray=None,fdict=None,verbose=True,analysis_beam=1.,lmins=None,lmaxs=None):
"""
ells -- either 1D or 2D fourier wavenumbers
cmb_ps -- Theory C_ell_TT in 1D or 2D fourier space
kbeams -- 1d or 2d beam transforms
freqs -- array of floats with frequency bandpasses
noises -- 1d, 2d or float noise powers (in uK^2-radian^2)
components -- list of strings representing foreground components recognized by fgGenerator
fnoise -- A szar.foregrounds.fgNoises object (or derivative) containing foreground power definitions
Returns beam-deconvolved covariance matrix
"""
nfreqs = len(freqs)
if cmb_ps.ndim==2:
cshape = (nfreqs,nfreqs,1,1)
elif cmb_ps.ndim==1:
cshape = (nfreqs,nfreqs,1)
else:
raise ValueError
Covmat = np.tile(cmb_ps,cshape)*analysis_beam**2.
for i,freq1 in enumerate(freqs):
for j,freq2 in enumerate(freqs):
if verbose: print("Populating covariance for ",freq1,"x",freq2)
if narray is not None:
Covmat[i,j,...] += narray[i,j,...]
else:
if i==j:
kbeam1 = kbeams[i]
noise1 = noises[i]
instnoise = np.nan_to_num(noise1*analysis_beam**2./kbeam1**2.)
Covmat[i,j,...] += instnoise
for component in components:
if fdict is None:
fgnoise = fnoise.get_noise(component,freq1,freq2,ells)
else:
fgnoise = np.nan_to_num(fdict[component](ells,freq1,freq2))
fgnoise[np.abs(fgnoise)>1e90] = 0
if (fgmax is not None) and component=='tsz':
fgnoise[ells>fgmax] = fgnoise[fgmax]
fgnoise = fgnoise * analysis_beam**2.
Covmat[i,j,...] += fgnoise
if data:
Covmat[i,j][ells>ellmaxes[i]] = 1e90 # !!!
Covmat[i,j][ells>ellmaxes[j]] = 1e90 # !!!
#if i>=j:
# io.plot_img(np.fft.fftshift(np.log10(Covmat[i,j,:])),lim=[-10,3])
if i==j:
if lmins is not None: Covmat[i,j][ells<lmins[i]] = np.inf
if lmaxs is not None: Covmat[i,j][ells>lmaxs[i]] = np.inf
return Covmat
def ilc_cinv(ells,cmb_ps,kbeams,freqs,noises,components,fnoise,plot=False,plot_save=None,eigpow=True,ellmaxes=None,data=True,fgmax=None,narray=None):
"""
ells -- either 1D or 2D fourier wavenumbers
cmb_ps -- Theory C_ell_TT in 1D or 2D fourier space
kbeams -- 1d or 2d beam transforms
freqs -- array of floats with frequency bandpasses
noises -- 1d, 2d or float noise powers (in uK^2-radian^2)
components -- list of strings representing foreground components recognized by fgGenerator
fnoise -- A szar.foregrounds.fgNoises object (or derivative) containing foreground power definitions
Returns beam-deconvolved inv covariance matrix
"""
Covmat = np.nan_to_num(ilc_cov(ells,cmb_ps,kbeams,freqs,noises,components,fnoise,plot,plot_save,ellmaxes=ellmaxes,data=data,fgmax=fgmax,narray=narray))
print("Inverting covariance...")
if eigpow:
from pixell import utils
cinv = utils.eigpow(Covmat, -1.,axes=[0,1])
return cinv,Covmat
else:
cinv = np.linalg.inv(Covmat.T).T
return cinv
def minimum_ell(shape,wcs):
"""
Returns the lowest angular wavenumber of an ndmap
rounded down to the nearest integer.
"""
modlmap = enmap.modlmap(shape,wcs)
min_ell = modlmap[modlmap>0].min()
return int(min_ell)
def resolution(shape,wcs):
return np.abs(wcs.wcs.cdelt[1])*utils.degree
def inpaint_cg(imap,rand_map,mask,power2d,eps=1.e-8):
"""
by Thibaut Louis
imap -- masked map
rand_map -- random map with same power
mask -- mask
power2d -- 2d S+N power : IMPORTANT, this must be non-zero up to pixel scale
eps
"""
assert imap.ndim==2
nyside,nxside = imap.shape
def apply_px_c_inv_px(my_map):
my_map.shape=(nyside,nxside)
#apply x_proj
my_new_map=(1-mask)*my_map
# change to Fourrier representation
a_l = fft(my_new_map,axes=[-2,-1])
# apply inverse power spectrum
a_l=a_l*1/power2d
# change back to pixel representation
my_new_map = ifft(a_l,normalize=True,axes=[-2,-1])
#Remove the imaginary part
my_new_map=my_new_map.real
# apply x_proj
my_new_map=(1-mask)*my_new_map
#Array to vector
my_new_map.shape=(nxside*nyside,)
my_map.shape=(nxside*nyside,)
return(my_new_map)
def apply_px_c_inv_py(my_map):
# apply y_proj
my_map.shape=(nyside,nxside)
my_new_map=mask*my_map
# change to Fourrier representation
a_l = fft(my_new_map,axes=[-2,-1])
# apply inverse power spectrum
a_l=a_l*1/power2d
# change back to pixel representation
my_new_map = ifft(a_l,normalize=True,axes=[-2,-1])
#Remove the imaginary part
my_new_map=my_new_map.real
# apply x_proj
my_new_map=(1-mask)*my_new_map
#Array to vector
my_new_map.shape=(nxside*nyside,)
return(my_new_map)
b=-apply_px_c_inv_py(imap-rand_map)
#Number of iterations
i_max=2000
#initial value of x
x=b
i=0
r=b-apply_px_c_inv_px(x)
d=r
delta_new=np.inner(r,r)
delta_o=delta_new
delta_array=np.zeros(shape=(i_max))
while i<i_max and delta_new > eps**2*delta_o:
# print ("")
# print ("number of iterations:", i)
# print ("")
# print ("eps**2*delta_o=",eps**2*delta_o)
# print ("")
# print ("delta new=",delta_new)
q=apply_px_c_inv_px(d)
alpha=delta_new/(np.inner(d,q))
x=x+alpha*d
if i/50.<np.int(i/50):
r=b-apply_px_c_inv_px(x)
else:
r=r-alpha*q
delta_old=delta_new
delta_new=np.inner(r,r)
beta=delta_new/delta_old
d=r+beta*d
i=i+1
#print "delta_o=", delta_o
#print "delta_new=", delta_new
x.shape=(nyside,nxside)
x_old=x
x=x+rand_map*(1-mask)
complete=imap*mask
rebuild_map=complete+x
print("Num iterations : ",i)
return rebuild_map
## WORKING WITH DATA
def split_calc(isplits,jsplits,icoadd,jcoadd,fourier_calc=None,alt=True):
"""
Calculate the best estimate of the signal (from mean of crosses)
and of the noise (total - mean crosses) power.
isplits (and jsplits) are (nsplits,Ny,Nx) fourier transforms of
windowed maps. No window correction is applied to the result.
No polarization rotation is done either.
"""
shape,wcs = isplits.shape,isplits.wcs
assert isplits.ndim==3
fc = fourier_calc if fourier_calc is not None else maps.FourierCalc(shape[-2:],wcs)
total = fc.f2power(icoadd,jcoadd)
insplits = isplits.shape[0]
jnsplits = jsplits.shape[0]
if alt:
assert insplits==jnsplits
noise = 0.
for i in range(insplits):
diff1 = isplits[i] - icoadd
diff2 = jsplits[i] - jcoadd
noise = noise + fc.f2power(diff1,diff2)
noise = noise / ((1.-1./insplits)*insplits**2)
crosses = total - noise
else:
ncrosses = 0.
totcross = 0.
for i in range(insplits):
for j in range(jnsplits):
if i==j: continue # FIXME: REALLY?! What about for independent experiments?
totcross += fc.f2power(isplits[i],jsplits[j])
ncrosses += 1.
crosses = totcross / ncrosses
noise = total - crosses
return total,crosses,noise
def noise_from_splits(splits,fourier_calc=None,nthread=0,do_cross=True):
"""
Calculate noise power spectra by subtracting cross power of splits
from autopower of splits. Optionally calculate cross power spectra
of T,E,B from I,Q,U.
splits -- (nsplits,ncomp,Ny,Nx) arrays
ncomp can be 1 for T only, or 3 for I,Q,U
ncomp could be > 3 for e.g. I1,Q1,U1,I2,Q2,U2 for 2 arrays
"""
try:
wcs = splits.wcs
except:
wcs = splits[0].wcs
splits = enmap.enmap(np.asarray(splits),wcs).astype(np.float32)
assert splits.ndim==3 or splits.ndim==4
if splits.ndim == 3: splits = splits[:,None,:,:]
ncomp = splits.shape[1]
ndim = splits.ndim
if fourier_calc is None:
shape = splits.shape[-3:] if do_cross else splits.shape[-2:]
fourier_calc = FourierCalc(shape,wcs)
Nsplits = splits.shape[0]
if do_cross: assert ncomp==3 or ncomp==1
# Get fourier transforms of I,Q,U
ksplits = [fourier_calc.iqu2teb(split, nthread=nthread, normalize=False, rot=False) for split in splits]
del splits
if do_cross:
kteb_splits = []
# Rotate I,Q,U to T,E,B for cross power (not necssary for noise)
for ksplit in ksplits:
kteb_splits.append( ksplit.copy())
if (ndim==3 and ncomp==3):
kteb_splits[-1][...,-2:,:,:] = enmap.map_mul(fourier_calc.rot, kteb_splits[-1][...,-2:,:,:])
# get auto power of I,Q,U
auto = 0.
for ksplit in ksplits:
auto += fourier_calc.power2d(kmap=ksplit)[0]
auto /= Nsplits
# do cross powers of I,Q,U
Ncrosses = (Nsplits*(Nsplits-1)/2)
cross = 0.
for i in range(len(ksplits)):
for j in range(i+1,len(ksplits)):
cross += fourier_calc.power2d(kmap=ksplits[i],kmap2=ksplits[j])[0]
cross /= Ncrosses
if do_cross:
# do cross powers of T,E,B
cross_teb = 0.
for i in range(len(ksplits)):
for j in range(i+1,len(ksplits)):
cross_teb += fourier_calc.power2d(kmap=kteb_splits[i],kmap2=kteb_splits[j])[0]
cross_teb /= Ncrosses
else:
cross_teb = None
del ksplits
# get noise model for I,Q,U
noise = (auto-cross)/Nsplits
# return I,Q,U noise model and T,E,B cross-power
return noise,cross_teb
### FULL SKY
def get_planck_cutout(imap,ra,dec,arcmin,px=2.0,arcmin_y=None):
if arcmin_y is None: arcmin_y = arcmin
xsize = int(arcmin/px)
ysize = int(arcmin_y/px)
shape,wcs = enmap.geometry(pos=(0,0),shape=(ysize,xsize),res=np.deg2rad(px/60.))
return enmap.enmap(cutout_gnomonic(imap,rot=(ra,dec),coord=['G','C'],
xsize=xsize,ysize=ysize,reso=px,gal_cut=0,flip='geo'),wcs)
def cutout_gnomonic(map,rot=None,coord=None,
xsize=200,ysize=None,reso=1.5,
nest=False,remove_dip=False,
remove_mono=False,gal_cut=0,
flip='astro'):
"""Obtain a cutout from a healpix map (given as an array) in Gnomonic projection.
Derivative of healpy.visufunc.gnomonic
Parameters
----------
map : array-like
The map to project, supports masked maps, see the `ma` function.
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
xsize : int, optional
The size of the image. Default: 200
ysize : None or int, optional
The size of the image. Default: None= xsize
reso : float, optional
Resolution (in arcmin). Default: 1.5 arcmin
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
remove_dip : bool, optional
If :const:`True`, remove the dipole+monopole
remove_mono : bool, optional
If :const:`True`, remove the monopole
gal_cut : float, scalar, optional
Symmetric galactic cut for the dipole/monopole fit.
Removes points in latitude range [-gal_cut, +gal_cut]
See Also
--------
gnomview, mollview, cartview, orthview, azeqview
"""
import pylab
import healpy as hp
import healpy.projaxes as PA
margins = (0.075,0.05,0.075,0.05)
extent = (0.0,0.0,1.0,1.0)
extent = (extent[0]+margins[0],
extent[1]+margins[1],
extent[2]-margins[2]-margins[0],
extent[3]-margins[3]-margins[1])
f=pylab.figure(0,figsize=(5.5,6))
map = hp.pixelfunc.ma_to_array(map)
ax=PA.HpxGnomonicAxes(f,extent,coord=coord,rot=rot,
format="%.3g",flipconv=flip)
if remove_dip:
map=hp.pixelfunc.remove_dipole(map,gal_cut=gal_cut,nest=nest,copy=True)
elif remove_mono:
map=hp.pixelfunc.remove_monopole(map,gal_cut=gal_cut,nest=nest,copy=True)
img = ax.projmap(map,nest=nest,coord=coord,
xsize=xsize,ysize=ysize,reso=reso)
pylab.close(f)
return img
### STACKING
def aperture_photometry(instamp,aperture_radius,annulus_width,modrmap=None):
# inputs in radians, outputs in arcmin^2
stamp = instamp.copy()
if modrmap is None: modrmap = stamp.modrmap()
mean = stamp[np.logical_and(modrmap>aperture_radius,modrmap<(aperture_radius+annulus_width))].mean()
stamp -= mean
pix_scale=resolution(stamp.shape,stamp.wcs)*(180*60)/np.pi
flux = stamp[modrmap<aperture_radius].sum()*pix_scale**2
return flux #* enmap.area(stamp.shape,stamp.wcs )/ np.prod(stamp.shape[-2:])**2. *((180*60)/np.pi)**2.
def aperture_photometry2(instamp,aperture_radius,modrmap=None):
# inputs in radians, outputs in arcmin^2
stamp = instamp.copy()
if modrmap is None: modrmap = stamp.modrmap()
annulus_out = np.sqrt(2.) * aperture_radius
flux = stamp[modrmap<aperture_radius].mean() - stamp[np.logical_and(modrmap>aperture_radius,modrmap<(annulus_out))].mean()
return flux
def interpolate_grid(inGrid,inY,inX,outY=None,outX=None,regular=True,kind="cubic",kx=3,ky=3,**kwargs):
'''
if inGrid is [j,i]
Assumes inY is along j axis
Assumes inX is along i axis
Similarly for outY/X
'''
if regular:
interp_spline = RectBivariateSpline(inY,inX,inGrid,kx=kx,ky=ky,**kwargs)
if (outY is None) and (outX is None): return interp_spline
outGrid = interp_spline(outY,outX)
else:
interp_spline = interp2d(inX,inY,inGrid,kind=kind,**kwargs)
if (outY is None) and (outX is None): return lambda y,x: interp_spline(x,y)
outGrid = interp_spline(outX,outY)
return outGrid
class MatchedFilter(object):
def __init__(self,shape,wcs,template=None,noise_power=None):
shape = shape[-2:]
area = enmap.area(shape,wcs)
self.normfact = area / (np.prod(shape))**2
if noise_power is not None: self.n2d = noise_power
if template is not None: self.ktemp = enmap.fft(template,normalize=False)
def apply(self,imap=None,kmap=None,template=None,ktemplate=None,noise_power=None,kmask=None):
if kmap is None:
kmap = enmap.fft(imap,normalize=False)
else:
assert imap is None
if kmask is None: kmask = kmap.copy()*0.+1.
n2d = self.n2d if noise_power is None else noise_power
if ktemplate is None:
ktemp = self.ktemp if template is None else enmap.fft(template,normalize=False)
else:
ktemp = ktemplate
in2d = 1./n2d
in2d[~np.isfinite(in2d)] = 0
phi_un = np.sum(ktemp.conj()*kmap*self.normfact*kmask*in2d).real
phi_var = 1./np.sum(ktemp.conj()*ktemp*self.normfact*kmask*in2d).real
return phi_un*phi_var, phi_var
def mask_center(inmap):
imap = inmap.copy()
Ny,Nx = imap.shape
assert Ny==Nx
N = Ny
if N%2==1:
imap[N//2,N//2] = np.nan
else:
imap[N//2,N//2] = np.nan
imap[N//2-1,N//2] = np.nan
imap[N//2,N//2-1] = np.nan
imap[N//2-1,N//2-1] = np.nan
return imap
class Purify(object):
def __init__(self,shape,wcs,window):
px = resolution(shape,wcs)
self.windict = init_deriv_window(window,px)
lxMap,lyMap,self.modlmap,self.angLMap,lx,ly = get_ft_attributes(shape,wcs)
def lteb_from_iqu(self,imap,method='pure',flip_q=True,iau=True):
"""
maps must have window applied!
"""
sgnq = -1 if flip_q else 1
fT, fE, fB = iqu_to_pure_lteb(imap[0],sgnq*imap[1],imap[2],self.modlmap,self.angLMap,windowDict=self.windict,method=method,iau=iau)
return fT,-fE,-fB
def init_deriv_window(window,px):
"""
px is in radians
"""
def matrixShift(l,row_shift,column_shift):
m1=np.hstack((l[:,row_shift:],l[:,:row_shift]))
m2=np.vstack((m1[column_shift:],m1[:column_shift]))
return m2
delta=px
Win=window[:]
dWin_dx=(-matrixShift(Win,-2,0)+8*matrixShift(Win,-1,0)-8*matrixShift(Win,1,0)+matrixShift(Win,2,0))/(12*delta)
dWin_dy=(-matrixShift(Win,0,-2)+8*matrixShift(Win,0,-1)-8*matrixShift(Win,0,1)+matrixShift(Win,0,2))/(12*delta)
d2Win_dx2=(-matrixShift(dWin_dx,-2,0)+8*matrixShift(dWin_dx,-1,0)-8*matrixShift(dWin_dx,1,0)+matrixShift(dWin_dx,2,0))/(12*delta)
d2Win_dy2=(-matrixShift(dWin_dy,0,-2)+8*matrixShift(dWin_dy,0,-1)-8*matrixShift(dWin_dy,0,1)+matrixShift(dWin_dy,0,2))/(12*delta)
d2Win_dxdy=(-matrixShift(dWin_dy,-2,0)+8*matrixShift(dWin_dy,-1,0)-8*matrixShift(dWin_dy,1,0)+matrixShift(dWin_dy,2,0))/(12*delta)
#In return we change the sign of the simple gradient in order to agree with np convention
return {'Win':Win, 'dWin_dx':-dWin_dx,'dWin_dy':-dWin_dy, 'd2Win_dx2':d2Win_dx2, 'd2Win_dy2':d2Win_dy2,'d2Win_dxdy':d2Win_dxdy}
def iqu_to_pure_lteb(T_map,Q_map,U_map,modLMap,angLMap,windowDict,method='pure',iau=True):
"""
maps must have window applied!
"""
if iau: angLMap = -angLMap
window = windowDict
win =window['Win']
dWin_dx=window['dWin_dx']
dWin_dy=window['dWin_dy']
d2Win_dx2=window['d2Win_dx2']
d2Win_dy2=window['d2Win_dy2']
d2Win_dxdy=window['d2Win_dxdy']
T_temp=T_map.copy() #*win
fT=fft(T_temp,axes=[-2,-1])
Q_temp=Q_map.copy() #*win
fQ=fft(Q_temp,axes=[-2,-1])
U_temp=U_map.copy() #*win
fU=fft(U_temp,axes=[-2,-1])
fE=fT.copy()
fB=fT.copy()
fE=fQ[:]*np.cos(2.*angLMap)+fU[:]*np.sin(2.*angLMap)
fB=-fQ[:]*np.sin(2.*angLMap)+fU[:]*np.cos(2.*angLMap)
if method=='standard':
return fT, fE, fB
Q_temp=Q_map.copy()*dWin_dx
QWx=fft(Q_temp,axes=[-2,-1])
Q_temp=Q_map.copy()*dWin_dy
QWy=fft(Q_temp,axes=[-2,-1])
U_temp=U_map.copy()*dWin_dx
UWx=fft(U_temp,axes=[-2,-1])
U_temp=U_map.copy()*dWin_dy
UWy=fft(U_temp,axes=[-2,-1])
U_temp=2.*Q_map*d2Win_dxdy-U_map*(d2Win_dx2-d2Win_dy2)
QU_B=fft(U_temp,axes=[-2,-1])
U_temp=-Q_map*(d2Win_dx2-d2Win_dy2)-2.*U_map*d2Win_dxdy
QU_E=fft(U_temp,axes=[-2,-1])
modLMap=modLMap+2
fB[:] += QU_B[:]*(1./modLMap)**2
fB[:]-= (2.*1j)/modLMap*(np.sin(angLMap)*(QWx[:]+UWy[:])+np.cos(angLMap)*(QWy[:]-UWx[:]))
if method=='hybrid':
return fT, fE, fB
fE[:]+= QU_E[:]*(1./modLMap)**2
fE[:]-= (2.*1j)/modLMap*(np.sin(angLMap)*(QWy[:]-UWx[:])-np.cos(angLMap)*(QWx[:]+UWy[:]))
if method=='pure':
return fT, fE, fB
def gauss_kern(sigmaY,sigmaX,nsigma=5.0):
"""
@ brief Returns a normalized 2D gauss kernel array for convolutions
^
| Y
|
------>
X
"""
sizeY = int(nsigma*sigmaY)
sizeX = int(nsigma*sigmaX)
y, x = np.mgrid[-sizeY:sizeY+1, -sizeX:sizeX+1]
g = np.exp(-(x**2/(2.*sigmaX**2)+y**2/(2.*sigmaY**2)))
return g / g.sum()
def gkern_interp(shape,wcs,rs,bprof,fwhm_guess,nsigma=20.0):
"""
@ brief Returns a normalized 2D kernel array for convolutions
given a 1D profile shape.
rs in radians
bprof is profile
fwhm_guess is in arcmin
"""
fwhm_guess *= np.pi/(180.*60.)
# Approximate pixel size
py,px = enmap.pixshape(shape, wcs, signed=False)
sigma = fwhm_guess/(np.sqrt(8.*np.log(2.)))
modrmap = enmap.modrmap(shape,wcs)
ny,nx = shape
sy = int(nsigma*sigma/py)
sx = int(nsigma*sigma/px)
if ((ny%2==0) and (sy%2==1)) or ((ny%2==1) and (sy%2==0)): sy+=1
if ((nx%2==0) and (sx%2==1)) or ((nx%2==1) and (sx%2==0)): sx+=1
rmap = crop_center(modrmap,sy,sx)
g = interp(rs,bprof)(rmap)
return g / g.sum()
def convolve_profile(imap,rs,bprof,fwhm_guess,nsigma=20.0):
"""
rs in radians
bprof is profile
fwhm_guess is in arcmin
"""
g = gkern_interp(imap.shape,imap.wcs,rs,bprof,fwhm_guess,nsigma=nsigma)
print(g.shape)
return convolve(imap,g)
def convolve(imap,kernel):
from scipy import signal
g = kernel
ncomps = imap.shape[0] if imap.ndim>2 else 1
imaps = imap.reshape((ncomps,imap.shape[-2],imap.shape[-1]))
data = []
for i in range(imaps.shape[0]):
omap = signal.convolve(imaps[i],g, mode='same')
data.append(omap)
if ncomps==1:
data = np.array(data).reshape((imap.shape[-2],imap.shape[-1]))
else:
data = np.array(data).reshape((ncomps,imap.shape[-2],imap.shape[-1]))
return enmap.enmap(data,imap.wcs)
def convolve_gaussian(imap,fwhm=None,nsigma=5.0):
"""
@brief convolve a map with a Gaussian beam (real space operation)
@param kernel real-space 2D kernel
@param fwhm Full Width Half Max in arcmin
@param nsigma Number of sigmas the Gaussian kernel is defined out to.
@param sigmaY standard deviation of Gaussian in pixel units in the Y direction
@param sigmaX standard deviation of Gaussian in pixel units in the X direction
"""
fwhm *= np.pi/(180.*60.)
py,px = enmap.pixshape(imap.shape, imap.wcs)
sigmaY = fwhm/(np.sqrt(8.*np.log(2.))*py)
sigmaX = fwhm/(np.sqrt(8.*np.log(2.))*px)
g = gauss_kern(sigmaY, sigmaX,nsigma=nsigma)
return convolve(imap,g)
def get_grf_cmb(shape,wcs,theory,spec,seed=None):
modlmap = enmap.modlmap(shape,wcs)
lmax = modlmap.max()
ells = np.arange(0,lmax,1)
Ny,Nx = shape[-2:]
return get_grf_realization(shape,wcs,interp(ells,theory.gCl(spec,ells))(modlmap).reshape((1,1,Ny,Nx)),seed=None)
def get_grf_realization(shape,wcs,power2d,seed=None):
mg = MapGen(shape,wcs,power2d)
return mg.get_map(seed=seed)
def ftrans(p2d,tfunc=np.log10):
wcs = None
try: wcs = p2d.wcs
except: pass
t2d = tfunc(np.fft.fftshift(p2d))
if wcs is None:
return t2d
else:
return enmap.enmap(t2d,wcs)
def real_space_filter(kfilter):
return np.fft.ifftshift(ifft(kfilter+0j,normalize=True,axes=[-2,-1]).real)
def rfilter(imap,kfilter=None,rfilter=None,mode='same',boundary='wrap',**kwargs):
"""
Filter a real-space map imap with a k-space filter kfilter
but using a real-space convolution.
"""
if rfilter is None: rfilter = real_space_filter(kfilter)
from scipy.signal import convolve2d
return enmap.samewcs(convolve2d(imap,rfilter,mode=mode,boundary=boundary,**kwargs),imap)
def rgeo(degrees,pixarcmin,**kwargs):
"""
Return shape,wcs geometry pair for patch of width degrees and
resolution pixarcmin.
"""
return rect_geometry(width_deg=degrees,px_res_arcmin=pixarcmin,**kwargs)
class SymMat(object):
"""
A memory efficient but not very flexible symmetric matrix.
If a matrix (e.g. covariance) is large but symmetric,
this lets you reduce the memory footprint by <50% by
only storing the upper right triangle.
e.g.:
>>> a = SymMat(3,(100,100))
>>> a[0,1] = np.ones((100,100))
After this, a[0,1] and and a[1,0] will return the same
matrix.
However, only two leading indices are supported (hence, a matrix)
and the usual numpy slicing on these doesn't work. a[0][1] doesn't
work either. The trailing dimensions can be of arbitary shape.
e.g.
>>> a = SymMat(3,(2,100,100))
is also valid.
You can convert the symmetric matrix to a full footprint good old
numpy array with:
>>> array = a.to_array()
However, you usually don't want to do this on the full array, since
the whole point of using this was to never have the full matrix
in memory. Instead, you are allowed to specify a slice of the
trailing dimensions:
>>> array = a.to_array(np.s_[:10,:10])
allowing you to loop over slices as you please.
"""
def __init__(self,ncomp,shape,data=None):
self.ncomp = ncomp
self.shape = shape
ndat = ncomp*(ncomp+1)//2
self.data = data if data is not None else np.empty((ndat,)+shape)
def yx_to_k(self,y,x):
if y>x: return self.yx_to_k(x,y)
return y*self.ncomp+x - y*(y+1)//2
def __getitem__(self, tup):
y, x = tup
return self.data[self.yx_to_k(y,x)]
def __setitem__(self, tup, data):
y, x = tup
self.data[self.yx_to_k(y,x)] = data
def to_array(self,sel=np.s_[...],flatten=False):
"""
Convert the SymMat object to a numpy array, optionally selecting a
slice of the data.
Args:
sel: a numpy slice allowing for selection of the projected array.
Use np.s_ to construct this.
flatten: whether to flatten the array before selecting with sel
"""
oshape = self.data[0].reshape(-1)[sel].shape if flatten else self.data[0][sel].shape
out = np.empty((self.ncomp,self.ncomp,)+oshape)
for y in range(self.ncomp):
for x in range(y,self.ncomp):
kindex = self.yx_to_k(y,x)
data = self.data[kindex].reshape(-1) if flatten else self.data[kindex]
out[y,x] = data[sel].copy()
if x!=y: out[x,y] = out[y,x].copy()
return out
def symmat_from_data(data):
ndat = data.shape[0]
shape = data.shape[1:]
ncomp = int(0.5*(np.sqrt(8*ndat+1)-1))
return SymMat(ncomp,shape,data=data)
def change_alm_lmax(alms, lmax, dtype=np.complex128):
ilmax = hp.Alm.getlmax(alms.shape[-1])
olmax = lmax
oshape = list(alms.shape)
oshape[-1] = hp.Alm.getsize(olmax)
oshape = tuple(oshape)
alms_out = np.zeros(oshape, dtype = dtype)
flmax = min(ilmax, olmax)
for m in range(flmax+1):
lminc = m
lmaxc = flmax
idx_isidx = hp.Alm.getidx(ilmax, lminc, m)
idx_ieidx = hp.Alm.getidx(ilmax, lmaxc, m)
idx_osidx = hp.Alm.getidx(olmax, lminc, m)
idx_oeidx = hp.Alm.getidx(olmax, lmaxc, m)
alms_out[..., idx_osidx:idx_oeidx+1] = alms[..., idx_isidx:idx_ieidx+1].copy()
return alms_out
| bsd-2-clause | -5,363,611,843,508,144,000 | 33.541826 | 203 | 0.614543 | false |
TomekProdevelopers/e24cli | src/func.py | 1 | 2587 | import boto3
class Cfg:
ec2 = None
cfg = Cfg()
class check_profile(object):
def __init__(self, f):
self.f = f
def __call__(self, *args):
if cfg.ec2 is None:
print("Please set up profile: e24 or aws")
return
self.f(*args)
def set_default_profile(params_args):
profile = params_args.profile[0]
boto3.setup_default_session(profile_name= profile)
if profile == 'e24':
ec2 = boto3.resource('ec2', endpoint_url ='https://eu-poland-1poznan.api.e24cloud.com')
else:
ec2 = boto3.resource('ec2')
cfg.ec2 = ec2
#print("EC2 set_default_profile: " + str(ec2))
#print("EC2 cfg set_default_profile: " + str(cfg.ec2 ))
# def print_ec2():
# print("EC2: " + str(cfg.ec2))
@check_profile
def get_images_list(pattern):
ids = pattern.filter
print("id " + str(ids) )
if ids == None or len(ids) == 0:
images = cfg.ec2.images.all()
else:
images = cfg.ec2.images.filter( ImageIds=[ids])
for img in images:
print('Image: id:{0}, architecture:{1}, description:{2}, platform:{3}'.format(img.id,img.architecture,img.description, img.platform ))
@check_profile
def get_instances_list(pattern):
intsances = cfg.ec2.instances.all()
for ins in intsances:
print("Instance: Id: {0} state: {1} type:{1} image id:{2} image_id:{3}".format(ins.id, ins.state, ins.instance_type, ins.image_id))
@check_profile
def create_instance(pattern):
image_id = pattern.image_id[0]
instance_type = pattern.instance_type[0]
instances = cfg.ec2.create_instances(ImageId=image_id, MinCount = 1, MaxCount = 1, InstanceType=instance_type)#m1.small
for ins in instances:
print("Instance: Id: {0} state: {1} type:{2} image id:{3}".format(ins.id, ins.state, ins.instance_type, ins.image_id))
def terminate_instance(pattern):
id=pattern.id[0]
ints = cfg.ec2.Instance(id)
ints.terminate()
print("Instance has been terminated: Id: {0} state: {1} type:{2} image id:{3}".format(ints.id, ints.state, ints.instance_type,ints.image_id))
def stop_instance(pattern):
id=pattern.id[0]
ints = cfg.ec2.Instance(id)
ints.stop()
print("Instance has been stoped: Id: {0} state: {1} type:{2} image id:{3}".format(ints.id, ints.state, ints.instance_type,ints.image_id))
def start_instance(pattern):
id=pattern.id[0]
ints = cfg.ec2.Instance(id)
ints.start()
print("Instance has been started: Id: {0} state: {1} type:{2} image id:{3}".format(ints.id, ints.state, ints.instance_type,ints.image_id)) | mit | -4,472,386,874,889,252,000 | 34.452055 | 146 | 0.634712 | false |
matus-stehlik/glowing-batman | competitions/migrations/0004_auto__add_field_competition_organizer_group.py | 1 | 20964 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Competition.organizer_group'
db.add_column(u'competitions_competition', 'organizer_group',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.Group'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Competition.organizer_group'
db.delete_column(u'competitions_competition', 'organizer_group_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'competitions.competition': {
'Meta': {'ordering': "['name']", 'object_name': 'Competition'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'competitions_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'competitions_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'organizer_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'})
},
u'competitions.competitionorgregistration': {
'Meta': {'ordering': "['added_at']", 'object_name': 'CompetitionOrgRegistration'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'organizer registration_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'approved': ('django.db.models.fields.BooleanField', [], {}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'organizer registration_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.UserProfile']"})
},
u'competitions.competitionuserregistration': {
'Meta': {'ordering': "['added_at']", 'object_name': 'CompetitionUserRegistration'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user registrations_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user registrations_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['profiles.UserProfile']"})
},
u'competitions.season': {
'Meta': {'ordering': "['competition', 'year', 'number']", 'object_name': 'Season'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'seasons_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'join_deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'seasons_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'number': ('django.db.models.fields.IntegerField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'competitions.series': {
'Meta': {'ordering': "['submission_deadline']", 'unique_together': "(('season', 'number'),)", 'object_name': 'Series'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'series_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'series_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'number': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'problemset': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['problems.ProblemSet']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'season': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Season']"}),
'submission_deadline': ('django.db.models.fields.DateTimeField', [], {})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'events.event': {
'Meta': {'ordering': "['-start_time', 'end_time']", 'object_name': 'Event'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'registered_org': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'organized_event_set'", 'symmetrical': 'False', 'through': u"orm['events.EventOrgRegistration']", 'to': u"orm['auth.User']"}),
'registered_user': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'through': u"orm['events.EventUserRegistration']", 'symmetrical': 'False'}),
'registration_end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {})
},
u'events.eventorgregistration': {
'Meta': {'ordering': "(u'_order',)", 'unique_together': "(('event', 'organizer'),)", 'object_name': 'EventOrgRegistration'},
'_order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['events.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'events.eventuserregistration': {
'Meta': {'ordering': "(u'_order',)", 'unique_together': "(('event', 'user'),)", 'object_name': 'EventUserRegistration'},
'_order': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['events.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'leaflets.leaflet': {
'Meta': {'ordering': "['competition', '-year', 'issue']", 'unique_together': "(('competition', 'year', 'issue'),)", 'object_name': 'Leaflet'},
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.IntegerField', [], {}),
'leaflet': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'problems.problem': {
'Meta': {'object_name': 'Problem'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'problems_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['problems.ProblemCategory']"}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'problems_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'rating_score': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'rating_votes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'severity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['problems.ProblemSeverity']"}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'problems.problemcategory': {
'Meta': {'ordering': "['name']", 'object_name': 'ProblemCategory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'problems.probleminset': {
'Meta': {'ordering': "['position']", 'unique_together': "(['problem', 'problemset'],)", 'object_name': 'ProblemInSet'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'problem': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['problems.Problem']"}),
'problemset': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['problems.ProblemSet']"})
},
u'problems.problemset': {
'Meta': {'object_name': 'ProblemSet'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sets_created'", 'null': 'True', 'to': u"orm['auth.User']"}),
'competition': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['competitions.Competition']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['events.Event']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'leaflet': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['leaflets.Leaflet']", 'null': 'True', 'blank': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sets_modified'", 'null': 'True', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'problems': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['problems.Problem']", 'through': u"orm['problems.ProblemInSet']", 'symmetrical': 'False'})
},
u'problems.problemseverity': {
'Meta': {'ordering': "['level']", 'object_name': 'ProblemSeverity'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'profiles.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['schools.Address']", 'null': 'True', 'blank': 'True'}),
'classlevel': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'competes': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['competitions.Competition']", 'symmetrical': 'False', 'through': u"orm['competitions.CompetitionUserRegistration']", 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_phone_number': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['schools.School']", 'null': 'True', 'blank': 'True'}),
'school_class': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'social_security_number': ('django.db.models.fields.CharField', [], {'max_length': '11', 'null': 'True', 'blank': 'True'}),
'state_id_number': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'schools.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postal_number': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
u'schools.school': {
'Meta': {'object_name': 'School'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['schools.Address']", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['competitions'] | mit | -2,907,174,913,752,145,400 | 87.459916 | 230 | 0.560819 | false |
google-research/google-research | sparse_data/data/sim_test.py | 1 | 7867 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for sim.py in the exp_framework module."""
from absl.testing import absltest
import numpy as np
from sparse_data.data import sim
NUM_DATASET = 5
NUM_REPLICATE = 5
SEED = 2462723
def setUpModule():
np.random.seed(SEED)
class TestSim(absltest.TestCase):
def setUp(self):
super(TestSim, self).setUp()
self.init_method = None # children should define this
def test_reproducability(self):
if self.init_method is None:
return
# pylint: disable=not-callable
datasets = [self.init_method() for _ in range(NUM_DATASET)]
# check reproducability of get() function
for _ in range(NUM_REPLICATE):
xs, ys = [], []
for d in datasets:
d.reset()
x, y = d.generate()
xs.append(x)
ys.append(y)
np.random.randn() # make calls to global np.random RNG
for i in range(NUM_DATASET - 1):
self.assertTrue(np.array_equal(xs[i], xs[i + 1]))
self.assertTrue(np.array_equal(ys[i], ys[i + 1]))
# check reproducability of generate() function
for _ in range(NUM_REPLICATE):
x_trains, y_trains, x_tests, y_tests = [], [], [], []
for d in datasets:
d.reset()
x_train, y_train, x_test, y_test = d.get()
x_trains.append(x_train)
y_trains.append(y_train)
x_tests.append(x_test)
y_tests.append(y_test)
np.random.randn() # make calls to global np.random RNG
for i in range(NUM_DATASET - 1):
self.assertTrue(np.array_equal(x_trains[i], x_trains[i + 1]))
self.assertTrue(np.array_equal(y_trains[i], y_trains[i + 1]))
self.assertTrue(np.array_equal(x_tests[i], x_tests[i + 1]))
self.assertTrue(np.array_equal(y_tests[i], y_tests[i + 1]))
class TestLinear(TestSim):
def setUp(self):
super(TestLinear, self).setUp()
self.init_method = sim.LinearSimulation
def test_shape(self):
num_sample = np.random.randint(10, 20)
num_feature = np.random.randint(10, 20)
problem = 'classification'
for _ in range(NUM_REPLICATE):
d = self.init_method(
num_sample=num_sample, num_feature=num_feature, problem=problem)
d.reset()
x, y = d.generate()
self.assertEqual(x.shape, (num_sample, num_feature))
self.assertEqual(y.shape, (num_sample,))
def test_sparsity(self):
num_sample = 1000
num_feature = 10
problem = 'classification'
for _ in range(NUM_REPLICATE):
prop_nonzero = np.random.uniform(0.2, 0.8)
d = self.init_method(
num_sample=num_sample,
num_feature=num_feature,
prop_nonzero=prop_nonzero,
problem=problem)
d.reset()
x, _ = d.generate()
observed_prop_nonzero = np.true_divide(np.sum(x > 0), np.size(x))
self.assertLess(
np.abs(observed_prop_nonzero - prop_nonzero), 0.1 * prop_nonzero)
class TestCardinality(TestLinear):
def setUp(self):
super(TestCardinality, self).setUp()
self.init_method = sim.CardinalitySimulation
def test_shape(self):
num_sample = np.random.randint(10, 20)
num_feature = np.random.randint(10, 20) * 2 # should be even
problem = 'classification'
for _ in range(NUM_REPLICATE):
d = self.init_method(
num_sample=num_sample, num_feature=num_feature, problem=problem)
d.reset()
x, y = d.generate()
self.assertEqual(x.shape, (num_sample, num_feature))
self.assertEqual(y.shape, (num_sample,))
def test_sparsity(self):
pass
class TestSparsity(TestCardinality):
def setUp(self):
super(TestSparsity, self).setUp()
self.init_method = sim.SparsitySimulation
def test_sparsity(self):
num_sample = 1000
num_feature = 50
problem = 'classification'
for _ in range(NUM_REPLICATE):
prop_nonzero = np.random.uniform(0.2, 0.8)
d = self.init_method(
num_sample=num_sample,
num_feature=num_feature,
prop_nonzero=prop_nonzero,
problem=problem)
d.reset()
x, _ = d.generate()
x_inf = x[:, :int(num_feature / 2)]
observed_prop_nonzero = np.true_divide(np.sum(x_inf > 0), np.size(x_inf))
self.assertLess(
np.abs(observed_prop_nonzero - prop_nonzero), 0.1 * prop_nonzero)
class TestMultiplicative(TestLinear):
def setUp(self):
super(TestMultiplicative, self).setUp()
self.init_method = sim.MultiplicativeSimulation
def test_shape(self):
orders = range(1, 10)
problem = 'classification'
for _ in range(NUM_REPLICATE):
num_sample = np.random.randint(10, 20)
num_group_per_order = np.random.randint(10, 20)
num_feature = np.sum([o * num_group_per_order for o in orders],
dtype=np.int)
d = self.init_method(
num_sample=num_sample,
num_feature=num_feature,
orders=orders,
problem=problem)
d.reset()
x, y = d.generate()
self.assertEqual(x.shape, (num_sample, num_feature))
self.assertEqual(y.shape, (num_sample,))
def test_sparsity(self):
num_sample = 1000
num_group_per_order = 10
orders = range(1, 10)
problem = 'classification'
num_feature = np.sum([o * num_group_per_order for o in orders],
dtype=np.int)
for _ in range(NUM_REPLICATE):
prop_nonzero = np.random.uniform(0.2, 0.8)
d = self.init_method(
num_sample=num_sample,
num_feature=num_feature,
orders=orders,
prop_nonzero=prop_nonzero,
problem=problem)
d.reset()
x, _ = d.generate()
observed_prop_nonzero = np.true_divide(np.sum(x > 0), np.size(x))
self.assertLess(
np.abs(observed_prop_nonzero - prop_nonzero), 0.1 * prop_nonzero)
class TestXOR(TestLinear):
def setUp(self):
super(TestXOR, self).setUp()
self.init_method = sim.XORSimulation
def test_shape(self):
problem = 'classification'
for _ in range(NUM_REPLICATE):
num_sample = np.random.randint(10, 20)
num_features = 2 * np.random.randint(10, 20)
d = self.init_method(
num_sample=num_sample, num_feature=num_features, problem=problem)
d.reset()
x, y = d.generate()
self.assertEqual(x.shape, (num_sample, num_features))
self.assertEqual(y.shape, (num_sample,))
def test_sparsity(self):
num_sample = 1000
num_pair = 10
problem = 'classification'
for _ in range(NUM_REPLICATE):
prop_nonzero = np.random.uniform(0.2, 0.8)
d = self.init_method(
num_sample=num_sample,
num_feature=num_pair / 2,
prop_nonzero=prop_nonzero,
problem=problem)
d.reset()
x, _ = d.generate()
observed_prop_nonzero = np.true_divide(np.sum(x > 0), np.size(x))
self.assertLess(
np.abs(observed_prop_nonzero - prop_nonzero), 0.1 * prop_nonzero)
class TestFunctions(absltest.TestCase):
def test_continuous_to_binary(self):
# TODO(jisungkim) add more tests here
y = [0, 1, 2, 3, 4, 5]
exp_y_squashed = [0, 0, 0, 1, 1, 1]
self.assertTrue(
np.array_equal(exp_y_squashed,
sim.continuous_to_binary(y, squashing='linear')))
if __name__ == '__main__':
absltest.main()
| apache-2.0 | 3,414,960,622,290,468,000 | 28.575188 | 79 | 0.620313 | false |
ReactiveX/RxPY | rx/core/operators/buffer.py | 1 | 1960 | from typing import Callable, Optional, Any
from rx import operators as ops
from rx.core import Observable, pipe
def _buffer(boundaries: Observable) -> Callable[[Observable], Observable]:
return pipe(
ops.window(boundaries),
ops.flat_map(pipe(ops.to_iterable(), ops.map(list)))
)
def _buffer_when(closing_mapper: Callable[[], Observable]) -> Callable[[Observable], Observable]:
return pipe(
ops.window_when(closing_mapper),
ops.flat_map(pipe(ops.to_iterable(), ops.map(list)))
)
def _buffer_toggle(openings: Observable,
closing_mapper: Callable[[Any], Observable]
) -> Callable[[Observable], Observable]:
return pipe(
ops.window_toggle(openings, closing_mapper),
ops.flat_map(pipe(ops.to_iterable(), ops.map(list)))
)
def _buffer_with_count(count: int, skip: Optional[int] = None) -> Callable[[Observable], Observable]:
"""Projects each element of an observable sequence into zero or more
buffers which are produced based on element count information.
Examples:
>>> res = buffer_with_count(10)(xs)
>>> res = buffer_with_count(10, 1)(xs)
Args:
count: Length of each buffer.
skip: [Optional] Number of elements to skip between
creation of consecutive buffers. If not provided, defaults to
the count.
Returns:
A function that takes an observable source and returns an
observable sequence of buffers.
"""
def buffer_with_count(source: Observable) -> Observable:
nonlocal skip
if skip is None:
skip = count
def mapper(value):
return value.pipe(ops.to_iterable(), ops.map(list))
def predicate(value):
return len(value) > 0
return source.pipe(ops.window_with_count(count, skip), ops.flat_map(mapper), ops.filter(predicate))
return buffer_with_count
| mit | -4,276,155,024,958,729,700 | 30.612903 | 107 | 0.632143 | false |
mabaer/forensic_tools | natted_ip_detection.py | 1 | 2784 | #!/usr/bin/python
# This script prints the IPs that are likely natted using the TCP timestamp option.
# The script invokes tshark and gets as parameter a pcap file.
# Copyright 2015 Marc-André Bär
import sys
import subprocess
listIPs=[]
listNattedIPs=[]
#Returns number of frames
def getFrameCount(frames):
return frames.count('\n')
#Checks an ip
def checkIP(ip, frames, count):
global listNattedIPs
avgRelation=0.0
relation=0.0
lasttime=0
lastTSval=0
c=0
for i in range(count):
frame=frames[i].split()
if frame[0] == ip:
#If it is the first frame just set values
if lasttime == 0:
lasttime=frame[1]
lastTSval=frame[2]
else:
#If it is a frame bigger 2 check if the relation fits to the average
if(c>0 and lastTSval != frame[2]):
relation = float((float(frame[1])-float(lasttime)))/(float(float(frame[2])-float(lastTSval)))
#Check if relation fits with the average
absdiff=abs(relation-avgRelation)
#More Finetuning possible if necessary.
maxdiff=avgRelation
#If the difference is bigger than the allowed max or a negativ relation is measured the ip will be marked as natted
if(absdiff > maxdiff or relation < 0):
#if the timestamp is to close to the maximumg (4294967296) we will not count the fraame because an overflow would lead to wrong calculations
if(frame[2] < 4294966796 or frame[2] > 500):
listNattedIPs.append(ip)
break
#Update average
c += 1
avgRelation=float(avgRelation*float((c-1)/float(c))+relation*float(1/float(c)))
#If it is the second frame just calculate the relation
elif lastTSval != frame[2]:
c += 1
avgRelation = float((float(frame[1])-float(lasttime)))/(float(float(frame[2])-float(lastTSval)))
#Update last values
lasttime=frame[1]
lastTSval=frame[2]
def main():
if len (sys.argv) != 2 :
print "Use as argument PCAP_File"
sys.exit (1)
filename = sys.argv[1]
#Receive necessary data from the pcap file
#tshark -r capture.pcap -e ip.src -e frame.time_relative -e tcp.options.timestamp.tsval -T fields
process = subprocess.Popen(['tshark','-r',filename, '-e', 'ip.src', '-e', 'frame.time_relative', '-e', 'tcp.options.timestamp.tsval', '-T', 'fields'],stdout=subprocess.PIPE)
output = process.communicate()[0]
frames = output.splitlines()
count=getFrameCount(output)
#Iterate over frames
for i in range(count):
frame=frames[i].split()
#If IP adress was not checked yet => Check it
if frame[0] not in listIPs:
listIPs.append(frame[0])
checkIP(frame[0], frames, count)
print("The natted IP adresses are:")
print '\n'.join(listNattedIPs)
if __name__ == '__main__':
main() | gpl-2.0 | -7,275,602,389,794,584,000 | 33.8125 | 177 | 0.663075 | false |
vlegoff/tsunami | src/primaires/salle/commandes/etendue/cote_ajouter.py | 1 | 3227 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'ajouter' de la commande 'étendue côte'."""
from primaires.interpreteur.masque.parametre import Parametre
from primaires.interpreteur.masque.exceptions.erreur_interpretation import \
ErreurInterpretation
class PrmCoteAjouter(Parametre):
"""Commande 'étendue côte ajouter'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "ajouter", "add")
self.schema = "<cle>"
self.aide_courte = "ajoute la salle comme côte de l'étendue"
self.aide_longue = \
"Cette commande permet d'ajouter la salle où vous vous " \
"trouvez comme côte de l'étendue précisée en paramètre."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
# On vérifie que la clé est une étendue
try:
etendue = type(self).importeur.salle.etendues[cle]
except KeyError:
personnage << "|err|Cette étendue {} n'existe pas.|ff|".format(cle)
else:
salle = personnage.salle
if salle.coords.invalide:
personnage << "|err|Cette salle n'a pas de coordonnées " \
"valide.|ff|"
elif salle.coords in etendue:
personnage << "|err|Ce point existe déjà dans l'étendue.|ff|"
else:
etendue.ajouter_cote(salle)
personnage << \
"La salle {} est une côte de l'étendue {}.".format(
salle.ident, etendue.cle)
| bsd-3-clause | -3,525,610,655,706,161,000 | 44.084507 | 79 | 0.676976 | false |
shilad/geo-provenance-viz | data/scripts/create_domains.py | 1 | 4973 | __author__ = 'shilad'
import collections
import io
import json
import marshal
import os
import subprocess
import sys
import tldextract
import traceback
import urllib2
def warn(message):
sys.stderr.write(message + '\n')
def open_bz2(path):
DEVNULL = open(os.devnull, 'w')
p = subprocess.Popen(["pbzcat", path], stdout = subprocess.PIPE, stderr = DEVNULL)
return io.open(p.stdout.fileno(), 'r', encoding='utf-8')
def url2registereddomain(url):
host = url2host(url)
parts = tldextract.extract(host)
return parts.registered_domain
def url2host(url):
# if not url.startswith('http:') and not url.startswith('https:') and not url.startswith('ftp://'):
# url = 'http://' + url
return urllib2.urlparse.urlparse(url).netloc
ALT_NAMES = {
'Myanmar': 'Burma',
'French Southern Territories': 'French Southern and Antarctic Lands',
'Saint Helena': 'Saint Helena, Ascension and Tristan da Cunha',
'Pitcairn': 'Pitcairn Islands',
'Vatican': 'Vatican City',
'Micronesia': 'Federated States of Micronesia',
'Macedonia': 'Republic of Macedonia',
'Bahamas': 'The Bahamas',
'Georgia': 'Georgia (country)',
'Ireland': 'Republic of Ireland',
'Palestinian Territory': 'Palestine',
'Macao': 'Macau',
'U.S. Virgin Islands': 'United States Virgin Islands',
'Gambia': 'The Gambia'
}
TITLE_MAPPING = {}
def title2iso(title):
global TITLE_MAPPING
global ALT_NAMES
if not TITLE_MAPPING:
for line in io.open('../raw/geonames.txt', encoding='utf-8'):
tokens = line.split('\t')
iso = tokens[0].strip().lower()
t = tokens[4]
TITLE_MAPPING[t + ' (en)'] = iso
if t in ALT_NAMES:
TITLE_MAPPING[ALT_NAMES[t] + ' (en)'] = iso
return TITLE_MAPPING.get(title)
def write_top_domains():
make_key = lambda *parts: intern('@@'.join(parts).encode('ascii', 'ignore'))
counts = collections.defaultdict(collections.Counter)
inferred = read_inferred_urls()
for record in read_urls():
url = record['url']
if url not in inferred: continue
lang = record['language']
acountry = title2iso(record['countryTitle'])
if not acountry: continue
domain = record['effectiveDomain2']
scountry = inferred[url]
keys = [
make_key('all', 'all', 'all'),
make_key('all', 'all', scountry),
make_key(lang, 'all', 'all'),
make_key('all', acountry, 'all'),
make_key('all', acountry, scountry),
make_key(lang, 'all', scountry),
make_key(lang, acountry, 'all'),
make_key(lang, acountry, scountry),
]
for k in keys:
counts[k][domain] += 1
for key in counts:
(lang, acountry, scountry) = key.split('@@')
path_dir = '../results/sources/domains/%s/%s' % (lang, acountry)
path = '%s/%s.js' % (path_dir, scountry)
if not os.path.exists(path_dir):
os.makedirs(path_dir)
f = io.open(path, 'w', encoding='utf-8')
f.write(u'[\n')
top_total = 0
for (i, (domain, count)) in enumerate(counts[key].most_common(100)):
f.write(u'%s,\n' % json.dumps([domain, count], f))
top_total += count
total = sum(counts[key].values())
f.write(u'%s\n]\n' % json.dumps(['other', total - top_total], f))
def read_urls():
f = open_bz2('../raw/source_urls.tsv.bz2')
fields = None
for (i, line) in enumerate(f):
if i % 100000 == 0:
warn('processing url %d' % i)
tokens = [f.strip() for f in line.split('\t')]
if not fields:
fields = tokens
elif len(fields) != len(tokens):
warn('invalid line: %s' % `line`)
else:
yield dict(zip(fields, tokens))
def read_inferred_urls(threshold=0.85):
p = '../cache/inferred_urls.%s.bin' % threshold
if os.path.isfile(p):
f = open(p, 'rb')
result = marshal.load(f)
f.close()
warn('loaded %d from cache file %s' % (len(result), p))
return result
f = open_bz2('../raw/web-viz.results.txt.bz2')
n = 0
results = {}
for line in f:
n += 1
if n % 10000 == 0:
warn('doing line %d, size of dict is' % n)
try:
tokens = line.split('\t')
(url, conf, dist) = tokens[:3]
if not dist: continue
dist = eval(dist)
top_val = max(dist.values())
if top_val > threshold:
top_country = [c for c in dist.keys() if dist[c] == top_val][0]
results[url] = top_country
except:
warn('error processing line ' + `line`)
traceback.print_exc()
f.close()
f = open(p, 'wb')
marshal.dump(results, f)
f.close()
return results
if __name__ == '__main__':
write_top_domains()
| apache-2.0 | 7,937,624,274,821,008,000 | 30.08125 | 103 | 0.556405 | false |
anurag03/integration_tests | cfme/tests/cloud_infra_common/test_genealogy.py | 1 | 3345 | # -*- coding: utf-8 -*-
import pytest
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.common.provider import BaseProvider
from cfme.infrastructure.provider.rhevm import RHEVMProvider
from cfme.infrastructure.provider.scvmm import SCVMMProvider
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
from cfme.utils.providers import ProviderFilter
from cfme.markers.env_markers.provider import providers
pytestmark = [
pytest.mark.usefixtures('uses_infra_providers', 'uses_cloud_providers', 'provider'),
pytest.mark.tier(2),
pytest.mark.provider(
gen_func=providers,
filters=[ProviderFilter(classes=[BaseProvider]),
ProviderFilter(classes=[SCVMMProvider, RHEVMProvider], inverted=True)],
scope='module'),
]
@pytest.fixture(scope="function")
def vm_crud(provider, small_template):
collection = provider.appliance.provider_based_collection(provider)
return collection.instantiate(random_vm_name(context='genealogy'),
provider,
template_name=small_template.name)
# uncollected above in pytest_generate_tests
@pytest.mark.meta(blockers=["GH#ManageIQ/manageiq:473"])
@pytest.mark.parametrize("from_edit", [True, False], ids=["via_edit", "via_summary"])
@test_requirements.genealogy
@pytest.mark.uncollectif(
lambda provider, from_edit: provider.one_of(CloudProvider) and not from_edit)
def test_vm_genealogy_detected(
request, setup_provider, provider, small_template, soft_assert, from_edit, vm_crud):
"""Tests vm genealogy from what CFME can detect.
Prerequisities:
* A provider that is set up and having suitable templates for provisioning.
Steps:
* Provision the VM
* Then, depending on whether you want to check it via ``Genealogy`` or edit page:
* Open the edit page of the VM and you can see the parent template in the dropdown.
Assert that it corresponds with the template the VM was deployed from.
* Open VM Genealogy via details page and see the the template being an ancestor of the
VM.
Note:
The cloud providers appear to not have Genealogy option available in the details view. So
the only possibility available is to do the check via edit form.
Metadata:
test_flag: genealogy, provision
"""
vm_crud.create_on_provider(find_in_cfme=True, allow_skip="default")
request.addfinalizer(lambda: vm_crud.cleanup_on_provider())
vm_crud.mgmt.wait_for_steady_state()
if from_edit:
vm_crud.open_edit()
view = navigate_to(vm_crud, 'Edit')
opt = view.form.parent_vm.all_selected_options[0]
parent = opt.strip()
assert parent.startswith(small_template.name), "The parent template not detected!"
else:
try:
vm_crud_ancestors = vm_crud.genealogy.ancestors
except NameError:
logger.exception("The parent template not detected!")
raise pytest.fail("The parent template not detected!")
assert small_template.name in vm_crud_ancestors, \
"{} is not in {}'s ancestors".format(small_template.name, vm_crud.name)
| gpl-2.0 | 2,607,156,042,699,554,300 | 40.8125 | 98 | 0.690882 | false |
migueldiascosta/pymatgen | pymatgen/util/io_utils.py | 1 | 3178 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
"""
This module provides utility classes for io operations.
"""
__author__ = "Shyue Ping Ong, Rickard Armiento, Anubhav Jain, G Matteo, Ioannis Petousis"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "Sep 23, 2011"
import re
from monty.io import zopen
def prompt(question):
import six
# Fix python 2.x.
if six.PY2:
my_input = raw_input
else:
my_input = input
return my_input(question)
def ask_yesno(question, default=True):
try:
answer = prompt(question)
return answer.lower().strip() in ["y", "yes"]
except EOFError:
return default
def clean_lines(string_list, remove_empty_lines=True):
"""
Strips whitespace, carriage returns and empty lines from a list of strings.
Args:
string_list: List of strings
remove_empty_lines: Set to True to skip lines which are empty after
stripping.
Returns:
List of clean strings with no whitespaces.
"""
for s in string_list:
clean_s = s
if '#' in s:
ind = s.index('#')
clean_s = s[:ind]
clean_s = clean_s.strip()
if (not remove_empty_lines) or clean_s != '':
yield clean_s
def micro_pyawk(filename, search, results=None, debug=None, postdebug=None):
"""
Small awk-mimicking search routine.
'file' is file to search through.
'search' is the "search program", a list of lists/tuples with 3 elements;
i.e. [[regex,test,run],[regex,test,run],...]
'results' is a an object that your search program will have access to for
storing results.
Here regex is either as a Regex object, or a string that we compile into a
Regex. test and run are callable objects.
This function goes through each line in filename, and if regex matches that
line *and* test(results,line)==True (or test == None) we execute
run(results,match),where match is the match object from running
Regex.match.
The default results is an empty dictionary. Passing a results object let
you interact with it in run() and test(). Hence, in many occasions it is
thus clever to use results=self.
Author: Rickard Armiento, Ioannis Petousis
Returns:
results
"""
if results is None:
results = {}
# Compile strings into regexs
for entry in search:
entry[0] = re.compile(entry[0])
with zopen(filename, "rt") as f:
for line in f:
for entry in search:
match = re.search(entry[0], line)
if match and (entry[1] is None
or entry[1](results, line)):
if debug is not None:
debug(results, match)
entry[2](results, match)
if postdebug is not None:
postdebug(results, match)
return results
| mit | 2,114,022,318,312,449,800 | 27.630631 | 89 | 0.610132 | false |
dltn/tortellini | server-firmware/Processing/DataPusher/oauthlib/oauth2/rfc6749/clients/mobile_application.py | 1 | 9156 | # -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
from .base import Client
from ..parameters import prepare_grant_uri
from ..parameters import parse_implicit_response
class MobileApplicationClient(Client):
"""A public client utilizing the implicit code grant workflow.
A user-agent-based application is a public client in which the
client code is downloaded from a web server and executes within a
user-agent (e.g. web browser) on the device used by the resource
owner. Protocol data and credentials are easily accessible (and
often visible) to the resource owner. Since such applications
reside within the user-agent, they can make seamless use of the
user-agent capabilities when requesting authorization.
The implicit grant type is used to obtain access tokens (it does not
support the issuance of refresh tokens) and is optimized for public
clients known to operate a particular redirection URI. These clients
are typically implemented in a browser using a scripting language
such as JavaScript.
As a redirection-based flow, the client must be capable of
interacting with the resource owner's user-agent (typically a web
browser) and capable of receiving incoming requests (via redirection)
from the authorization server.
Unlike the authorization code grant type in which the client makes
separate requests for authorization and access token, the client
receives the access token as the result of the authorization request.
The implicit grant type does not include client authentication, and
relies on the presence of the resource owner and the registration of
the redirection URI. Because the access token is encoded into the
redirection URI, it may be exposed to the resource owner and other
applications residing on the same device.
"""
def prepare_request_uri(self, uri, redirect_uri=None, scope=None,
state=None, **kwargs):
"""Prepare the implicit grant request URI.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format, per `Appendix B`_:
:param redirect_uri: OPTIONAL. The redirect URI must be an absolute URI
and it should have been registerd with the OAuth
provider prior to use. As described in `Section 3.1.2`_.
:param scope: OPTIONAL. The scope of the access request as described by
Section 3.3`_. These may be any string but are commonly
URIs or various categories such as ``videos`` or ``documents``.
:param state: RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in `Section 10.12`_.
:param kwargs: Extra arguments to include in the request URI.
In addition to supplied parameters, OAuthLib will append the ``client_id``
that was provided in the constructor as well as the mandatory ``response_type``
argument, set to ``token``::
>>> from oauthlib.oauth2 import MobileApplicationClient
>>> client = MobileApplicationClient('your_id')
>>> client.prepare_request_uri('https://example.com')
'https://example.com?client_id=your_id&response_type=token'
>>> client.prepare_request_uri('https://example.com', redirect_uri='https://a.b/callback')
'https://example.com?client_id=your_id&response_type=token&redirect_uri=https%3A%2F%2Fa.b%2Fcallback'
>>> client.prepare_request_uri('https://example.com', scope=['profile', 'pictures'])
'https://example.com?client_id=your_id&response_type=token&scope=profile+pictures'
>>> client.prepare_request_uri('https://example.com', foo='bar')
'https://example.com?client_id=your_id&response_type=token&foo=bar'
.. _`Appendix B`: http://tools.ietf.org/html/rfc6749#appendix-B
.. _`Section 2.2`: http://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
.. _`Section 10.12`: http://tools.ietf.org/html/rfc6749#section-10.12
"""
return prepare_grant_uri(uri, self.client_id, 'token',
redirect_uri=redirect_uri, state=state, scope=scope, **kwargs)
def parse_request_uri_response(self, uri, state=None, scope=None):
"""Parse the response URI fragment.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the "application/x-www-form-urlencoded" format:
:param uri: The callback URI that resulted from the user being redirected
back from the provider to you, the client.
:param state: The state provided in the authorization request.
:param scope: The scopes provided in the authorization request.
:return: Dictionary of token parameters.
:raises: Warning if scope has changed. OAuth2Error if response is invalid.
A successful response should always contain
**access_token**
The access token issued by the authorization server. Often
a random string.
**token_type**
The type of the token issued as described in `Section 7.1`_.
Commonly ``Bearer``.
**state**
If you provided the state parameter in the authorization phase, then
the provider is required to include that exact state value in the
response.
While it is not mandated it is recommended that the provider include
**expires_in**
The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
**scope**
Providers may supply this in all responses but are required to only
if it has changed since the authorization request.
A few example responses can be seen below::
>>> response_uri = 'https://example.com/callback#access_token=sdlfkj452&state=ss345asyht&token_type=Bearer&scope=hello+world'
>>> from oauthlib.oauth2 import MobileApplicationClient
>>> client = MobileApplicationClient('your_id')
>>> client.parse_request_uri_response(response_uri)
{
'access_token': 'sdlfkj452',
'token_type': 'Bearer',
'state': 'ss345asyht',
'scope': [u'hello', u'world']
}
>>> client.parse_request_uri_response(response_uri, state='other')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response
**scope**
File "oauthlib/oauth2/rfc6749/parameters.py", line 197, in parse_implicit_response
raise ValueError("Mismatching or missing state in params.")
ValueError: Mismatching or missing state in params.
>>> client.parse_request_uri_response(response_uri, scope=['other'])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "oauthlib/oauth2/rfc6749/__init__.py", line 598, in parse_request_uri_response
**scope**
File "oauthlib/oauth2/rfc6749/parameters.py", line 199, in parse_implicit_response
validate_token_parameters(params, scope)
File "oauthlib/oauth2/rfc6749/parameters.py", line 285, in validate_token_parameters
raise Warning("Scope has changed to %s." % new_scope)
Warning: Scope has changed to [u'hello', u'world'].
.. _`Section 7.1`: http://tools.ietf.org/html/rfc6749#section-7.1
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
"""
self.token = parse_implicit_response(uri, state=state, scope=scope)
self._populate_attributes(self.token)
return self.token
| mit | -9,126,146,475,474,549,000 | 50.728814 | 137 | 0.6481 | false |
jens-a-e/staalpiplayer | dmx/__init__.py | 1 | 2151 | import sys
import serial
import struct
import numpy
import time
import math
import random
class DMXDevice(object):
DEBUG = False
def __init__(self, start, length):
self.start, self.length = start, length
if start < 1:
print "DMX Channels must start at least at 1!"
self.start = 1
self.blackout()
def set(self, chan, value):
"""set the value of this channel to value
(Remember, that DMX channels in start at 1)"""
if chan >= 1 and chan <= self.length:
self.values[chan-1] = value
else:
if self.DEBUG is not None and self.DEBUG is True:
print "DMX Device debug: Channel "+str(chan)+" not in range!"
def blackout(self):
self.values = [0] * self.length
def pack(self, buf):
"""modify the passed buffer in place"""
for index in range(self.length):
buf[self.start+index] = self.values[index]
def __str__(self):
return "<DMXDevice start=%d, length=%d>" % (self.start, self.length)
class DMXManager(object):
def __init__(self, port, max_channels = 512):
self.MAX_CHANNELS = max_channels
self.UNIVERSE = 1
self.SEND_LABEL = 6
self.s = serial.Serial(port,57600)
self.buf = numpy.zeros((self.MAX_CHANNELS + self.UNIVERSE,), dtype='B')
self.devices = []
def append(self, device):
self.devices.append(device)
def blackout(self):
for device in self.devices:
device.blackout()
self.send()
def send(self):
for device in self.devices:
device.pack(self.buf)
l = len(self.buf)
msg = struct.pack("<BBH "+str(l)+"s B",
0x7e, self.SEND_LABEL, l,
self.buf.tostring(),
0xe7
)
self.s.write(msg)
if __name__=='__main__':
port = sys.argv[1]
manager = DMXManager(port)
light_0 = DMXDevice(start=25, length=6)
light_1 = DMXDevice(start=1, length=6)
manager.append(light_0)
manager.append(light_1)
while True:
intensity = 128*math.sin(time.time())+128
light_0.set(0, int(intensity))
light_1.set(1, int(intensity))
#for light in light_0, light_1:
# for color in range(3):
# light.set(color, random.randintil.com(0, 255))
manager.send()
| gpl-3.0 | -5,255,746,908,915,081,000 | 25.231707 | 75 | 0.630404 | false |
cool-RR/cute-wing-stuff | scripts/push_line_to_end.py | 1 | 3545 | # Copyright 2009-2014 Ram Rachum.
# This program is distributed under the MIT license.
from __future__ import with_statement
import os.path, sys
sys.path += [
os.path.dirname(__file__),
os.path.join(os.path.dirname(__file__), 'third_party.zip'),
]
from python_toolbox import string_tools
import wingapi
import shared
def _get_n_identical_edge_characters(string, character=None, head=True):
'''
Get the number of identical characters at `string`'s head.
For example, the result for 'qqqwe' would be `3`, while the result for
'meow' will be `1`.
Specify `character` to only consider that character; if a different
character is found at the head, `0` will be returned.
Specify `head=False` to search the tail instead of the head.
'''
if not string:
return 0
index = 0 if head is True else -1
direction = 1 if head is True else -1
if character is None:
character = string[index]
else:
assert isinstance(character, basestring) and len(character) == 1
for i, c in enumerate(string[::direction]):
if c != character:
return i
else:
return len(string)
def push_line_to_end(editor=wingapi.kArgEditor, line_offset=0):
'''
Push the current line to the end, aligning it to right border of editor.
This inserts or deletes as many spaces as necessary from the beginning of
the line to make the end of the line exactly coincide with the right border
of the editor. (Whose width can be configured in Wing's "Preferences" ->
"Line Wrapping" -> "Reformatting Wrap Column".)
This is useful for creating lines of this style:
if first_long_condition(foo, foobar) and \
second_long_condition(fubaz, bazbar):
Also deletes trailing spaces.
Suggested key combination: `Insert End`
'''
assert isinstance(editor, wingapi.CAPIEditor)
document = editor.GetDocument()
assert isinstance(document, wingapi.CAPIDocument)
position, _ = editor.GetSelection()
line = document.GetLineNumberFromPosition(position) + line_offset
line_start = document.GetLineStart(line)
line_end = document.GetLineEnd(line)
line_content = document.GetCharRange(line_start, line_end)
n_trailing_spaces = _get_n_identical_edge_characters(line_content,
character=' ',
head=False)
current_line_length = line_end - line_start
n_spaces_to_add = \
wingapi.gApplication.GetPreference('edit.text-wrap-column') - \
current_line_length + n_trailing_spaces
with shared.UndoableAction(document):
document.DeleteChars(line_end - n_trailing_spaces, line_end - 1)
if n_spaces_to_add == 0:
return
elif n_spaces_to_add > 0:
string_to_insert = (' ' * n_spaces_to_add)
document.InsertChars(line_start, string_to_insert)
else:
assert n_spaces_to_add < 0
n_spaces_to_delete = min(
-n_spaces_to_add,
string_tools.get_n_identical_edge_characters(
line_content,
character=' '
)
)
document.DeleteChars(
line_start,
line_start + (n_spaces_to_delete - 1)
)
| mit | 3,883,187,155,571,205,000 | 34.45 | 79 | 0.590409 | false |
amadeusproject/amadeuslms | bulletin/serializers.py | 1 | 12627 | """
Copyright 2016, 2017 UFPE - Universidade Federal de Pernambuco
Este arquivo é parte do programa Amadeus Sistema de Gestão de Aprendizagem, ou simplesmente Amadeus LMS
O Amadeus LMS é um software livre; você pode redistribui-lo e/ou modifica-lo dentro dos termos da Licença Pública Geral GNU como publicada pela Fundação do Software Livre (FSF); na versão 2 da Licença.
Este programa é distribuído na esperança que possa ser útil, mas SEM NENHUMA GARANTIA; sem uma garantia implícita de ADEQUAÇÃO a qualquer MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a Licença Pública Geral GNU para maiores detalhes.
Você deve ter recebido uma cópia da Licença Pública Geral GNU, sob o título "LICENSE", junto com este programa, se não, escreva para a Fundação do Software Livre (FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import os
import zipfile
import time
from django.conf import settings
from django.core.files import File
from rest_framework import serializers
from django.shortcuts import get_object_or_404
from subjects.serializers import TagSerializer
from topics.serializers import TopicSerializer
from pendencies.serializers import PendenciesSerializer
from students_group.serializers import StudentsGroupSerializer
from users.serializers import UserBackupSerializer
from subjects.models import Tag, Subject
from topics.models import Topic, Resource
from pendencies.models import Pendencies
from students_group.models import StudentsGroup
from log.models import Log
from users.models import User
from .models import Bulletin
class SimpleBulletinSerializer(serializers.ModelSerializer):
topic = TopicSerializer('get_subject')
tags = TagSerializer(many = True)
pendencies_resource = PendenciesSerializer(many = True)
indicators = serializers.CharField(required = False, allow_blank = True, max_length = 255)
file_content = serializers.CharField(required = False, allow_blank = True, max_length = 255)
def get_subject(self, obj):
subject = self.context.get("subject", None)
return subject
def validate(self, data):
files = self.context.get('files', None)
if files:
if data["file_content"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["file_content"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["file_content"], dst_path)
new_name = "goal_" + str(time.time()) + os.path.splitext(data["file_content"])[1]
new_path = os.path.join("bulletin", os.path.join("goals", new_name))
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_path))
data["file_content"] = new_path
else:
path = files.extract(data["file_content"], settings.MEDIA_ROOT)
else:
data["file_content"] = None
if data["indicators"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["indicators"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["indicators"], dst_path)
new_name = "ind_" + str(time.time()) + os.path.splitext(data["indicators"])[1]
new_path = os.path.join("bulletin", os.path.join("indicators", new_name))
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_path))
data["indicators"] = new_path
else:
path = files.extract(data["indicators"], settings.MEDIA_ROOT)
else:
data["indicators"] = None
else:
data["file_content"] = None
data["indicators"] = None
return data
class Meta:
model = Bulletin
extra_kwargs = {
"tags": {
"validators": [],
},
}
exclude = ('students', 'groups',)
validators = []
def create(self, data):
topic = data['topic']
bulletin = None
if not topic["id"] is None:
if "subject" in topic:
r_exits = Resource.objects.filter(topic__subject = topic["subject"], name__unaccent__iexact = data["name"])
else:
r_exits = Resource.objects.filter(topic__subject__id = topic["subject_id"], name__unaccent__iexact = data["name"])
if not r_exits.exists():
if topic['id'] == "":
topic_exist = Topic.objects.filter(subject = topic['subject'], name__unaccent__iexact = topic["name"])
if topic_exist.exists():
topic = topic_exist[0]
else:
topic = Topic.objects.create(name = topic['name'], subject = topic['subject'], repository = topic['repository'], visible = topic['visible'], order = topic['order'], description = topic['description'])
data["topic"] = topic
else:
data["topic"] = get_object_or_404(Topic, id = topic["id"])
bulletin_data = data
pendencies = bulletin_data["pendencies_resource"]
del bulletin_data["pendencies_resource"]
bulletin = Bulletin()
bulletin.name = bulletin_data["name"]
bulletin.brief_description = bulletin_data["brief_description"]
bulletin.show_window = bulletin_data["show_window"]
bulletin.all_students = bulletin_data["all_students"]
bulletin.visible = bulletin_data["visible"]
bulletin.order = bulletin_data["order"]
bulletin.topic = bulletin_data["topic"]
bulletin.content = bulletin_data["content"]
bulletin.file_content = bulletin_data["file_content"]
bulletin.indicators = bulletin_data["indicators"]
bulletin.save()
tags = data["tags"]
for tag in tags:
if not tag["name"] == "":
if tag["id"] == "":
tag = Tag.objects.create(name = tag["name"])
else:
tag = get_object_or_404(Tag, id = tag["id"])
bulletin.tags.add(tag)
resource = get_object_or_404(Resource, id = bulletin.id)
for pend in pendencies:
Pendencies.objects.create(resource = resource, **pend)
return bulletin
def update(self, instance, data):
return instance
class CompleteBulletinSerializer(serializers.ModelSerializer):
topic = TopicSerializer('get_subject')
tags = TagSerializer(many = True)
pendencies_resource = PendenciesSerializer(many = True)
groups = StudentsGroupSerializer('get_files', many = True)
students = UserBackupSerializer('get_files', many = True)
indicators = serializers.CharField(required = False, allow_blank = True, max_length = 255)
file_content = serializers.CharField(required = False, allow_blank = True, max_length = 255)
def get_subject(self, obj):
subject = self.context.get("subject", None)
return subject
def get_files(self, obj):
files = self.context.get("files", None)
return files
def validate(self, data):
files = self.context.get('files', None)
if files:
if data["file_content"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["file_content"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["file_content"], dst_path)
new_name = "goal_" + str(time.time()) + os.path.splitext(data["file_content"])[1]
new_path = os.path.join("bulletin", os.path.join("goals", new_name))
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_path))
data["file_content"] = new_path
else:
path = files.extract(data["file_content"], settings.MEDIA_ROOT)
else:
data["file_content"] = None
if data["indicators"] in files.namelist():
file_path = os.path.join(settings.MEDIA_ROOT, data["indicators"])
if os.path.isfile(file_path):
dst_path = os.path.join(settings.MEDIA_ROOT, "tmp")
path = files.extract(data["indicators"], dst_path)
new_name = "ind_" + str(time.time()) + os.path.splitext(data["indicators"])[1]
new_path = os.path.join("bulletin", os.path.join("indicators", new_name))
os.rename(os.path.join(dst_path, path), os.path.join(settings.MEDIA_ROOT, new_path))
data["indicators"] = new_path
else:
path = files.extract(data["indicators"], settings.MEDIA_ROOT)
else:
data["indicators"] = None
else:
data["file_content"] = None
data["indicators"] = None
return data
class Meta:
model = Bulletin
extra_kwargs = {
"tags": {
"validators": [],
},
}
fields = '__all__'
validators = []
def create(self, data):
topic = data['topic']
bulletin = None
if not topic["id"] is None:
if "subject" in topic:
r_exits = Resource.objects.filter(topic__subject = topic["subject"], name__unaccent__iexact = data["name"])
else:
r_exits = Resource.objects.filter(topic__subject__id = topic["subject_id"], name__unaccent__iexact = data["name"])
if not r_exits.exists():
if topic['id'] == "":
topic_exist = Topic.objects.filter(subject = topic['subject'], name__unaccent__iexact = topic["name"])
if topic_exist.exists():
topic = topic_exist[0]
else:
topic = Topic.objects.create(name = topic['name'], subject = topic['subject'], repository = topic['repository'], visible = topic['visible'], order = topic['order'], description = topic['description'])
data["topic"] = topic
else:
data["topic"] = get_object_or_404(Topic, id = topic["id"])
bulletin_data = data
pendencies = bulletin_data["pendencies_resource"]
del bulletin_data["pendencies_resource"]
bulletin = Bulletin()
bulletin.name = bulletin_data["name"]
bulletin.brief_description = bulletin_data["brief_description"]
bulletin.show_window = bulletin_data["show_window"]
bulletin.all_students = bulletin_data["all_students"]
bulletin.visible = bulletin_data["visible"]
bulletin.order = bulletin_data["order"]
bulletin.topic = bulletin_data["topic"]
bulletin.content = bulletin_data["content"]
bulletin.file_content = bulletin_data["file_content"]
bulletin.indicators = bulletin_data["indicators"]
bulletin.save()
tags = data["tags"]
for tag in tags:
if not tag["name"] == "":
if tag["id"] == "":
tag = Tag.objects.create(name = tag["name"])
else:
tag = get_object_or_404(Tag, id = tag["id"])
bulletin.tags.add(tag)
resource = get_object_or_404(Resource, id = bulletin.id)
students = data["students"]
subject = get_object_or_404(Subject, slug = self.context.get("subject", None))
for student_data in students:
logs = student_data["get_items"]
if student_data["id"] == "":
u_exist = User.objects.filter(email = student_data["email"])
if not u_exist.exists():
student = u_exist[0]
for log in logs:
log["user_id"] = student.id
l_exists = Log.objects.filter(user_id = log["user_id"], user = log["user"], user_email = log["user_email"], action = log["action"], resource = log["resource"], component = log["component"], context = log["context"])
if not l_exists.exists():
Log.objects.create(**log)
else:
student = User()
student.email = student_data["email"]
student.username = student_data["username"]
student.last_name = student_data["last_name"]
student.social_name = student_data["social_name"]
student.show_email = student_data["show_email"]
student.is_staff = student_data["is_staff"]
student.is_active = student_data["is_active"]
student.image = student_data["image"]
student.save()
for log in logs:
log["user_id"] = student.id
Log.objects.create(**log)
else:
student = get_object_or_404(User, id = student_data["id"])
for log in logs:
l_exists = Log.objects.filter(user_id = log["user_id"], user = log["user"], user_email = log["user_email"], action = log["action"], resource = log["resource"], component = log["component"], context = log["context"])
if not l_exists.exists():
Log.objects.create(**log)
bulletin.students.add(student)
subject.students.add(student)
groups = data["groups"]
for group_data in groups:
g_exists = StudentsGroup.objects.filter(subject = subject, slug = group_data["slug"])
if g_exists.exists():
group = g_exists[0]
else:
group = StudentsGroup()
group.name = group_data["name"]
group.description = group_data["description"]
group.subject = subject
group.save()
for participant in group_data["participants"]:
p_user = get_object_or_404(User, email = participant["email"])
group.participants.add(p_user)
bulletin.groups.add(group)
for pend in pendencies:
Pendencies.objects.create(resource = resource, **pend)
return bulletin
def update(self, instance, data):
return instance | gpl-2.0 | -7,957,297,920,854,449,000 | 32.068241 | 231 | 0.662407 | false |
readdy/readdy | wrappers/python/src/python/readdy/api/trajectory.py | 1 | 26325 | # coding=utf-8
# Copyright © 2018 Computational Molecular Biology Group,
# Freie Universität Berlin (GER)
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Created on 28.09.17
@author: clonker
"""
import os as _os
import typing as _typing
import h5py as _h5py
import numpy as _np
from readdy._internal.readdybinding.common.util import read_reaction_observable as _read_reaction_observable
from readdy._internal.readdybinding.common.util import read_trajectory as _read_trajectory
from readdy._internal.readdybinding.common.util import TrajectoryParticle
from readdy._internal.readdybinding.common.util import read_topologies_observable as _read_topologies
from readdy.util.observable_utils import calculate_pressure as _calculate_pressure
import readdy.util.io_utils as _io_utils
class ReactionInfo:
def __init__(self, name, uuid, n_educts, n_products, rate, educt_distance, product_distance, educt_types,
product_types, inverse_types_map):
self._name = name[()].decode()
self._id = uuid
self._n_educts = n_educts
self._n_products = n_products
self._rate = rate
self._educt_distance = educt_distance
self._product_distance = product_distance
self._educt_types = educt_types
self._product_types = product_types
self._inverse_types_map = inverse_types_map
@property
def name(self):
"""
Returns the name of the reaction as registered in the reaction diffusion system.
:return: the name of the reaction
"""
return self._name
@property
def n_educts(self):
"""
Returns the number of educts in this reaction.
:return: the number of educts
"""
return self._n_educts
@property
def n_products(self):
"""
Returns the number of products in this reaction.
:return: the number of products
"""
return self._n_products
@property
def educt_distance(self):
"""
Return the educt distance. Only meaningful if n_educts > 1.
:return: the educt distance
"""
return self._educt_distance
@property
def product_distance(self):
"""
Returns the product distance. Only meaningful if n_products > 1.
:return: the product distance
"""
return self._product_distance
@property
def educt_types(self):
"""
Returns the types that are involved as educts.
:return: list of length n_educts
"""
return [self._inverse_types_map[x] for x in self._educt_types[:self.n_educts]]
@property
def product_types(self):
"""
Returns the types that are involved as products.
:return: list of length n_products
"""
return [self._inverse_types_map[x] for x in self._product_types[:self.n_products]]
@property
def reaction_id(self):
"""
Returns the global unique id of this reaction.
:return: the id
"""
return self._id
@property
def rate(self):
"""
Returns the reaction rate.
:return: the reaction rate
"""
return self._rate
@property
def type(self):
"""
Returns the type of this reaction. Can be one of "decay", "conversion", "fission", "fusion", "enzymatic".
:return: the type of this reaction
"""
if self.n_educts == 1:
if self.n_products == 0:
return "decay"
elif self.n_products == 1:
return "conversion"
elif self.n_products == 2:
return "fission"
else:
raise ValueError("this should not happen, the number of educts was {} and the number "
"of products was {}".format(self.n_educts, self.n_products))
elif self.n_educts == 2:
if self.n_products == 1:
return "fusion"
elif self.n_products == 2:
return "enzymatic"
else:
raise ValueError("this should not happen, the number of educts was {} and the number "
"of products was {}".format(self.n_educts, self.n_products))
else:
raise ValueError("this should not happen, the number of educts was {} and the number "
"of products was {}".format(self.n_educts, self.n_products))
def __str__(self):
result = ""
result += self.type + " reaction \"{}\": ".format(self.name)
result += " + ".join(self.educt_types) + " -> "
result += " + ".join(self.product_types) + " ["
result += " rate={} ".format(self.rate)
if self.n_educts > 1:
result += " educt_distance={} ".format(self.educt_distance)
if self.n_products > 1:
result += " product_distance={} ".format(self.product_distance)
result += "]"
return result
__repr__ = __str__
class GeneralInformation:
def __init__(self, filename):
import json
dsname = "readdy/config/general"
with _h5py.File(filename, "r") as f:
if not dsname in f:
raise ValueError("General information was not recorded in the file!")
j = json.loads(f[dsname][()])
self._kbt = j['kbt']
self._box_volume = j['box_volume']
self._box_size = _np.array(j['box_size'])
self._pbc = _np.array(j['pbc'])
@property
def kbt(self):
return self._kbt
@property
def box_volume(self):
return self._box_volume
@property
def box_size(self):
return self._box_size
@property
def periodic_boundary_conditions(self):
return self._pbc
class _CKPT(object):
TOPOLOGY_CKPT = 'topologies_ckpt'
POSITIONS_CKPT = 'trajectory_ckpt'
class Trajectory(object):
def __init__(self, filename, name=""):
"""
attempts to open the given trajectory
:param filename: the file name
:param name: the trajectory name inside the file, as given in the simulation
"""
assert _os.path.exists(filename), "The file '{}' did not exist!".format(filename)
self._filename = filename
self._name = name
self._diffusion_constants = _io_utils.get_diffusion_constants(filename)
self._particle_types = _io_utils.get_particle_types(filename)
self._particle_type_mapping = {k: v['type_id'] for k, v in self._particle_types.items()}
self._topology_types = _io_utils.get_topology_types(filename)
self._reactions = []
self._inverse_types_map = {v: k for k, v in self.particle_types.items()}
self._inverse_topology_types_map = {v: k for k, v in self.topology_types.items()}
self._general = GeneralInformation(filename)
for _, reaction in _io_utils.get_reactions(filename).items():
info = ReactionInfo(reaction["name"], reaction["id"], reaction["n_educts"],
reaction["n_products"], reaction["rate"], reaction["educt_distance"],
reaction["product_distance"], reaction["educt_types"], reaction["product_types"],
self._inverse_types_map)
self._reactions.append(info)
self._spatial_topology_reaction_mapping = _io_utils.get_spatial_topology_reactions(filename)
self._structural_topology_reaction_mapping = _io_utils.get_structural_topology_reactions(filename)
def species_name(self, id):
"""
Retrieves the species' name according to its id as saved in some observables.
:param id: the id
:return: the species' name
"""
return self._inverse_types_map[id]
def topology_type_name(self, type_id):
"""
Retrieves the topologies' type name according to the type id as stored in some observables
:param type_id: the type id
:return: the topologies' type name
"""
return self._inverse_topology_types_map[type_id]
def is_topology_particle_type(self, particle_type):
"""
Checks whether a particle type belongs to a topology or a freely diffusing particle.
:param particle_type: the particle type, either id (int) or name (str)
:return: true if the particle type belongs to topologies
"""
import numbers
pname = None
if isinstance(particle_type, numbers.Number):
pname = self.species_name(particle_type)
if isinstance(particle_type, str):
pname = particle_type
assert pname is not None, f"Unknown particle type: {particle_type}"
return self._particle_types[pname]['flavor'] == 'TOPOLOGY'
@property
def kbt(self):
return self._general.kbt
@property
def box_volume(self):
return self._general.box_volume
@property
def box_size(self):
return self._general.box_size
@property
def periodic_boundary_conditions(self):
return self._general.periodic_boundary_conditions
@property
def diffusion_constants(self):
"""
Returns a dictionary of particle type -> diffusion constant
:return: the diffusion constants
"""
return self._diffusion_constants
@property
def particle_types(self):
"""
Returns a dictionary of particle type -> particle type ID
:return: the particle types
"""
return self._particle_type_mapping
@property
def topology_types(self):
"""
Returns a dictionary of topology type -> topology type ID
:return: the topology types
"""
return self._topology_types
@property
def reactions(self):
"""
Returns a list of `ReactionInfo` objects containing information about each reaction.
:return: a list of `ReactionInfo` objects
"""
return self._reactions
def convert_to_xyz(self, xyz_filename=None, generate_tcl=True, tcl_with_grid=False, particle_radii=None,
color_ids=None, draw_box=False):
"""
Converts this trajectory to a xyz file that can be read into VMD. Assuming the TCL script was generated, the
trajectory can be visualized by `vmd -e traj.xyz.tcl`.
:param xyz_filename: the output file name, if None, the filename with an .xyz extension will be used
:param generate_tcl: generates a tcl script that can be used alongside with the xyz file
:param tcl_with_grid: enables a grid view inside VMD
:param particle_radii: map particle radii for visualization purposes, e.g., `{"A": 10., "B": .1}`
:param color_ids: map particle type names to tcl/vmd color ids, e.g., `{"A": 0, "B": 5}`, default uses
consecutive numbering
:param draw_box: if True, will use box size from trajectory file and make vmd draw the edges of the box
"""
from readdy.api.utils import convert_trajectory_to_xyz as to_xyz
if draw_box:
bs = tuple(self._general.box_size)
else:
bs = None
to_xyz(self._filename, self._name, xyz_filename=xyz_filename, generate_tcl=generate_tcl,
tcl_with_grid=tcl_with_grid, particle_radii=particle_radii, color_ids=color_ids, box_size=bs)
def read(self) -> _typing.List[TrajectoryParticle]:
"""
Reads the trajectory into memory as a list of lists.
:return: the trajectory
"""
return _read_trajectory(self._filename, self._name)
def read_observable_particle_positions(self, data_set_name=""):
"""
Reads back the output of the particle_positions observable.
:param data_set_name: The data set name as given in the simulation setup
:return: a tuple of lists, where the first element contains a list of simulation times and the second element
contains a list of (N, 3)-shaped arrays, where N is the number of particles in that time step
"""
with _h5py.File(self._filename, "r") as f:
if not "readdy/observables/particle_positions/" + data_set_name in f:
raise ValueError("The particle positions observable was not recorded in the file or recorded under a "
"different name!")
group = f["readdy/observables/particle_positions/" + data_set_name]
time = group["time"][:]
data = group["data"][:]
return time, data
def read_observable_particles(self, data_set_name=""):
"""
Reads back the output of the particles observable.
:param data_set_name: The data set name as given in the simulation setup.
:return: a tuple of lists, where:
* the first element contains a list of simulation times
* the second element contains a of lists of list of type-ids, which then can be made human-readable
by calling `species_name(type_id)`
* the third element contains a list of lists of unique ids for each particle
* the fourth element contains a list of lists of particle positions
"""
with _h5py.File(self._filename, "r") as f:
group_path = "readdy/observables/particles/" + data_set_name
if not group_path in f:
raise ValueError("The particles observable was not recorded in the file or recorded under a different "
"name!")
group = f[group_path]
types = group["types"][:]
ids = group["ids"][:]
positions = group["positions"][:]
time = group["time"][:]
return time, types, ids, positions
def read_observable_rdf(self, data_set_name="rdf"):
"""
Reads back the output of the rdf observable.
:param data_set_name: The data set name as given in the simulation setup.
:return: a tuple of lists containing (simulation time with shape (T,), bin centers with shape (N, ),
distribution value with shape (T, N))
"""
with _h5py.File(self._filename, "r") as f:
group_path = "readdy/observables/" + data_set_name
if not group_path in f:
raise ValueError("The rdf observable was not recorded in the file or recorded under a different name!")
group = f[group_path]
time = group["time"][:]
bin_centers = group["bin_centers"][:]
distribution = group["distribution"][:]
return time, bin_centers, distribution
def read_observable_number_of_particles(self, data_set_name="n_particles"):
"""
Reads back the output of the "number of particles" observable.
:param data_set_name: The data set name as given in the simulation setup.
:return: a tuple of lists containing the simulation time and a list of lists containing the counts for
each specified type
"""
group_path = "readdy/observables/" + data_set_name
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The number of particles observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
counts = f[group_path]["data"][:]
return time, counts
def read_observable_reactions(self, data_set_name="reactions"):
"""
Reads back the output of the "reactions" observable
:param data_set_name: The data set name as given in the simulation setup
:return: a tuple which contains an array corresponding to the time as first entry and a list of lists containing
reaction record objects as second entry
"""
time = None
group_path = "readdy/observables/" + data_set_name
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The reactions observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
return time, _read_reaction_observable(self._filename, data_set_name)
def read_observable_reaction_counts(self, data_set_name="reaction_counts"):
"""
Reads back the output of the "reaction_counts" observable
:param data_set_name: The data set name as given in the simulation setup
:return: a time array and a dictionary with keys `reactions`, `spatial_topology_reactions`,
`structural_topology_reactions`
"""
group_path = "readdy/observables/" + data_set_name
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The reaction counts observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
counts_reactions = {}
counts_spatial = {}
counts_structural = {}
counts_group = f[group_path]["counts"]
for reaction in self.reactions:
if str(reaction.reaction_id) in counts_group:
counts_reactions[reaction.name] = counts_group[str(reaction.reaction_id)][:]
if "spatialCounts" in f[group_path]:
spatial_group = f[group_path]["spatialCounts"]
for reaction_id in self._spatial_topology_reaction_mapping.keys():
name = self._spatial_topology_reaction_mapping[reaction_id]
name = (name.split(':')[0]).strip()
counts_spatial[name] = spatial_group[str(reaction_id)][:]
if "structuralCounts" in f[group_path]:
structural_group = f[group_path]["structuralCounts"]
for reaction_id in self._structural_topology_reaction_mapping.keys():
name = self._structural_topology_reaction_mapping[reaction_id]
counts_structural[name] = structural_group[str(reaction_id)][:]
return time, {
'reactions': counts_reactions,
'spatial_topology_reactions': counts_spatial,
'structural_topology_reactions': counts_structural
}
def read_observable_energy(self, data_set_name="energy"):
"""
Reads back the output of the "energy" observable.
:param data_set_name: The data set name as given in the simulation setup
:return: a tuple which contains an array corresponding to the time as first entry and a list of scalars
representing the system's potential energy per time step
"""
group_path = "readdy/observables/" + data_set_name
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The energy observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
energy = f[group_path]["data"][:]
return time, energy
def read_observable_forces(self, data_set_name="forces"):
"""
Reads back the output of the "forces" observable.
:param data_set_name: The data set name as given in the simulation setup
:return: a tuple which contains an array corresponding to the time as first entry and a list of arrays
containing the particles' forces per time step
"""
group_path = "readdy/observables/" + data_set_name
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The forces observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
forces = f[group_path]["data"][:]
return time, forces
def read_observable_topologies(self, data_set_name="topologies", start=None, stop=None):
"""
Reads back the output of the "topologies" observable
:param data_set_name: The data set name as given in the simulation setup
:param start: start step, if None from the beginning
:param stop: stop step, if None until the end
:return: a tuple which contains an array corresponding to the time as first entry and an array containing
lists of topologies per recorded time step
"""
group_path = "readdy/observables/{}".format(data_set_name)
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The topologies observable was not recorded in the file or recorded under a different"
"name!")
if start is None:
start = 0
if stop is None:
return _read_topologies(self._filename, group_path, start)
else:
return _read_topologies(self._filename, group_path, start, stop)
def read_observable_virial(self, data_set_name="virial"):
"""
Reads back the output of the "virial" observable.
:param data_set_name: The data set name as given in the simulation setup
:return: a tuple which contains an array corresponding to the time as first entry and an array containing the
corresponding virial per time step
"""
group_path = "readdy/observables/{}".format(data_set_name)
with _h5py.File(self._filename, "r") as f:
if not group_path in f:
raise ValueError("The virial observable was not recorded in the file or recorded under a "
"different name!")
time = f[group_path]["time"][:]
virial = f[group_path]["data"][:]
return time, virial
def read_observable_pressure(self, data_set_name="_pressure"):
"""
Reads back the output of the "pressure" observable. As the pressure can be computed from the number of particles
and the virial, this actually reads back the n_particles and virial observables. The data_set_name serves as a
postfix, where the default value corresponds to the data sets as they are created when using the default
settings of the observable.
:param data_set_name: the data set name postfix, default="_pressure",
yielding "n_particles_pressure" and "virial_pressure", respectively
:return: a tuple which contains an array corresponding to the time as first entry and an array containing the
corresponding pressure per time step
"""
time_n_particles, n_particles = self.read_observable_number_of_particles("n_particles{}".format(data_set_name))
time_virial, virial = self.read_observable_virial("virial{}".format(data_set_name))
if not _np.array_equal(time_n_particles, time_virial):
raise RuntimeError("For Pressure it is required to know the number of particles and the virial. "
"However, these two observables were recorded with different strides.")
pressure = _np.array([_calculate_pressure(self.box_volume, self.kbt, n_particles[i], virial[i])
for i in range(len(time_n_particles))])
return time_virial, pressure
def list_checkpoints(self):
result = []
trajectory_group_path = 'readdy/trajectory/' + _CKPT.POSITIONS_CKPT
topology_group_path = 'readdy/observables/' + _CKPT.TOPOLOGY_CKPT
with _h5py.File(self._filename, 'r') as f:
if trajectory_group_path in f:
assert topology_group_path in f, "Corrupted checkpointing: Contains checkpoints for particles " \
"but not for topologies"
t_particles = f[trajectory_group_path]['time'][:]
t_topologies = f[topology_group_path]['time'][:]
assert _np.array_equal(t_particles, t_topologies)
for ix, t in enumerate(t_particles):
result.append({
'number': ix,
'step': t
})
return result
def to_numpy(self, name="", start=None, stop=None):
from readdy.api.utils import load_trajectory_to_npy
return load_trajectory_to_npy(self._filename, begin=start, end=stop, name=name)
| bsd-3-clause | -4,600,007,790,698,382,300 | 42.581126 | 120 | 0.605706 | false |
UnitedThruAction/PrecinctDB | src/Cousub.py | 1 | 4545 | """Class to represent the COUSUB (COUnty SUBdivision) level of the Census,
used in the naming convention for Election Districts in some counties.
"""
from tqdm import tqdm
from couchdb.mapping import Document, TextField, IntegerField
from Database import Database, QueryType
class Cousub(Document):
"""COUnty SUBdivisions from the 2010 Census."""
DIRNAME = "./Input"
DATABASE = Database().get_db()
doctype = TextField()
census_LOGRECNO = IntegerField()
census_STATE = IntegerField()
census_COUNTY = IntegerField()
census_COUSUB = IntegerField()
census_NAME = TextField()
@staticmethod
def load_cousubs_from_file(overwrite=True):
"""Load VTDs into database from NY BoE files.
See https://www.census.gov/prod/cen2010/doc/pl94-171.pdf, p.2-22
"""
filename = "".join([Cousub.DIRNAME, "/nygeo2010.pl"])
filehandle = open(filename, "r")
for line in tqdm(filehandle):
# State-County-Voting District/Remainder-County Subdivision
census_SUMLEV = int(line[8:11].rstrip().lstrip())
if census_SUMLEV == 60: # COUSUB
# Logical Record Number
census_LOGRECNO = int(line[18:25].rstrip().lstrip())
# FIPS State
census_STATE = int(line[27:29].rstrip().lstrip())
# FIPS County
census_COUNTY = int(line[29:32].rstrip().lstrip())
# FIPS County Subdivision
census_COUSUB = int(line[36:41].rstrip().lstrip())
# Area Name-Legal/Statistical Area Description (LSAD) Term-Part
# Indicator
census_NAME = line[226:316].rstrip().lstrip()
try:
cousubs = Cousub.load_cousubs_from_db(
QueryType.COUSUB_BY_CENSUS_LOGRECNO, census_LOGRECNO)
if overwrite:
for cousub in cousubs:
cousub.doctype = "Cousub"
cousub.census_LOGRECNO = census_LOGRECNO
cousub.census_STATE = census_STATE
cousub.census_COUNTY = census_COUNTY
cousub.census_COUSUB = census_COUSUB
cousub.census_NAME = census_NAME
cousub.store(Cousub.DATABASE)
except ValueError:
new_cousub = Cousub(doctype="Cousub",
census_LOGRECNO=census_LOGRECNO,
census_STATE=census_STATE,
census_COUNTY=census_COUNTY,
census_COUSUB=census_COUSUB,
census_NAME=census_NAME)
new_cousub.store(Cousub.DATABASE)
filehandle.close()
@staticmethod
def load_cousubs_from_db(query_type, key):
"""Get a VTD from the DB via a query."""
if not isinstance(query_type, QueryType):
raise ValueError("Must provide a QueryType Enum")
uuids = [
doc.value for doc in Cousub.DATABASE.view(
query_type.value)[key]]
if len(uuids) == 0:
raise ValueError("No docs returned: {}, key {}"
.format(query_type, key))
return [Cousub.load(Cousub.DATABASE, uuid) for uuid in uuids]
@staticmethod
def get_cousub_name(county, cousub):
cousubs = Cousub.load_cousubs_from_db(
QueryType.COUSUB_BY_COUNTY_COUSUB, [county, cousub])
if len(cousubs) > 1:
raise ValueError("More than one COUSUB returned for county {},"
" cousub {}".format(county, cousub))
else:
cousub = cousubs[0]
return cousub.census_NAME
@staticmethod
def main():
print("Loading basic COUSUBs from files.")
Cousub.load_cousubs_from_file()
def to_dict(self):
"""Convert VTD to dict for use with Pandas.
TODO: Make this less horrible. self.__dict__ doesn't work."""
temp_dict = {}
temp_dict['census_LOGRECNO'] = self.census_LOGRECNO
temp_dict['census_STATE'] = self.census_STATE
temp_dict['census_COUNTY'] = self.census_COUNTY
temp_dict['census_COUSUB'] = self.census_COUSUB
temp_dict['census_NAME'] = self.census_NAME
return temp_dict
if __name__ == "__main__":
Cousub.main()
| mit | -2,888,762,316,842,533,400 | 37.516949 | 79 | 0.546535 | false |
facebookresearch/fastText | python/benchmarks/get_word_vector.py | 1 | 1502 | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from fasttext import load_model
from fasttext import tokenize
import sys
import time
import tempfile
import argparse
def get_word_vector(data, model):
t1 = time.time()
print("Reading")
with open(data, 'r') as f:
tokens = tokenize(f.read())
t2 = time.time()
print("Read TIME: " + str(t2 - t1))
print("Read NUM : " + str(len(tokens)))
f = load_model(model)
# This is not equivalent to piping the data into
# print-word-vector, because the data is tokenized
# first.
t3 = time.time()
i = 0
for t in tokens:
f.get_word_vector(t)
i += 1
if i % 10000 == 0:
sys.stderr.write("\ri: " + str(float(i / len(tokens))))
sys.stderr.flush()
t4 = time.time()
print("\nVectoring: " + str(t4 - t3))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Simple benchmark for get_word_vector.')
parser.add_argument('model', help='A model file to use for benchmarking.')
parser.add_argument('data', help='A data file to use for benchmarking.')
args = parser.parse_args()
get_word_vector(args.data, args.model)
| mit | 5,576,297,108,336,475,000 | 29.04 | 78 | 0.641145 | false |
dhocker/athomepowerlineserver | testclient/ahps_client.py | 1 | 24338 | #
# AtHomePowerlineServer - networked server for X10 and WiFi devices
# Copyright © 2014, 2020 Dave Hocker (email: [email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# See the LICENSE file for more details.
#
#
# Test client for AtHomePowerlineServer and ahps API module
#
# python3 ahps_client.py [-s hostname|hostaddress] [-p portnumber] [-v | -q] request [arguments]
#
import sys
import json
from optparse import OptionParser
sys.path.append("./")
sys.path.append("../")
from ahps.ahps_api import ServerRequest
# Default global settings
host = "localhost"
port = 9999
verbose = True
def _required_keys(dict_to_test, keys):
"""
Test a dict for a list of required keys. Extra keys are ignored.
:param dict_to_test:
:param keys: list of keys that must be present
:return: True if all keys are present
"""
for key in keys:
if key not in dict_to_test.keys():
raise KeyError("Required key missing: " + key)
return True
def _open_request(request_args):
"""
An open server request. The argument is a JSON file
containing the entire request. This is the "raw" interface.
:param request_args: request request_file.json
:return:
"""
try:
fh = open(args[1], "r")
dev_json = json.load(fh)
fh.close()
except Exception as ex:
print(str(ex))
return None
request = ServerRequest(host=host, port=port, verbose=verbose)
result = request.open_request(dev_json)
return result
def _device_on(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.device_on(request_args[1])
def _device_off(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.device_off(request_args[1])
def _all_devices_on(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.all_devices_on()
def _all_devices_off(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.all_devices_off()
def _device_dim(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.device_dim(request_args[1], request_args[2])
def _device_bright(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.device_bright(request_args[1], request_args[2])
def _status_request(request_args):
# This DOES NOT work. Why?
# data = "{ \"command\": \"StatusRequest\", \"args\": {\"a\": 1} }"
# This DOES work. Why?
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.status_request()
def _create_timer_program(program):
timer_program = {
"name": program["name"],
"day-mask": program["day-mask"],
"trigger-method": program["trigger-method"],
"time": program["time"],
"offset": str(program["offset"]),
"command": program["command"],
"randomize": True if program["randomize"] else False,
"randomize-amount": str(program["randomize-amount"]),
"color": str(program["color"]),
"brightness": int(program["brightness"])
}
return timer_program
def _define_program(request_args):
dd_required_keys = [
"name",
"day-mask",
"trigger-method",
"time",
"offset",
"command",
"randomize",
"randomize-amount",
"color",
"brightness"
]
try:
fh = open(args[1], "r")
program_json = json.load(fh)
fh.close()
# Test for required keys
_required_keys(program_json, dd_required_keys)
except Exception as ex:
print(str(ex))
return None
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.define_program(_create_timer_program(program_json))
def _update_program(request_args):
dd_required_keys = [
"id",
"name",
"day-mask",
"trigger-method",
"time",
"offset",
"command",
"randomize",
"color",
"brightness"
]
try:
fh = open(args[1], "r")
dev_json = json.load(fh)
fh.close()
# Test for required keys
_required_keys(dev_json, dd_required_keys)
except Exception as ex:
print(str(ex))
return None
program = _create_timer_program(dev_json)
program["id"] = dev_json["id"]
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.update_program(program)
def _delete_device_program(request_args):
"""
Delete a program from a device
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.delete_device_program(request_args[1], request_args[2])
def _define_device(request_args):
dd_required_keys = [
"device-name",
"device-location",
"device-mfg",
"device-address",
"device-channel",
"device-color",
"device-brightness"
]
try:
fh = open(args[1], "r")
dev_json = json.load(fh)
fh.close()
# Test for required keys
_required_keys(dev_json, dd_required_keys)
except Exception as ex:
print(str(ex))
return None
device = {}
device_name = dev_json["device-name"]
device_location = dev_json["device-location"]
device_mfg = dev_json["device-mfg"]
device_address = dev_json["device-address"]
device_channel = dev_json["device-channel"]
device_color = dev_json["device-color"]
device_brightness = dev_json["device-brightness"]
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.define_device(device_name, device_location, device_mfg, device_address, device_channel,
device_color, device_brightness)
def _update_device(request_args):
dd_required_keys = [
"device-id",
"device-name",
"device-location",
"device-mfg",
"device-address",
"device-channel",
"device-color",
"device-brightness"
]
try:
fh = open(args[1], "r")
dev_json = json.load(fh)
# Test for required keys
_required_keys(dev_json, dd_required_keys)
fh.close()
except Exception as ex:
print(str(ex))
return None
device = {}
device_id = dev_json["device-id"]
device_name = dev_json["device-name"]
device_location = dev_json["device-location"]
device_mfg = dev_json["device-mfg"]
device_address = dev_json["device-address"]
device_channel = dev_json["device-channel"]
device_color = dev_json["device-color"]
device_brightness = dev_json["device-brightness"]
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.update_device(device_id, device_name, device_location, device_mfg, device_address,
device_channel, device_color, device_brightness)
def _delete_device(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.delete_device(request_args[1])
def _query_devices(request_args):
"""
Query for all devices
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
if len(request_args) >= 2:
return request.query_device(request_args[1])
return request.query_devices()
def _query_programs(request_args):
"""
Query for all programs
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_programs()
def _query_device_programs(request_args):
"""
Query for all programs for a device
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_programs_for_device_id(request_args[1])
def _query_device_program(request_args):
"""
Query for a device progam by its program ID
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_program_by_id(request_args[1])
def _assign_device(request_args):
"""
Assign a device to a group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.assign_device_to_group(request_args[1], request_args[2])
def _assign_program(request_args):
"""
Assign a program to a device
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.assign_program_to_device(request_args[1], request_args[2])
def _assign_program_to_group(request_args):
"""
Assign a program to a device group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.assign_program_to_group_devices(request_args[1], request_args[2])
def _define_group(request_args):
"""
Define a device group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.define_action_group(request_args[1])
def _update_group(request_args):
"""
Update a device group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
group = {
"group-id": request_args[1],
"group-name": request_args[2]
}
return request.update_action_group(group)
def _delete_group(request_args):
"""
Delete a device group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.delete_action_group(request_args[1])
def _delete_group_device(request_args):
"""
Delete a device from a group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.delete_action_group_device(request_args[1], request_args[2])
def _delete_program(request_args):
"""
Delete a program
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.delete_program(request_args[1])
def _group_on(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.group_on(request_args[1])
def _group_off(request_args):
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.group_off(request_args[1])
def _query_action_group(request_args):
"""
Query for a device group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_action_group(request_args[1])
def _query_group_devices(request_args):
"""
Query for all devices in a group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.get_action_group_devices(request_args[1])
def _query_groups(request_args):
"""
Query for all groups
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.get_action_groups()
def _query_available_devices(request_args):
"""
Query for all devices of a given manufacturer/type
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_available_devices(request_args[1])
def _discover_devices(request_args):
"""
Discover all devices on the local network
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.discover_devices()
def _query_available_group_devices(request_args):
"""
Query for all devices available for assignment to a group
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_available_devices_for_group_id(request_args[1])
def _query_available_programs(request_args):
"""
Query for all programs available for assignment to a device
:param request_args:
:return:
"""
request = ServerRequest(host=host, port=port, verbose=verbose)
return request.query_available_programs_for_device_id(request_args[1])
def _request_help(request_args):
"""
help request
:param request_args:
:return:
"""
# Check for markdown format output
md = False
if len(request_args) >= 3 and request_args[2].lower() == "md":
md = True
print("Command Line Tool")
print()
print("Help - Request List")
if md:
print()
print("Legend")
print("* All request names are case insensitive")
print("* device-id is the unique identifier for a device")
print("* program-id is the unique identifier for a timer/trigger program")
print("* group-id is the unique identifier for a device group")
print("* <file_name.json> is a JSON formatted file")
print()
if request_args[1].lower() in ["*", "all"]:
for key in sorted(request_list.keys()):
r = request_list[key]
if md:
print("##%s" % key)
print(r["description"])
print("```")
print("Syntax:", r["syntax"])
print("```")
else:
print(key)
print(" Description:", r["description"])
print(" Syntax:", r["syntax"])
elif request_args[1].lower() in request_list.keys():
r = request_list[request_args[1]]
print(request_args[1])
print(" Description:", r["description"])
print(" Syntax:", r["syntax"])
else:
print(request_args[1], "is not a valid request")
"""
List of all supported requests
handler: the function that handles the request
syntax:
[a | b] one from list is REQUIRED
{a | b} optional choice, one from list MAY be chosen
argcount: the minimum number of required request arguments including the request
"""
request_list = {
"help": {
"description": "Help for one or all requests",
"syntax": "help [requestname | all | *] {md}",
"handler": _request_help,
"argcount": 2
},
"statusrequest": {
"description": "Returns the status of the server",
"syntax": "StatusRequest",
"handler": _status_request,
"argcount": 1
},
"request": {
"description": "A raw request in JSON format",
"syntax": "request <request_file.json>",
"handler": _open_request,
"argcount": 2
},
"on": {
"description": "Turn a device on",
"syntax": "on device-id",
"handler": _device_on,
"argcount": 2
},
"deviceon": {
"description": "Turn a device on",
"syntax": "deviceon device-id",
"handler": _device_on,
"argcount": 2
},
"alldeviceson": {
"description": "Turn all devices on",
"syntax": "alldeviceson",
"handler": _all_devices_on,
"argcount": 1
},
"off": {
"description": "Turn a device off",
"syntax": "off device-id",
"handler": _device_off,
"argcount": 2
},
"deviceoff": {
"description": "Turn a device off",
"syntax": "deviceoff device-id",
"handler": _device_off,
"argcount": 2
},
"alldevicesoff": {
"description": "Turn all devices off",
"syntax": "alldevicesoff",
"handler": _all_devices_off,
"argcount": 1
},
"definedevice": {
"description": "Define a new device using a JSON formatted input file",
"syntax": "definedevice <new_device.json>",
"handler": _define_device,
"argcount": 2
},
"updatedevice": {
"description": "Update a device definition using a JSON formatted input file",
"syntax": "updatedevice <update_device.json>",
"handler": _update_device,
"argcount": 2
},
"deletedevice": {
"description": "Delete a device by ID",
"syntax": "deletedevice device-id",
"handler": _delete_device,
"argcount": 2
},
"querydevices": {
"description": "List all devices with details",
"syntax": "querydevices",
"handler": _query_devices,
"argcount": 1
},
"querydevice": {
"description": "List a device by ID",
"syntax": "querydevice device-id",
"handler": _query_devices,
"argcount": 2
},
"querydeviceprograms": {
"description": "List all programs for a device ID",
"syntax": "querydeviceprograms device-id",
"handler": _query_device_programs,
"argcount": 2
},
"assigndevice": {
"description": "Assign a device to a group",
"syntax": "assigndevice group-id device-id",
"handler": _assign_device,
"argcount": 3
},
"assignprogram": {
"description": "Assign a program to a device",
"syntax": "assignprogram device-id program-id",
"handler": _assign_program,
"argcount": 3
},
"assignprogramtogroup": {
"description": "Assign a program to a device group",
"syntax": "assignprogramtogroup group-id program-id",
"handler": _assign_program_to_group,
"argcount": 3
},
"definegroup": {
"description": "Define a new device group",
"syntax": "definegroup group-name",
"handler": _define_group,
"argcount": 2
},
"deletegroup": {
"description": "Delete a device group",
"syntax": "deletegroup group-id",
"handler": _delete_group,
"argcount": 2
},
"deletegroupdevice": {
"description": "Delete a device from a group",
"syntax": "deletegroupdevice group-id device-id",
"handler": _delete_group_device,
"argcount": 3
},
"deletedeviceprogram": {
"description": "Delete a program from a device",
"syntax": "deletedeviceprogram device-id program-id",
"handler": _delete_device_program,
"argcount": 3
},
"defineprogram": {
"description": "Define a new program",
"syntax": "defineprogram <new_program.json>",
"handler": _define_program,
"argcount": 2
},
"deleteprogram": {
"description": "Delete a program",
"syntax": "deleteprogram program-id",
"handler": _delete_program,
"argcount": 2
},
"groupon": {
"description": "Turn on all devices in a group",
"syntax": "groupon group-id",
"handler": _group_on,
"argcount": 2
},
"groupoff": {
"description": "Turn off all devices in a group",
"syntax": "deviceoff group-id",
"handler": _group_off,
"argcount": 2
},
"querygroup": {
"description": "List device group details",
"syntax": "querygroup group-id",
"handler": _query_action_group,
"argcount": 2
},
"querygroupdevices": {
"description": "List devices in a group",
"syntax": "querygroupdevices group-id",
"handler": _query_group_devices,
"argcount": 2
},
"querygroups": {
"description": "List all groups",
"syntax": "querygroups",
"handler": _query_groups,
"argcount": 1
},
"queryavailablemfgdevices": {
"description": "List all devices of a manufacturer/type",
"syntax": "queryavailablemfgdevices mfg-or-type",
"handler": _query_available_devices,
"argcount": 2
},
"discoverdevices": {
"description": "Discover all devices on the local network",
"syntax": "discoverdevices",
"handler": _discover_devices,
"argcount": 1
},
"queryavailablegroupdevices": {
"description": "List all devices available for assignment to a group",
"syntax": "queryavailablegroupdevices group-id",
"handler": _query_available_group_devices,
"argcount": 2
},
"queryavailableprograms": {
"description": "List all programs available for assignment to a device",
"syntax": "queryavailableprograms device-id",
"handler": _query_available_programs,
"argcount": 2
},
"queryprogram": {
"description": "List program details for a program ID",
"syntax": "queryprogram program-id",
"handler": _query_device_program,
"argcount": 2
},
"querydeviceprogram": {
"description": "List program details for a program ID",
"syntax": "querydeviceprogram program-id",
"handler": _query_device_program,
"argcount": 2
},
"queryprograms": {
"description": "List all programs",
"syntax": "queryprograms",
"handler": _query_programs,
"argcount": 1
},
"updategroup": {
"description": "Update a group",
"syntax": "updategroup group-id group-name",
"handler": _update_group,
"argcount": 3
},
"updateprogram": {
"description": "Update a program",
"syntax": "updateprogram <update_program.json>",
"handler": _update_program,
"argcount": 2
},
}
def _get_request_handler(request_args):
if request_args is not None and len(request_args) > 0:
request = request_args[0].lower()
if request in request_list.keys():
if len(request_args) >= request_list[request]["argcount"]:
return request_list[request]
else:
print("Wrong number of request arguments")
print(request_args)
print("%d arguments required (including request), %d provided" % (request_list[request]["argcount"], len(request_args)))
else:
print("Unknown request:", args[0])
else:
# Show minimal help
_request_help(["help", "help"])
return None
"""
AtHomePowerlineServer Client
python ahps_client.py [-s SERVER] [-p PORT] [-v | -q] request [request...argument(s)]
request
"""
if __name__ == "__main__":
# Show license advertisement
import disclaimer.Disclaimer
disclaimer.Disclaimer.DisplayDisclaimer()
# import pdb; pdb.set_trace()
parser = OptionParser(usage="usage: %prog [options] request [arguments]")
parser.add_option("-s", "--server", help="Server name or address")
parser.add_option("-p", "--port", type="int", help="TCP port number for server")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=True, help="Verbose logging")
parser.add_option("-q", "--quiet", action="store_false", dest="verbose", help="Quiet/minimal logging")
(options, args) = parser.parse_args()
if options.server is not None:
host = options.server
if options.port is not None:
port = int(options.port)
verbose = options.verbose
handler = _get_request_handler(args)
if handler:
handler["handler"](args)
| gpl-3.0 | -8,405,126,692,640,701,000 | 28.2699 | 136 | 0.586925 | false |
dunkhong/grr | grr/server/grr_response_server/rdfvalues/objects.py | 1 | 21984 | #!/usr/bin/env python
"""Top level datastore objects.
This package contains the rdfvalue wrappers around the top level datastore
objects defined by objects.proto.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import functools
import hashlib
import itertools
import os
import stat
from future.builtins import str
from future.utils import python_2_unicode_compatible
from typing import Text
from grr_response_core import config
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import client_network as rdf_client_network
from grr_response_core.lib.rdfvalues import cloud as rdf_cloud
from grr_response_core.lib.rdfvalues import crypto as rdf_crypto
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util import precondition
from grr_response_core.lib.util import text
from grr_response_proto import objects_pb2
_UNKNOWN_GRR_VERSION = "Unknown-GRR-version"
class ClientLabel(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ClientLabel
class StringMapEntry(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.StringMapEntry
class ClientSnapshot(rdf_structs.RDFProtoStruct):
"""The client object.
Attributes:
timestamp: An rdfvalue.Datetime indicating when this client snapshot was
saved to the database. Should be present in every client object loaded
from the database, but is not serialized with the rdfvalue fields.
"""
protobuf = objects_pb2.ClientSnapshot
rdf_deps = [
StringMapEntry,
rdf_cloud.CloudInstance,
rdf_client_fs.Filesystem,
rdf_client.HardwareInfo,
rdf_client_network.Interface,
rdf_client.KnowledgeBase,
rdf_client.StartupInfo,
rdf_client_fs.Volume,
rdfvalue.ByteSize,
rdfvalue.RDFDatetime,
]
def __init__(self, *args, **kwargs):
super(ClientSnapshot, self).__init__(*args, **kwargs)
self.timestamp = None
def Uname(self):
"""OS summary string."""
return "%s-%s-%s" % (self.knowledge_base.os, self.os_release,
self.os_version)
def GetGRRVersionString(self):
"""Returns the client installation-name and GRR version as a string."""
client_info = self.startup_info.client_info
client_name = client_info.client_description or client_info.client_name
if client_info.client_version > 0:
client_version = str(client_info.client_version)
else:
client_version = _UNKNOWN_GRR_VERSION
return " ".join([client_name, client_version])
def GetMacAddresses(self):
"""MAC addresses from all interfaces."""
result = set()
for interface in self.interfaces:
if (interface.mac_address and
interface.mac_address != b"\x00" * len(interface.mac_address)):
result.add(Text(interface.mac_address.human_readable_address))
return sorted(result)
def GetIPAddresses(self):
"""IP addresses from all interfaces."""
result = []
filtered_ips = ["127.0.0.1", "::1", "fe80::1"]
for interface in self.interfaces:
for address in interface.addresses:
if address.human_readable_address not in filtered_ips:
result.append(Text(address.human_readable_address))
return sorted(result)
def GetSummary(self):
"""Gets a client summary object.
Returns:
rdf_client.ClientSummary
Raises:
ValueError: on bad cloud type
"""
summary = rdf_client.ClientSummary()
summary.client_id = self.client_id
summary.timestamp = self.timestamp
summary.system_info.release = self.os_release
summary.system_info.version = str(self.os_version or "")
summary.system_info.kernel = self.kernel
summary.system_info.machine = self.arch
summary.system_info.install_date = self.install_time
kb = self.knowledge_base
if kb:
summary.system_info.fqdn = kb.fqdn
summary.system_info.system = kb.os
summary.users = kb.users
summary.interfaces = self.interfaces
summary.client_info = self.startup_info.client_info
if kb.os_release:
summary.system_info.release = kb.os_release
if kb.os_major_version:
summary.system_info.version = "%d.%d" % (kb.os_major_version,
kb.os_minor_version)
hwi = self.hardware_info
if hwi:
summary.serial_number = hwi.serial_number
summary.system_manufacturer = hwi.system_manufacturer
summary.system_uuid = hwi.system_uuid
cloud_instance = self.cloud_instance
if cloud_instance:
summary.cloud_type = cloud_instance.cloud_type
if cloud_instance.cloud_type == "GOOGLE":
summary.cloud_instance_id = cloud_instance.google.unique_id
elif cloud_instance.cloud_type == "AMAZON":
summary.cloud_instance_id = cloud_instance.amazon.instance_id
else:
raise ValueError("Bad cloud type: %s" % cloud_instance.cloud_type)
return summary
class ClientMetadata(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ClientMetadata
rdf_deps = [
rdf_client_network.NetworkAddress,
rdf_crypto.RDFX509Cert,
rdfvalue.RDFDatetime,
]
class ClientFullInfo(rdf_structs.RDFProtoStruct):
"""ClientFullInfo object."""
protobuf = objects_pb2.ClientFullInfo
rdf_deps = [
ClientMetadata,
ClientSnapshot,
ClientLabel,
rdf_client.StartupInfo,
]
def GetLabelsNames(self, owner=None):
return set(
Text(l.name) for l in self.labels if not owner or l.owner == owner)
class GRRUser(rdf_structs.RDFProtoStruct):
"""GRRUser object."""
protobuf = objects_pb2.GRRUser
rdf_deps = [
rdf_crypto.Password,
]
def GetEmail(self):
"""Returns the E-Mail address for the user."""
if config.CONFIG.Get("Email.enable_custom_email_address") and self.email:
return self.email
return "{}@{}".format(self.username, config.CONFIG.Get("Logging.domain"))
class ApprovalGrant(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ApprovalGrant
rdf_deps = [
rdfvalue.RDFDatetime,
]
class ApprovalRequest(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ApprovalRequest
rdf_deps = [
rdfvalue.RDFDatetime,
ApprovalGrant,
]
@property
def is_expired(self):
return self.expiration_time < rdfvalue.RDFDatetime.Now()
@python_2_unicode_compatible
@functools.total_ordering
class HashID(rdfvalue.RDFValue):
"""An unique hash identifier."""
__abstract = True # pylint: disable=g-bad-name
protobuf_type = "bytes"
hash_id_length = None
def __init__(self, initializer=None):
if self.__class__.hash_id_length is None:
raise TypeError("Trying to instantiate base HashID class. "
"hash_id_length has to be set.")
super(HashID, self).__init__()
if isinstance(initializer, HashID):
initializer = initializer._value # pylint: disable=protected-access
if initializer is None:
initializer = b"\x00" * self.__class__.hash_id_length
precondition.AssertType(initializer, (bytes, rdfvalue.RDFBytes))
if len(initializer) != self.__class__.hash_id_length:
raise ValueError(
"Expected %s bytes but got `%s` `%s` instead" %
(self.__class__.hash_id_length, len(initializer), initializer))
if isinstance(initializer, rdfvalue.RDFBytes):
self._value = initializer.SerializeToBytes()
else:
self._value = initializer
@classmethod
def FromWireFormat(cls, value):
precondition.AssertType(value, bytes)
return cls(value)
def SerializeToBytes(self):
return self.AsBytes()
@classmethod
def FromSerializedBytes(cls, value):
precondition.AssertType(value, bytes)
return cls(value)
def AsBytes(self):
return self._value
def AsHexString(self):
return text.Hexify(self._value)
def AsHashDigest(self):
return rdfvalue.HashDigest(self._value)
def __repr__(self):
cls_name = compatibility.GetName(self.__class__)
value = text.Hexify(self._value)
return "{cls_name}('{value}')".format(cls_name=cls_name, value=value)
def __str__(self):
return self.__repr__()
def __lt__(self, other):
if isinstance(other, self.__class__):
return self._value < other._value # pylint: disable=protected-access
else:
return self._value < other
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._value == other._value # pylint: disable=protected-access
else:
return self._value == other
# Required, because in Python 3 overriding `__eq__` nullifies `__hash__`.
__hash__ = rdfvalue.RDFValue.__hash__
class PathID(HashID):
"""An unique path identifier corresponding to some path.
Args:
components: A list of path components to construct the identifier from.
"""
hash_id_length = 32
@classmethod
def FromComponents(cls, components):
_ValidatePathComponents(components)
if components:
# We need a string to hash, based on components. If we simply concatenated
# them, or used a separator that could appear in some component, odd data
# could force a hash collision. So we explicitly include the lengths of
# the components.
string = "{lengths}:{path}".format(
lengths=",".join(str(len(component)) for component in components),
path="/".join(components))
result = hashlib.sha256(string.encode("utf-8")).digest()
else:
# For an empty list of components (representing `/`, i.e. the root path),
# we use special value: zero represented as a 256-bit number.
result = b"\0" * 32
return PathID(result)
class PathInfo(rdf_structs.RDFProtoStruct):
"""Basic metadata about a path which has been observed on a client."""
protobuf = objects_pb2.PathInfo
rdf_deps = [
rdfvalue.RDFDatetime,
rdf_client_fs.StatEntry,
rdf_crypto.Hash,
]
def __init__(self, *args, **kwargs):
super(PathInfo, self).__init__(*args, **kwargs)
_ValidatePathComponents(self.components)
# TODO(hanuszczak): Find a reliable way to make sure that noone ends up with
# incorrect `PathInfo` (a one that is both root and non-directory). Simple
# validation in a constructor has two flaws:
#
# a) One can still screw it up by setting directory to `False` on already
# constructed value.
# b) The `Copy` method temporarily constructs an incorrect object and assigns
# all the fields afterwards.
@classmethod
def OS(cls, *args, **kwargs):
return cls(*args, path_type=cls.PathType.OS, **kwargs)
@classmethod
def TSK(cls, *args, **kwargs):
return cls(*args, path_type=cls.PathType.TSK, **kwargs)
@classmethod
def Registry(cls, *args, **kwargs):
return cls(*args, path_type=cls.PathType.REGISTRY, **kwargs)
@classmethod
def PathTypeFromPathspecPathType(cls, ps_path_type):
if ps_path_type == rdf_paths.PathSpec.PathType.OS:
return cls.PathType.OS
elif ps_path_type == rdf_paths.PathSpec.PathType.TSK:
return cls.PathType.TSK
elif ps_path_type == rdf_paths.PathSpec.PathType.REGISTRY:
return cls.PathType.REGISTRY
elif ps_path_type == rdf_paths.PathSpec.PathType.TMPFILE:
return cls.PathType.TEMP
else:
raise ValueError("Unexpected path type: %s" % ps_path_type)
@classmethod
def FromPathSpec(cls, pathspec):
# Note that since PathSpec objects may contain more information than what is
# stored in a PathInfo object, we can only create a PathInfo object from a
# PathSpec, never the other way around.
path_type = cls.PathTypeFromPathspecPathType(pathspec.last.pathtype)
components = []
for pathelem in pathspec:
path = pathelem.path
if pathelem.offset:
path += ":%s" % pathelem.offset
if pathelem.stream_name:
path += ":%s" % pathelem.stream_name
# TODO(hanuszczak): Sometimes the paths start with '/', sometimes they do
# not (even though they are all supposed to be absolute). If they do start
# with `/` we get an empty component at the beginning which needs to be
# removed.
#
# It is also possible that path is simply '/' which, if split, yields two
# empty components. To simplify things we just filter out all empty
# components. As a side effect we also support pathological cases such as
# '//foo//bar////baz'.
#
# Ideally, pathspec should only allow one format (either with or without
# leading slash) sanitizing the input as soon as possible.
components.extend(component for component in path.split("/") if component)
return cls(path_type=path_type, components=components)
@classmethod
def FromStatEntry(cls, stat_entry):
result = cls.FromPathSpec(stat_entry.pathspec)
result.directory = stat.S_ISDIR(int(stat_entry.st_mode))
result.stat_entry = stat_entry
return result
@property
def root(self):
return not self.components
@property
def basename(self):
if self.root:
return ""
else:
return self.components[-1]
def GetPathID(self):
return PathID.FromComponents(self.components)
def GetParentPathID(self):
return PathID.FromComponents(self.components[:-1])
def GetParent(self):
"""Constructs a path info corresponding to the parent of current path.
The root path (represented by an empty list of components, corresponds to
`/` on Unix-like systems) does not have a parent.
Returns:
Instance of `rdf_objects.PathInfo` or `None` if parent does not exist.
"""
if self.root:
return None
return PathInfo(
components=self.components[:-1],
path_type=self.path_type,
directory=True)
def GetAncestors(self):
"""Yields all ancestors of a path.
The ancestors are returned in order from closest to the farthest one.
Yields:
Instances of `rdf_objects.PathInfo`.
"""
current = self
while True:
current = current.GetParent()
if current is None:
return
yield current
def UpdateFrom(self, src):
"""Merge path info records.
Merges src into self.
Args:
src: An rdfvalues.objects.PathInfo record, will be merged into self.
Raises:
ValueError: If src does not represent the same path.
"""
if not isinstance(src, PathInfo):
raise TypeError("expected `%s` but got `%s`" % (PathInfo, type(src)))
if self.path_type != src.path_type:
raise ValueError(
"src [%s] does not represent the same path type as self [%s]" %
(src.path_type, self.path_type))
if self.components != src.components:
raise ValueError(
"src [%s] does not represent the same path as self [%s]" %
(src.components, self.components))
if src.HasField("stat_entry"):
self.stat_entry = src.stat_entry
if src.last_stat_entry_timestamp is not None:
if self.last_stat_entry_timestamp is not None:
self.last_stat_entry_timestamp = max(self.last_stat_entry_timestamp,
src.last_stat_entry_timestamp)
else:
self.last_stat_entry_timestamp = src.last_stat_entry_timestamp
self.directory = self.directory or src.directory
def _ValidatePathComponent(component):
if not isinstance(component, Text):
raise TypeError("Non-unicode path component")
if not component:
raise ValueError("Empty path component")
if component == "." or component == "..":
raise ValueError("Incorrect path component: '%s'" % component)
def _ValidatePathComponents(components):
try:
for component in components:
_ValidatePathComponent(component)
except ValueError as error:
message = "Incorrect path component list '%s': %s"
raise ValueError(message % (components, error))
# TODO(hanuszczak): Instead of these two functions for categorized paths we
# should create an RDF value that wraps a string and provides these two as
# methods.
def ParseCategorizedPath(path):
"""Parses a categorized path string into type and list of components."""
components = tuple(component for component in path.split("/") if component)
if components[0:2] == ("fs", "os"):
return PathInfo.PathType.OS, components[2:]
elif components[0:2] == ("fs", "tsk"):
return PathInfo.PathType.TSK, components[2:]
elif components[0:1] == ("registry",):
return PathInfo.PathType.REGISTRY, components[1:]
elif components[0:1] == ("temp",):
return PathInfo.PathType.TEMP, components[1:]
else:
raise ValueError("Incorrect path: '%s'" % path)
def ToCategorizedPath(path_type, components):
"""Translates a path type and a list of components to a categorized path."""
try:
prefix = {
PathInfo.PathType.OS: ("fs", "os"),
PathInfo.PathType.TSK: ("fs", "tsk"),
PathInfo.PathType.REGISTRY: ("registry",),
PathInfo.PathType.TEMP: ("temp",),
}[path_type]
except KeyError:
raise ValueError("Unknown path type: `%s`" % path_type)
return "/".join(itertools.chain(prefix, components))
class ClientReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ClientReference
rdf_deps = []
class HuntReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.HuntReference
rdf_deps = []
class CronJobReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.CronJobReference
rdf_deps = []
class FlowReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.FlowReference
rdf_deps = []
class VfsFileReference(rdf_structs.RDFProtoStruct):
"""Object reference pointing to a VFS file."""
protobuf = objects_pb2.VfsFileReference
rdf_deps = []
def ToPath(self):
"""Converts a reference into a VFS file path."""
if self.path_type == PathInfo.PathType.OS:
return os.path.join("fs", "os", *self.path_components)
elif self.path_type == PathInfo.PathType.TSK:
return os.path.join("fs", "tsk", *self.path_components)
elif self.path_type == PathInfo.PathType.REGISTRY:
return os.path.join("registry", *self.path_components)
elif self.path_type == PathInfo.PathType.TEMP:
return os.path.join("temp", *self.path_components)
raise ValueError("Unsupported path type: %s" % self.path_type)
class ApprovalRequestReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ApprovalRequestReference
rdf_deps = []
class ObjectReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ObjectReference
rdf_deps = [
ClientReference,
HuntReference,
CronJobReference,
FlowReference,
VfsFileReference,
ApprovalRequestReference,
]
class UserNotification(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.UserNotification
rdf_deps = [
rdfvalue.RDFDatetime,
ObjectReference,
]
class MessageHandlerRequest(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.MessageHandlerRequest
rdf_deps = [
rdfvalue.RDFDatetime,
rdf_protodict.EmbeddedRDFValue,
]
class SHA256HashID(HashID):
"""SHA-256 based hash id."""
hash_id_length = 32
@classmethod
def FromData(cls, data):
h = hashlib.sha256(data).digest()
return SHA256HashID(h)
class BlobID(HashID):
"""Blob identificator."""
hash_id_length = 32
@classmethod
def FromBlobData(cls, data):
h = hashlib.sha256(data).digest()
return BlobID(h)
class ClientPathID(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.ClientPathID
rdf_deps = [
PathID,
]
class BlobReference(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.BlobReference
rdf_deps = [
BlobID,
]
class BlobReferences(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.BlobReferences
rdf_deps = [
BlobReference,
]
class SerializedValueOfUnrecognizedType(rdf_structs.RDFProtoStruct):
"""Class used to represent objects that can't be deserialized properly.
When deserializing certain objects stored in the database (FlowResults, for
example), we don't want to fail hard if for some reason the type of the value
is unknown and can no longer be found in the system. When this happens,
SerializedValueOfUnrecognizedType is used as a stub. This way, affected
API calls won't simply fail and raise, but will rather return all the results
they can and the user will be able to fetch the data, albeit in serialized
form.
"""
protobuf = objects_pb2.SerializedValueOfUnrecognizedType
rdf_deps = []
class APIAuditEntry(rdf_structs.RDFProtoStruct):
"""Audit entry for API calls, persistend in the relational database."""
protobuf = objects_pb2.APIAuditEntry
rdf_deps = [rdfvalue.RDFDatetime]
# Use dictionaries instead of if-statements to look up mappings to increase
# branch coverage during testing. This way, all constants are accessed,
# without requiring a test for every single one.
_HTTP_STATUS_TO_CODE = {
200: objects_pb2.APIAuditEntry.OK,
403: objects_pb2.APIAuditEntry.FORBIDDEN,
404: objects_pb2.APIAuditEntry.NOT_FOUND,
500: objects_pb2.APIAuditEntry.ERROR,
501: objects_pb2.APIAuditEntry.NOT_IMPLEMENTED,
}
@classmethod
def FromHttpRequestResponse(cls, request, response):
response_code = APIAuditEntry._HTTP_STATUS_TO_CODE.get(
response.status_code, objects_pb2.APIAuditEntry.ERROR)
return cls(
http_request_path=request.full_path, # include query string
router_method_name=response.headers.get("X-API-Method", ""),
username=request.user,
response_code=response_code,
)
class SignedBinaryID(rdf_structs.RDFProtoStruct):
protobuf = objects_pb2.SignedBinaryID
| apache-2.0 | 6,188,827,126,327,592,000 | 29.789916 | 80 | 0.689183 | false |
IvarsKarpics/mxcube | gui/widgets/routine_dc_char_widget_layout.py | 1 | 3762 | #
# Project: MXCuBE
# https://github.com/mxcube
#
# This file is part of MXCuBE software.
#
# MXCuBE is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MXCuBE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with MXCuBE. If not, see <http://www.gnu.org/licenses/>.
from gui.utils import QtImport
__credits__ = ["MXCuBE collaboration"]
__license__ = "LGPLv3+"
class RoutineDCWidgetLayout(QtImport.QWidget):
def __init__(self, parent=None, name=None, flags=0):
QtImport.QWidget.__init__(self, parent, QtImport.Qt.WindowFlags(flags))
if not name:
self.setObjectName("RoutineDCWidgetLayout")
# Hardware objects ----------------------------------------------------
# Internal variables --------------------------------------------------
# Graphic elements ----------------------------------------------------
self.min_dose_radio = QtImport.QRadioButton(self)
self.min_time_radio = QtImport.QRadioButton(self)
self.dose_time_bgroup = QtImport.QButtonGroup(self)
self.dose_time_bgroup.addButton(self.min_dose_radio)
self.dose_time_bgroup.addButton(self.min_time_radio)
self.dose_limit_cbx = QtImport.QCheckBox(self)
self.time_limit_cbx = QtImport.QCheckBox(self)
self.dose_ledit = QtImport.QLineEdit(self)
self.dose_ledit.setMinimumSize(50, 0)
self.dose_ledit.setMaximumSize(50, 32767)
self.time_ledit = QtImport.QLineEdit(self)
self.time_ledit.setMinimumSize(50, 0)
self.time_ledit.setMaximumSize(50, 32767)
self.radiation_damage_cbx = QtImport.QCheckBox(self)
# Layout --------------------------------------------------------------
_main_gridlayout = QtImport.QGridLayout(self)
_main_gridlayout.addWidget(self.min_dose_radio, 0, 0) # , 2, 1)
_main_gridlayout.addWidget(self.min_time_radio, 1, 0)
_main_gridlayout.addWidget(self.dose_limit_cbx, 0, 1)
_main_gridlayout.addWidget(self.time_limit_cbx, 1, 1)
_main_gridlayout.addWidget(self.dose_ledit, 0, 2)
_main_gridlayout.addWidget(self.time_ledit, 1, 2)
_main_gridlayout.addWidget(self.radiation_damage_cbx, 2, 0, 1, 2)
_main_gridlayout.setColumnStretch(3, 0)
# SizePolicies --------------------------------------------------------
# Qt signal/slot connections ------------------------------------------
# Other ---------------------------------------------------------------
self.languageChange()
# self.resize(QtCore.QSize(380,114).expandedTo(self.minimumSizeHint()))
# self.setAttribute(QtCore.Qt.WA_WState_Polished)
def languageChange(self):
self.setWindowTitle(self.__tr("RoutineDCWidget"))
# self.dose_time_bgroup.setTitle(QtGui.QString.null)
self.min_dose_radio.setText(self.__tr("Use min dose"))
self.min_time_radio.setText(self.__tr("Use min time"))
self.dose_limit_cbx.setText(self.__tr("Dose limit MGy:"))
self.time_limit_cbx.setText(self.__tr("Total time limit (s):"))
self.radiation_damage_cbx.setText(self.__tr("Account for radiation damage"))
def __tr(self, s, c=None):
return QtImport.QApplication.translate("RoutineDCWidgetLayout", s, c)
| lgpl-3.0 | 3,860,248,042,025,868,300 | 43.258824 | 84 | 0.606326 | false |
evernote/pootle | pootle/apps/evernote_auth/migrations/0002_data__adjust_pks.py | 1 | 7861 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Replace existing User FKs with their PootleProfile counterparts.
This will create temporal DB fields to copy data over and avoid
integrity errors while we are at it.
"""
db.add_column('evernote_auth_evernoteaccount', 'new_user_id',
models.IntegerField(null=True, default=None))
db.execute('''
UPDATE evernote_auth_evernoteaccount as EA
JOIN auth_user AS U on EA.user_id = U.id
JOIN pootle_app_pootleprofile PP on U.id = PP.user_id
SET EA.new_user_id = PP.id;
''')
db.delete_unique('evernote_auth_evernoteaccount', 'user_id')
db.execute('''
UPDATE evernote_auth_evernoteaccount SET user_id = new_user_id;
''')
db.create_unique('evernote_auth_evernoteaccount', 'user_id')
db.delete_column('evernote_auth_evernoteaccount', 'new_user_id')
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'evernote_auth.evernoteaccount': {
'Meta': {'object_name': 'EvernoteAccount'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'evernote_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'evernote_account'", 'unique': 'True', 'to': u"orm['auth.User']"}),
'user_autocreated': ('django.db.models.fields.BooleanField', [], {})
},
'pootle_app.directory': {
'Meta': {'ordering': "['name']", 'object_name': 'Directory'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_dirs'", 'null': 'True', 'to': "orm['pootle_app.Directory']"}),
'pootle_path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
u'pootle_language.language': {
'Meta': {'ordering': "['code']", 'object_name': 'Language', 'db_table': "'pootle_app_language'"},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'directory': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pootle_app.Directory']", 'unique': 'True'}),
'fullname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'specialchars': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'pootle_profile.pootleprofile': {
'Meta': {'object_name': 'PootleProfile', 'db_table': "'pootle_app_pootleprofile'"},
'alt_src_langs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'user_alt_src_langs'", 'blank': 'True', 'db_index': 'True', 'to': u"orm['pootle_language.Language']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'input_height': ('django.db.models.fields.SmallIntegerField', [], {'default': '5'}),
'rate': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'review_rate': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'ui_lang': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'unit_rows': ('django.db.models.fields.SmallIntegerField', [], {'default': '9'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['auth', 'pootle_profile', 'evernote_auth']
symmetrical = True
| gpl-2.0 | 7,213,035,376,260,031,000 | 67.356522 | 230 | 0.563033 | false |
mbedmicro/pyOCD | pyocd/target/builtin/target_MKL28Z512xxx7.py | 1 | 11291 | # pyOCD debugger
# Copyright (c) 2006-2013,2018 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import (FlashRegion, RamRegion, RomRegion, MemoryMap)
from ...coresight import ap
from ...coresight.cortex_m import CortexM
from ...debug.svd.loader import SVDFile
import logging
import os.path
from time import (time, sleep)
LOG = logging.getLogger(__name__)
SIM_SDID = 0x40075024
SIM_SDID_KEYATTR_MASK = 0x70
SIM_SDID_KEYATTR_SHIFT = 4
KEYATTR_DUAL_CORE = 1
RCM_MR = 0x4007f010
RCM_MR_BOOTROM_MASK = 0x6
SCG_CSR = 0x4007B010
SCG_RCCR = 0x4007B014
SCS_MASK = 0x0F000000
SCS_SHIFT = 24
DIVCORE_MASK = 0x000F0000
DIVCORE_SHIFT = 16
DIVSLOW_MASK = 0x0000000F
DIVSLOW_SHIFT = 0
SCG_FIRCCSR = 0x4007B300
FIRCEN_MASK = 1
SCG_FIRCCFG = 0x4007B308
RECOVER_TIMEOUT = 1.0 # 1 second
FLASH_ALGO = {
'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4832b510, 0x60414930, 0x60814931, 0x22806801, 0x22204391, 0x60014311, 0x4448482e, 0xf860f000,
0xd0002800, 0xbd102001, 0x47702000, 0xb5104829, 0x44484929, 0xf922f000, 0xd1042800, 0x21004825,
0xf0004448, 0x4a25f9c3, 0x230168d1, 0x4319029b, 0xbd1060d1, 0x4c1fb570, 0x444c4605, 0x4b1e4601,
0x68e24620, 0xf89ef000, 0xd1052800, 0x46292300, 0x68e24620, 0xf916f000, 0x68ca4918, 0x029b2301,
0x60ca431a, 0xb570bd70, 0x460b460c, 0x46014606, 0xb0844810, 0x44484615, 0xf8bef000, 0xd10a2800,
0x90029001, 0x480b2101, 0x462b9100, 0x46314622, 0xf0004448, 0x4909f957, 0x230168ca, 0x431a029b,
0xb00460ca, 0x0000bd70, 0xd928c520, 0x40076000, 0x0000ffff, 0x00000004, 0x6b65666b, 0xf0003000,
0xd00a2800, 0x68c9492b, 0x0e094a2b, 0x447a0049, 0x03095a51, 0x2064d103, 0x20044770, 0xb4104770,
0x60032300, 0x21026041, 0x02896081, 0x492360c1, 0x158a7a0c, 0x610240a2, 0x61837ac9, 0xbc106141,
0x47704618, 0xd0022800, 0x20006181, 0x20044770, 0x28004770, 0x2004d101, 0xb4104770, 0x42191e5b,
0x421ad101, 0xbc10d002, 0x47702065, 0x428b6803, 0x6840d804, 0x18181889, 0xd2024288, 0x2066bc10,
0xbc104770, 0x47702000, 0x4288490d, 0x206bd001, 0x20004770, 0x28004770, 0x290fd008, 0x2a04d802,
0xe005d104, 0xd8012913, 0xd0012a08, 0x47702004, 0x47702000, 0x40075040, 0x00000512, 0x40020020,
0x6b65666b, 0x4605b5f8, 0x460c4616, 0xf7ff4618, 0x2800ffdb, 0x2308d12b, 0x46214632, 0xf7ff4628,
0x0007ffb8, 0x19a6d123, 0x1e7668e9, 0x91004630, 0xf922f000, 0xd0032900, 0x1c409e00, 0x1e764346,
0xd81342b4, 0x4478480a, 0x60046800, 0x20094909, 0xf00071c8, 0x4607f8f9, 0x280069a8, 0x4780d000,
0xd1032f00, 0x190468e8, 0xd9eb42b4, 0xbdf84638, 0x00000416, 0x40020000, 0xd1012a00, 0x47702004,
0x461cb5ff, 0x4615b081, 0x2304460e, 0x98014622, 0xff7ff7ff, 0xd11a0007, 0xd0172c00, 0x4478480d,
0x600e6801, 0x6800cd02, 0x490b6041, 0x71c82006, 0xf8caf000, 0x98014607, 0x28006980, 0x4780d000,
0xd1032f00, 0x1d361f24, 0xd1e72c00, 0xb0054638, 0x0000bdf0, 0x000003be, 0x40020000, 0x4604b510,
0xf7ff4608, 0x2800ff71, 0x2c00d106, 0x4904d005, 0x71c82044, 0xf8a8f000, 0x2004bd10, 0x0000bd10,
0x40020000, 0xb081b5ff, 0x460e4614, 0x23084605, 0xff3ff7ff, 0xd12a2800, 0x686868a9, 0xf8acf000,
0x42719000, 0x40014240, 0x42b7424f, 0x9800d101, 0x2c00183f, 0x1bbdd01a, 0xd90042a5, 0x490d4625,
0x447908e8, 0x600e6809, 0x2201490b, 0x0a0271ca, 0x728872ca, 0x72489804, 0xf876f000, 0xd1062800,
0x1b649800, 0x183f1976, 0xd1e42c00, 0xb0052000, 0x0000bdf0, 0x0000031a, 0x40020000, 0xd00c2800,
0xd00a2a00, 0xd21a2908, 0x447b000b, 0x18db791b, 0x0705449f, 0x0d0b0907, 0x2004110f, 0x68c04770,
0x6840e00a, 0x6880e008, 0x6800e006, 0x2001e004, 0x6900e002, 0x6940e000, 0x20006010, 0x206a4770,
0x00004770, 0xd1012b00, 0x47702004, 0x461cb5f8, 0x460e4615, 0x9f082304, 0xfedbf7ff, 0xd1192800,
0xd0172d00, 0x447a4a0f, 0x60066810, 0x2102480e, 0x990671c1, 0x681172c1, 0x60886820, 0xf824f000,
0xd0082800, 0x29009907, 0x600ed000, 0xd0012f00, 0x60392100, 0x1d24bdf8, 0x1d361f2d, 0xd1e12d00,
0x0000bdf8, 0x00000276, 0x40020000, 0xd1012800, 0x47702004, 0x4803b510, 0x71c22240, 0xf0007181,
0xbd10f803, 0x40020000, 0x2170480a, 0x21807001, 0x78017001, 0xd5fc0609, 0x06817800, 0x2067d501,
0x06c14770, 0x2068d501, 0x07c04770, 0x2069d0fc, 0x00004770, 0x40020000, 0x09032200, 0xd373428b,
0x428b0a03, 0x0b03d358, 0xd33c428b, 0x428b0c03, 0xe012d321, 0x430b4603, 0x2200d47f, 0x428b0843,
0x0903d374, 0xd35f428b, 0x428b0a03, 0x0b03d344, 0xd328428b, 0x428b0c03, 0x22ffd30d, 0xba120209,
0x428b0c03, 0x1212d302, 0xd0650209, 0x428b0b03, 0xe000d319, 0x0bc30a09, 0xd301428b, 0x1ac003cb,
0x0b834152, 0xd301428b, 0x1ac0038b, 0x0b434152, 0xd301428b, 0x1ac0034b, 0x0b034152, 0xd301428b,
0x1ac0030b, 0x0ac34152, 0xd301428b, 0x1ac002cb, 0x0a834152, 0xd301428b, 0x1ac0028b, 0x0a434152,
0xd301428b, 0x1ac0024b, 0x0a034152, 0xd301428b, 0x1ac0020b, 0xd2cd4152, 0x428b09c3, 0x01cbd301,
0x41521ac0, 0x428b0983, 0x018bd301, 0x41521ac0, 0x428b0943, 0x014bd301, 0x41521ac0, 0x428b0903,
0x010bd301, 0x41521ac0, 0x428b08c3, 0x00cbd301, 0x41521ac0, 0x428b0883, 0x008bd301, 0x41521ac0,
0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x41524601, 0x47704610, 0x0fcae05d, 0x4249d000,
0xd3001003, 0x40534240, 0x469c2200, 0x428b0903, 0x0a03d32d, 0xd312428b, 0x018922fc, 0x0a03ba12,
0xd30c428b, 0x11920189, 0xd308428b, 0x11920189, 0xd304428b, 0xd03a0189, 0xe0001192, 0x09c30989,
0xd301428b, 0x1ac001cb, 0x09834152, 0xd301428b, 0x1ac0018b, 0x09434152, 0xd301428b, 0x1ac0014b,
0x09034152, 0xd301428b, 0x1ac0010b, 0x08c34152, 0xd301428b, 0x1ac000cb, 0x08834152, 0xd301428b,
0x1ac0008b, 0xd2d94152, 0x428b0843, 0x004bd301, 0x41521ac0, 0xd2001a41, 0x46634601, 0x105b4152,
0xd3014610, 0x2b004240, 0x4249d500, 0x46634770, 0xd300105b, 0xb5014240, 0x46c02000, 0xbd0246c0,
0x40020004, 0x00000000, 0x00000000, 0x00100000, 0x00200000, 0x00400000, 0x00800000, 0x00000000,
0x00800000, 0x00000000,
],
'pc_init' : 0x20000021,
'pc_unInit': 0x20000049,
'pc_program_page': 0x200000A7,
'pc_erase_sector': 0x20000075,
'pc_eraseAll' : 0x2000004D,
'static_base' : 0x20000000 + 0x00000020 + 0x00000624,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_size' : 0x00000200,
# All keys above are auto-generated. The following are added or modified.
'analyzer_supported' : True, # [modified] default is False
'analyzer_address' : 0x1fffa000, # [modified] default is zero. Use 8K block before flash algo. Can be any unused SRAM.
'page_buffers' : [0x20000a00, 0x20001200], # [added] Use areas above algo. Note 'begin_data' is unused if double buffering. Can be any unused SRAM.
'min_program_length' : 4 # [added] See FSL_FEATURE_FLASH_PFLASH_BLOCK_WRITE_UNIT_SIZE in KSDK features header file
}
class Flash_kl28z(Flash_Kinetis):
def __init__(self, target):
super(Flash_kl28z, self).__init__(target, FLASH_ALGO)
self._saved_firccsr = 0
self._saved_rccr = 0
def prepare_target(self):
"""!
This function sets up target clocks to ensure that flash is clocked at the maximum
of 24MHz. Doing so gets the best flash programming performance. The FIRC clock source
is used so that there is no dependency on an external crystal frequency.
"""
# Enable FIRC.
value = self.target.read32(SCG_FIRCCSR)
self._saved_firccsr = value
value |= FIRCEN_MASK
self.target.write32(SCG_FIRCCSR, value)
# Switch system to FIRC, core=48MHz (/1), slow=24MHz (/2).
# Flash and the bus are clocked from the slow clock, and its max is 24MHz,
# so there is no benefit from raising the core clock further.
self._saved_rccr = self.target.read32(SCG_RCCR)
self.target.write32(SCG_RCCR, (0x3 << SCS_SHIFT) | (1 << DIVSLOW_SHIFT))
csr = self.target.read32(SCG_CSR)
LOG.debug("SCG_CSR = 0x%08x", csr)
def restore_target(self):
"""! Restore clock registers to original values."""
self.target.write32(SCG_FIRCCSR, self._saved_firccsr)
self.target.write32(SCG_RCCR, self._saved_rccr)
class KL28x(Kinetis):
SINGLE_MAP = MemoryMap(
FlashRegion(name='flash', start=0, length=0x80000, blocksize=0x800, is_boot_memory=True,
flash_class=Flash_kl28z,
algo=FLASH_ALGO),
RamRegion(name='ram', start=0x1fff8000, length=0x20000),
RamRegion(name='usb ram', start=0x40100000, length=0x800)
)
DUAL_MAP = MemoryMap(
FlashRegion(name='flash', start=0, length=0x80000, blocksize=0x800, is_boot_memory=True,
flash_class=Flash_kl28z,
algo=FLASH_ALGO),
RomRegion(name='core1 imem alias', start=0x1d200000, length=0x40000),
RamRegion(name='core0 ram', start=0x1fffa000, length=0x18000),
RomRegion(name='core1 imem', start=0x2d200000, length=0x40000),
RamRegion(name='core1 dmem', start=0x2d300000, length=0x8000),
RamRegion(name='usb ram', start=0x40100000, length=0x800)
)
def __init__(self, session):
super(KL28x, self).__init__(session, self.SINGLE_MAP)
self.is_dual_core = False
self._svd_location = SVDFile.from_builtin("MKL28T7_CORE0.svd")
def create_init_sequence(self):
seq = super(KL28x, self).create_init_sequence()
seq.wrap_task('discovery',
lambda seq: seq
# The KL28 will lock up if an invalid AP is accessed, so replace the AP scan with a
# fixed list of known APs.
.replace_task('find_aps', self.create_kl28_aps)
# Before creating cores, determine which memory map should be used.
.insert_before('create_cores',
('detect_dual_core', self.detect_dual_core)
)
)
return seq
def create_kl28_aps(self):
"""! @brief Set the fixed list of valid AP numbers for KL28."""
self.dp.valid_aps = [0, 1, 2]
def detect_dual_core(self):
# Check if this is the dual core part.
sdid = self.aps[0].read_memory(SIM_SDID)
keyattr = (sdid & SIM_SDID_KEYATTR_MASK) >> SIM_SDID_KEYATTR_SHIFT
LOG.debug("KEYATTR=0x%x SDID=0x%08x", keyattr, sdid)
self.is_dual_core = (keyattr == KEYATTR_DUAL_CORE)
if self.is_dual_core:
LOG.info("KL28 is dual core")
self.memory_map = self.DUAL_MAP
def post_connect_hook(self):
# Disable ROM vector table remapping.
self.aps[0].write32(RCM_MR, RCM_MR_BOOTROM_MASK)
| apache-2.0 | -5,302,605,662,147,541,000 | 49.182222 | 153 | 0.715703 | false |
the0forge/sp | frontend/models.py | 1 | 28529 | # -*- coding: utf-8 -*-
from django.contrib.auth.models import AbstractBaseUser
from django.core.urlresolvers import reverse
from django.db import models
from django.template.defaultfilters import slugify
from datetime import datetime
from django.conf import settings
from decimal import Decimal
from colorfield.fields import ColorField
from frontend.utils import phone_for_search
from .managers import SPUserManager
from db_settings.models import Settings
class SPUser(AbstractBaseUser):
F_WEIGHT = [
('bold', 'bold'),
('bolder', 'bolder'),
('lighter', 'lighter'),
('normal', 'normal')
]
username = models.CharField(max_length=64, unique=True)
email = models.EmailField(
verbose_name='email address',
max_length=255,
unique=True,
null=True,
blank=True
)
is_active = models.BooleanField(default=True)
is_admin = models.BooleanField(default=False)
font_size = models.IntegerField(default=12, null=True, blank=True)
font_weight = models.CharField(choices=F_WEIGHT, default='normal', max_length=16, null=True, blank=True)
bg_color = ColorField(default='#FFFFFF', null=True, blank=True)
label_bg_color = ColorField(default='#EEEEEE', null=True, blank=True)
font_color = ColorField(default='#2B2B2B', null=True, blank=True)
jodabrian_visible = models.BooleanField(default=True)
USERNAME_FIELD = 'username'
objects = SPUserManager()
def get_full_name(self):
# The user is identified by their email address
return self.email
def get_short_name(self):
# The user is identified by their email address
return self.email
def __unicode__(self):
return self.username
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True
@property
def is_staff(self):
"Is the user a member of staff?"
# Simplest possible answer: All admins are staff
return self.is_admin
class ImportNote(models.Model):
""" Import notes
"""
model = models.CharField(max_length=50)
model_id = models.PositiveIntegerField()
type = models.CharField(max_length=50, default=None)
text = models.TextField()
note = models.TextField(null=True, blank=True)
src_model = models.CharField(max_length=50, null=True, blank=True)
src_model_id_field = models.CharField(max_length=50, null=True, blank=True)
src_model_id_text = models.CharField(max_length=50, null=True, blank=True)
def __unicode__(self):
return self.type + ' Import Note for ' + self.model + '.id = ' + str(self.model_id)
class Customer(models.Model):
""" Customer; makes an order from SmartPractice
"""
registration = models.CharField(max_length=255)
name = models.CharField(max_length=255)
customer_type = models.CharField(max_length=255)
address_line_1 = models.CharField(max_length=255)
address_line_2 = models.CharField(max_length=255)
suburb = models.CharField(max_length=100)
state = models.CharField(max_length=100)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=100, default='Australia')
telephone = models.CharField(max_length=40)
telephone_clean = models.CharField(max_length=40) # auto-generated field
fax = models.CharField(max_length=40)
email = models.EmailField(max_length=255)
delivery_attn = models.CharField(max_length=255)
delivery_address_line_1 = models.CharField(max_length=255)
delivery_address_line_2 = models.CharField(max_length=255)
delivery_suburb = models.CharField(max_length=100)
delivery_state = models.CharField(max_length=100)
delivery_postcode = models.CharField(max_length=10)
delivery_country = models.CharField(max_length=100, default='Australia')
from_src_company_id = models.IntegerField(null=True, blank=True)
from_src_membadd_id = models.IntegerField(null=True, blank=True)
slug = models.SlugField(unique=True, max_length=150)
notes = models.ManyToManyField('Note', related_name='c_notes', blank=True)
last_read = models.DateTimeField(auto_now=True, null=True, blank=True, editable=False)
def save(self, *args, **kwargs):
self.set_slug()
self.telephone_clean = phone_for_search(self.telephone)
super(Customer, self).save(*args, **kwargs)
def set_slug(self):
if not self.slug:
self.slug = "%i-%s" % (Customer.objects.last().pk + 1, slugify(self.name))
def contacts_with_email(self):
return self.contacts.exclude(email__isnull=True).exclude(email='')
@property
def contacts_data(self):
return u' '.join([x.show_info() for x in self.contacts.all() if x])
@property
def same_delivery_address(self):
if self.address_line_1 == self.delivery_address_line_1 and \
self.address_line_2 == self.delivery_address_line_2 and \
self.delivery_suburb == self.suburb and \
self.delivery_state == self.state and \
self.delivery_postcode == self.postcode and \
self.name == self.delivery_attn:
return True
return False
@property
def parsed_name(self):
try:
force_company = bool(Settings.objects.get(key='myob_force_company').value)
except Settings.DoesNotExist:
pass
else:
if force_company:
return {
'l': self.name,
'f': ''
}
data = self.name.split()
if not len(data):
return {
'l': 'without-name-id-%s' % self.pk,
'f': ''
}
pref = ''
if data[0].upper() in ('DR', 'MR', 'MASTER', 'MRS', 'MISS', 'MS', 'SIR', 'MADAM', 'PROF'):
pref = data[0]
data = data[1:]
if len(data) != 2:
return {
'l': self.name,
'f': ''
}
return {
'f': data[0],
'l': ('%s %s' % (pref, data[1])).strip()
}
def __unicode__(self):
return self.name
class CustomerContact(models.Model):
""" Contact for a Customer
"""
customer = models.ForeignKey(Customer, related_name='contacts')
first_name = models.CharField(max_length=100)
surname = models.CharField(max_length=100)
phone = models.CharField(max_length=40, blank=True, null=True)
email = models.EmailField(max_length=255, blank=True, null=True)
def __unicode__(self):
return (u'%s %s' % (self.first_name, self.surname)).strip()
def show_info(self):
if not self.phone and not self.email:
return self.__unicode__()
return u'%s (%s)' % (
self.__unicode__(),
('%s %s' % (self.email, self.phone)).strip()
)
@property
def display_email(self):
if not self.email:
return ''
return '%s %s <%s>'.strip() % (
self.first_name,
self.surname,
self.email
)
def info(self):
ret = '%s %s' % (self.first_name, self.surname)
if self.phone:
ret = '%s, %s' % (ret, self.phone)
if self.email:
ret = '%s, %s' % (ret, self.email)
return ret
class Size(models.Model):
""" Product Size/Dimensions
"""
width = models.DecimalField(max_digits=10, decimal_places=4, null=True)
height = models.DecimalField(max_digits=10, decimal_places=4, null=True)
depth = models.DecimalField(max_digits=10, decimal_places=4, null=True)
units = models.CharField(max_length=80, null=True)
notes = models.TextField(null=True)
sub_notes = models.TextField(null=True)
class Meta:
ordering = ('width', 'height', 'depth', 'units')
def __unicode__(self):
if self.width and self.height and self.depth:
return "W:%d H:%d D:%d" % (self.width, self.height, self.depth)
elif self.width and self.height:
return "W:%d H:%d" % (self.width, self.height)
else:
return self.notes
class Medium(models.Model):
""" Product Medium
"""
name = models.CharField(max_length=100)
description = models.CharField(max_length=400)
notes = models.TextField(null=True)
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.description
class Supplier(models.Model):
""" Supplier of Products SP sells (SP, JH, AIO, ...)
"""
code = models.CharField(max_length=20)
name = models.CharField(max_length=150)
class Meta:
ordering = ('code', 'name')
def __unicode__(self):
return "%s : %s" % (self.code, self.name)
class Product(models.Model):
""" Products SmartPractice sells; supplied by Suppliers
"""
code = models.CharField(max_length=60)
name = models.CharField(max_length=100)
type = models.CharField(max_length=150)
description = models.CharField(max_length=255)
notes = models.ManyToManyField('Note', related_name='p_notes', blank=True)
message = models.TextField()
current_stock = models.PositiveIntegerField(default=0)
minimum_stock = models.PositiveIntegerField(default=0)
sp_cost = models.DecimalField(max_digits=12, decimal_places=2, null=True)
size = models.ForeignKey(Size, related_name='+', on_delete=models.PROTECT)
medium = models.ForeignKey(Medium, related_name='+', null=True, on_delete=models.PROTECT)
supplier = models.ForeignKey(Supplier, related_name='products', on_delete=models.PROTECT)
royalty_group = models.ForeignKey('RoyaltyGroup', null=True, on_delete=models.PROTECT)
manual_royalty = models.PositiveSmallIntegerField(help_text='[0..100]%', null=True)
last_read = models.DateTimeField(auto_now=True, null=True, blank=True, editable=False)
@property
def royalty(self):
return self.manual_royalty or self.royalty_group.royalty if self.royalty_group else 0
@property
def default_price(self):
if self.price_levels.exists():
return self.price_levels.order_by('-cost_per_item')[0].cost_per_item
if not self.sp_cost:
return 0
return self.sp_cost * (1 + Decimal(self.royalty)/100)
@property
def back_orders(self):
ret = []
for a in self.ordered_list.all():
for b in a.back_orders.filter(complete=False):
ret.append(b)
return ret
@property
def stock_out(self):
"""
@return: sum of qty from all active orders
"""
qty = 0
for o in self.ordered_list.all():
if o.order.last_status not in (OrderStatus.CANCELLED, OrderStatus.SHIPPED):
qty += o.quantity
return qty
@property
def last_order(self):
try:
return self.ordered_list.order_by('-order__order_date')[0]
except IndexError:
return None
class Meta:
ordering = ('name',)
def __unicode__(self):
return "%s (%s)" % (self.name, self.code)
class Catalog(models.Model):
""" Catalog's SmartPractice advertise products in
"""
name = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class Meta:
ordering = ('name',)
class CatalogIssue(models.Model):
""" An Issue of a Catalog
"""
catalog = models.ForeignKey(Catalog, related_name='issues')
products = models.ManyToManyField(Product, related_name='catalog_issues', through='CatalogIssueProduct')
issue = models.CharField(max_length=80)
def __unicode__(self):
return '%s / %s' % (self.catalog.name, self.issue)
class CatalogIssueProduct(models.Model):
""" Product advertised in specific issue of a catalog
"""
catalog_issue = models.ForeignKey(CatalogIssue)
product = models.ForeignKey(Product, related_name='catalog_links')
page_ref = models.PositiveSmallIntegerField()
img_ref = models.PositiveSmallIntegerField()
sub_ref = models.CharField(max_length=3, null=True, blank=True)
def __unicode__(self):
return "%s features in Issue %s of Catalog %s on Page %s Reference %s, %s" % (
self.product,
self.catalog_issue,
self.catalog_issue.catalog,
self.page_ref,
self.img_ref,
self.sub_ref)
class RoyaltyGroup(models.Model):
""" Price Level Group for a PriceLevel; 'AR', 'LI', etc..
"""
name = models.CharField(max_length=64)
description = models.CharField(max_length=255, null=True, blank=True)
royalty = models.DecimalField(max_digits=12, decimal_places=2, default=0)
class Meta:
ordering = ('name',)
def __unicode__(self):
return self.name
class PriceLevel(models.Model):
""" Price Level for a Product; products can have multiple price levels
"""
product = models.ForeignKey(Product, related_name='price_levels', null=True, blank=True)
min_amount = models.PositiveIntegerField()
max_amount = models.PositiveIntegerField(blank=True, null=True)
cost_per_item = models.DecimalField(max_digits=12, decimal_places=2, null=True, blank=True)
notes = models.TextField(null=True, blank=True)
def __unicode__(self):
return 'Level #%s' % self.pk
class Meta:
ordering = ('-min_amount',)
class Order(models.Model):
""" Order placed by a Customer for Product(s) sold by SmartPractice
"""
customer = models.ForeignKey('Customer', related_name='orders')
products = models.ManyToManyField('Product', related_name='+', through='OrderProduct')
shipping_cost = models.DecimalField(max_digits=9, decimal_places=2, default=0)
total_cost = models.DecimalField(max_digits=12, decimal_places=2, default=0) # total net_cost + shipping_cost
total_price = models.DecimalField(max_digits=12, decimal_places=2, default=0) # total net_price + shipping_cost
order_date = models.DateTimeField(default=datetime.now)
wanted_by = models.DateTimeField(default=datetime.now)
invoice_company_name = models.CharField(max_length=255)
invoice_company_reg = models.CharField(max_length=120)
invoice_address_line_1 = models.CharField(max_length=255)
invoice_address_line_2 = models.CharField(max_length=255)
invoice_suburb = models.CharField(max_length=100)
invoice_state = models.CharField(max_length=100)
invoice_postcode = models.CharField(max_length=10)
invoice_country = models.CharField(max_length=100)
shipping_attn = models.CharField(max_length=255)
shipping_address_line_1 = models.CharField(max_length=255)
shipping_address_line_2 = models.CharField(max_length=255)
shipping_suburb = models.CharField(max_length=100)
shipping_state = models.CharField(max_length=100)
shipping_postcode = models.CharField(max_length=10)
shipping_country = models.CharField(max_length=100)
from_src_order_id = models.IntegerField(null=True, blank=True)
from_borders_fakeid = models.IntegerField(null=True, blank=True)
order_notes = models.CharField(max_length=510, null=True, blank=True)
notes = models.ManyToManyField('Note', related_name='o_notes')
last_read = models.DateTimeField(auto_now=True, null=True, blank=True, editable=False)
@property
def order_date_str(self):
return '%s' % self.order_date.strftime("%Y-%m-%d")
@property
def order_month_str(self):
return '%s' % self.order_date.strftime("%Y-%m")
@property
def last_invoice(self):
return self.invoices.order_by('-timestamp')[0] if self.invoices.exists() else None
@property
def last_status(self):
return self.statuses.order_by('-timestamp')[0] if self.statuses.count() else None
@property
def invoice_url(self):
return reverse('order_print_invoice', kwargs={
'pk': self.pk,
'filename': self.last_invoice.filename
})
def __unicode__(self):
return 'Order %s' % self.pk
@property
def summary(self):
data = {
'discount': Decimal(0),
'tax': Decimal(0),
'sub_cost': Decimal(0),
'sub_price': Decimal(0),
'sub_profit': Decimal(0),
'gross_cost': Decimal(0),
'gross_price': Decimal(0),
'gross_profit': Decimal(0),
'net_cost': Decimal(0),
'net_price': Decimal(0),
'net_profit': Decimal(0),
'qty': 0
}
for order_product in self.ordered_products.all():
data['discount'] += order_product.discount_sum
data['tax'] += order_product.tax_sum
data['sub_cost'] += order_product.sub_cost
data['sub_price'] += order_product.sub_price
data['sub_profit'] += order_product.sub_profit
data['gross_cost'] += order_product.gross_cost
data['gross_price'] += order_product.gross_price
data['gross_profit'] += order_product.gross_profit
data['net_cost'] += order_product.net_cost
data['net_price'] += order_product.net_price
data['net_profit'] += order_product.net_profit
data['qty'] += order_product.quantity
data['gross_price_with_shipping'] = data['gross_price'] + self.shipping_cost
return data
def total_recount(self, save=False):
data = self.summary
self.total_price = float(data['net_price']) + float(self.shipping_cost)
self.total_cost = float(data['net_cost']) + float(self.shipping_cost)
if save:
self.save(total_recount=False)
def save(self, total_recount=True, *args, **kwargs):
if total_recount:
self.total_recount(save=False)
super(Order, self).save(*args, **kwargs)
class Meta:
ordering = ('-order_date',)
class OrderStatus(models.Model):
""" Status for an Order; an Order can have multiple OrderStatus's as it progresses from Processing -> Shipped etc
"""
PROCESSING = 'PS'
CONFIRMED = 'CF'
AWAITING_PAYMENT = 'AP'
AWAITING_STOCK = 'AS'
CANCELLED = 'CN'
IN_FORFILLMENT = 'IF'
SHIPPED = 'SD'
ORDER_STATUS_CHOICES = (
(PROCESSING, 'Processing'),
(CONFIRMED, 'Confirmed'),
(AWAITING_PAYMENT, 'Awaiting Payment'),
(AWAITING_STOCK, 'Awaiting Stock (Back Order)'),
(CANCELLED, 'Cancelled'),
(IN_FORFILLMENT, 'In Forfillment'),
(SHIPPED, 'Complete (Shipped)'),
)
STATUSES = [x[0] for x in ORDER_STATUS_CHOICES]
order = models.ForeignKey(Order, related_name='statuses')
status = models.CharField(max_length=2, choices=ORDER_STATUS_CHOICES, default=PROCESSING)
notes = models.TextField()
timestamp = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True)
def __unicode__(self):
return '%s - %s' % (self.order, self.status)
def save(self, *args, **kwargs):
super(OrderStatus, self).save(*args, **kwargs)
class Meta:
ordering = ('-timestamp',)
class OrderProduct(models.Model):
""" 'Line Item' for an order; contains Product ordered on an Order with its quantity
"""
order = models.ForeignKey(Order, related_name='ordered_products')
product = models.ForeignKey(Product, related_name='ordered_list')
quantity = models.PositiveSmallIntegerField()
last_quantity = models.PositiveSmallIntegerField(default=0)
unit_price = models.DecimalField(max_digits=9, decimal_places=2, default=0)
discount_percentage = models.DecimalField(max_digits=5, decimal_places=2, default=0)
back_order = models.BooleanField(default=False)
with_tax = models.BooleanField(default=False)
class Meta:
ordering = ('product__code',)
def __unicode__(self):
return '%s %s' % (self.order, self.product)
def stock_update(self, quantity=None):
if quantity is None:
quantity = self.quantity
if self.last_quantity != quantity:
bo_query = self.back_orders.filter(complete=False)
delta = quantity - self.last_quantity
if delta > 0: # ordered more
if delta > self.product.current_stock:
bo_amount = delta - self.product.current_stock
self.product.current_stock = 0
# Create BackOrder
if bo_query.exists():
bo_query.update(amount=models.F('amount')+bo_amount) # must be only one!
else:
self.back_orders.create(amount=bo_amount)
else:
self.product.current_stock -= delta
else: # ordered less
delta *= -1 # delta -50 => 50
if bo_query.exists():
bo = bo_query.get()
delta_with_bo = bo.amount - delta
if delta_with_bo > 0:
bo.amount = delta_with_bo
bo.save()
else:
bo.delete()
self.product.current_stock -= delta_with_bo
else:
self.product.current_stock += delta
self.product.save()
self.last_quantity = quantity
def save(self, *args, **kwargs):
if self.pk:
q = 0 if self.order.last_status.status == OrderStatus.CANCELLED else None
self.stock_update(quantity=q)
self.back_order = True if self.back_orders.exists() else False
super(OrderProduct, self).save(*args, **kwargs)
def delete(self, using=None):
self.stock_update(quantity=0)
super(OrderProduct, self).delete(using)
@property
def order_date_str(self):
return self.order.order_date_str
@property
def order_month_str(self):
return self.order.order_month_str
@property
def supplier(self):
return self.product.supplier.code
@property
def cost(self):
try:
return self.product.sp_cost * self.quantity
except TypeError:
return 0
@property
def price(self):
try:
return self.unit_price * self.quantity
except TypeError:
return 0
@property
def profit(self):
return self.price - self.cost
# Sub is the basic cost (include royalty)
@property
def sub_cost(self):
return self.cost
@property
def sub_price(self):
return self.price * (1 + Decimal(self.product.royalty)/100)
@property
def sub_profit(self):
return self.sub_price - self.sub_cost
# Gross - including discount
@property
def discount_sum(self):
return self.price * self.discount_percentage / 100
@property
def gross_cost(self):
return self.sub_cost
@property
def gross_price(self):
return self.sub_price - self.discount_sum
@property
def gross_profit(self):
return self.gross_price - self.gross_cost
# NET - including TAX
@property
def tax_sum(self):
if self.with_tax:
return self.sub_price * settings.TAX_PERCENT / 100
return Decimal(0)
@property
def net_cost(self):
return self.gross_cost + self.tax_sum
''' Stevo, maybe we should take TAX from cost (not price)?
def cost_tax_sum(self):
if self.with_tax:
return self.gross_cost * settings.TAX_PERCENT / 100
return 0.00
return self.gross_cost + self.cost_tax_sum()
'''
@property
def net_price(self):
return self.gross_price + self.tax_sum
@property
def net_profit(self):
return self.net_price - self.net_cost
class Company(models.Model):
""" The various companies SmartPractice trade as; 'CAA' 'SP' etc
"""
name = models.CharField(max_length=255)
legal_name = models.CharField(max_length=255, null=True)
phone = models.CharField(max_length=25)
fax = models.CharField(max_length=25)
registration = models.CharField(max_length=100)
logo_img = models.ImageField(upload_to='company_logos', max_length=255, height_field='logo_height', width_field='logo_width', null=True)
logo_height = models.PositiveSmallIntegerField(null=True)
logo_width = models.PositiveSmallIntegerField(null=True)
pobox = models.CharField(max_length=255, null=True)
address = models.CharField(max_length=255, null=True)
payments = models.CharField(max_length=255, null=True)
abn = models.CharField(max_length=255, null=True)
from_mail = models.EmailField(null=True)
default_invoice = models.ForeignKey('Document', null=True, blank=True, related_name='default_invoices')
default_packing_slip = models.ForeignKey('Document', null=True, blank=True, related_name='default_packing_slip')
def __unicode__(self):
return self.name
class Invoice(models.Model):
""" An Invoice for an Order issued by a particular Company that SmartPractices trades as
"""
order = models.ForeignKey(Order, related_name='invoices')
company = models.ForeignKey(Company, related_name='+')
number = models.PositiveIntegerField()
timestamp = models.DateTimeField(default=datetime.now, auto_now_add=True)
@property
def filename(self):
return 'invoice_%s_%s.pdf' % (self.number, slugify(self.order.customer.name))
def __unicode__(self):
return 'Order %s; Number: %s' % (self.order, self.number)
class Note(models.Model):
text = models.TextField()
create_dt = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return '%s...' % self.text[:30]
class BackOrder(models.Model):
order_product = models.ForeignKey('OrderProduct', related_name='back_orders')
amount = models.IntegerField()
timestamp = models.DateTimeField(auto_now_add=True)
complete = models.BooleanField(default=False)
class Meta:
ordering = ('-timestamp',)
class StockAdjust(models.Model):
R_NEW, R_ERROR, R_TAKE = range(3)
REASONS = (
(R_NEW, 'New stock'),
(R_ERROR, 'Stock Error'),
(R_TAKE, 'Stock take')
)
product = models.ForeignKey('Product', related_name='stock_adjust')
current_amount = models.IntegerField()
added_amount = models.IntegerField()
user = models.ForeignKey(settings.AUTH_USER_MODEL)
timestamp = models.DateTimeField(auto_now_add=True)
reason = models.PositiveSmallIntegerField(choices=REASONS)
def __unicode__(self):
return '%s added %s' % (self.product, self.added_amount)
def save(self, *args, **kwargs):
created = False if self.pk else True
super(StockAdjust, self).save(*args, **kwargs)
if created:
self.product.current_stock += self.added_amount
self.product.save()
class Meta:
ordering = ('-timestamp', )
class Document(models.Model):
title = models.CharField(max_length=64)
description = models.TextField(blank=True, null=True)
dt = models.DateTimeField(auto_now_add=True)
file = models.FileField(blank=True, null=True, upload_to='documents')
def __unicode__(self):
return self.title
class Meta:
ordering = ('-dt',)
| gpl-3.0 | 866,189,637,661,283,300 | 33.134236 | 140 | 0.604122 | false |
oliver-sanders/cylc | cylc/flow/scripts/cylc_broadcast.py | 1 | 13148 | #!/usr/bin/env python3
# THIS FILE IS PART OF THE CYLC SUITE ENGINE.
# Copyright (C) 2008-2019 NIWA & British Crown (Met Office) & Contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""cylc [control] broadcast|bcast [OPTIONS] REG
Override [runtime] config in targeted namespaces in a running suite.
Uses for broadcast include making temporary changes to task behaviour,
and task-to-downstream-task communication via environment variables.
A broadcast can target any [runtime] namespace for all cycles or for a
specific cycle. If a task is affected by specific-cycle and all-cycle
broadcasts at once, the specific takes precedence. If a task is affected
by broadcasts to multiple ancestor namespaces, the result is determined
by normal [runtime] inheritance. In other words, it follows this order:
all:root -> all:FAM -> all:task -> tag:root -> tag:FAM -> tag:task
Broadcasts persist, even across suite restarts, until they expire when
their target cycle point is older than the oldest current in the suite,
or until they are explicitly cancelled with this command. All-cycle
broadcasts do not expire.
For each task the final effect of all broadcasts to all namespaces is
computed on the fly just prior to job submission. The --cancel and
--clear options simply cancel (remove) active broadcasts, they do not
act directly on the final task-level result. Consequently, for example,
you cannot broadcast to "all cycles except Tn" with an all-cycle
broadcast followed by a cancel to Tn (there is no direct broadcast to Tn
to cancel); and you cannot broadcast to "all members of FAMILY except
member_n" with a general broadcast to FAMILY followed by a cancel to
member_n (there is no direct broadcast to member_n to cancel).
To broadcast a variable to all tasks (quote items with internal spaces):
% cylc broadcast -s "[environment]VERSE = the quick brown fox" REG
To do the same with a file:
% cat >'broadcast.rc' <<'__RC__'
% [environment]
% VERSE = the quick brown fox
% __RC__
% cylc broadcast -F 'broadcast.rc' REG
To cancel the same broadcast:
% cylc broadcast --cancel "[environment]VERSE" REG
If -F FILE was used, the same file can be used to cancel the broadcast:
% cylc broadcast -G 'broadcast.rc' REG
Use -d/--display to see active broadcasts. Multiple --cancel options or
multiple --set and --set-file options can be used on the same command line.
Multiple --set and --set-file options are cumulative.
The --set-file=FILE option can be used when broadcasting multiple values, or
when the value contains newline or other metacharacters. If FILE is "-", read
from standard input.
Broadcast cannot change [runtime] inheritance.
See also 'cylc reload' - reload a modified suite definition at run time."""
import sys
if '--use-ssh' in sys.argv[1:]:
sys.argv.remove('--use-ssh')
from cylc.flow.remote import remrun
if remrun():
sys.exit(0)
import re
from tempfile import NamedTemporaryFile
from ansimarkup import parse as cparse
from cylc.flow.task_id import TaskID
from cylc.flow.terminal import cli_function
from cylc.flow.exceptions import UserInputError
from cylc.flow.print_tree import print_tree
from cylc.flow.option_parsers import CylcOptionParser as COP
from cylc.flow.broadcast_report import (
get_broadcast_bad_options_report, get_broadcast_change_report)
from cylc.flow.cfgspec.suite import SPEC, upg
from cylc.flow.network.client import SuiteRuntimeClient
from cylc.flow.parsec.config import ParsecConfig
from cylc.flow.parsec.validate import cylc_config_validate
REC_ITEM = re.compile(r'^\[([^\]]*)\](.*)$')
def get_padding(settings, level=0, padding=0):
"""Return the left padding for displaying a setting."""
level += 1
for key, val in settings.items():
tmp = level * 2 + len(key)
if tmp > padding:
padding = tmp
if isinstance(val, dict):
padding = get_padding(val, level, padding)
return padding
def get_rdict(left, right=None):
"""Check+transform left=right into a nested dict.
left can be key, [key], [key1]key2, [key1][key2], [key1][key2]key3, etc.
"""
if left == "inherit":
raise UserInputError(
"Inheritance cannot be changed by broadcast")
rdict = {}
cur_dict = rdict
tail = left
while tail:
match = REC_ITEM.match(tail)
if match:
sect, tail = match.groups()
if tail:
# [sect]... = right
cur_dict[sect.strip()] = {}
cur_dict = cur_dict[sect.strip()]
else:
# [sect] = right
cur_dict[sect.strip()] = right
else:
# item = right
cur_dict[tail.strip()] = right
tail = None
upg({'runtime': {'__MANY__': rdict}}, 'test')
cylc_config_validate(rdict, SPEC['runtime']['__MANY__'])
return rdict
def files_to_settings(settings, setting_files, cancel_mode=False):
"""Parse setting files, and append to settings."""
cfg = ParsecConfig(
SPEC['runtime']['__MANY__'], validator=cylc_config_validate)
for setting_file in setting_files:
if setting_file == '-':
with NamedTemporaryFile() as handle:
handle.write(sys.stdin.read().encode())
handle.seek(0, 0)
cfg.loadcfg(handle.name)
else:
cfg.loadcfg(setting_file)
stack = [([], cfg.get(sparse=True))]
while stack:
keys, item = stack.pop()
if isinstance(item, dict):
for key, value in item.items():
stack.append((keys + [key], value))
else:
settings.append({})
cur_setting = settings[-1]
while keys:
key = keys.pop(0)
if keys:
cur_setting[key] = {}
cur_setting = cur_setting[key]
elif cancel_mode:
cur_setting[key] = None
else:
cur_setting[key] = item
def report_bad_options(bad_options, is_set=False):
bad_opts = get_broadcast_bad_options_report(bad_options, is_set=is_set)
if bad_opts is not None:
return cparse(f'<red>{bad_opts}</red>')
return bad_opts
def get_option_parser():
"""CLI for "cylc broadcast"."""
parser = COP(__doc__, comms=True)
parser.add_option(
"-p", "--point", metavar="CYCLE_POINT",
help="Target cycle point. More than one can be added. "
"Defaults to '*' with --set and --cancel, "
"and nothing with --clear.",
action="append", dest="point_strings", default=[])
parser.add_option(
"-n", "--namespace", metavar="NAME",
help="Target namespace. Defaults to 'root' with "
"--set and --cancel, and nothing with --clear.",
action="append", dest="namespaces", default=[])
parser.add_option(
"-s", "--set", metavar="[SEC]ITEM=VALUE",
help="A [runtime] config item and value to broadcast.",
action="append", dest="settings", default=[])
parser.add_option(
"-F", "--set-file", "--file", metavar="FILE",
help="File with config to broadcast. Can be used multiple times.",
action="append", dest="setting_files", default=[])
parser.add_option(
"-c", "--cancel", metavar="[SEC]ITEM",
help="An item-specific broadcast to cancel.",
action="append", dest="cancel", default=[])
parser.add_option(
"-G", "--cancel-file", metavar="FILE",
help="File with broadcasts to cancel. Can be used multiple times.",
action="append", dest="cancel_files", default=[])
parser.add_option(
"-C", "--clear",
help="Cancel all broadcasts, or with -p/--point, "
"-n/--namespace, cancel all broadcasts to targeted "
"namespaces and/or cycle points. Use \"-C -p '*'\" "
"to cancel all all-cycle broadcasts without canceling "
"all specific-cycle broadcasts.",
action="store_true", dest="clear", default=False)
parser.add_option(
"-e", "--expire", metavar="CYCLE_POINT",
help="Cancel any broadcasts that target cycle "
"points earlier than, but not inclusive of, CYCLE_POINT.",
action="store", default=None, dest="expire")
parser.add_option(
"-d", "--display",
help="Display active broadcasts.",
action="store_true", default=False, dest="show")
parser.add_option(
"-k", "--display-task", metavar="TASKID",
help="Print active broadcasts for a given task "
"(" + TaskID.SYNTAX + ").",
action="store", default=None, dest="showtask")
parser.add_option(
"-b", "--box",
help="Use unicode box characters with -d, -k.",
action="store_true", default=False, dest="unicode")
parser.add_option(
"-r", "--raw",
help="With -d/--display or -k/--display-task, write out "
"the broadcast config structure in raw Python form.",
action="store_true", default=False, dest="raw")
return parser
@cli_function(get_option_parser)
def main(_, options, suite):
"""Implement cylc broadcast."""
pclient = SuiteRuntimeClient(
suite, options.owner, options.host, options.port,
options.comms_timeout)
if options.show or options.showtask:
if options.showtask:
try:
TaskID.split(options.showtask)
except ValueError:
raise UserInputError("TASKID must be " + TaskID.SYNTAX)
settings = pclient('get_broadcast', {'task_id': options.showtask})
padding = get_padding(settings) * ' '
if options.raw:
print(str(settings))
else:
print_tree(settings, padding, options.unicode)
sys.exit(0)
if options.clear:
modified_settings, bad_options = pclient(
'clear_broadcast',
{'point_strings': options.point_strings,
'namespaces': options.namespaces}
)
if modified_settings:
print(get_broadcast_change_report(
modified_settings, is_cancel=True))
sys.exit(report_bad_options(bad_options))
if options.expire:
modified_settings, bad_options = pclient(
'expire_broadcast',
{'cutoff': options.expire}
)
if modified_settings:
print(get_broadcast_change_report(
modified_settings, is_cancel=True))
sys.exit(report_bad_options(bad_options))
# implement namespace and cycle point defaults here
namespaces = options.namespaces
if not namespaces:
namespaces = ["root"]
point_strings = options.point_strings
if not point_strings:
point_strings = ["*"]
if options.cancel or options.cancel_files:
settings = []
for option_item in options.cancel:
if "=" in option_item:
raise UserInputError(
"--cancel=[SEC]ITEM does not take a value")
option_item = option_item.strip()
setting = get_rdict(option_item)
settings.append(setting)
files_to_settings(settings, options.cancel_files, options.cancel)
modified_settings, bad_options = pclient(
'clear_broadcast',
{'point_strings': point_strings,
'namespaces': namespaces,
'cancel_settings': settings}
)
if modified_settings:
print(get_broadcast_change_report(
modified_settings, is_cancel=True))
sys.exit(report_bad_options(bad_options))
if options.settings or options.setting_files:
settings = []
for option_item in options.settings:
if "=" not in option_item:
raise UserInputError(
"--set=[SEC]ITEM=VALUE requires a value")
lhs, rhs = [s.strip() for s in option_item.split("=", 1)]
setting = get_rdict(lhs, rhs)
settings.append(setting)
files_to_settings(settings, options.setting_files)
modified_settings, bad_options = pclient(
'put_broadcast',
{'point_strings': point_strings,
'namespaces': namespaces,
'settings': settings
}
)
print(get_broadcast_change_report(modified_settings))
sys.exit(report_bad_options(bad_options, is_set=True))
if __name__ == "__main__":
main()
| gpl-3.0 | 8,232,696,319,183,442,000 | 36.141243 | 77 | 0.621768 | false |
mfigurnov/sact | fake_cifar10.py | 1 | 2418 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to generate random data of the same format as CIFAR-10.
Creates TFRecord files with the same fields as
tensorflow/models/slim/datasets/downlod_and_convert_cifar10.py
for use in unit tests of the code that handles this data.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import StringIO
import numpy as np
from PIL import Image
import tensorflow as tf
from tensorflow_models.slim.datasets import dataset_utils
tf.app.flags.DEFINE_string('out_directory', 'testdata/cifar10',
'Output directory for the test data.')
FLAGS = tf.app.flags.FLAGS
_IMAGE_SIZE = 32
def create_fake_data(split_name, num_examples=4):
"""Writes the fake TFRecords for one split of the dataset.
Args:
split_name: One of 'train' or 'test'.
num_examples: The number of random examples to generate and write to the
output TFRecord file.
"""
output_file = os.path.join(FLAGS.out_directory,
'cifar10_%s.tfrecord' % split_name)
writer = tf.python_io.TFRecordWriter(output_file)
for _ in range(num_examples):
image = np.random.randint(256, size=(_IMAGE_SIZE, _IMAGE_SIZE, 3),
dtype=np.uint8)
image = Image.fromarray(image)
image_buffer = StringIO.StringIO()
image.save(image_buffer, format='png')
image_buffer = image_buffer.getvalue()
label = 0
example = dataset_utils.image_to_tfexample(
image_buffer, 'png', _IMAGE_SIZE, _IMAGE_SIZE, label)
writer.write(example.SerializeToString())
writer.close()
def main(_):
create_fake_data('train')
create_fake_data('test')
if __name__ == '__main__':
tf.app.run()
| apache-2.0 | -526,887,796,072,276,860 | 30.402597 | 80 | 0.672457 | false |
Tim---/scapy-tetra | scapy_tetra/mac_type.py | 1 | 2142 | #!/usr/bin/env python
# Fields differentiating between several MAC PDU types
from scapy.packet import Packet, bind_layers
from scapy.fields import BitEnumField, ConditionalField
from .gsmtap import GSMTAP
# Table 321: MAC PDU types for SCH/F, SCH/HD, STCH and BSCH
class MAC_DL(Packet):
name = 'MAC (downlink)'
fields_desc = [
BitEnumField('type', 0, 2, {
0: 'MAC-RESOURCE',
1: 'MAC-FRAG or MAC-END',
2: 'Broadcast',
3: 'MAC-U-SIGNAL'
}),
ConditionalField(BitEnumField('subtype', 0, 1, {
0: 'MAC-FRAG',
1: 'MAC-END',
}), lambda pkt: pkt.type == 1),
ConditionalField(BitEnumField('subtype', 0, 2, {
0: 'SYSINFO',
1: 'ACCESS-DEFINE',
}), lambda pkt: pkt.type == 2),
]
# Table 321: MAC PDU types for SCH/F, SCH/HD, STCH and BSCH
class MAC_UL(Packet):
name = 'MAC (uplink)'
fields_desc = [
BitEnumField('type', 0, 2, {
0: 'MAC-DATA',
1: 'MAC-FRAG or MAC-END',
3: 'MAC-U-SIGNAL',
}),
ConditionalField(BitEnumField('subtype', 0, 1, {
0: 'MAC-FRAG',
1: 'MAC-END',
}), lambda pkt: pkt.type == 1),
]
# Table 322: MAC PDU types for SCH/HU
class MAC_SCH_HU(Packet):
name = 'MAC (uplink, SCH/HU)'
fields_desc = [
BitEnumField('type', 0, 1, {
0: 'MAC-ACCESS',
1: 'MAC-END-HU',
}),
]
# SCH/F, SCH/HD, STCH, BNCH (downlink) -> MAC_Downlink
bind_layers(GSMTAP, MAC_DL, flag_uplink=0, sub_type=5)
bind_layers(GSMTAP, MAC_DL, flag_uplink=0, sub_type=4)
bind_layers(GSMTAP, MAC_DL, flag_uplink=0, sub_type=7)
bind_layers(GSMTAP, MAC_DL, flag_uplink=0, sub_type=6)
# SCH/F, SCH/HD, STCH, BNCH (uplink) -> MAC_Uplink
bind_layers(GSMTAP, MAC_UL, flag_uplink=1, sub_type=5)
bind_layers(GSMTAP, MAC_UL, flag_uplink=1, sub_type=4)
bind_layers(GSMTAP, MAC_UL, flag_uplink=1, sub_type=7)
bind_layers(GSMTAP, MAC_UL, flag_uplink=1, sub_type=6)
# SCH/HU (uplink) -> MAC_SCH_HU
bind_layers(GSMTAP, MAC_SCH_HU, flag_uplink=1, sub_type=3)
| gpl-2.0 | -8,505,794,606,212,739,000 | 29.6 | 59 | 0.572829 | false |
inercia/evy | evy/patched/asynchat.py | 1 | 1592 | #
# Evy - a concurrent networking library for Python
#
# Unless otherwise noted, the files in Evy are under the following MIT license:
#
# Copyright (c) 2012, Alvaro Saurin
# Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS)
# Copyright (c) 2007-2010, Linden Research, Inc.
# Copyright (c) 2005-2006, Bob Ippolito
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from evy import patcher
from evy.patched import asyncore
from evy.patched import socket
patcher.inject('asynchat',
globals(),
('asyncore', asyncore),
('socket', socket))
del patcher
| mit | -4,209,755,448,849,266,700 | 39.820513 | 79 | 0.758166 | false |
CoPhi/wordnet-like-searchpage | parser/hrv/manageids.py | 1 | 1702 | #! /usr/bin/env pythonimport sys
import collections
import shutil
import os
import os.path
import hashlib
from sets import Set
import string
# other utils functions
sep="|"
def returnWN30Id(id):
pos = id[0]
other = id[2:]
if pos == 'n':
pos = '1';
elif pos == 'v':
pos = '2';
elif pos == 'a':
pos = '3';
elif pos == 'r':
pos = '4';
elif pos == 's':
pos = '5';
id = pos + other;
return id;
def returnWN30pos(p):
pos = ""
if p == '1':
pos = 'n';
elif p == '2':
pos = 'v';
elif p == '3':
pos = 'a';
elif p == '4':
pos = 'r';
elif p == '5':
pos = 's';
return pos;
def returnIWNId(id):
lists = id.split("-")
pos=lists[3]
id = lists[2]
return id+sep+pos
def decodePos(pos):
if pos == 'n':
pos = '1';
elif pos == 'v':
pos = '2';
elif pos == 'a':
pos = '3';
elif pos == 'r':
pos = '4';
elif pos == 's':
pos = '5';
else:
pos="0"
return pos
def returnHWNId(id):
lists = id.split("-")
pos="-"
if len(lists)== 3:
pos=lists[2]
id = lists[1]
pos=decodePos(pos)
else:
print "Wrong IDs "+id
return pos+id
def returnHWNPos(id):
lists = id.split("-")
pos="-"
if len(lists)== 3:
pos=lists[2]
id = lists[1]
pos=decodePos(pos)
else:
print "Wrong IDs (Pos) "+id
return pos
def returnGRCId(id):
return id
def returnLATId(id):
return id
def returnARAId(id):
return id
def returnENGId(id):
return id
def returnSynsetId(lan,pos,id):
cod=""
p=""
if lan=="grc":
cod="001"
if lan=="lat":
cod="002"
if lan=="ara":
cod="003"
if lan=="ita":
cod="004"
if pos=="N":
p="1"
if pos=="V":
p="2"
if pos=="A":
p="3"
if pos=="R":
p="4"
if pos=="X":
p="5"
return p+cod+id | gpl-3.0 | 7,135,100,050,336,141,000 | 13.191667 | 32 | 0.536428 | false |
kaushik94/sympy | sympy/geometry/point.py | 1 | 36250 | """Geometrical Points.
Contains
========
Point
Point2D
Point3D
When methods of Point require 1 or more points as arguments, they
can be passed as a sequence of coordinates or Points:
>>> from sympy.geometry.point import Point
>>> Point(1, 1).is_collinear((2, 2), (3, 4))
False
>>> Point(1, 1).is_collinear(Point(2, 2), Point(3, 4))
False
"""
from __future__ import division, print_function
import warnings
from sympy.core import S, sympify, Expr
from sympy.core.compatibility import is_sequence
from sympy.core.containers import Tuple
from sympy.simplify import nsimplify, simplify
from sympy.geometry.exceptions import GeometryError
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.complexes import im
from sympy.matrices import Matrix
from sympy.core.numbers import Float
from sympy.core.evaluate import global_evaluate
from sympy.core.add import Add
from sympy.utilities.iterables import uniq
from sympy.utilities.misc import filldedent, func_name, Undecidable
from .entity import GeometryEntity
class Point(GeometryEntity):
"""A point in a n-dimensional Euclidean space.
Parameters
==========
coords : sequence of n-coordinate values. In the special
case where n=2 or 3, a Point2D or Point3D will be created
as appropriate.
evaluate : if `True` (default), all floats are turn into
exact types.
dim : number of coordinates the point should have. If coordinates
are unspecified, they are padded with zeros.
on_morph : indicates what should happen when the number of
coordinates of a point need to be changed by adding or
removing zeros. Possible values are `'warn'`, `'error'`, or
`ignore` (default). No warning or error is given when `*args`
is empty and `dim` is given. An error is always raised when
trying to remove nonzero coordinates.
Attributes
==========
length
origin: A `Point` representing the origin of the
appropriately-dimensioned space.
Raises
======
TypeError : When instantiating with anything but a Point or sequence
ValueError : when instantiating with a sequence with length < 2 or
when trying to reduce dimensions if keyword `on_morph='error'` is
set.
See Also
========
sympy.geometry.line.Segment : Connects two Points
Examples
========
>>> from sympy.geometry import Point
>>> from sympy.abc import x
>>> Point(1, 2, 3)
Point3D(1, 2, 3)
>>> Point([1, 2])
Point2D(1, 2)
>>> Point(0, x)
Point2D(0, x)
>>> Point(dim=4)
Point(0, 0, 0, 0)
Floats are automatically converted to Rational unless the
evaluate flag is False:
>>> Point(0.5, 0.25)
Point2D(1/2, 1/4)
>>> Point(0.5, 0.25, evaluate=False)
Point2D(0.5, 0.25)
"""
is_Point = True
def __new__(cls, *args, **kwargs):
evaluate = kwargs.get('evaluate', global_evaluate[0])
on_morph = kwargs.get('on_morph', 'ignore')
# unpack into coords
coords = args[0] if len(args) == 1 else args
# check args and handle quickly handle Point instances
if isinstance(coords, Point):
# even if we're mutating the dimension of a point, we
# don't reevaluate its coordinates
evaluate = False
if len(coords) == kwargs.get('dim', len(coords)):
return coords
if not is_sequence(coords):
raise TypeError(filldedent('''
Expecting sequence of coordinates, not `{}`'''
.format(func_name(coords))))
# A point where only `dim` is specified is initialized
# to zeros.
if len(coords) == 0 and kwargs.get('dim', None):
coords = (S.Zero,)*kwargs.get('dim')
coords = Tuple(*coords)
dim = kwargs.get('dim', len(coords))
if len(coords) < 2:
raise ValueError(filldedent('''
Point requires 2 or more coordinates or
keyword `dim` > 1.'''))
if len(coords) != dim:
message = ("Dimension of {} needs to be changed "
"from {} to {}.").format(coords, len(coords), dim)
if on_morph == 'ignore':
pass
elif on_morph == "error":
raise ValueError(message)
elif on_morph == 'warn':
warnings.warn(message)
else:
raise ValueError(filldedent('''
on_morph value should be 'error',
'warn' or 'ignore'.'''))
if any(coords[dim:]):
raise ValueError('Nonzero coordinates cannot be removed.')
if any(a.is_number and im(a) for a in coords):
raise ValueError('Imaginary coordinates are not permitted.')
if not all(isinstance(a, Expr) for a in coords):
raise TypeError('Coordinates must be valid SymPy expressions.')
# pad with zeros appropriately
coords = coords[:dim] + (S.Zero,)*(dim - len(coords))
# Turn any Floats into rationals and simplify
# any expressions before we instantiate
if evaluate:
coords = coords.xreplace(dict(
[(f, simplify(nsimplify(f, rational=True)))
for f in coords.atoms(Float)]))
# return 2D or 3D instances
if len(coords) == 2:
kwargs['_nocheck'] = True
return Point2D(*coords, **kwargs)
elif len(coords) == 3:
kwargs['_nocheck'] = True
return Point3D(*coords, **kwargs)
# the general Point
return GeometryEntity.__new__(cls, *coords)
def __abs__(self):
"""Returns the distance between this point and the origin."""
origin = Point([0]*len(self))
return Point.distance(origin, self)
def __add__(self, other):
"""Add other to self by incrementing self's coordinates by
those of other.
Notes
=====
>>> from sympy.geometry.point import Point
When sequences of coordinates are passed to Point methods, they
are converted to a Point internally. This __add__ method does
not do that so if floating point values are used, a floating
point result (in terms of SymPy Floats) will be returned.
>>> Point(1, 2) + (.1, .2)
Point2D(1.1, 2.2)
If this is not desired, the `translate` method can be used or
another Point can be added:
>>> Point(1, 2).translate(.1, .2)
Point2D(11/10, 11/5)
>>> Point(1, 2) + Point(.1, .2)
Point2D(11/10, 11/5)
See Also
========
sympy.geometry.point.Point.translate
"""
try:
s, o = Point._normalize_dimension(self, Point(other, evaluate=False))
except TypeError:
raise GeometryError("Don't know how to add {} and a Point object".format(other))
coords = [simplify(a + b) for a, b in zip(s, o)]
return Point(coords, evaluate=False)
def __contains__(self, item):
return item in self.args
def __div__(self, divisor):
"""Divide point's coordinates by a factor."""
divisor = sympify(divisor)
coords = [simplify(x/divisor) for x in self.args]
return Point(coords, evaluate=False)
def __eq__(self, other):
if not isinstance(other, Point) or len(self.args) != len(other.args):
return False
return self.args == other.args
def __getitem__(self, key):
return self.args[key]
def __hash__(self):
return hash(self.args)
def __iter__(self):
return self.args.__iter__()
def __len__(self):
return len(self.args)
def __mul__(self, factor):
"""Multiply point's coordinates by a factor.
Notes
=====
>>> from sympy.geometry.point import Point
When multiplying a Point by a floating point number,
the coordinates of the Point will be changed to Floats:
>>> Point(1, 2)*0.1
Point2D(0.1, 0.2)
If this is not desired, the `scale` method can be used or
else only multiply or divide by integers:
>>> Point(1, 2).scale(1.1, 1.1)
Point2D(11/10, 11/5)
>>> Point(1, 2)*11/10
Point2D(11/10, 11/5)
See Also
========
sympy.geometry.point.Point.scale
"""
factor = sympify(factor)
coords = [simplify(x*factor) for x in self.args]
return Point(coords, evaluate=False)
def __rmul__(self, factor):
"""Multiply a factor by point's coordinates."""
return self.__mul__(factor)
def __neg__(self):
"""Negate the point."""
coords = [-x for x in self.args]
return Point(coords, evaluate=False)
def __sub__(self, other):
"""Subtract two points, or subtract a factor from this point's
coordinates."""
return self + [-x for x in other]
@classmethod
def _normalize_dimension(cls, *points, **kwargs):
"""Ensure that points have the same dimension.
By default `on_morph='warn'` is passed to the
`Point` constructor."""
# if we have a built-in ambient dimension, use it
dim = getattr(cls, '_ambient_dimension', None)
# override if we specified it
dim = kwargs.get('dim', dim)
# if no dim was given, use the highest dimensional point
if dim is None:
dim = max(i.ambient_dimension for i in points)
if all(i.ambient_dimension == dim for i in points):
return list(points)
kwargs['dim'] = dim
kwargs['on_morph'] = kwargs.get('on_morph', 'warn')
return [Point(i, **kwargs) for i in points]
@staticmethod
def affine_rank(*args):
"""The affine rank of a set of points is the dimension
of the smallest affine space containing all the points.
For example, if the points lie on a line (and are not all
the same) their affine rank is 1. If the points lie on a plane
but not a line, their affine rank is 2. By convention, the empty
set has affine rank -1."""
if len(args) == 0:
return -1
# make sure we're genuinely points
# and translate every point to the origin
points = Point._normalize_dimension(*[Point(i) for i in args])
origin = points[0]
points = [i - origin for i in points[1:]]
m = Matrix([i.args for i in points])
# XXX fragile -- what is a better way?
return m.rank(iszerofunc = lambda x:
abs(x.n(2)) < 1e-12 if x.is_number else x.is_zero)
@property
def ambient_dimension(self):
"""Number of components this point has."""
return getattr(self, '_ambient_dimension', len(self))
@classmethod
def are_coplanar(cls, *points):
"""Return True if there exists a plane in which all the points
lie. A trivial True value is returned if `len(points) < 3` or
all Points are 2-dimensional.
Parameters
==========
A set of points
Raises
======
ValueError : if less than 3 unique points are given
Returns
=======
boolean
Examples
========
>>> from sympy import Point3D
>>> p1 = Point3D(1, 2, 2)
>>> p2 = Point3D(2, 7, 2)
>>> p3 = Point3D(0, 0, 2)
>>> p4 = Point3D(1, 1, 2)
>>> Point3D.are_coplanar(p1, p2, p3, p4)
True
>>> p5 = Point3D(0, 1, 3)
>>> Point3D.are_coplanar(p1, p2, p3, p5)
False
"""
if len(points) <= 1:
return True
points = cls._normalize_dimension(*[Point(i) for i in points])
# quick exit if we are in 2D
if points[0].ambient_dimension == 2:
return True
points = list(uniq(points))
return Point.affine_rank(*points) <= 2
def distance(self, other):
"""The Euclidean distance between self and another GeometricEntity.
Returns
=======
distance : number or symbolic expression.
Raises
======
TypeError : if other is not recognized as a GeometricEntity or is a
GeometricEntity for which distance is not defined.
See Also
========
sympy.geometry.line.Segment.length
sympy.geometry.point.Point.taxicab_distance
Examples
========
>>> from sympy.geometry import Point, Line
>>> p1, p2 = Point(1, 1), Point(4, 5)
>>> l = Line((3, 1), (2, 2))
>>> p1.distance(p2)
5
>>> p1.distance(l)
sqrt(2)
The computed distance may be symbolic, too:
>>> from sympy.abc import x, y
>>> p3 = Point(x, y)
>>> p3.distance((0, 0))
sqrt(x**2 + y**2)
"""
if not isinstance(other, GeometryEntity):
try:
other = Point(other, dim=self.ambient_dimension)
except TypeError:
raise TypeError("not recognized as a GeometricEntity: %s" % type(other))
if isinstance(other, Point):
s, p = Point._normalize_dimension(self, Point(other))
return sqrt(Add(*((a - b)**2 for a, b in zip(s, p))))
distance = getattr(other, 'distance', None)
if distance is None:
raise TypeError("distance between Point and %s is not defined" % type(other))
return distance(self)
def dot(self, p):
"""Return dot product of self with another Point."""
if not is_sequence(p):
p = Point(p) # raise the error via Point
return Add(*(a*b for a, b in zip(self, p)))
def equals(self, other):
"""Returns whether the coordinates of self and other agree."""
# a point is equal to another point if all its components are equal
if not isinstance(other, Point) or len(self) != len(other):
return False
return all(a.equals(b) for a, b in zip(self, other))
def evalf(self, prec=None, **options):
"""Evaluate the coordinates of the point.
This method will, where possible, create and return a new Point
where the coordinates are evaluated as floating point numbers to
the precision indicated (default=15).
Parameters
==========
prec : int
Returns
=======
point : Point
Examples
========
>>> from sympy import Point, Rational
>>> p1 = Point(Rational(1, 2), Rational(3, 2))
>>> p1
Point2D(1/2, 3/2)
>>> p1.evalf()
Point2D(0.5, 1.5)
"""
coords = [x.evalf(prec, **options) for x in self.args]
return Point(*coords, evaluate=False)
def intersection(self, other):
"""The intersection between this point and another GeometryEntity.
Parameters
==========
other : GeometryEntity or sequence of coordinates
Returns
=======
intersection : list of Points
Notes
=====
The return value will either be an empty list if there is no
intersection, otherwise it will contain this point.
Examples
========
>>> from sympy import Point
>>> p1, p2, p3 = Point(0, 0), Point(1, 1), Point(0, 0)
>>> p1.intersection(p2)
[]
>>> p1.intersection(p3)
[Point2D(0, 0)]
"""
if not isinstance(other, GeometryEntity):
other = Point(other)
if isinstance(other, Point):
if self == other:
return [self]
p1, p2 = Point._normalize_dimension(self, other)
if p1 == self and p1 == p2:
return [self]
return []
return other.intersection(self)
def is_collinear(self, *args):
"""Returns `True` if there exists a line
that contains `self` and `points`. Returns `False` otherwise.
A trivially True value is returned if no points are given.
Parameters
==========
args : sequence of Points
Returns
=======
is_collinear : boolean
See Also
========
sympy.geometry.line.Line
Examples
========
>>> from sympy import Point
>>> from sympy.abc import x
>>> p1, p2 = Point(0, 0), Point(1, 1)
>>> p3, p4, p5 = Point(2, 2), Point(x, x), Point(1, 2)
>>> Point.is_collinear(p1, p2, p3, p4)
True
>>> Point.is_collinear(p1, p2, p3, p5)
False
"""
points = (self,) + args
points = Point._normalize_dimension(*[Point(i) for i in points])
points = list(uniq(points))
return Point.affine_rank(*points) <= 1
def is_concyclic(self, *args):
"""Do `self` and the given sequence of points lie in a circle?
Returns True if the set of points are concyclic and
False otherwise. A trivial value of True is returned
if there are fewer than 2 other points.
Parameters
==========
args : sequence of Points
Returns
=======
is_concyclic : boolean
Examples
========
>>> from sympy import Point
Define 4 points that are on the unit circle:
>>> p1, p2, p3, p4 = Point(1, 0), (0, 1), (-1, 0), (0, -1)
>>> p1.is_concyclic() == p1.is_concyclic(p2, p3, p4) == True
True
Define a point not on that circle:
>>> p = Point(1, 1)
>>> p.is_concyclic(p1, p2, p3)
False
"""
points = (self,) + args
points = Point._normalize_dimension(*[Point(i) for i in points])
points = list(uniq(points))
if not Point.affine_rank(*points) <= 2:
return False
origin = points[0]
points = [p - origin for p in points]
# points are concyclic if they are coplanar and
# there is a point c so that ||p_i-c|| == ||p_j-c|| for all
# i and j. Rearranging this equation gives us the following
# condition: the matrix `mat` must not a pivot in the last
# column.
mat = Matrix([list(i) + [i.dot(i)] for i in points])
rref, pivots = mat.rref()
if len(origin) not in pivots:
return True
return False
@property
def is_nonzero(self):
"""True if any coordinate is nonzero, False if every coordinate is zero,
and None if it cannot be determined."""
is_zero = self.is_zero
if is_zero is None:
return None
return not is_zero
def is_scalar_multiple(self, p):
"""Returns whether each coordinate of `self` is a scalar
multiple of the corresponding coordinate in point p.
"""
s, o = Point._normalize_dimension(self, Point(p))
# 2d points happen a lot, so optimize this function call
if s.ambient_dimension == 2:
(x1, y1), (x2, y2) = s.args, o.args
rv = (x1*y2 - x2*y1).equals(0)
if rv is None:
raise Undecidable(filldedent(
'''can't determine if %s is a scalar multiple of
%s''' % (s, o)))
# if the vectors p1 and p2 are linearly dependent, then they must
# be scalar multiples of each other
m = Matrix([s.args, o.args])
return m.rank() < 2
@property
def is_zero(self):
"""True if every coordinate is zero, False if any coordinate is not zero,
and None if it cannot be determined."""
nonzero = [x.is_nonzero for x in self.args]
if any(nonzero):
return False
if any(x is None for x in nonzero):
return None
return True
@property
def length(self):
"""
Treating a Point as a Line, this returns 0 for the length of a Point.
Examples
========
>>> from sympy import Point
>>> p = Point(0, 1)
>>> p.length
0
"""
return S.Zero
def midpoint(self, p):
"""The midpoint between self and point p.
Parameters
==========
p : Point
Returns
=======
midpoint : Point
See Also
========
sympy.geometry.line.Segment.midpoint
Examples
========
>>> from sympy.geometry import Point
>>> p1, p2 = Point(1, 1), Point(13, 5)
>>> p1.midpoint(p2)
Point2D(7, 3)
"""
s, p = Point._normalize_dimension(self, Point(p))
return Point([simplify((a + b)*S.Half) for a, b in zip(s, p)])
@property
def origin(self):
"""A point of all zeros of the same ambient dimension
as the current point"""
return Point([0]*len(self), evaluate=False)
@property
def orthogonal_direction(self):
"""Returns a non-zero point that is orthogonal to the
line containing `self` and the origin.
Examples
========
>>> from sympy.geometry import Line, Point
>>> a = Point(1, 2, 3)
>>> a.orthogonal_direction
Point3D(-2, 1, 0)
>>> b = _
>>> Line(b, b.origin).is_perpendicular(Line(a, a.origin))
True
"""
dim = self.ambient_dimension
# if a coordinate is zero, we can put a 1 there and zeros elsewhere
if self[0].is_zero:
return Point([1] + (dim - 1)*[0])
if self[1].is_zero:
return Point([0,1] + (dim - 2)*[0])
# if the first two coordinates aren't zero, we can create a non-zero
# orthogonal vector by swapping them, negating one, and padding with zeros
return Point([-self[1], self[0]] + (dim - 2)*[0])
@staticmethod
def project(a, b):
"""Project the point `a` onto the line between the origin
and point `b` along the normal direction.
Parameters
==========
a : Point
b : Point
Returns
=======
p : Point
See Also
========
sympy.geometry.line.LinearEntity.projection
Examples
========
>>> from sympy.geometry import Line, Point
>>> a = Point(1, 2)
>>> b = Point(2, 5)
>>> z = a.origin
>>> p = Point.project(a, b)
>>> Line(p, a).is_perpendicular(Line(p, b))
True
>>> Point.is_collinear(z, p, b)
True
"""
a, b = Point._normalize_dimension(Point(a), Point(b))
if b.is_zero:
raise ValueError("Cannot project to the zero vector.")
return b*(a.dot(b) / b.dot(b))
def taxicab_distance(self, p):
"""The Taxicab Distance from self to point p.
Returns the sum of the horizontal and vertical distances to point p.
Parameters
==========
p : Point
Returns
=======
taxicab_distance : The sum of the horizontal
and vertical distances to point p.
See Also
========
sympy.geometry.point.Point.distance
Examples
========
>>> from sympy.geometry import Point
>>> p1, p2 = Point(1, 1), Point(4, 5)
>>> p1.taxicab_distance(p2)
7
"""
s, p = Point._normalize_dimension(self, Point(p))
return Add(*(abs(a - b) for a, b in zip(s, p)))
def canberra_distance(self, p):
"""The Canberra Distance from self to point p.
Returns the weighted sum of horizontal and vertical distances to
point p.
Parameters
==========
p : Point
Returns
=======
canberra_distance : The weighted sum of horizontal and vertical
distances to point p. The weight used is the sum of absolute values
of the coordinates.
Examples
========
>>> from sympy.geometry import Point
>>> p1, p2 = Point(1, 1), Point(3, 3)
>>> p1.canberra_distance(p2)
1
>>> p1, p2 = Point(0, 0), Point(3, 3)
>>> p1.canberra_distance(p2)
2
Raises
======
ValueError when both vectors are zero.
See Also
========
sympy.geometry.point.Point.distance
"""
s, p = Point._normalize_dimension(self, Point(p))
if self.is_zero and p.is_zero:
raise ValueError("Cannot project to the zero vector.")
return Add(*((abs(a - b)/(abs(a) + abs(b))) for a, b in zip(s, p)))
@property
def unit(self):
"""Return the Point that is in the same direction as `self`
and a distance of 1 from the origin"""
return self / abs(self)
n = evalf
__truediv__ = __div__
class Point2D(Point):
"""A point in a 2-dimensional Euclidean space.
Parameters
==========
coords : sequence of 2 coordinate values.
Attributes
==========
x
y
length
Raises
======
TypeError
When trying to add or subtract points with different dimensions.
When trying to create a point with more than two dimensions.
When `intersection` is called with object other than a Point.
See Also
========
sympy.geometry.line.Segment : Connects two Points
Examples
========
>>> from sympy.geometry import Point2D
>>> from sympy.abc import x
>>> Point2D(1, 2)
Point2D(1, 2)
>>> Point2D([1, 2])
Point2D(1, 2)
>>> Point2D(0, x)
Point2D(0, x)
Floats are automatically converted to Rational unless the
evaluate flag is False:
>>> Point2D(0.5, 0.25)
Point2D(1/2, 1/4)
>>> Point2D(0.5, 0.25, evaluate=False)
Point2D(0.5, 0.25)
"""
_ambient_dimension = 2
def __new__(cls, *args, **kwargs):
if not kwargs.pop('_nocheck', False):
kwargs['dim'] = 2
args = Point(*args, **kwargs)
return GeometryEntity.__new__(cls, *args)
def __contains__(self, item):
return item == self
@property
def bounds(self):
"""Return a tuple (xmin, ymin, xmax, ymax) representing the bounding
rectangle for the geometric figure.
"""
return (self.x, self.y, self.x, self.y)
def rotate(self, angle, pt=None):
"""Rotate ``angle`` radians counterclockwise about Point ``pt``.
See Also
========
translate, scale
Examples
========
>>> from sympy import Point2D, pi
>>> t = Point2D(1, 0)
>>> t.rotate(pi/2)
Point2D(0, 1)
>>> t.rotate(pi/2, (2, 0))
Point2D(2, -1)
"""
from sympy import cos, sin, Point
c = cos(angle)
s = sin(angle)
rv = self
if pt is not None:
pt = Point(pt, dim=2)
rv -= pt
x, y = rv.args
rv = Point(c*x - s*y, s*x + c*y)
if pt is not None:
rv += pt
return rv
def scale(self, x=1, y=1, pt=None):
"""Scale the coordinates of the Point by multiplying by
``x`` and ``y`` after subtracting ``pt`` -- default is (0, 0) --
and then adding ``pt`` back again (i.e. ``pt`` is the point of
reference for the scaling).
See Also
========
rotate, translate
Examples
========
>>> from sympy import Point2D
>>> t = Point2D(1, 1)
>>> t.scale(2)
Point2D(2, 1)
>>> t.scale(2, 2)
Point2D(2, 2)
"""
if pt:
pt = Point(pt, dim=2)
return self.translate(*(-pt).args).scale(x, y).translate(*pt.args)
return Point(self.x*x, self.y*y)
def transform(self, matrix):
"""Return the point after applying the transformation described
by the 3x3 Matrix, ``matrix``.
See Also
========
sympy.geometry.point.Point2D.rotate
sympy.geometry.point.Point2D.scale
sympy.geometry.point.Point2D.translate
"""
if not (matrix.is_Matrix and matrix.shape == (3, 3)):
raise ValueError("matrix must be a 3x3 matrix")
col, row = matrix.shape
x, y = self.args
return Point(*(Matrix(1, 3, [x, y, 1])*matrix).tolist()[0][:2])
def translate(self, x=0, y=0):
"""Shift the Point by adding x and y to the coordinates of the Point.
See Also
========
sympy.geometry.point.Point2D.rotate, scale
Examples
========
>>> from sympy import Point2D
>>> t = Point2D(0, 1)
>>> t.translate(2)
Point2D(2, 1)
>>> t.translate(2, 2)
Point2D(2, 3)
>>> t + Point2D(2, 2)
Point2D(2, 3)
"""
return Point(self.x + x, self.y + y)
@property
def x(self):
"""
Returns the X coordinate of the Point.
Examples
========
>>> from sympy import Point2D
>>> p = Point2D(0, 1)
>>> p.x
0
"""
return self.args[0]
@property
def y(self):
"""
Returns the Y coordinate of the Point.
Examples
========
>>> from sympy import Point2D
>>> p = Point2D(0, 1)
>>> p.y
1
"""
return self.args[1]
class Point3D(Point):
"""A point in a 3-dimensional Euclidean space.
Parameters
==========
coords : sequence of 3 coordinate values.
Attributes
==========
x
y
z
length
Raises
======
TypeError
When trying to add or subtract points with different dimensions.
When `intersection` is called with object other than a Point.
Examples
========
>>> from sympy import Point3D
>>> from sympy.abc import x
>>> Point3D(1, 2, 3)
Point3D(1, 2, 3)
>>> Point3D([1, 2, 3])
Point3D(1, 2, 3)
>>> Point3D(0, x, 3)
Point3D(0, x, 3)
Floats are automatically converted to Rational unless the
evaluate flag is False:
>>> Point3D(0.5, 0.25, 2)
Point3D(1/2, 1/4, 2)
>>> Point3D(0.5, 0.25, 3, evaluate=False)
Point3D(0.5, 0.25, 3)
"""
_ambient_dimension = 3
def __new__(cls, *args, **kwargs):
if not kwargs.pop('_nocheck', False):
kwargs['dim'] = 3
args = Point(*args, **kwargs)
return GeometryEntity.__new__(cls, *args)
def __contains__(self, item):
return item == self
@staticmethod
def are_collinear(*points):
"""Is a sequence of points collinear?
Test whether or not a set of points are collinear. Returns True if
the set of points are collinear, or False otherwise.
Parameters
==========
points : sequence of Point
Returns
=======
are_collinear : boolean
See Also
========
sympy.geometry.line.Line3D
Examples
========
>>> from sympy import Point3D, Matrix
>>> from sympy.abc import x
>>> p1, p2 = Point3D(0, 0, 0), Point3D(1, 1, 1)
>>> p3, p4, p5 = Point3D(2, 2, 2), Point3D(x, x, x), Point3D(1, 2, 6)
>>> Point3D.are_collinear(p1, p2, p3, p4)
True
>>> Point3D.are_collinear(p1, p2, p3, p5)
False
"""
return Point.is_collinear(*points)
def direction_cosine(self, point):
"""
Gives the direction cosine between 2 points
Parameters
==========
p : Point3D
Returns
=======
list
Examples
========
>>> from sympy import Point3D
>>> p1 = Point3D(1, 2, 3)
>>> p1.direction_cosine(Point3D(2, 3, 5))
[sqrt(6)/6, sqrt(6)/6, sqrt(6)/3]
"""
a = self.direction_ratio(point)
b = sqrt(Add(*(i**2 for i in a)))
return [(point.x - self.x) / b,(point.y - self.y) / b,
(point.z - self.z) / b]
def direction_ratio(self, point):
"""
Gives the direction ratio between 2 points
Parameters
==========
p : Point3D
Returns
=======
list
Examples
========
>>> from sympy import Point3D
>>> p1 = Point3D(1, 2, 3)
>>> p1.direction_ratio(Point3D(2, 3, 5))
[1, 1, 2]
"""
return [(point.x - self.x),(point.y - self.y),(point.z - self.z)]
def intersection(self, other):
"""The intersection between this point and another GeometryEntity.
Parameters
==========
other : GeometryEntity or sequence of coordinates
Returns
=======
intersection : list of Points
Notes
=====
The return value will either be an empty list if there is no
intersection, otherwise it will contain this point.
Examples
========
>>> from sympy import Point3D
>>> p1, p2, p3 = Point3D(0, 0, 0), Point3D(1, 1, 1), Point3D(0, 0, 0)
>>> p1.intersection(p2)
[]
>>> p1.intersection(p3)
[Point3D(0, 0, 0)]
"""
if not isinstance(other, GeometryEntity):
other = Point(other, dim=3)
if isinstance(other, Point3D):
if self == other:
return [self]
return []
return other.intersection(self)
def scale(self, x=1, y=1, z=1, pt=None):
"""Scale the coordinates of the Point by multiplying by
``x`` and ``y`` after subtracting ``pt`` -- default is (0, 0) --
and then adding ``pt`` back again (i.e. ``pt`` is the point of
reference for the scaling).
See Also
========
translate
Examples
========
>>> from sympy import Point3D
>>> t = Point3D(1, 1, 1)
>>> t.scale(2)
Point3D(2, 1, 1)
>>> t.scale(2, 2)
Point3D(2, 2, 1)
"""
if pt:
pt = Point3D(pt)
return self.translate(*(-pt).args).scale(x, y, z).translate(*pt.args)
return Point3D(self.x*x, self.y*y, self.z*z)
def transform(self, matrix):
"""Return the point after applying the transformation described
by the 4x4 Matrix, ``matrix``.
See Also
========
sympy.geometry.point.Point3D.scale
sympy.geometry.point.Point3D.translate
"""
if not (matrix.is_Matrix and matrix.shape == (4, 4)):
raise ValueError("matrix must be a 4x4 matrix")
col, row = matrix.shape
from sympy.matrices.expressions import Transpose
x, y, z = self.args
m = Transpose(matrix)
return Point3D(*(Matrix(1, 4, [x, y, z, 1])*m).tolist()[0][:3])
def translate(self, x=0, y=0, z=0):
"""Shift the Point by adding x and y to the coordinates of the Point.
See Also
========
scale
Examples
========
>>> from sympy import Point3D
>>> t = Point3D(0, 1, 1)
>>> t.translate(2)
Point3D(2, 1, 1)
>>> t.translate(2, 2)
Point3D(2, 3, 1)
>>> t + Point3D(2, 2, 2)
Point3D(2, 3, 3)
"""
return Point3D(self.x + x, self.y + y, self.z + z)
@property
def x(self):
"""
Returns the X coordinate of the Point.
Examples
========
>>> from sympy import Point3D
>>> p = Point3D(0, 1, 3)
>>> p.x
0
"""
return self.args[0]
@property
def y(self):
"""
Returns the Y coordinate of the Point.
Examples
========
>>> from sympy import Point3D
>>> p = Point3D(0, 1, 2)
>>> p.y
1
"""
return self.args[1]
@property
def z(self):
"""
Returns the Z coordinate of the Point.
Examples
========
>>> from sympy import Point3D
>>> p = Point3D(0, 1, 1)
>>> p.z
1
"""
return self.args[2]
| bsd-3-clause | 2,243,155,447,638,387,200 | 25.772526 | 92 | 0.531503 | false |
radical-software/mongrey | mongrey/ext/slugify/__init__.py | 1 | 1063 |
from .main import Slugify, UniqueSlugify
from .alt_translates import *
slugify = Slugify()
unique_slugify = UniqueSlugify()
slugify_unicode = Slugify(translate=None)
slugify_url = Slugify()
slugify_url.to_lower = True
slugify_url.stop_words = ('a', 'an', 'the')
slugify_url.max_length = 200
slugify_filename = Slugify()
slugify_filename.separator = '_'
slugify_filename.safe_chars = '-.'
slugify_filename.max_length = 255
slugify_ru = Slugify(pretranslate=CYRILLIC)
slugify_de = Slugify(pretranslate=GERMAN)
slugify_el = Slugify(pretranslate=GREEK)
# Legacy code
def deprecate_init(Klass):
class NewKlass(Klass):
def __init__(self, *args, **kwargs):
import warnings
warnings.simplefilter('once')
warnings.warn("'slugify.get_slugify' is deprecated; use 'slugify.Slugify' instead.",
DeprecationWarning, stacklevel=2)
super(NewKlass, self).__init__(*args, **kwargs)
return NewKlass
# get_slugify was deprecated in 2014, march 31
get_slugify = deprecate_init(Slugify)
| bsd-3-clause | -6,626,177,688,992,938,000 | 27.72973 | 96 | 0.685795 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.