repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
javiercantero/streamlink | src/streamlink/plugins/tv5monde.py | 1 | 2401 | import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate
from streamlink.stream import HLSStream, HTTPStream, RTMPStream
from streamlink.utils import parse_json
from streamlink.plugins.common_jwplayer import _js_to_json
class TV5Monde(Plugin):
_url_re = re.compile(r'http://(.+\.)?(tv|tivi)5monde(plus(afrique)?)?\.com')
_videos_re = re.compile(r'"?(?:files|sources)"?:\s*(?P<videos>\[.+?\])')
_videos_embed_re = re.compile(r'(?:file:\s*|src=)"(?P<embed>.+?\.mp4|.+?/embed/.+?)"')
_videos_schema = validate.Schema(
validate.transform(_js_to_json),
validate.transform(parse_json),
validate.all([
validate.any(
validate.Schema(
{'url': validate.url()},
validate.get('url')
),
validate.Schema(
{'file': validate.url()},
validate.get('file')
),
)
])
)
@classmethod
def can_handle_url(cls, url):
return TV5Monde._url_re.match(url)
def _get_non_embed_streams(self, page):
match = self._videos_re.search(page)
if match is not None:
videos = self._videos_schema.validate(match.group('videos'))
return videos
return []
def _get_embed_streams(self, page):
match = self._videos_embed_re.search(page)
if match is None:
return []
url = match.group('embed')
if '.mp4' in url:
return [url]
res = http.get(url)
videos = self._get_non_embed_streams(res.text)
if videos:
return videos
return []
def _get_streams(self):
res = http.get(self.url)
match = self._videos_re.search(res.text)
if match is not None:
videos = self._videos_schema.validate(match.group('videos'))
else:
videos = self._get_embed_streams(res.text)
for url in videos:
if '.m3u8' in url:
for stream in HLSStream.parse_variant_playlist(self.session, url).items():
yield stream
elif 'rtmp' in url:
yield 'vod', RTMPStream(self.session, {'rtmp': url})
elif '.mp4' in url:
yield 'vod', HTTPStream(self.session, url)
__plugin__ = TV5Monde
| bsd-2-clause | -9,086,185,595,071,028,000 | 29.782051 | 90 | 0.540608 | false |
Shinichi-Nakagawa/xp2015_baseball_tools | service/storage.py | 1 | 1676 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Shinichi Nakagawa'
from configparser import ConfigParser
from boto3.session import Session
import glob
import os
class Storage(object):
def __init__(self, config_file):
"""
init
:param config_file: Application config file
:return:
"""
self.config = ConfigParser()
self.config.read(config_file)
self.session = Session(
aws_access_key_id=self.config['aws']['access_key'],
aws_secret_access_key=self.config['aws']['secret_access_key'],
region_name=self.config['aws']['region']
)
self.s3 = self.session.resource('s3')
self.s3client = self.session.client('s3')
self.bucket_name = self.config['baseball_report']['bucket_name']
def upload_files(self, dir_path, extension, key_name, delimiter='/', delete=True):
"""
file upload for S3
:param dir_path: input_file_path
:param extension: upload file extension
:param key_name: bucket key name
:param delimiter: Delimiter
:param delete: Delete Flg
:return: None
"""
for file_name in glob.glob(delimiter.join([dir_path, '*{extension}'.format(extension=extension)])):
remote_file_name = delimiter.join(
[
key_name,
file_name.replace('{dir_path}{delimiter}'.format(dir_path=dir_path, delimiter=delimiter), '')
]
)
self.s3client.upload_file(file_name, self.bucket_name, remote_file_name)
if delete:
os.remove(file_name)
| mit | 4,140,903,514,542,236,700 | 33.204082 | 113 | 0.575179 | false |
techjacker/sitemapgenerator | sitemapgenerator/crawler.py | 1 | 3955 | import re
import requests
from bs4 import BeautifulSoup
from time import sleep
from random import randint
from numbers import Number
import functools
def handle_requests_failures(func):
'''
This decorator handles request.excptions
'''
@functools.wraps(func)
def wrapper(self, *args, **kw):
'''
Handle RequestException
'''
try:
return func(self, *args, **kw)
except requests.exceptions.RequestException as error:
print(error)
self.links_broken.append(kw['url'])
return wrapper
class Crawler:
def __init__(self, domain, quiet=False, throttle_max=3, limit=10000):
self.set_domain(domain)
self.quiet = quiet
self.limit = limit if \
isinstance(limit, Number) else 10000
self.throttle_max = throttle_max if \
isinstance(throttle_max, Number) else 3
self.links = {}
self.links_broken = []
self.headers = {'User-Agent': (
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:47.0) '
'Gecko/20100101 Firefox/47.0'
)}
def set_domain(self, domain):
if not domain:
raise Exception('domain must be defined')
if not domain.startswith('http://') and \
not domain.startswith('https://'):
domain = 'http://' + domain
self.domain = domain
@staticmethod
def extract_links(contents):
soup = BeautifulSoup(contents, 'html.parser')
return {
a.get('href'): {"soup": a}
for a in soup.find_all('a')
if a.get('href') is not None and not a.get('href').startswith('#')
}
@handle_requests_failures
def request_url(self, url):
res = requests.get(url, headers=self.headers).text
# set visited flag
if self.strip_domain(url) in self.links:
self.links[self.strip_domain(url)]['visited'] = True
return res
def strip_domain(self, url):
return re.sub('^' + re.escape(self.domain), '', url)
def merge_links(self, links, url):
for k, v in links.items():
# strip domain on internal links
if k.strip().startswith(self.domain):
k = self.strip_domain(k)
# add extra links if not homepage and not already in dict
if k and k != '/' and k not in self.links:
self.links[k] = v
def get_domain_links(self, all=False):
return {
k: v for k, v in self.links.items()
if not k.startswith('http') and (all or len(k.split('.')) == 1)
}
@property
def unvisited_links(self):
return (
k for k, v in self.get_domain_links().items() if 'visited' not in v
)
@property
def domain_links(self):
return (self.domain + l for l in self.get_domain_links(all=True))
def crawl(self, url=''):
text = self.request_url(url=self.domain + url)
links = self.extract_links(text)
self.merge_links(links, url)
def run(self, url='', recurse=False, throttle=None):
if self.quiet is not True:
print('crawling {}'.format(url if url else 'homepage'))
self.crawl(url)
no_visited_links = 1
if recurse is True and no_visited_links < self.limit:
next_unvisited_link = next(self.unvisited_links, None)
while next_unvisited_link:
self.crawl(next_unvisited_link)
next_unvisited_link = next(self.unvisited_links, None)
no_visited_links += 1
sleep(throttle if isinstance(throttle, Number)
else randint(0, self.throttle_max))
if self.quiet is not True:
print('crawled {} URLs'.format(no_visited_links + 1))
if self.links_broken:
print('found broken {} links'.format(len(self.links_broken)))
| mit | 4,129,940,450,796,477,400 | 30.64 | 79 | 0.566877 | false |
qunying/gps | share/support/core/vcs/mercurial.py | 1 | 2725 | """
Mercurial CVS support plugin
Provides support for the Mercurial configuration management system.
It integrates into GPS's VCS support, and uses the same menus
as all other VCS systems supported by GPS.
You can easily edit this file if you would like to customize
the mercurial commands that are sent for each of the menus.
"""
###########################################################################
# No user customization below this line
###########################################################################
import GPS
import os
from vcs import register_vcs_actions, ACTION, LABEL, SEPARATOR
# Mercurial VCS Menu
actions = [
SEPARATOR,
{ACTION: "Status", LABEL: "Query _status"},
{ACTION: "Commit", LABEL: "_Commit"},
{ACTION: "Commit (via revision log)",
LABEL: "_Commit (via revision log)"},
{ACTION: "Commit (from revision log)", LABEL: "Commit file"},
SEPARATOR,
{ACTION: "History (as text)",
LABEL: "View _entire revision history (as text)"},
{ACTION: "History",
LABEL: "View _entire revision history"},
{ACTION: "History for revision",
LABEL: "View specific revision _history"},
SEPARATOR,
{ACTION: "Diff against head",
LABEL: "Compare against tip revision"},
{ACTION: "Diff against revision",
LABEL: "Compare against other revision"},
SEPARATOR,
{ACTION: "Annotate", LABEL: "Add annotations"},
{ACTION: "Remove Annotate", LABEL: "Remove annotations"},
{ACTION: "Edit revision log", LABEL: "Edit revision log"},
{ACTION: "Edit global ChangeLog", LABEL: "Edit global ChangeLog"},
{ACTION: "Remove revision log", LABEL: "Remove revision log"},
SEPARATOR,
{ACTION: "Add no commit", LABEL: "Add, no commit"},
SEPARATOR,
{ACTION: "Status dir (recursively)", LABEL:
"Directory/Query status for directory recursively"},
{ACTION: "List project",
LABEL: "Project/List all files in project"},
{ACTION: "Status project",
LABEL: "Project/Query status for project"},
{ACTION: "List project (recursively)", LABEL:
"Project/List all files in project (recursively)"},
{ACTION: "Status project (recursively)", LABEL:
"Project/Query status for project (recursively)"},
]
# Mercurial VCS Menu registration
register_vcs_actions("Mercurial", actions)
# Mercurial VCS plugin configuration
# XML data is stored at the same location as this file .pyxml as extension
with open(os.path.splitext(__file__)[0] + '.pyxml', 'rb') as fd:
MERCURIAL_CONFIG = fd.read()
GPS.parse_xml(MERCURIAL_CONFIG)
| gpl-3.0 | -4,390,281,981,889,278,500 | 31.058824 | 77 | 0.604771 | false |
troych/PlexKodiConnect | contextmenu.py | 1 | 1380 | # -*- coding: utf-8 -*-
###############################################################################
import logging
import os
import sys
import xbmc
import xbmcaddon
###############################################################################
_addon = xbmcaddon.Addon(id='plugin.video.plexkodiconnect')
try:
_addon_path = _addon.getAddonInfo('path').decode('utf-8')
except TypeError:
_addon_path = _addon.getAddonInfo('path').decode()
try:
_base_resource = xbmc.translatePath(os.path.join(
_addon_path,
'resources',
'lib')).decode('utf-8')
except TypeError:
_base_resource = xbmc.translatePath(os.path.join(
_addon_path,
'resources',
'lib')).decode()
sys.path.append(_base_resource)
###############################################################################
import loghandler
from context_entry import ContextMenu
###############################################################################
loghandler.config()
log = logging.getLogger("PLEX.contextmenu")
###############################################################################
if __name__ == "__main__":
try:
# Start the context menu
ContextMenu()
except Exception as error:
log.exception(error)
import traceback
log.exception("Traceback:\n%s" % traceback.format_exc())
raise
| gpl-2.0 | 6,899,344,392,238,884,000 | 25.538462 | 79 | 0.454348 | false |
danielhkl/matplotlib2tikz | matplotlib2tikz/color.py | 1 | 2761 | # -*- coding: utf-8 -*-
#
import matplotlib as mpl
import numpy
def mpl_color2xcolor(data, matplotlib_color):
'''Translates a matplotlib color specification into a proper LaTeX xcolor.
'''
# Convert it to RGBA.
my_col = numpy.array(mpl.colors.ColorConverter().to_rgba(matplotlib_color))
# If the alpha channel is exactly 0, then the color is really 'none'
# regardless of the RGB channels.
if my_col[-1] == 0.0:
return data, 'none', my_col
xcol = None
# RGB values (as taken from xcolor.dtx):
available_colors = {
'red': numpy.array([1, 0, 0]),
'green': numpy.array([0, 1, 0]),
'blue': numpy.array([0, 0, 1]),
'brown': numpy.array([0.75, 0.5, 0.25]),
'lime': numpy.array([0.75, 1, 0]),
'orange': numpy.array([1, 0.5, 0]),
'pink': numpy.array([1, 0.75, 0.75]),
'purple': numpy.array([0.75, 0, 0.25]),
'teal': numpy.array([0, 0.5, 0.5]),
'violet': numpy.array([0.5, 0, 0.5]),
'black': numpy.array([0, 0, 0]),
'darkgray': numpy.array([0.25, 0.25, 0.25]),
'gray': numpy.array([0.5, 0.5, 0.5]),
'lightgray': numpy.array([0.75, 0.75, 0.75]),
'white': numpy.array([1, 1, 1])
# The colors cyan, magenta, yellow, and olive are also
# predefined by xcolor, but their RGB approximation of the
# native CMYK values is not very good. Don't use them here.
}
available_colors.update(data['custom colors'])
# Check if it exactly matches any of the colors already available.
# This case is actually treated below (alpha==1), but that loop
# may pick up combinations with black before finding the exact
# match. Hence, first check all colors.
for name, rgb in available_colors.items():
if all(my_col[:3] == rgb):
xcol = name
return data, xcol, my_col
# Check if my_col is a multiple of a predefined color and 'black'.
for name, rgb in available_colors.items():
if name == 'black':
continue
if rgb[0] != 0.0:
alpha = my_col[0] / rgb[0]
elif rgb[1] != 0.0:
alpha = my_col[1] / rgb[1]
else:
assert rgb[2] != 0.0
alpha = my_col[2] / rgb[2]
# The cases 0.0 (my_col == black) and 1.0 (my_col == rgb) are
# already accounted for by checking in available_colors above.
if all(my_col[:3] == alpha * rgb) and 0.0 < alpha < 1.0:
xcol = name + ('!%r!black' % (alpha * 100))
return data, xcol, my_col
# Lookup failed, add it to the custom list.
xcol = 'color' + str(len(data['custom colors']))
data['custom colors'][xcol] = my_col[:3]
return data, xcol, my_col
| mit | 2,779,563,172,198,260,700 | 36.310811 | 79 | 0.560666 | false |
open2c/cooltools | cooltools/cli/dump_cworld.py | 1 | 2380 | import click
from . import cli
from .. import io
@cli.command()
@click.argument(
"cool_paths",
metavar="COOL_PATHS",
type=str,
nargs=-1
)
@click.argument(
"out_path",
metavar="OUT_PATH",
type=click.Path(exists=False, writable=True),
nargs=1,
)
@click.option(
"--cworld-type",
help="The format of the CWorld output. "
"'matrix' converts a single .cool file into the .matrix.txt.gz tab-separated format. "
"'tar' dumps all specified cooler files into a "
"single .tar archive containing multiple .matrix.txt.gz files (use to make "
"multi-resolution archives).",
type=click.Choice(["matrix", "tar"]),
default="matrix",
show_default=True,
)
@click.option(
"--region",
help="The coordinates of a genomic region to dump, in the UCSC format. "
"If empty (by default), dump a genome-wide matrix. This option can be used "
"only when dumping a single cooler file.",
type=str,
default="",
show_default=True,
)
@click.option(
"--balancing-type",
help="The type of the matrix balancing. 'IC_unity' - iteratively corrected "
"for the total number of contacts per locus=1.0; 'IC' - same, but preserving "
"the average total number of contacts; 'raw' - no balancing",
type=click.Choice(["IC_unity", "IC", "raw"]),
default="IC_unity",
show_default=True,
)
def dump_cworld(cool_paths, out_path, cworld_type, region, balancing_type):
"""
Convert a cooler or a group of coolers into the Dekker' lab CWorld text format.
COOL_PATHS : Paths to one or multiple .cool files
OUT_PATH : Output CWorld file path
"""
if (cworld_type == "matrix") and (len(cool_paths) > 1):
raise click.ClickException(
"Only one .cool file can be converted into the matrix " "format at a time."
)
if cworld_type == "matrix":
io.dump_cworld(
cool_paths[0],
out_path,
region=region,
iced=(balancing_type != "raw"),
iced_unity=(balancing_type == "IC_unity"),
buffer_size=int(1e8),
)
elif cworld_type == "tar":
if region:
raise Exception(
"Only genome-wide matrices and not specific regions can be dumpled"
" into a .tar CWorld archive."
)
io.dump_cworld_tar(cool_paths, out_path)
| mit | 28,392,404,168,664,796 | 30.733333 | 90 | 0.611765 | false |
eResearchSA/reporting-unified | unified/models/hnas.py | 1 | 9940 | from sqlalchemy.sql import func
from . import db, id_column, to_dict, SnapshotMothods
class Owner(db.Model):
"""Storage Account/Owner"""
id = id_column()
name = db.Column(db.String(64), unique=True, nullable=False)
virtual_volume_usage = db.relationship("VirtualVolumeUsage",
backref="owner")
def json(self):
"""JSON"""
return to_dict(self, ["name"])
class Snapshot(db.Model, SnapshotMothods):
"""Storage Snapshot"""
id = id_column()
ts = db.Column(db.Integer, nullable=False, unique=True)
filesystem_usage = db.relationship("FilesystemUsage", backref="snapshot")
virtual_volume_usage = db.relationship("VirtualVolumeUsage",
backref="snapshot")
def json(self):
"""JSON"""
return to_dict(self, ["ts"])
class Filesystem(db.Model):
"""Filesystem"""
id = id_column()
name = db.Column(db.String(256), unique=True, nullable=False)
virtual_volumes = db.relationship("VirtualVolume", backref="filesystem")
usage = db.relationship("FilesystemUsage", backref="filesystem")
def json(self):
"""JSON"""
return to_dict(self, ["name"])
def summarise(self, start_ts=0, end_ts=0):
""""Gets usage of a file system between start_ts and end_ts.
Maximal usage of the period is returned.
"""
snapshot_ids = Snapshot.id_between(start_ts, end_ts)
id_query = FilesystemUsage.query.\
filter(FilesystemUsage.filesystem_id == self.id).\
filter(FilesystemUsage.snapshot_id.in_(snapshot_ids)).\
with_entities(FilesystemUsage.id)
query = FilesystemUsage.query.filter(FilesystemUsage.id.in_(id_query)).\
with_entities(func.max(FilesystemUsage.capacity),
func.min(FilesystemUsage.free),
func.max(FilesystemUsage.live_usage),
func.max(FilesystemUsage.snapshot_usage))
values = query.first()
if values.count(None) == len(values):
return {}
else:
fields = ['capacity', 'free', 'live_usage', 'snapshot_usage']
return dict(zip(fields, values))
def list(self, start_ts=0, end_ts=0):
""""Gets a list of usages of a filesystem between start_ts and end_ts.
"""
snapshots = Snapshot.between(start_ts, end_ts)
query = FilesystemUsage.query.join(snapshots).\
filter(FilesystemUsage.filesystem_id == self.id).\
order_by(snapshots.c.ts).\
with_entities(snapshots.c.ts,
FilesystemUsage.capacity,
FilesystemUsage.free,
FilesystemUsage.live_usage,
FilesystemUsage.snapshot_usage)
fields = ['ts', 'capacity', 'free', 'live_usage', 'snapshot_usage']
rslt = []
for q in query.all():
rslt.append(dict(zip(fields, q)))
return rslt
class FilesystemUsage(db.Model):
"""Filesystem Usage"""
id = id_column()
capacity = db.Column(db.BigInteger, nullable=False)
free = db.Column(db.BigInteger, nullable=False)
live_usage = db.Column(db.BigInteger, nullable=False)
snapshot_usage = db.Column(db.BigInteger, nullable=False)
snapshot_id = db.Column(None,
db.ForeignKey("snapshot.id"),
index=True,
nullable=False)
filesystem_id = db.Column(None,
db.ForeignKey("filesystem.id"),
index=True,
nullable=False)
def json(self):
"""JSON"""
return to_dict(self, ["capacity", "free", "live_usage",
"snapshot_usage", "snapshot_id", "filesystem_id"])
@classmethod
def summarise(cls, start_ts=0, end_ts=0):
""""Gets usage from their snapshots between start_ts and end_ts.
Maximal usage of the period is returned.
"""
id_query = Snapshot.id_between(start_ts, end_ts)
query = cls.query.filter(cls.snapshot_id.in_(id_query)).\
group_by(cls.filesystem_id).\
with_entities(cls.filesystem_id,
func.max(cls.capacity).label('capacity'),
func.min(cls.free).label('free'),
func.max(cls.live_usage).label('live_usage'),
func.max(cls.snapshot_usage).label('snapshot_usage'))
file_systems = dict(Filesystem.query.with_entities(Filesystem.id, Filesystem.name).all())
fields = ['filesystem', 'capacity', 'free', 'live_usage', 'snapshot_usage']
rslt = []
for q in query.all():
mappings = (file_systems[q[0]], q[1], q[2], q[3], q[4])
rslt.append(dict(zip(fields, mappings)))
return rslt
class VirtualVolume(db.Model):
"""Virtual Volume"""
id = id_column()
name = db.Column(db.String(256), unique=True, nullable=False)
usage = db.relationship("VirtualVolumeUsage", backref="virtual_volume")
filesystem_id = db.Column(None,
db.ForeignKey("filesystem.id"),
index=True,
nullable=False)
def json(self):
"""JSON"""
return to_dict(self, ["name", "filesystem_id"])
def summarise(self, start_ts=0, end_ts=0):
""""Gets usage of a virtual volume between start_ts and end_ts.
Maximal usage of the period is returned.
"""
snapshot_ids = Snapshot.id_between(start_ts, end_ts)
id_query = VirtualVolumeUsage.query.\
filter(VirtualVolumeUsage.virtual_volume_id == self.id).\
filter(VirtualVolumeUsage.snapshot_id.in_(snapshot_ids)).\
with_entities(VirtualVolumeUsage.id)
query = VirtualVolumeUsage.query.\
filter(VirtualVolumeUsage.id.in_(id_query)).\
group_by(VirtualVolumeUsage.owner_id).\
with_entities(VirtualVolumeUsage.owner_id,
func.max(VirtualVolumeUsage.quota),
func.max(VirtualVolumeUsage.files),
func.max(VirtualVolumeUsage.usage))
fields = ['owner', 'quota', 'files', 'usage']
rslt = []
for q in query.all():
values = list(q)
# almost all usages has no owner, query owner directly if needed
if values[0]:
values[0] = Owner.query.get(q[0]).name
rslt.append(dict(zip(fields, values)))
return rslt
def list(self, start_ts=0, end_ts=0):
""""Gets a list of usages of a virtual volume between start_ts and end_ts.
"""
snapshots = Snapshot.between(start_ts, end_ts)
query = VirtualVolumeUsage.query.join(snapshots).\
filter(VirtualVolumeUsage.virtual_volume_id == self.id).\
order_by(VirtualVolumeUsage.owner_id, snapshots.c.ts).\
with_entities(VirtualVolumeUsage.owner_id,
snapshots.c.ts,
VirtualVolumeUsage.quota,
VirtualVolumeUsage.files,
VirtualVolumeUsage.usage)
fields = ['ts', 'quota', 'files', 'usage']
rslt = {}
for q in query.all():
if q[0]:
owner = Owner.query.get(q[0]).name
else:
owner = 'UNKNOWN' # no owner
if owner not in rslt:
rslt[owner] = []
rslt[owner].append(dict(zip(fields, q[1:])))
return rslt
class VirtualVolumeUsage(db.Model):
"""Virtual Volume Usage"""
id = id_column()
files = db.Column(db.BigInteger, nullable=False)
quota = db.Column(db.BigInteger, nullable=False)
usage = db.Column(db.BigInteger, nullable=False)
owner_id = db.Column(None, db.ForeignKey("owner.id"))
snapshot_id = db.Column(None,
db.ForeignKey("snapshot.id"),
index=True,
nullable=False)
virtual_volume_id = db.Column(None,
db.ForeignKey("virtual_volume.id"),
index=True,
nullable=False)
def json(self):
"""JSON"""
return to_dict(self, ["files", "quota", "usage", "owner_id", "snapshot_id",
"virtual_volume_id"])
@classmethod
def summarise(cls, start_ts=0, end_ts=0):
""""Gets usage from their snapshots between start_ts and end_ts.
Maximal usage of the period is returned.
"""
id_query = Snapshot.id_between(start_ts, end_ts)
query = cls.query.filter(cls.snapshot_id.in_(id_query)).\
group_by(cls.virtual_volume_id, cls.owner_id).\
with_entities(cls.virtual_volume_id, cls.owner_id,
func.max(cls.quota).label('quota'),
func.max(cls.files).label('files'),
func.max(cls.usage).label('usage'))
fq = VirtualVolume.query.join(Filesystem).\
with_entities(VirtualVolume.id, Filesystem.name, VirtualVolume.name).all()
file_systems = {}
for fs in fq:
file_systems[fs[0]] = fs[1:]
# Not all virtual volumes has owner
owners = dict(Owner.query.with_entities(Owner.id, Owner.name).all())
fields = ['filesystem', 'virtual_volume', 'owner', 'quota', 'files', 'usage']
rslt = []
for q in query.all():
fn, vn = file_systems[q[0]]
owner = owners[q[1]] if q[1] else ''
mappings = (fn, vn, owner, q[2], q[3], q[4])
rslt.append(dict(zip(fields, mappings)))
return rslt
| apache-2.0 | -236,425,678,749,025,100 | 37.527132 | 97 | 0.547485 | false |
pjamesjoyce/lcopt | lcopt/interact.py | 1 | 48802 | from flask import Flask, request, render_template, redirect, send_file
import webbrowser
import json
from ast import literal_eval
from lcopt.io import exchange_factory
from collections import OrderedDict
from itertools import groupby
import xlsxwriter
from io import BytesIO
import os
from lcopt.bw2_export import Bw2Exporter
from lcopt.export_view import LcoptView
from lcopt.utils import find_port
from lcopt.settings import settings
class FlaskSandbox():
def __init__(self, modelInstance):
self.modelInstance = modelInstance
self.get_sandbox_variables()
# Set up the dictionary of actions that can be processed by POST requests
self.postActions = {
'savePosition': self.savePosition,
'saveModel': self.saveModel,
'newProcess': self.newProcess,
'echo': self.echo,
'searchEcoinvent': self.searchEcoinvent,
'searchBiosphere': self.searchBiosphere,
'newConnection': self.newConnection,
'addInput': self.addInput,
'inputLookup': self.inputLookup,
'parse_parameters': self.parameter_parsing,
'create_function': self.create_function,
'add_parameter': self.add_parameter,
'simaPro_export': self.simaPro_export,
'removeInput': self.removeInput,
'unlinkIntermediate': self.unlinkIntermediate,
'update_settings': self.update_settings,
'export_view_file': self.export_view_file
}
#print (self.modelInstance.newVariable)
def shutdown_server(self): # pragma: no cover
func = request.environ.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
def output_code(self, process_id):
exchanges = self.modelInstance.database['items'][process_id]['exchanges']
production_filter = lambda x: x['type'] == 'production'
code = list(filter(production_filter, exchanges))[0]['input'][1]
return code
def get_sandbox_variables(self):
m = self.modelInstance
db = m.database['items']
matrix = m.matrix
ext_dbs = [x['name'] for x in m.external_databases]
sandbox_positions = m.sandbox_positions
products = OrderedDict((k, v) for k, v in db.items() if v['type'] == 'product')
product_codes = [k[1] for k in products.keys()]
processes = OrderedDict((k, v) for k, v in db.items() if v['type'] == 'process')
process_codes = [k[1] for k in processes.keys()]
process_name_map = {k[1]: v['name'] for k, v in processes.items()}
self.reverse_process_name_map = {value: key for key, value in process_name_map.items()}
# note this maps from output code to process
process_output_map = {self.output_code(x): x[1] for x in processes.keys()}
self.reverse_process_output_map = {value: key for key, value in process_output_map.items()}
# map products to producing process name
self.production_map = {k:process_name_map[v] for k, v, in process_output_map.items()}
intermediates = {k: v for k, v in products.items() if v['lcopt_type'] == 'intermediate'}
intermediate_codes = [k[1] for k in intermediates.keys()]
intermediate_map = {k[1]: v['name'] for k, v in intermediates.items()}
self.reverse_intermediate_map = {value: key for key, value in intermediate_map.items()}
#process_output_name_map = {process_code: output_name for x in processes.keys()}
process_output_name_map = {x[1]: intermediate_map[self.reverse_process_output_map[x[1]]] for x in processes.keys()}
inputs = OrderedDict((k, v) for k, v in products.items() if v['lcopt_type'] == 'input')
input_codes = [k[1] for k in inputs.keys()]
input_map = {k[1]: v['name'] for k, v in inputs.items()}
self.reverse_input_map = {value: key for key, value in input_map.items()}
biosphere = OrderedDict((k, v) for k, v in products.items() if v['lcopt_type'] == 'biosphere')
biosphere_codes = [k[1] for k in biosphere.keys()]
biosphere_map = {k[1]: v['name'] for k, v in biosphere.items()}
self.reverse_biosphere_map = {value: key for key, value in biosphere_map.items()}
label_map = {**input_map, **process_output_name_map, **biosphere_map}
#print('label_map = {}\n'.format(label_map))
self.outputlabels = [{'process_id': x, 'output_name': process_output_name_map[x]} for x in process_codes]
link_indices = [process_output_map[x] if x in intermediate_codes else x for x in product_codes]
if matrix is not None:
row_totals = matrix.sum(axis=1)
input_row_totals = {k: row_totals[m.names.index(v)] for k, v in input_map.items()}
biosphere_row_totals = {k: row_totals[m.names.index(v)] for k, v in biosphere_map.items()}
# compute the nodes
i = 1
self.nodes = []
for t in process_codes:
self.nodes.append({'name': process_name_map[t], 'type': 'transformation', 'id': t, 'initX': i * 100, 'initY': i * 100})
i += 1
i = 1
for p in input_codes:
if input_row_totals[p] != 0:
item = db[(m.database['name'], p)]
el = item.get('ext_link')
if el:
if el[0] == m.database['name']:
ext_db_items = m.database['items']
ext_item = ext_db_items[el]
#ext_ref_prod = ext_item.get('reference product','')
ext_name = ext_item.get('name', '')
ext_location = ext_item.get('location', '')
ext_item_data = "<div><i><b>This is an internal link</b></i></br><b>Database: </b>{}</br><b>Process: </b>{}</br><b>Location: </b>{}</br></div>".format(el[0], ext_name, ext_location)
else:
ext_db_ix = ext_dbs.index(el[0])
ext_db_items = m.external_databases[ext_db_ix]['items']
ext_item = ext_db_items[el]
ext_ref_prod = ext_item.get('reference product','')
ext_name = ext_item.get('name', '')
ext_location = ext_item.get('location', '')
ext_item_data = "<div><b>Database: </b>{}</br><b>Reference product: </b>{}</br><b>Process: </b>{}</br><b>Location: </b>{}</br></div>".format(el[0], ext_ref_prod, ext_name, ext_location)
else:
ext_item_data = "<div><i><b>This is a burden free input</b></i></div>"
self.nodes.append({'name': input_map[p], 'type': 'input', 'id': p + "__0", 'initX': i * 50 + 150, 'initY': i * 50, 'ext_item_data': ext_item_data})
i += 1
i = 1
for p in biosphere_codes:
if biosphere_row_totals[p] != 0:
item = db[(m.database['name'], p)]
el = item.get('ext_link')
if el:
ext_db_ix = ext_dbs.index(el[0])
ext_db_items = m.external_databases[ext_db_ix]['items']
ext_item = ext_db_items[el]
if type(ext_item['categories']) == tuple:
ext_categories = "; ".join(ext_item['categories'])
else:
ext_categories = ext_item['categories']
ext_item_data = "<div><b>Database: </b>{}</br><b>Name: </b>{}</br><b>Type: </b>{}</br><b>Categories: </b>{}</br></div>".format(el[0], ext_item['name'], ext_item['type'], ext_categories)
else:
ext_item_data = None
self.nodes.append({'name': biosphere_map[p], 'type': 'biosphere', 'id': p + "__0", 'initX': i * 50 + 150, 'initY': i * 50, 'ext_item_data': ext_item_data})
i += 1
# compute links
self.links = []
input_duplicates = []
biosphere_duplicates = []
#check there is a matrix (new models won't have one until parameter_scan() is run)
if matrix is not None:
for c, column in enumerate(matrix.T):
for r, i in enumerate(column):
if i > 0:
p_from = link_indices[r]
p_to = link_indices[c]
if p_from in input_codes:
suffix = "__" + str(input_duplicates.count(p_from))
input_duplicates.append(p_from)
p_type = 'input'
elif p_from in biosphere_codes:
suffix = "__" + str(biosphere_duplicates.count(p_from))
biosphere_duplicates.append(p_from)
p_type = 'biosphere'
else:
suffix = ""
p_type = 'intermediate'
self.links.append({'sourceID': p_from + suffix, 'targetID': p_to, 'type': p_type, 'amount': 1, 'label': label_map[p_from]})
#add extra nodes
while len(input_duplicates) > 0:
p = input_duplicates.pop()
count = input_duplicates.count(p)
if count > 0:
suffix = "__" + str(count)
ext_item_data = [x['ext_item_data'] for x in self.nodes if x['id'] == p + "__0"][0]
self.nodes.append({'name': input_map[p], 'type': 'input', 'id': p + suffix, 'initX': i * 50 + 150, 'initY': i * 50, 'ext_item_data': ext_item_data})
i += 1
while len(biosphere_duplicates) > 0:
p = biosphere_duplicates.pop()
count = biosphere_duplicates.count(p)
if count > 0:
suffix = "__" + str(count)
ext_item_data = [x['ext_item_data'] for x in self.nodes if x['id'] == p + "__0"][0]
self.nodes.append({'name': biosphere_map[p], 'type': 'biosphere', 'id': p + suffix, 'initX': i * 50 + 150, 'initY': i * 50, 'ext_item_data': ext_item_data})
i += 1
#try and reset the locations
for n in self.nodes:
node_id = n['id']
if node_id in sandbox_positions:
n['initX'] = sandbox_positions[node_id]['x']
n['initY'] = sandbox_positions[node_id]['y']
#print(self.nodes)
#print(inputs)
#print(process_name_map)
def savePosition(self, f):
if f['uuid'] not in self.modelInstance.sandbox_positions:
self.modelInstance.sandbox_positions[f['uuid']] = {}
self.modelInstance.sandbox_positions[f['uuid']]['x'] = f['x']
self.modelInstance.sandbox_positions[f['uuid']]['y'] = f['y']
#print('Setting {} to ({},{})'.format(f['uuid'], f['x'], f['y']))
return "OK"
def saveModel(self, postData): # pragma: no cover
#print ("this is where we save the model")
self.modelInstance.save()
return "OK"
def newProcess(self, postData):
#print ("this is where we're going to create the process, using...")
#print (postData)
m = self.modelInstance
name = postData['process_name']
unit = postData['unit']
output_name = postData['output_name']
exchanges = [{'name': output_name, 'type': 'production', 'unit': unit, 'lcopt_type': 'intermediate'}]
location = 'GLO'
m.create_process(name, exchanges, location, unit)
self.modelInstance.parameter_scan()
#print (m.database['items'][(m.database['name'], postData['uuid'])])
return "OK"
def newConnection(self, postData):
#print(postData)
db = self.modelInstance.database
self.get_sandbox_variables()
source = postData['sourceId']
#print(self.reverse_process_output_map[source])
target = postData['targetId']
label = postData['label']
new_exchange = {'amount': 1,
'comment': 'technosphere exchange of {}'.format(label),
'input': (db['name'], self.reverse_process_output_map[source]),
'type': 'technosphere',
'uncertainty type': 1}
db['items'][(db['name'], target)]['exchanges'].append(new_exchange)
self.modelInstance.parameter_scan()
#print (db['items'][(db['name'], target)]['exchanges'])
return "OK"
def addInput(self, postData):
#print(postData)
my_targetId = postData['targetId']
my_name = postData['name']
#my_type = postData['type']
my_unit = postData['unit']
my_location = postData['location']
m = self.modelInstance
exchange_to_link = m.get_exchange(my_name)
if exchange_to_link is False:
#Create the new product
kwargs = {}
if 'ext_link' in postData.keys():
my_ext_link = literal_eval(postData['ext_link'])
kwargs['ext_link'] = my_ext_link
#exchange_to_link = m.create_product (name = my_name, location =my_location , unit=my_unit, ext_link = my_ext_link)
#print('created linked product')
#else:
if 'lcopt_type' in postData.keys():
lcopt_type = postData['lcopt_type']
kwargs['lcopt_type'] = lcopt_type
exchange_to_link = m.create_product (name=my_name, location=my_location, unit=my_unit, **kwargs)
#print('created unlinked product')
#link the product
#this_exchange = m.get_exchange(my_name)
#print(this_exchange)
this_exchange_object = exchange_factory(exchange_to_link, 'technosphere', 1, 1, '{} exchange of {}'.format('technosphere', my_name))
#print (this_exchange_object)
target_item = m.database['items'][(m.database['name'], my_targetId)]
#[print(target_item)]
target_item['exchanges'].append(this_exchange_object)
#run the parameter scan
m.parameter_scan()
return "OK"
def update_sandbox_on_delete(self, modelInstance, full_id):
id_components = full_id.split("__")
alt_id_sandbox_positions = {tuple(k.split("__")): v for k, v in modelInstance.sandbox_positions.items()}
new_sandbox_positions = {}
for k, v in alt_id_sandbox_positions.items():
#print (k)
#print(id_components)
if len(k) == 1:
new_sandbox_positions['{}'.format(*k)] = v
elif id_components[0] in k and k[1] == id_components[1]:
pass
elif id_components[0] in k and int(k[1]) > int(id_components[1]):
new_sandbox_positions['{0}__{1}'.format(k[0], int(k[1]) - 1)] = v
else:
new_sandbox_positions['{}__{}'.format(*k)] = v
modelInstance.sandbox_positions = new_sandbox_positions
return True
def removeInput(self, postData):
m = self.modelInstance
db_name = m.database.get('name')
process_code = (db_name, postData['targetId'])
input_code = (db_name, postData['sourceId'].split("_")[0])
m.remove_input_link(process_code, input_code)
self.update_sandbox_on_delete(m, postData['sourceId'])
# TODO: Sort out sandbox variables
return "OK"
def unlinkIntermediate(self, postData):
m = self.modelInstance
m.unlink_intermediate(postData['sourceId'], postData['targetId'])
return "OK"
def inputLookup(self, postData):
m = self.modelInstance
myInput = m.database['items'][(m.database['name'], postData['code'])]
return_data = {}
if 'ext_link' in myInput.keys():
ext_link = myInput['ext_link']
if ext_link[0] == m.database['name']:
print('this is an internal external link')
ext_db = m.database['items'] #[x['items'] for x in m.external_databases if x['name'] == ext_link[0]][0]
else:
ext_db = [x['items'] for x in m.external_databases if x['name'] == ext_link[0]][0]
full_link = ext_db[ext_link]
if postData['format'] == 'ecoinvent':
full_link_string = "{} {{{}}} [{}]".format(full_link['name'], full_link['location'], full_link['unit'])
elif postData['format'] == 'biosphere':
if full_link['type'] == 'emission':
full_link_string = '{} (emission to {}) [{}]'.format(full_link['name'], ", ".join(full_link['categories']), full_link['unit'])
else:
full_link_string = '{} ({}) [{}]'.format(full_link['name'], ", ".join(full_link['categories']), full_link['unit'])
return_data['isLinked'] = True
return_data['ext_link'] = str(ext_link)
return_data['ext_link_string'] = full_link_string
return_data['ext_link_unit'] = full_link['unit']
else:
#print('This is an unlinked product')
return_data['isLinked'] = False
return_data['unlinked_unit'] = myInput['unit']
return json.dumps(return_data)
def echo(self, postData):
data = {'message': 'Hello from echo'}
return json.dumps(data)
def searchEcoinvent(self, postData):
search_term = postData['search_term']
location = postData['location']
markets_only = postData['markets_only'] in ['True', 'true', 'on']
m = self.modelInstance
#print(type(markets_only))
#print(location)
if location == "":
#print ('no location')
location = None
result = m.search_databases(search_term, location, markets_only, databases_to_search=m.technosphere_databases, allow_internal=True)
json_dict = {str(k): v for k, v in dict(result).items()}
data = {'message': 'hello from ecoinvent', 'search_term': search_term, 'result': json_dict, 'format': 'ecoinvent'}
return json.dumps(data)
def searchBiosphere(self, postData):
search_term = postData['search_term']
m = self.modelInstance
result = m.search_databases(search_term, databases_to_search=m.biosphere_databases, allow_internal=False)
json_dict = {str(k): v for k, v in dict(result).items()}
data = {'message': 'hello from biosphere3', 'search_term': search_term, 'result': json_dict, 'format': 'biosphere'}
#print (json_dict)
return json.dumps(data)
def create_function(self, postData):
#print(postData)
new_function = postData['my_function']
function_for = postData['for']
if function_for.split("_")[-1] == "production":
parameter = self.modelInstance.production_params[function_for]
elif function_for.split("_")[-1] == "allocation":
parameter = self.modelInstance.allocation_params[function_for]
else:
parameter = self.modelInstance.params[function_for]
parameter['function'] = new_function
parameter['description'] = postData['description']
return "OK"
def parameter_sorting(self):
parameters = self.modelInstance.params
production_params = self.modelInstance.production_params
ext_params = self.modelInstance.ext_params
allocation_params = self.modelInstance.allocation_params
# create a default parameter set if there isn't one yet
if len(self.modelInstance.parameter_sets) == 0:
print ('No parameter sets - creating a default set')
self.modelInstance.parameter_sets['ParameterSet_1'] = OrderedDict()
for param in parameters:
self.modelInstance.parameter_sets['ParameterSet_1'][param] = 0
for param in production_params:
self.modelInstance.parameter_sets['ParameterSet_1'][param] = 1
for param in allocation_params:
self.modelInstance.parameter_sets['ParameterSet_1'][param] = 1
for param in ext_params:
self.modelInstance.parameter_sets['ParameterSet_1'][param['name']] = param['default']
exporter = Bw2Exporter(self.modelInstance)
exporter.evaluate_parameter_sets()
evaluated_parameters = self.modelInstance.evaluated_parameter_sets
subsectionTitles = {
'input': "Inputs from the 'technosphere'",
'intermediate': "Inputs from other processes",
'biosphere': "Direct emissions to the environment"
}
to_name = lambda x: parameters[x]['to_name']
input_order = lambda x: parameters[x]['coords'][1]
type_of = lambda x: parameters[x]['type']
rev_p_params = {v['from_name']: k for k, v in production_params.items()}
rev_a_params = {v['from_name']: k for k, v in allocation_params.items()}
sorted_keys = sorted(parameters, key=input_order)
sorted_parameters = []
for target, items in groupby(sorted_keys, to_name):
section = {'name': target, 'my_items': []}
this_p_param = rev_p_params[target]
if production_params[this_p_param].get('function'):
#print ('{} determined by a function'.format(this_p_param))
values = ['{} = {:.3f}'.format(production_params[this_p_param]['function'], e_ps[this_p_param]) for e_ps_name, e_ps in evaluated_parameters.items()]
isFunction = True
else:
values = [ps[this_p_param] if this_p_param in ps.keys() else '' for ps_name, ps in self.modelInstance.parameter_sets.items()]
isFunction = False
subsection = {'name': 'Production exchange (Output)', 'my_items': []}
#subsection['my_items'].append({'id': this_p_param, 'name': 'Output of {}'.format(production_params[this_p_param]['from_name']), 'existing_values': values, 'unit': production_params[this_p_param]['unit'], 'isFunction': isFunction})
subsection['my_items'].append({'id': this_p_param, 'name': '{}'.format(production_params[this_p_param]['from_name']), 'existing_values': values, 'unit': production_params[this_p_param]['unit'], 'isFunction': isFunction})
section['my_items'].append(subsection)
if self.modelInstance.allow_allocation:
this_a_param = rev_a_params[target]
if allocation_params[this_a_param].get('function'):
#print ('{} determined by a function'.format(this_p_param))
values = ['{} = {:.3f}'.format(allocation_params[this_a_param]['function'], e_ps[this_a_param]) for e_ps_name, e_ps in evaluated_parameters.items()]
isFunction = True
else:
values = [ps[this_a_param] if this_a_param in ps.keys() else '' for ps_name, ps in self.modelInstance.parameter_sets.items()]
isFunction = False
subsection = {'name': 'Allocation parameter', 'my_items': []}
subsection['my_items'].append({'id': this_a_param, 'name': '{}'.format(allocation_params[this_a_param]['from_name']), 'existing_values': values, 'unit': allocation_params[this_a_param]['unit'], 'isFunction': isFunction})
section['my_items'].append(subsection)
sorted_exchanges = sorted(items, key=type_of)
#print (sorted_exchanges)
for type, exchanges in groupby(sorted_exchanges, type_of):
#print('\t{}'.format(type))
subsection = {'name': subsectionTitles[type], 'my_items': []}
for exchange in exchanges:
if parameters[exchange].get('function'):
#print ('{} determined by a function'.format(exchange))
values = ['{} = {:.3f}'.format(parameters[exchange]['function'], e_ps[exchange]) for e_ps_name, e_ps in evaluated_parameters.items()]
isFunction = True
else:
values = [ps[exchange] if exchange in ps.keys() else '' for ps_name, ps in self.modelInstance.parameter_sets.items()]
isFunction = False
#print('\t\t{} ({}) {}'.format(parameters[exchange]['from_name'], exchange, values))
subsection['my_items'].append({'id': exchange, 'name': parameters[exchange]['from_name'], 'existing_values': values, 'unit': parameters[exchange]['unit'], 'isFunction': isFunction})
section['my_items'].append(subsection)
db_code = (self.modelInstance.database['name'], parameters[exchange]['to'])
#print(db_code)
unit = self.modelInstance.database['items'][db_code]['unit']
item_name = self.production_map[db_code[1]]
print(item_name)
#section['name'] = "{}\t({})".format(target, unit)
section['name'] = item_name
sorted_parameters.append(section)
ext_section = {'name': 'Global Parameters', 'my_items': [{'name': 'User created', 'my_items': []}]}
for e_p in self.modelInstance.ext_params:
values = [ps[e_p['name']] if e_p['name'] in ps.keys() else e_p['default'] for ps_name, ps in self.modelInstance.parameter_sets.items()]
ext_section['my_items'][0]['my_items'].append({'id': e_p['name'], 'name': e_p['description'], 'existing_values': values, 'unit': e_p.get('unit', ''), 'isFunction': False})
sorted_parameters.append(ext_section)
return sorted_parameters
def parameter_parsing(self, postData):
#print(postData)
myjson = json.loads(postData['data'], object_pairs_hook=OrderedDict)
#print(myjson)
current_parameter_sets = []
for line in myjson:
line_id = line['id']
if line['Name'] != '':
reserved = ['Name', 'id', 'Unit']
for k in line.keys():
if k not in reserved:
if k not in current_parameter_sets:
current_parameter_sets.append(k)
#print (k, line['id'], line[k])
if line[k] == '':
line[k] = 0
if k in self.modelInstance.parameter_sets.keys():
if line[k] != '[FUNCTION]':
self.modelInstance.parameter_sets[k][line_id] = float(line[k])
else:
self.modelInstance.parameter_sets[k] = OrderedDict()
#print ('created {}'.format(k))
if line[k] != '[FUNCTION]':
self.modelInstance.parameter_sets[k][line_id] = float(line[k])
new_parameter_sets = OrderedDict()
for ps in current_parameter_sets:
new_parameter_sets[ps] = self.modelInstance.parameter_sets[ps]
self.modelInstance.parameter_sets = new_parameter_sets
#print([k for k in self.modelInstance.parameter_sets.keys()])
self.modelInstance.save()
#print('parameters saved')
return 'OK'
#return redirect("/")
def add_parameter(self, postData):
self.modelInstance.add_parameter(postData['param_id'], postData['param_description'], float(postData['param_default']), postData['param_unit'])
#print ('Added {} (default {}) added to global parameters'.format(postData['param_id'], postData['param_default']))
return "OK"
def simaPro_export(self, postData):
self.modelInstance.database_to_SimaPro_csv()
self.modelInstance.generate_parameter_set_excel_file()
return "OK"
def update_settings(self, postData):
print(postData)
try:
new_amount = float(postData['settings_amount'])
except:
new_amount = self.modelInstance.analysis_settings['amount']
if new_amount != 0:
self.modelInstance.analysis_settings['amount'] = new_amount
myjson = json.loads(postData['settings_methods'])
self.modelInstance.analysis_settings['methods'] = [tuple(x) for x in myjson]
if postData['allow_allocation'] == 'true':
self.modelInstance.allow_allocation = True
else:
self.modelInstance.allow_allocation = False
print (self.modelInstance.allow_allocation)
return "OK"
def export_view_file(self, postData):
model = self.modelInstance
exporter = LcoptView(model)
exporter.export()
return "OK"
def create_excel_summary(self):
settings = self.modelInstance.result_set['settings']
results = self.modelInstance.result_set['results']
method_names = ['{}{}'.format(x[0].upper(), x[1:]) for x in settings['method_names']]
ps_names = settings['ps_names']
#create an output stream
output = BytesIO()
workbook = xlsxwriter.Workbook(output)
worksheet = workbook.add_worksheet()
base_format = {'border': 1, 'align': 'center'}
base_header_format = {'border': 1, 'align': 'center', 'bold': True, 'text_wrap': True}
cell_format = workbook.add_format(base_format)
cell_format.set_align('vcenter')
row_header_format = workbook.add_format(base_header_format)
row_header_format.set_align('vcenter')
col_header_format = workbook.add_format(base_header_format)
col_header_format.set_align('vcenter')
title_format = workbook.add_format({'bold': True, 'font_size': 20})
row_offset = 2
col_offset = 1
worksheet.write(row_offset, col_offset, 'Impact', col_header_format)
worksheet.write(row_offset, col_offset + 1, 'Unit', col_header_format)
worksheet.write(0, 1, '{} summary'.format(self.modelInstance.name), title_format)
for i, m in enumerate(method_names):
for j, p in enumerate(ps_names):
worksheet.write(i + row_offset + 1, j + col_offset + 2, results[j]['scores'][i], cell_format)
for i, m in enumerate(method_names):
worksheet.write(i + row_offset + 1, col_offset, m, row_header_format)
worksheet.write(i + row_offset + 1, col_offset + 1, settings['method_units'][i], row_header_format)
for j, p in enumerate(ps_names):
worksheet.write(row_offset, j + col_offset + 2, p, col_header_format)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(0), xlsxwriter.utility.xl_col_to_name(0)
worksheet.set_column('{}:{}'.format(start_col, end_col), 5)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(col_offset), xlsxwriter.utility.xl_col_to_name(col_offset)
worksheet.set_column('{}:{}'.format(start_col, end_col), 25)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(col_offset + 1), xlsxwriter.utility.xl_col_to_name(col_offset + 1 + len(ps_names))
worksheet.set_column('{}:{}'.format(start_col, end_col), 12)
workbook.close()
#go back to the beginning of the stream
output.seek(0)
return output
def create_excel_method(self, m):
settings = self.modelInstance.result_set['settings']
results = self.modelInstance.result_set['results']
method_names = ['{}{}'.format(x[0].upper(), x[1:]) for x in settings['method_names']]
method = method_names[m]
ps_names = settings['ps_names']
table_data = []
for i, p in enumerate(ps_names):
foreground_results = results[i]['foreground_results']
this_item = []
for k, v in foreground_results.items():
running_total = 0
for j, _ in enumerate(ps_names):
running_total += abs(results[j]['foreground_results'][k][m])
if(running_total != 0):
this_item.append({'name': k, 'value': v[m], 'rt': running_total})
this_item = sorted(this_item, key=lambda x: x['rt'], reverse=True)
table_data.append(this_item)
#print(table_data)
output = BytesIO()
workbook = xlsxwriter.Workbook(output)
worksheet = workbook.add_worksheet()
base_format = {'border': 1, 'align': 'center'}
base_header_format = {'border': 1, 'align': 'center', 'bold': True, 'text_wrap': True}
cell_format = workbook.add_format(base_format)
cell_format.set_align('vcenter')
total_format = workbook.add_format(base_header_format)
total_format.set_align('vcenter')
total_format.set_bg_color('#eeeeee')
row_header_format = workbook.add_format(base_header_format)
row_header_format.set_align('vcenter')
col_header_format = workbook.add_format(base_header_format)
col_header_format.set_align('vcenter')
title_format = workbook.add_format({'bold': True, 'font_size': 12})
row_offset = 4
col_offset = 1
worksheet.write(0, 1, 'Model', title_format)
worksheet.write(0, 2, self.modelInstance.name, title_format)
worksheet.write(1, 1, 'Method', title_format)
worksheet.write(1, 2, method, title_format)
worksheet.write(2, 1, 'Unit', title_format)
worksheet.write(2, 2, settings['method_units'][m], title_format)
worksheet.write(row_offset, col_offset, 'Process', col_header_format)
worksheet.write(row_offset + 1, col_offset, 'Total', total_format)
for i, p in enumerate(ps_names):
worksheet.write(row_offset, col_offset + i + 1, p, col_header_format)
worksheet.write(row_offset + 1, col_offset + i + 1, results[i]['scores'][m], total_format)
for i, item in enumerate(table_data[0]):
worksheet.write(row_offset + i + 2, col_offset, item['name'], row_header_format)
no_items = len(table_data[0])
for i, item in enumerate(table_data):
for j in range(no_items):
worksheet.write(row_offset + j + 2, col_offset + i + 1, item[j]['value'], cell_format)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(0), xlsxwriter.utility.xl_col_to_name(0)
worksheet.set_column('{}:{}'.format(start_col, end_col), 5)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(col_offset), xlsxwriter.utility.xl_col_to_name(col_offset)
worksheet.set_column('{}:{}'.format(start_col, end_col), 25)
start_col, end_col = xlsxwriter.utility.xl_col_to_name(col_offset + 1), xlsxwriter.utility.xl_col_to_name(col_offset + len(ps_names))
worksheet.set_column('{}:{}'.format(start_col, end_col), 12)
workbook.close()
output.seek(0)
return output
def create_app(self):
app = Flask(__name__)
def uc_first(string):
return string[0].upper() + string[1:]
app.jinja_env.filters['uc_first'] = uc_first
@app.route('/')
def index():
name = self.modelInstance.name
self.get_sandbox_variables()
args = {'model': {'name': name}, 'nodes': self.nodes, 'links': self.links, 'outputlabels': self.outputlabels}
return render_template('sandbox.html', args=args)
@app.route('/process_post', methods=['POST'])
def process_post():
try:
f = request.form
except:
f = request.get_json()
#print(f)
action = self.postActions[f['action']]
return action(f)
#return "OK"
@app.route('/shutdown')
def shutdown(): # pragma: no cover
self.shutdown_server()
return render_template('shutdown.html')
@app.route('/inputs.json')
def inputs_as_json():
"""creates a json file of the reverse input map to send from the server"""
self.get_sandbox_variables()
# to_json = [x for x in self.reverse_input_map.keys()]
#to_json = reverse_input_map
to_json = [{'name': k, 'code': v} for k, v in self.reverse_input_map.items()]
input_json = json.dumps(to_json)
return input_json
@app.route('/biosphere.json')
def biosphere_as_json():
"""creates a json file of the reverse biosphere map to send from the server"""
self.get_sandbox_variables()
# to_json = [x for x in self.reverse_input_map.keys()]
#to_json = reverse_input_map
to_json = [{'name': k, 'code': v} for k, v in self.reverse_biosphere_map.items()]
biosphere_json = json.dumps(to_json)
return biosphere_json
@app.route('/intermediates.json')
def intermediates_as_json():
"""creates a json file of the reverse intermediate map to send from the server"""
self.get_sandbox_variables()
# to_json = [x for x in self.reverse_input_map.keys()]
#to_json = reverse_input_map
to_json = [{'name': k, 'code': v} for k, v in self.reverse_intermediate_map.items()]
intermediate_json = json.dumps(to_json)
return intermediate_json
@app.route('/usednames.json')
def usednames_as_json():
"""creates a json file of the names already used"""
self.get_sandbox_variables()
names = []
names.extend([k.lower() for k in self.reverse_input_map.keys()])
names.extend([k.lower() for k in self.reverse_intermediate_map.keys()])
names.extend([k.lower() for k in self.reverse_biosphere_map.keys()])
names.extend([k.lower() for k in self.reverse_process_name_map.keys()])
names_json = json.dumps(names)
return names_json
@app.route('/testing')
def testbed():
args = {'model': {'name': self.modelInstance.name}}
args['result_sets'] = self.modelInstance.result_set
return render_template('testbed.html', args=args)
@app.route('/functions')
def function_editor():
args = {'model': {'name': self.modelInstance.name}}
return render_template('create_functions.html', args=args)
@app.route('/results.json')
def results_as_json():
return json.dumps(self.modelInstance.result_set)
@app.route('/parameters.json')
def parameter_json():
sorted_parameters = self.parameter_sorting()
return json.dumps(sorted_parameters)
@app.route('/parameter_<param_id>.json')
def param_query(param_id):
if self.modelInstance.params.get(param_id):
param = self.modelInstance.params[param_id]
elif self.modelInstance.production_params.get(param_id):
param = self.modelInstance.production_params[param_id]
elif self.modelInstance.allocation_params.get(param_id):
param = self.modelInstance.allocation_params[param_id]
else:
param = []
#print(param)
return json.dumps(param)
@app.route('/status.json')
def status():
db = self.modelInstance.database['items']
products = OrderedDict((k, v) for k, v in db.items() if v['type'] == 'product')
inputs = OrderedDict((k, v) for k, v in products.items() if v['lcopt_type'] == 'input')
ext_linked_inputs = OrderedDict((k, v) for k, v in inputs.items() if v.get('ext_link'))
#print(ext_linked_inputs)
biosphere = OrderedDict((k, v) for k, v in products.items() if v['lcopt_type'] == 'biosphere')
totals = []
if len(self.modelInstance.parameter_sets):
exporter = Bw2Exporter(self.modelInstance)
exporter.evaluate_parameter_sets()
evaluated_parameters = self.modelInstance.evaluated_parameter_sets
for _, ps in evaluated_parameters.items():
running_total = 0
for k, v in ps.items():
if k[-10:] != 'production':
running_total += abs(v)
totals.append(running_total)
non_zero = sum(totals) > 0
else:
non_zero = False
#print(evaluated_parameters)
#print(totals)
has_model = len(db) != 0
model_has_impacts = len(ext_linked_inputs) + len(biosphere) != 0
model_has_parameters = len (self.modelInstance.parameter_sets) != 0 and non_zero
model_is_runnable = all([has_model, model_has_impacts, model_has_parameters])
model_has_functions = len([x for k, x in self.modelInstance.params.items() if x['function'] is not None]) != 0
model_is_fully_formed = all([has_model, model_has_impacts, model_has_parameters, model_has_functions])
status_object = {
'has_model': has_model,
'model_has_impacts': model_has_impacts,
'model_has_parameters': model_has_parameters,
'model_has_functions': model_has_functions,
'model_is_runnable': model_is_runnable,
'model_is_fully_formed': model_is_fully_formed,
}
return json.dumps(status_object)
@app.route('/analyse')
def analyse_preload():
args = {'model': {'name': self.modelInstance.name}}
item = request.args.get('item')
item_code = request.args.get('item_code')
#print(request.args)
args['item'] = item
args['item_code'] = item_code
#print('PRELOAD {}'.format(args['item_code']))
#self.modelInstance.analyse(item)
return render_template('analysis_preload.html', args=args)
@app.route('/analysis')
def analysis():
item_code = request.args.get('item_code')
item = request.args.get('item')
self.modelInstance.analyse(item, item_code)
args = {'model': {'name': self.modelInstance.name}}
args['item'] = item
args['result_sets'] = self.modelInstance.result_set
#return render_template('analysis.html', args = args)
#return render_template('testbed.html', args = args)
#redirect to the cached results so that reloading doesnt rerun the analysis
return redirect("/results?latest=True")
@app.route('/results')
def analysis_shortcut():
#if hasattr(self.modelInstance, 'result_set'):
if self.modelInstance.result_set is not None:
is_latest = request.args.get('latest')
item = self.modelInstance.result_set['settings']['item']
args = {'model': {'name': self.modelInstance.name}}
args['item'] = item
args['latest'] = is_latest
args['result_sets'] = self.modelInstance.result_set
return render_template('analysis.html', args=args)
else:
return render_template('analysis_fail.html')
#@app.route('/network.json')
#def network_as_json():
# parameter_set = request.args.get('ps')
# return self.modelInstance.result_set[int(parameter_set)]['json']
@app.route('/parameters')
def sorted_parameter_setup():
sorted_parameters = self.parameter_sorting()
args = {'title': 'Parameter set'}
args['sorted_parameters'] = sorted_parameters
args['ps_names'] = [x for x in self.modelInstance.parameter_sets.keys()]
return render_template('parameter_set_table_sorted.html',
args=args)
@app.route('/methods.json')
def methods_as_json():
import brightway2 as bw2
from lcopt.constants import DEFAULT_BIOSPHERE_PROJECT
if settings.model_storage.project == "single":
bw2.projects.set_current(settings.model_storage.single_project_name)
else:
if self.modelInstance.name in bw2.projects:
#print('getting custom methods')
bw2.projects.set_current(self.modelInstance.name)
else:
#print('getting default methods')
bw2.projects.set_current(DEFAULT_BIOSPHERE_PROJECT)
method_list = list(bw2.methods)
return json.dumps(method_list)
@app.route('/settings')
def model_settings():
args = {}
args['current_methods'] = json.dumps(self.modelInstance.analysis_settings['methods'])
args['current_amount'] = self.modelInstance.analysis_settings['amount']
args['allow_allocation'] = self.modelInstance.allow_allocation
return render_template('settings.html', args=args)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def server_error(e):
return render_template('500.html'), 500
@app.route('/excel_export')
def excel_export():
export_type = request.args.get('type')
ps = int(request.args.get('ps'))
m = int(request.args.get('m'))
#print (export_type, ps, m)
if export_type == 'summary':
output = self.create_excel_summary()
filename = "{}_summary_results.xlsx".format(self.modelInstance.name)
elif export_type == 'method':
output = self.create_excel_method(m)
filename = "{}_{}_results.xlsx".format(self.modelInstance.name, self.modelInstance.result_set['settings']['method_names'][m])
#finally return the file
return send_file(output, attachment_filename=filename, as_attachment=True)
@app.route('/locations.json')
def locations_as_json():
asset_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'assets')
filename = 'locations.json'
with open(os.path.join(asset_path, filename), 'r', encoding='utf-8') as f:
locations = json.load(f)
all_items = [x['items'] for x in self.modelInstance.external_databases if x['name'] in self.modelInstance.technosphere_databases]
used_locations = set([x['location'] for item in all_items for _, x in item.items()])
filtered_locations = [x for x in locations if x['code'] in used_locations]
#print(filtered_locations)
return json.dumps(filtered_locations)
@app.route('/mass_flow')
def mass_flow():
return render_template('mass_flow.html')
return app
def run(self, port=None, open_browser=True): # pragma: no cover
app = self.create_app()
if port is None:
port = find_port()
if open_browser:
url = 'http://127.0.0.1:{}/'.format(port)
webbrowser.open_new(url)
app.run(port=port)
| bsd-3-clause | 9,004,957,291,227,467,000 | 40.498299 | 243 | 0.546863 | false |
HybridF5/jacket | jacket/tests/compute/unit/virt/ironic/test_patcher.py | 1 | 5947 | # Copyright 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from jacket import context as nova_context
from jacket.compute import test
from jacket.tests.compute.unit import fake_instance
from jacket.tests.compute.unit.virt.ironic import utils as ironic_utils
from jacket.compute.virt.ironic import patcher
CONF = cfg.CONF
class IronicDriverFieldsTestCase(test.NoDBTestCase):
def setUp(self):
super(IronicDriverFieldsTestCase, self).setUp()
self.image_meta = ironic_utils.get_test_image_meta()
self.flavor = ironic_utils.get_test_flavor()
self.ctx = nova_context.get_admin_context()
self.instance = fake_instance.fake_instance_obj(self.ctx)
self.node = ironic_utils.get_test_node(driver='fake')
# Generic expected patches
self._expected_deploy_patch = [
{'path': '/instance_info/image_source',
'value': self.image_meta.id,
'op': 'add'},
{'path': '/instance_info/root_gb',
'value': str(self.instance['root_gb']),
'op': 'add'},
{'path': '/instance_info/swap_mb',
'value': str(self.flavor['swap']),
'op': 'add'},
{'path': '/instance_info/display_name',
'value': self.instance['display_name'],
'op': 'add'},
{'path': '/instance_info/vcpus',
'value': str(self.instance['vcpus']),
'op': 'add'},
{'path': '/instance_info/memory_mb',
'value': str(self.instance['memory_mb']),
'op': 'add'},
{'path': '/instance_info/local_gb',
'value': str(self.node.properties.get('local_gb', 0)),
'op': 'add'}
]
def test_create_generic(self):
node = ironic_utils.get_test_node(driver='pxe_fake')
patcher_obj = patcher.create(node)
self.assertIsInstance(patcher_obj, patcher.GenericDriverFields)
def test_generic_get_deploy_patch(self):
node = ironic_utils.get_test_node(driver='fake')
patch = patcher.create(node).get_deploy_patch(
self.instance, self.image_meta, self.flavor)
self.assertEqual(sorted(self._expected_deploy_patch), sorted(patch))
def test_generic_get_deploy_patch_capabilities(self):
node = ironic_utils.get_test_node(driver='fake')
self.flavor['extra_specs']['capabilities:boot_mode'] = 'bios'
expected = [{'path': '/instance_info/capabilities',
'value': '{"boot_mode": "bios"}',
'op': 'add'}]
expected += self._expected_deploy_patch
patch = patcher.create(node).get_deploy_patch(
self.instance, self.image_meta, self.flavor)
self.assertEqual(sorted(expected), sorted(patch))
def test_generic_get_deploy_patch_capabilities_op(self):
node = ironic_utils.get_test_node(driver='fake')
self.flavor['extra_specs']['capabilities:boot_mode'] = '<in> bios'
expected = [{'path': '/instance_info/capabilities',
'value': '{"boot_mode": "<in> bios"}',
'op': 'add'}]
expected += self._expected_deploy_patch
patch = patcher.create(node).get_deploy_patch(
self.instance, self.image_meta, self.flavor)
self.assertEqual(sorted(expected), sorted(patch))
def test_generic_get_deploy_patch_capabilities_nested_key(self):
node = ironic_utils.get_test_node(driver='fake')
self.flavor['extra_specs']['capabilities:key1:key2'] = '<in> bios'
expected = [{'path': '/instance_info/capabilities',
'value': '{"key1:key2": "<in> bios"}',
'op': 'add'}]
expected += self._expected_deploy_patch
patch = patcher.create(node).get_deploy_patch(
self.instance, self.image_meta, self.flavor)
self.assertEqual(sorted(expected), sorted(patch))
def test_generic_get_deploy_patch_ephemeral(self):
CONF.set_override('default_ephemeral_format', 'testfmt')
node = ironic_utils.get_test_node(driver='fake')
instance = fake_instance.fake_instance_obj(self.ctx,
ephemeral_gb=10)
patch = patcher.create(node).get_deploy_patch(
instance, self.image_meta, self.flavor)
expected = [{'path': '/instance_info/ephemeral_gb',
'value': str(instance.ephemeral_gb),
'op': 'add'},
{'path': '/instance_info/ephemeral_format',
'value': 'testfmt',
'op': 'add'}]
expected += self._expected_deploy_patch
self.assertEqual(sorted(expected), sorted(patch))
def test_generic_get_deploy_patch_preserve_ephemeral(self):
node = ironic_utils.get_test_node(driver='fake')
for preserve in [True, False]:
patch = patcher.create(node).get_deploy_patch(
self.instance, self.image_meta, self.flavor,
preserve_ephemeral=preserve)
expected = [{'path': '/instance_info/preserve_ephemeral',
'value': str(preserve), 'op': 'add', }]
expected += self._expected_deploy_patch
self.assertEqual(sorted(expected), sorted(patch))
| apache-2.0 | 6,840,993,694,003,473,000 | 44.746154 | 78 | 0.591222 | false |
vanessajurtz/lasagne4bio | secondary_proteins_prediction/predict.py | 1 | 2125 | import sys
import numpy as np
import importlib
import lasagne as nn
import theano
from theano import tensor as T
import os
import glob
import data
import utils
if not (2 <= len(sys.argv) <= 3):
sys.exit("Usage: python predict.py <metadata_path> [subset=test]")
sym_y = T.imatrix('target_output')
sym_x = T.tensor3()
metadata_path_all = glob.glob(sys.argv[1] + "*")
print "shape of metadata_path_all"
print(len(metadata_path_all))
if len(sys.argv) >= 3:
subset = sys.argv[2]
assert subset in ['train', 'valid', 'test', 'train_valid']
else:
subset = 'test'
if subset == "test":
X, mask, _, num_seq = data.get_test()
elif subset == "train":
sys.exit("train not implemented")
elif subset == "train_valid":
sys.exit("train_valid not implemented")
else:
sys.exit("valid not implemented")
for metadata_path in metadata_path_all:
print "Loading metadata file %s" % metadata_path
metadata = np.load(metadata_path)
config_name = metadata['config_name']
config = importlib.import_module("configurations.%s" % config_name)
print "Using configurations: '%s'" % config_name
print "Build model"
l_in, l_out = config.build_model()
print "Build eval function"
inference = nn.layers.get_output(
l_out, sym_x, deterministic=True)
print "Load parameters"
nn.layers.set_all_param_values(l_out, metadata['param_values'])
print "Compile functions"
predict = theano.function([sym_x], inference)
print "Predict"
predictions = []
batch_size = config.batch_size
num_batches = np.size(X,axis=0) // batch_size
for i in range(num_batches):
idx = range(i*batch_size, (i+1)*batch_size)
x_batch = X[idx]
mask_batch = mask[idx]
p = predict(x_batch)
predictions.append(p)
predictions = np.concatenate(predictions, axis = 0)
predictions_path = os.path.join("predictions", os.path.basename(metadata_path).replace("dump_", "predictions_").replace(".pkl", ".npy"))
print "Storing predictions in %s" % predictions_path
np.save(predictions_path, predictions)
| gpl-3.0 | 356,573,002,627,405,200 | 23.709302 | 140 | 0.653647 | false |
wwmm/wwplot | WWplot/fit.py | 1 | 1679 | # -*- coding: utf-8 -*-
import scipy.odr
from numpy import *
from PySide2.QtCore import QObject, Signal
class Fit(QObject):
finished = Signal()
def __init__(self, maxit=1000):
QObject.__init__(self)
self.maxit = maxit
self.ready = False
self.x, self.xerr = [], []
self.y, self.yerr = [], []
self.parameters = []
self.output, self.parameters_err = [], []
self.fit_function = None
self.myglobals = dict(globals())
self.myglobals["__builtins__"] = {}
def init_function(self, equation_str):
self.ready = False
N = equation_str.count("P[")
n_free = 0
for n in range(0, N):
test_str = "P[" + str(n) + "]"
if equation_str.count(test_str) > 0:
n_free = n_free + 1
self.parameters = []
for n in range(0, n_free):
self.parameters.append(1.0)
self.fit_function = lambda P, x: eval(equation_str, self.myglobals, locals())
self.model = scipy.odr.Model(self.fit_function)
def set_data(self, x, y, xerr=None, yerr=None):
if xerr is not None and yerr is not None:
self.fit_data = scipy.odr.RealData(x, y, sx=fabs(xerr), sy=fabs(yerr))
else:
self.fit_data = scipy.odr.RealData(x, y)
def run(self):
odr = scipy.odr.ODR(self.fit_data, self.model, maxit=self.maxit, beta0=self.parameters)
out = odr.run()
out.pprint()
self.parameters_err = sqrt(diag(out.cov_beta))
self.parameters = out.beta
self.ready = True
self.finished.emit()
return out.stopreason
| gpl-3.0 | 5,255,284,062,352,769,000 | 22.985714 | 95 | 0.54735 | false |
toast38coza/DJProcess | process/tasks/google_natural_language/test_extract_sentiment.py | 1 | 1090 | from google.cloud import language
client = language.Client()
import csv
def get_sentiment(message):
doc = client.document_from_text(message)
f_in = open('/Users/toast38coza/Downloads/verbatims.csv', 'rb')
f_out = open('/Users/toast38coza/Downloads/verbatims-new.csv', 'wb')
reader = csv.reader(f_in)
writer = csv.writer(f_out)
entities = {}
default_blank_entity = {'instances': []}
for row in reader:
text = row[5]
doc = client.document_from_text(text)
result = doc.annotate_text(include_sentiment=True, include_syntax=False, include_entities=True)
row.append(result.sentiment.score)
row.append(result.sentiment.magnitude)
writer.writerow(row)
for e in result.entities:
key = '{}:{}'.format(e.name, e.entity_type)
instance = {
'name': e.name,
'type': e.entity_type,
'salience': e.salience,
'sentiment': e.sentiment,
'doc': text
}
entity = entities.get(key, default_blank_entity)
entity.get('instances').append(instance)
# f_in.close()
# f_out.close()
| mit | -7,063,101,933,839,828,000 | 27.684211 | 99 | 0.637615 | false |
mystic123/DeepLearning | Basics/optimizers.py | 1 | 7742 | import numpy as np
class Optimizer:
"""
Optimizer class
"""
def __init__(self, net, cost, learning_rate, *args, **kwargs):
self.net = net
self.cost = cost
self.learning_rate = learning_rate
def compute_gradients(self, batch, y, *args, **kwargs):
zs, as_ = self.net.forward_pass(batch)
gradients = []
m = y.shape[0]
dA = self.cost.prime(as_[-1], y.T)
for i in range(len(self.net.weights) - 1, 0, -1):
dZ = dA * self.net.activation_prime(zs[i])
dW = np.matmul(dZ, as_[i - 1].T) / m
gradients = [dW] + gradients
dA = np.matmul(self.net.weights[i].T, dZ)
dZ = dA * self.net.activation_prime(zs[0])
dW = np.matmul(dZ, batch) / m
gradients = [dW] + gradients
return gradients
def update_weights(self, *args, **kwargs):
raise NotImplementedError
def name(self):
raise NotImplementedError
class SGD(Optimizer):
"""
Stochastic Gradient Descent Optimizer
"""
def __init__(self, *args, **kwargs):
super(SGD, self).__init__(*args, **kwargs)
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for w, dW in zip(self.net.weights, gradients):
w -= self.learning_rate * dW
def name(self):
return 'SGD'
class MomentumOptimizer(Optimizer):
"""
SGD With Momentum Optimizer
"""
def __init__(self, *args, gamma=0.9, **kwargs):
super(MomentumOptimizer, self).__init__(*args, **kwargs)
self.gamma = gamma
self.past_gradients = []
for w in self.net.weights:
self.past_gradients.append(np.zeros_like(w))
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# add momemtum term to weights update
self.net.weights[i] -= self.gamma * self.past_gradients[i] + self.learning_rate * dW
self.past_gradients[i] = dW
def name(self):
return 'Momentum'
class NAG(Optimizer):
"""
Nesterov Accelerated Gradient Optimizer
"""
def __init__(self, *args, gamma=0.9, **kwargs):
super(NAG, self).__init__(*args, **kwargs)
self.gamma = gamma
self.past_gradients = []
for w in self.net.weights:
self.past_gradients.append(np.zeros_like(w))
def compute_gradients(self, batch, y, *args, **kwargs):
net_weights = []
for w in self.net.weights:
net_weights.append(np.copy(w))
# compute gradients with respect to approximated future parameters
for i, w in enumerate(self.net.weights):
self.net.weights[i] = w - self.gamma * self.past_gradients[i]
gradients = super(NAG, self).compute_gradients(batch, y)
# restore weights
self.net.weights = net_weights
return gradients
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# add momentum term
self.net.weights[i] -= self.gamma * self.past_gradients[i] + self.learning_rate * dW
self.past_gradients[i] = dW
def name(self):
return 'NAG'
class Adagrad(Optimizer):
"""
Adagrad Optimizer
"""
def __init__(self, *args, epsilon=1e-8, **kwargs):
super(Adagrad, self).__init__(*args, **kwargs)
self.epsilon = epsilon
self.gradient_squares = []
for w in self.net.weights:
self.gradient_squares.append(np.zeros_like(w))
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# accumulate gradients squares since the beginning
self.gradient_squares[i] += np.square(dW)
self.net.weights[i] -= self.learning_rate / (np.sqrt(self.gradient_squares[i] + self.epsilon)) * dW
def name(self):
return 'Adagrad'
class Adadelta(Optimizer):
"""
Adadelta Optimizer
"""
def __init__(self, *args, gamma=0.9, epsilon=1e-8, **kwargs):
super(Adadelta, self).__init__(*args, **kwargs)
self.gamma = gamma
self.epsilon = epsilon
self.gradients_squares = []
self.past_updates_squares = []
for w in self.net.weights:
self.gradients_squares.append(np.zeros_like(w))
self.past_updates_squares.append(np.zeros_like(w))
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# decay accumulated gradients squares
self.gradients_squares[i] = self.gamma * self.gradients_squares[i] + (1 - self.gamma) * dW ** 2
update = -np.sqrt(
(self.past_updates_squares[i] + self.epsilon) / (self.gradients_squares[i] + self.epsilon)) * dW
self.past_updates_squares[i] = np.square(
self.gamma * self.past_updates_squares[i] + (1 - self.gamma) * update)
self.net.weights[i] += update
def name(self):
return 'Adadelta'
class RMSProp(Optimizer):
"""
RMSProp Optimizer
"""
def __init__(self, *args, gamma=0.9, epsilon=1e-8, **kwargs):
super(RMSProp, self).__init__(*args, **kwargs)
self.gamma = gamma
self.epsilon = epsilon
self.gradients_squares = []
for w in self.net.weights:
self.gradients_squares.append(np.zeros_like(w))
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# decay accumulated gradients squares
self.gradients_squares[i] = self.gamma * self.gradients_squares[i] + (1 - self.gamma) * dW ** 2
update = -self.learning_rate / np.sqrt(self.gradients_squares[i] + self.epsilon) * dW
self.net.weights[i] += update
def name(self):
return 'RMSProp'
class Adam(Optimizer):
"""
Adam Optimizer
"""
def __init__(self, *args, beta1=0.9, beta2=0.999, epsilon=1e-8, **kwargs):
super(Adam, self).__init__(*args, **kwargs)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
self.step = 1
self.past_gradients = []
self.gradient_squares = []
for w in self.net.weights:
self.past_gradients.append(np.zeros_like(w))
self.gradient_squares.append(np.zeros_like(w))
def update_weights(self, batch):
batch_xs, batch_ys = batch
gradients = self.compute_gradients(batch_xs, batch_ys)
for i, dW in enumerate(gradients):
# decay accumulated gradients
self.past_gradients[i] = self.beta1 * self.past_gradients[i] + (1 - self.beta1) * dW
# decay accumulated gradients squares
self.gradient_squares[i] = self.beta2 * self.gradient_squares[i] + (1 - self.beta2) * dW ** 2
# compute corrected estimates
mean_estimate = self.past_gradients[i] / (1 - self.beta1 ** self.step)
var_estimate = self.gradient_squares[i] / (1 - self.beta2 ** self.step)
update = -self.learning_rate / (np.sqrt(var_estimate) + self.epsilon) * mean_estimate
self.net.weights[i] += update
self.step += 1
def name(self):
return 'Adam'
| mit | 7,283,492,937,311,258,000 | 31.393305 | 112 | 0.580728 | false |
Miserlou/OpenWatch | openwatch/settings.py | 1 | 4279 | import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
BASE_URL = 'http://www.openwatch.net'
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'test_db', # Or path to database file if using sqlite3.
'USER': '', #XXX CHANGEME # Not used with sqlite3.
'PASSWORD': '', #XXX CHANGEME # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
AUTH_PROFILE_MODULE = 'recordings.UserProfile'
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = '/home/tuttle/Projects/openwatch/openwatch/static/'
UPLOAD_ROOT = '/var/www/openwatch/uploads/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = 'http://openwatch.net/static/'
#UPLOAD_ROOT = '/var/www/openwatch/uploads/'
#UPLOAD_ROOT = 'Users/davidbrodsky/Documents/django/OpenWatch_static/uploads'
STATIC_URL = '/static/'
#STATIC_ROOT = '/Users/davidbrodsky/Documents/django/OpenWatch_static'
STATICFILES_DIRS = (
os.path.join(os.path.dirname(__file__), '../static/'),
)
# Deprecated setting
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '2f=jo^b+x)xu92a93wt3+d9drnzvp%=e&3um6ltw%o03cwn3v$'
###XXX: Change me, obviously
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
# Not required with Django 1.4
#'django.middleware.csrf.CsrfResponseMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'openwatch.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
'django.contrib.admindocs',
'tagging',
'captcha',
'openwatch.recordings',
'openwatch.blog',
'openwatch.misc',
'openwatch.map'
)
CAPTCHA_FONT_SIZE = 42
CAPTCHA_LETTER_ROTATION = None
try:
from local_settings import *
except:
pass
| apache-2.0 | 7,432,822,318,124,921,000 | 31.416667 | 122 | 0.707175 | false |
krattai/noo-ebs | docs/zeroMQ-guide2/examples/Python/ticlient.py | 1 | 1692 | """
Titanic client example
Implements client side of http:rfc.zeromq.org/spec:9
Author : Min RK <[email protected]>
"""
import sys
import time
from mdcliapi import MajorDomoClient
def service_call (session, service, request):
"""Calls a TSP service
Returns reponse if successful (status code 200 OK), else None
"""
reply = session.send(service, request)
if reply:
status = reply.pop(0)
if status == "200":
return reply
elif status == "400":
print "E: client fatal error, aborting"
sys.exit (1)
elif status == "500":
print "E: server fatal error, aborting"
sys.exit (1)
else:
sys.exit (0); # Interrupted or failed
def main():
verbose = '-v' in sys.argv
session = MajorDomoClient("tcp://localhost:5555", verbose)
# 1. Send 'echo' request to Titanic
request = ["echo", "Hello world"]
reply = service_call(session, "titanic.request", request)
uuid = None
if reply:
uuid = reply.pop(0)
print "I: request UUID ", uuid
# 2. Wait until we get a reply
while True:
time.sleep (.1)
request = [uuid]
reply = service_call (session, "titanic.reply", request)
if reply:
reply_string = reply[-1]
print "Reply:", reply_string
# 3. Close request
request = [uuid]
reply = service_call (session, "titanic.close", request)
break
else:
print "I: no reply yet, trying again..."
time.sleep(5) # Try again in 5 seconds
return 0
if __name__ == '__main__':
main() | bsd-2-clause | 7,430,159,579,871,229,000 | 24.651515 | 68 | 0.560284 | false |
kamailio/kamcli | kamcli/commands/cmd_uacreg.py | 1 | 6162 | import click
from sqlalchemy import create_engine
from kamcli.ioutils import ioutils_dbres_print
from kamcli.cli import pass_context
from kamcli.iorpc import command_ctl
@click.group(
"uacreg",
help="Manage uac remote registrations",
short_help="Manage uac registrations",
)
@pass_context
def cli(ctx):
pass
@cli.command("add", short_help="Add a new remote registration account")
@click.option("realm", "--realm", default="", help='Realm (default: "")')
@click.option(
"authha1", "--auth-ha1", is_flag=True, help="Auth password in HA1 format"
)
@click.option(
"flags", "--flags", type=int, default=0, help="Flags (default: 0)"
)
@click.option(
"regdelay",
"--reg-delay",
type=int,
default=0,
help="Registration delay (default: 0)",
)
@click.option(
"socket", "--socket", default="", help='Local socket (default: "")'
)
@click.argument("l_uuid", metavar="<l_uuid>")
@click.argument("l_username", metavar="<l_username>")
@click.argument("l_domain", metavar="<l_domain>")
@click.argument("r_username", metavar="<r_username>")
@click.argument("r_domain", metavar="<r_domain>")
@click.argument("auth_username", metavar="<auth_username>")
@click.argument("auth_password", metavar="<auth_password>")
@click.argument("auth_proxy", metavar="<auth_proxy>")
@click.argument("expires", metavar="<expires>", type=int)
@pass_context
def uacreg_add(
ctx,
realm,
authha1,
flags,
regdelay,
socket,
l_uuid,
l_username,
l_domain,
r_username,
r_domain,
auth_username,
auth_password,
auth_proxy,
expires,
):
"""Add a new uac remote registration account
\b
Parameters:
<l_uuid> - local user unique id
<l_username> - local username
<l_domain> - local domain
<r_username> - remote username
<r_domain> - remote domain
<auth_username> - auth username
<auth_password> - auth password
<auth_proxy> - auth proxy (sip address)
<expires> - expires interval (int)
"""
ctx.vlog(
"Adding a new uac remote registration account - local uuid: [%s]",
l_uuid,
)
pwval = ""
ha1val = ""
if authha1:
ha1val = auth_password
else:
pwval = auth_password
e = create_engine(ctx.gconfig.get("db", "rwurl"))
e.execute(
"insert into uacreg (l_uuid, l_username, l_domain, r_username, "
"r_domain, realm, auth_username, auth_password, auth_ha1, auth_proxy, "
"expires, flags, reg_delay, socket) values "
"({0!r}, {1!r}, {2!r}, {3!r}, "
"{4!r}, {5!r}, {6!r}, {7!r}, {8!r}, {9!r}, "
"{10}, {11}, {12}, {13!r})".format(
l_uuid.encode("ascii", "ignore").decode(),
l_username.encode("ascii", "ignore").decode(),
l_domain.encode("ascii", "ignore").decode(),
r_username.encode("ascii", "ignore").decode(),
r_domain.encode("ascii", "ignore").decode(),
realm.encode("ascii", "ignore").decode(),
auth_username.encode("ascii", "ignore").decode(),
pwval.encode("ascii", "ignore").decode(),
ha1val.encode("ascii", "ignore").decode(),
auth_proxy.encode("ascii", "ignore").decode(),
expires,
flags,
regdelay,
socket.encode("ascii", "ignore").decode(),
)
)
@cli.command(
"passwd", short_help="Set the password for a remote registration account"
)
@click.option(
"authha1", "--auth-ha1", is_flag=True, help="Auth password in HA1 format"
)
@click.argument("l_uuid", metavar="<l_uuid>")
@click.argument("auth_password", metavar="<auth_password>")
@pass_context
def uacreg_passwd(ctx, realm, authha1, l_uuid, auth_password):
"""Set password for a remote registration account
\b
Parameters:
<l_uuid> - local user unique id
<auth_password> - auth password
"""
ctx.vlog(
"Adding a new uac remote registration account - local uuid: [%s]",
l_uuid,
)
pwval = ""
ha1val = ""
if authha1:
ha1val = auth_password
else:
pwval = auth_password
e = create_engine(ctx.gconfig.get("db", "rwurl"))
e.execute(
"update uacreg set auth_password={0!r}, auth_ha1={1!r} "
"where l_uuid={2!r}".format(
pwval.encode("ascii", "ignore").decode(),
ha1val.encode("ascii", "ignore").decode(),
l_uuid.encode("ascii", "ignore").decode(),
)
)
@cli.command("showdb", short_help="Show dialplan records in database")
@click.option(
"oformat",
"--output-format",
"-F",
type=click.Choice(["raw", "json", "table", "dict"]),
default=None,
help="Format the output",
)
@click.option(
"ostyle",
"--output-style",
"-S",
default=None,
help="Style of the output (tabulate table format)",
)
@click.argument("l_uuid", nargs=-1, metavar="[<l_uuid>]")
@pass_context
def uacreg_showdb(ctx, oformat, ostyle, l_uuid):
"""Show details for records in uacreg database table
\b
Parameters:
[<l_uuid>] - local user unique id
"""
e = create_engine(ctx.gconfig.get("db", "rwurl"))
if not l_uuid:
ctx.vlog("Showing all uacreg records")
res = e.execute("select * from uacreg")
ioutils_dbres_print(ctx, oformat, ostyle, res)
else:
for record in l_uuid:
ctx.vlog("Showing uacreg records for l_uuid: " + record)
res = e.execute(
"select * from uacreg where l_uuid={0!r}".format(record)
)
ioutils_dbres_print(ctx, oformat, ostyle, res)
@cli.command(
"list", short_help="Show details for remote registration records in memory"
)
@pass_context
def uacreg_list(ctx):
"""Show details for remote registration records in memory
\b
"""
command_ctl(ctx, "uac.reg_dump", [])
@cli.command(
"reload",
short_help="Reload remote registration records from database into memory",
)
@pass_context
def uacreg_reload(ctx):
"""Reload remote registration records from database into memory
"""
command_ctl(ctx, "uac.reg_reload", [])
| gpl-2.0 | -7,870,042,269,228,062,000 | 28.342857 | 79 | 0.595099 | false |
Jonadabe/letsencrypt | letsencrypt/achallenges.py | 1 | 2725 | """Client annotated ACME challenges.
Please use names such as ``achall`` to distiguish from variables "of type"
:class:`acme.challenges.Challenge` (denoted by ``chall``)
and :class:`.ChallengeBody` (denoted by ``challb``)::
from acme import challenges
from acme import messages
from letsencrypt import achallenges
chall = challenges.DNS(token='foo')
challb = messages.ChallengeBody(chall=chall)
achall = achallenges.DNS(chall=challb, domain='example.com')
Note, that all annotated challenges act as a proxy objects::
achall.token == challb.token
"""
from acme import challenges
from acme.jose import util as jose_util
from letsencrypt import crypto_util
# pylint: disable=too-few-public-methods
class AnnotatedChallenge(jose_util.ImmutableMap):
"""Client annotated challenge.
Wraps around server provided challenge and annotates with data
useful for the client.
:ivar challb: Wrapped `~.ChallengeBody`.
"""
__slots__ = ('challb',)
acme_type = NotImplemented
def __getattr__(self, name):
return getattr(self.challb, name)
class DVSNI(AnnotatedChallenge):
"""Client annotated "dvsni" ACME challenge."""
__slots__ = ('challb', 'domain', 'key')
acme_type = challenges.DVSNI
def gen_cert_and_response(self, s=None): # pylint: disable=invalid-name
"""Generate a DVSNI cert and save it to filepath.
:returns: ``(cert_pem, response)`` tuple, where ``cert_pem`` is the PEM
encoded certificate and ``response`` is an instance
:class:`acme.challenges.DVSNIResponse`.
:rtype: tuple
"""
response = challenges.DVSNIResponse(s=s)
cert_pem = crypto_util.make_ss_cert(self.key, [
self.domain, self.nonce_domain, response.z_domain(self.challb)])
return cert_pem, response
class SimpleHTTP(AnnotatedChallenge):
"""Client annotated "simpleHttp" ACME challenge."""
__slots__ = ('challb', 'domain', 'key')
acme_type = challenges.SimpleHTTP
class DNS(AnnotatedChallenge):
"""Client annotated "dns" ACME challenge."""
__slots__ = ('challb', 'domain')
acme_type = challenges.DNS
class RecoveryContact(AnnotatedChallenge):
"""Client annotated "recoveryContact" ACME challenge."""
__slots__ = ('challb', 'domain')
acme_type = challenges.RecoveryContact
class RecoveryToken(AnnotatedChallenge):
"""Client annotated "recoveryToken" ACME challenge."""
__slots__ = ('challb', 'domain')
acme_type = challenges.RecoveryToken
class ProofOfPossession(AnnotatedChallenge):
"""Client annotated "proofOfPossession" ACME challenge."""
__slots__ = ('challb', 'domain')
acme_type = challenges.ProofOfPossession
| apache-2.0 | 8,463,762,847,026,909,000 | 28.619565 | 80 | 0.681835 | false |
edx/i18n-tools | tests/test_changed.py | 1 | 2745 | from os import remove
from shutil import copyfile
import ddt
from unittest import mock
from i18n.changed import Changed
from . import I18nToolTestCase, MOCK_APPLICATION_DIR
@ddt.ddt
class TestChanged(I18nToolTestCase):
"""
Tests functionality of i18n/changed.py
"""
def setUp(self):
super().setUp()
self._setup_i18n_test_config()
self.changed = Changed()
def test_detect_changes(self):
"""
Verifies the detect_changes method can detect changes in translation source files.
"""
fake_locale_dir = MOCK_APPLICATION_DIR / 'conf' / 'locale' / 'mock'
file_name = fake_locale_dir / 'LC_MESSAGES' / 'mako.po'
copy = fake_locale_dir / 'LC_MESSAGES' / 'mako_copy.po'
# Note: this fails if you have not-yet-committed changes to test fixture .po files
self.assertFalse(self.changed.detect_changes())
try:
copyfile(file_name, copy) # Copy the .po file
remove(file_name) # Make changes to the .po file
self.assertTrue(self.changed.detect_changes()) # Detect changes made to the .po file
finally:
copyfile(copy, file_name) # Return .po file to its previous state
remove(copy) # Delete copy of .po file
def test_do_not_detect_changes(self):
"""
Verifies the detect_changes method doesn't detect changes in rows that do not start with msgid or msgstr.
"""
file_name = 'tests/data/test_do_not_detect_changes.txt'
copy = 'tests/data/test_do_not_detect_changes_copy.txt'
try:
copyfile(file_name, copy) # Copy the .txt file
remove(file_name) # Make changes to the .txt file
self.assertFalse(self.changed.detect_changes()) # Do not detect changes made to the .txt file
finally:
copyfile(copy, file_name) # Return .txt file to its previous state
remove(copy) # Delete copy of .txt file
@ddt.data(
(False, 'Source translation files are current.'),
(True, 'Source translations are out-of-date! Please update them.')
)
@ddt.unpack
def test_get_message(self, changes_detected, msg):
"""
Verifies that get_message method returns the correct message.
"""
self.assertEqual(self.changed.get_message(changes_detected), msg)
@ddt.data(
(True, 1),
(False, 0)
)
@ddt.unpack
def test_run(self, return_value, value):
"""
Verifies that run method returns the correct value.
"""
with mock.patch('i18n.changed.Changed.detect_changes', mock.Mock(return_value=return_value)):
self.assertEqual(self.changed.run(''), value)
| apache-2.0 | 2,824,621,743,089,920,000 | 35.118421 | 113 | 0.621858 | false |
gcasey/cosmotrack | scripts/configParsers.py | 1 | 4387 | ################################################################################
#
# Copyright 2013 Kitware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
import re
import pprint
import os
TEMPLATE_RESULTS = {
"version": None,
"visualization" : None,
"viz_server" : None,
"viz_port" : None,
"viz_frequency" : None,
"analysistool" : {}
}
# These shoudl contain the required parameters
ANALYSIS_TEMPLATES = {
'halotracker' : {
'bb' : None,
'merger_tree_file' : None
}
}
class IncompleteConfigurationException(Exception):
pass
class ParseError(Exception):
pass
CHARACTER_CONVERTER = re.compile(r'\W')
def convertKeyName(name):
name = name.lower()
return re.sub(CHARACTER_CONVERTER, '_', name)
def verifyMetaData(obj):
for key, value in obj.iteritems():
if value in (None, {}):
raise IncompleteConfigurationException('Pair: (%s, %s)' % (key, value))
else:
try:
verifyMetaData(value)
except AttributeError:
pass
def yesNoBool(token):
if token.lower() in ['yes', 'true', 'on', 'enabled']:
return True
elif token.lower() in ['no', 'false', 'off', 'disabled']:
return True
raise ValueError("No conversion to bool")
def guessType(token):
ConvertPrecedence = [yesNoBool, int, float, str]
for op in ConvertPrecedence:
try:
return op(token)
except ValueError:
pass
def simplifyChunk(text):
if len(text) == 0:
raise ParseError('No value for key')
if len(text) == 1:
return guessType(text[0])
else:
return [guessType(snip) for snip in text]
SECTION_MATCHER = re.compile('#\s*(\S*)\s*SECTION')
def parseCosmoConfig(fileobj):
result = TEMPLATE_RESULTS.copy()
namespace = result
for line in fileobj:
# We should check for section names first as it kind of looks like a comment
mobj = SECTION_MATCHER.match(line.strip())
if mobj:
name = mobj.group(1)
name = convertKeyName(name)
namespace = result['analysistool'][name]
#Other than section names # are comments
elif len(line) > 0 and line[0] == '#':
continue
else:
tokens = line.split()
if len(tokens) < 2:
continue
elif tokens[0].lower() == 'analysistool' and len(tokens) > 2 and yesNoBool(tokens[2]):
key = convertKeyName(tokens[1].strip())
result['analysistool'][key] = {}
elif tokens[0] == 'INSTANCE_NAME':
try:
key = convertKeyName(tokens[1])
namespace.update(ANALYSIS_TEMPLATES[key])
except KeyError:
pass
else:
key = convertKeyName(tokens[0])
namespace[key] = simplifyChunk(tokens[1:])
verifyMetaData(result)
return result
def parseIndatParams(fileobj):
result = {}
for line in fileobj:
if len(line) < 1 or line[0] == '#':
continue
else:
tokens = line.split()
if len(tokens) < 2:
continue
key = convertKeyName(tokens[0])
result[key] = simplifyChunk([tokens[1]])
return result
def main(simname, cosmofile, indatfile):
simname = simname
cosmoParams = parseCosmoConfig(open(cosmofile, 'r'))
indatParams = parseIndatParams(open(indatfile, 'r'))
result = {'simulation_name' : simname,
'cosmo' : cosmoParams,
'indat' : indatParams}
return result
if __name__ == '__main__':
import sys
_r = main(sys.argv[1], sys.argv[2], sys.argv[3])
pprint.pprint(_r)
| apache-2.0 | 5,939,486,349,299,386,000 | 27.303226 | 98 | 0.565079 | false |
letouriste001/SmartForest_2.0 | python3.4Smartforest/lib/python3.4/site-packages/django/utils/translation/trans_null.py | 1 | 1408 | # These are versions of the functions in django.utils.translation.trans_real
# that don't actually do anything. This is purely for performance, so that
# settings.USE_I18N = False can use this module rather than trans_real.py.
from django.conf import settings
from django.utils.encoding import force_text
def ngettext(singular, plural, number):
if number == 1:
return singular
return plural
ngettext_lazy = ngettext
def ungettext(singular, plural, number):
return force_text(ngettext(singular, plural, number))
def pgettext(context, message):
return ugettext(message)
def npgettext(context, singular, plural, number):
return ungettext(singular, plural, number)
activate = lambda x: None
deactivate = deactivate_all = lambda: None
get_language = lambda: settings.LANGUAGE_CODE
get_language_bidi = lambda: settings.LANGUAGE_CODE in settings.LANGUAGES_BIDI
check_for_language = lambda x: True
def gettext(message):
return message
def ugettext(message):
return force_text(gettext(message))
gettext_noop = gettext_lazy = _ = gettext
def to_locale(language):
p = language.find('-')
if p >= 0:
return language[:p].lower() + '_' + language[p + 1:].upper()
else:
return language.lower()
def get_language_from_request(request, check_path=False):
return settings.LANGUAGE_CODE
def get_language_from_path(request):
return None
| mit | -7,491,722,357,305,650,000 | 23.701754 | 77 | 0.721591 | false |
ProjectQ-Framework/ProjectQ | projectq/setups/decompositions/h2rx.py | 1 | 2023 | # -*- coding: utf-8 -*-
# Copyright 2017 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Module uses ideas from "Basic circuit compilation techniques for an
# ion-trap quantum machine" by Dmitri Maslov (2017) at
# https://iopscience.iop.org/article/10.1088/1367-2630/aa5e47
"""
Registers a decomposition for the H gate into an Ry and Rx gate.
"""
import math
from projectq.cengines import DecompositionRule
from projectq.meta import get_control_count
from projectq.ops import Ph, Rx, Ry, H
def _decompose_h2rx_M(cmd): # pylint: disable=invalid-name
"""Decompose the Ry gate."""
# Labelled 'M' for 'minus' because decomposition ends with a Ry(-pi/2)
qubit = cmd.qubits[0]
Rx(math.pi) | qubit
Ph(math.pi / 2) | qubit
Ry(-1 * math.pi / 2) | qubit
def _decompose_h2rx_N(cmd): # pylint: disable=invalid-name
"""Decompose the Ry gate."""
# Labelled 'N' for 'neutral' because decomposition doesn't end with
# Ry(pi/2) or Ry(-pi/2)
qubit = cmd.qubits[0]
Ry(math.pi / 2) | qubit
Ph(3 * math.pi / 2) | qubit
Rx(-1 * math.pi) | qubit
def _recognize_HNoCtrl(cmd): # pylint: disable=invalid-name
"""For efficiency reasons only if no control qubits."""
return get_control_count(cmd) == 0
#: Decomposition rules
all_defined_decomposition_rules = [
DecompositionRule(H.__class__, _decompose_h2rx_N, _recognize_HNoCtrl),
DecompositionRule(H.__class__, _decompose_h2rx_M, _recognize_HNoCtrl),
]
| apache-2.0 | -7,079,851,581,845,540,000 | 33.87931 | 76 | 0.69303 | false |
montyly/manticore | manticore/native/state.py | 1 | 2684 | from ..core.state import StateBase, Concretize, TerminateState
from ..native.memory import ConcretizeMemory, MemoryException
class State(StateBase):
@property
def cpu(self):
"""
Current cpu state
"""
return self._platform.current
@property
def mem(self):
"""
Current virtual memory mappings
"""
return self._platform.current.memory
def execute(self):
"""
Perform a single step on the current state
"""
from .cpu.abstractcpu import (
ConcretizeRegister,
) # must be here, otherwise we get circular imports
try:
result = self._platform.execute()
# Instead of State importing SymbolicRegisterException and SymbolicMemoryException
# from cpu/memory shouldn't we import Concretize from linux, cpu, memory ??
# We are forcing State to have abstractcpu
except ConcretizeRegister as e:
expression = self.cpu.read_register(e.reg_name)
def setstate(state, value):
state.cpu.write_register(setstate.e.reg_name, value)
setstate.e = e
raise Concretize(str(e), expression=expression, setstate=setstate, policy=e.policy)
except ConcretizeMemory as e:
expression = self.cpu.read_int(e.address, e.size)
def setstate(state, value):
state.cpu.write_int(setstate.e.address, value, setstate.e.size)
setstate.e = e
raise Concretize(str(e), expression=expression, setstate=setstate, policy=e.policy)
except MemoryException as e:
raise TerminateState(str(e), testcase=True)
# Remove when code gets stable?
assert self.platform.constraints is self.constraints
return result
def invoke_model(self, model):
"""
Invokes a `model`. Modelling can be used to override a function in the target program with a custom
implementation.
For more information on modelling see docs/models.rst
A `model` is a callable whose first argument is a `manticore.native.State` instance.
If the following arguments correspond to the arguments of the C function
being modeled. If the `model` models a variadic function, the following argument
is a generator object, which can be used to access function arguments dynamically.
The `model` callable should simply return the value that should be returned by the
native function being modeled.f
:param model: callable, model to invoke
"""
self._platform.invoke_model(model, prefix_args=(self,))
| apache-2.0 | -261,491,013,077,682,460 | 35.27027 | 107 | 0.643443 | false |
houshengbo/nova_vmware_compute_driver | nova/tests/matchers.py | 1 | 14525 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Matcher classes to be used inside of the testtools assertThat framework."""
import pprint
from lxml import etree
class DictKeysMismatch(object):
def __init__(self, d1only, d2only):
self.d1only = d1only
self.d2only = d2only
def describe(self):
return ('Keys in d1 and not d2: %(d1only)s.'
' Keys in d2 and not d1: %(d2only)s' % self.__dict__)
def get_details(self):
return {}
class DictMismatch(object):
def __init__(self, key, d1_value, d2_value):
self.key = key
self.d1_value = d1_value
self.d2_value = d2_value
def describe(self):
return ("Dictionaries do not match at %(key)s."
" d1: %(d1_value)s d2: %(d2_value)s" % self.__dict__)
def get_details(self):
return {}
class DictMatches(object):
def __init__(self, d1, approx_equal=False, tolerance=0.001):
self.d1 = d1
self.approx_equal = approx_equal
self.tolerance = tolerance
def __str__(self):
return 'DictMatches(%s)' % (pprint.pformat(self.d1))
# Useful assertions
def match(self, d2):
"""Assert two dicts are equivalent.
This is a 'deep' match in the sense that it handles nested
dictionaries appropriately.
NOTE:
If you don't care (or don't know) a given value, you can specify
the string DONTCARE as the value. This will cause that dict-item
to be skipped.
"""
d1keys = set(self.d1.keys())
d2keys = set(d2.keys())
if d1keys != d2keys:
d1only = d1keys - d2keys
d2only = d2keys - d1keys
return DictKeysMismatch(d1only, d2only)
for key in d1keys:
d1value = self.d1[key]
d2value = d2[key]
try:
error = abs(float(d1value) - float(d2value))
within_tolerance = error <= self.tolerance
except (ValueError, TypeError):
# If both values aren't convertible to float, just ignore
# ValueError if arg is a str, TypeError if it's something else
# (like None)
within_tolerance = False
if hasattr(d1value, 'keys') and hasattr(d2value, 'keys'):
matcher = DictMatches(d1value)
did_match = matcher.match(d2value)
if did_match is not None:
return did_match
elif 'DONTCARE' in (d1value, d2value):
continue
elif self.approx_equal and within_tolerance:
continue
elif d1value != d2value:
return DictMismatch(key, d1value, d2value)
class ListLengthMismatch(object):
def __init__(self, len1, len2):
self.len1 = len1
self.len2 = len2
def describe(self):
return ('Length mismatch: len(L1)=%(len1)d != '
'len(L2)=%(len2)d' % self.__dict__)
def get_details(self):
return {}
class DictListMatches(object):
def __init__(self, l1, approx_equal=False, tolerance=0.001):
self.l1 = l1
self.approx_equal = approx_equal
self.tolerance = tolerance
def __str__(self):
return 'DictListMatches(%s)' % (pprint.pformat(self.l1))
# Useful assertions
def match(self, l2):
"""Assert a list of dicts are equivalent."""
l1count = len(self.l1)
l2count = len(l2)
if l1count != l2count:
return ListLengthMismatch(l1count, l2count)
for d1, d2 in zip(self.l1, l2):
matcher = DictMatches(d2,
approx_equal=self.approx_equal,
tolerance=self.tolerance)
did_match = matcher.match(d1)
if did_match:
return did_match
class SubDictMismatch(object):
def __init__(self,
key=None,
sub_value=None,
super_value=None,
keys=False):
self.key = key
self.sub_value = sub_value
self.super_value = super_value
self.keys = keys
def describe(self):
if self.keys:
return "Keys between dictionaries did not match"
else:
return("Dictionaries do not match at %s. d1: %s d2: %s"
% (self.key,
self.super_value,
self.sub_value))
def get_details(self):
return {}
class IsSubDictOf(object):
def __init__(self, super_dict):
self.super_dict = super_dict
def __str__(self):
return 'IsSubDictOf(%s)' % (self.super_dict)
def match(self, sub_dict):
"""Assert a sub_dict is subset of super_dict."""
if not set(sub_dict.keys()).issubset(set(self.super_dict.keys())):
return SubDictMismatch(keys=True)
for k, sub_value in sub_dict.items():
super_value = self.super_dict[k]
if isinstance(sub_value, dict):
matcher = IsSubDictOf(super_value)
did_match = matcher.match(sub_value)
if did_match is not None:
return did_match
elif 'DONTCARE' in (sub_value, super_value):
continue
else:
if sub_value != super_value:
return SubDictMismatch(k, sub_value, super_value)
class XMLMismatch(object):
"""Superclass for XML mismatch."""
def __init__(self, state):
self.path = str(state)
self.expected = state.expected
self.actual = state.actual
def describe(self):
return "%(path)s: XML does not match" % self.__dict__
def get_details(self):
return {
'expected': self.expected,
'actual': self.actual,
}
class XMLTagMismatch(XMLMismatch):
"""XML tags don't match."""
def __init__(self, state, idx, expected_tag, actual_tag):
super(XMLTagMismatch, self).__init__(state)
self.idx = idx
self.expected_tag = expected_tag
self.actual_tag = actual_tag
def describe(self):
return ("%(path)s: XML tag mismatch at index %(idx)d: "
"expected tag <%(expected_tag)s>; "
"actual tag <%(actual_tag)s>" % self.__dict__)
class XMLAttrKeysMismatch(XMLMismatch):
"""XML attribute keys don't match."""
def __init__(self, state, expected_only, actual_only):
super(XMLAttrKeysMismatch, self).__init__(state)
self.expected_only = ', '.join(sorted(expected_only))
self.actual_only = ', '.join(sorted(actual_only))
def describe(self):
return ("%(path)s: XML attributes mismatch: "
"keys only in expected: %(expected_only)s; "
"keys only in actual: %(actual_only)s" % self.__dict__)
class XMLAttrValueMismatch(XMLMismatch):
"""XML attribute values don't match."""
def __init__(self, state, key, expected_value, actual_value):
super(XMLAttrValueMismatch, self).__init__(state)
self.key = key
self.expected_value = expected_value
self.actual_value = actual_value
def describe(self):
return ("%(path)s: XML attribute value mismatch: "
"expected value of attribute %(key)s: %(expected_value)r; "
"actual value: %(actual_value)r" % self.__dict__)
class XMLTextValueMismatch(XMLMismatch):
"""XML text values don't match."""
def __init__(self, state, expected_text, actual_text):
super(XMLTextValueMismatch, self).__init__(state)
self.expected_text = expected_text
self.actual_text = actual_text
def describe(self):
return ("%(path)s: XML text value mismatch: "
"expected text value: %(expected_text)r; "
"actual value: %(actual_text)r" % self.__dict__)
class XMLUnexpectedChild(XMLMismatch):
"""Unexpected child present in XML."""
def __init__(self, state, tag, idx):
super(XMLUnexpectedChild, self).__init__(state)
self.tag = tag
self.idx = idx
def describe(self):
return ("%(path)s: XML unexpected child element <%(tag)s> "
"present at index %(idx)d" % self.__dict__)
class XMLExpectedChild(XMLMismatch):
"""Expected child not present in XML."""
def __init__(self, state, tag, idx):
super(XMLExpectedChild, self).__init__(state)
self.tag = tag
self.idx = idx
def describe(self):
return ("%(path)s: XML expected child element <%(tag)s> "
"not present at index %(idx)d" % self.__dict__)
class XMLMatchState(object):
"""
Maintain some state for matching.
Tracks the XML node path and saves the expected and actual full
XML text, for use by the XMLMismatch subclasses.
"""
def __init__(self, expected, actual):
self.path = []
self.expected = expected
self.actual = actual
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, exc_tb):
self.path.pop()
return False
def __str__(self):
return '/' + '/'.join(self.path)
def node(self, tag, idx):
"""
Adds tag and index to the path; they will be popped off when
the corresponding 'with' statement exits.
:param tag: The element tag
:param idx: If not None, the integer index of the element
within its parent. Not included in the path
element if None.
"""
if idx is not None:
self.path.append("%s[%d]" % (tag, idx))
else:
self.path.append(tag)
return self
class XMLMatches(object):
"""Compare XML strings. More complete than string comparison."""
def __init__(self, expected):
self.expected_xml = expected
self.expected = etree.fromstring(expected)
def __str__(self):
return 'XMLMatches(%r)' % self.expected_xml
def match(self, actual_xml):
actual = etree.fromstring(actual_xml)
state = XMLMatchState(self.expected_xml, actual_xml)
result = self._compare_node(self.expected, actual, state, None)
if result is False:
return XMLMismatch(state)
elif result is not True:
return result
def _compare_node(self, expected, actual, state, idx):
"""Recursively compares nodes within the XML tree."""
# Start by comparing the tags
if expected.tag != actual.tag:
return XMLTagMismatch(state, idx, expected.tag, actual.tag)
with state.node(expected.tag, idx):
# Compare the attribute keys
expected_attrs = set(expected.attrib.keys())
actual_attrs = set(actual.attrib.keys())
if expected_attrs != actual_attrs:
expected_only = expected_attrs - actual_attrs
actual_only = actual_attrs - expected_attrs
return XMLAttrKeysMismatch(state, expected_only, actual_only)
# Compare the attribute values
for key in expected_attrs:
expected_value = expected.attrib[key]
actual_value = actual.attrib[key]
if 'DONTCARE' in (expected_value, actual_value):
continue
elif expected_value != actual_value:
return XMLAttrValueMismatch(state, key, expected_value,
actual_value)
# Compare the contents of the node
if len(expected) == 0 and len(actual) == 0:
# No children, compare text values
if ('DONTCARE' not in (expected.text, actual.text) and
expected.text != actual.text):
return XMLTextValueMismatch(state, expected.text,
actual.text)
else:
expected_idx = 0
actual_idx = 0
while (expected_idx < len(expected) and
actual_idx < len(actual)):
# Ignore comments and processing instructions
# TODO(Vek): may interpret PIs in the future, to
# allow for, say, arbitrary ordering of some
# elements
if (expected[expected_idx].tag in
(etree.Comment, etree.ProcessingInstruction)):
expected_idx += 1
continue
# Compare the nodes
result = self._compare_node(expected[expected_idx],
actual[actual_idx], state,
actual_idx)
if result is not True:
return result
# Step on to comparing the next nodes...
expected_idx += 1
actual_idx += 1
# Make sure we consumed all nodes in actual
if actual_idx < len(actual):
return XMLUnexpectedChild(state, actual[actual_idx].tag,
actual_idx)
# Make sure we consumed all nodes in expected
if expected_idx < len(expected):
for node in expected[expected_idx:]:
if (node.tag in
(etree.Comment, etree.ProcessingInstruction)):
continue
return XMLExpectedChild(state, node.tag, actual_idx)
# The nodes match
return True
| apache-2.0 | -1,041,197,624,686,391,300 | 32.08656 | 78 | 0.553253 | false |
VerifiableRobotics/controller-arena | src/controllerarena/controllers/refVec.py | 1 | 4049 | # code for python reference dipole vector field controller
# these functions require stuff
#from mathFuns import *
from numpy import *
from math import *
class refVec:
# define the constructor
def __init__(self, q_0, controller_flag):
# Initialize controller state
self.phi_prev = None
self.q_prev = q_0
self.e_int_w = 0
self.e_int_u = 0
# set gains
self.k_p_u = 1 # u indicates it is an position gain. p indicates it is a proportional gain.
self.k_p_w = 3 # w indicates it is an angular gain. p indicates it is a proportional gain.
if controller_flag == 1: # PID
self.k_i_w = 1
self.k_i_u = 1
self.k_d = -1 # the derivative gain is only on the angle
elif controller_flag == 2: # PI
self.k_i_w = 1
self.k_i_u = 1
self.k_d = 0
elif controller_flag == 3: # PD
self.k_i_w = 0
self.k_i_u = 0
self.k_d = -1
else: # P
self.k_i_w = 0
self.k_i_u = 0
self.k_d = 0
def get_output(self, q_d, q, dt): # obtain reference vector field value
F = self.get_vector_field(q, q_d) # F is an column vector
## obtain control signal as a fcn of reference vector field value
u = self.get_control(q, q_d, F, dt)
return u
def get_vector_field(self, q, q_d):
# return type: numpy array
# note: unsure if this vector field was just an example from the paper!!
# compute vector field F
# unpack
# x = q[0][0]
# y = q[1][0]
# x_d = q_d[0][0]
# y_d = q_d[1][0]
# #
# # compute [taken from paper draft], where r = [1;0] and lambda = 3
# Fx = 2*(x - x_d)**2 - (y - y_d)**2
# Fy = 3*(x - x_d)*(y - y_d)
# F = array([[Fx],[Fy]])
lamb = 3
theta_d = q_d[2][0]
delta_p = q[0:2] - q_d[0:2] # location - location_desired
r = array([[cos(theta_d)],[sin(theta_d)]])
F = lamb*(dot(transpose(r), delta_p)[0][0])*delta_p - r*(dot(transpose(delta_p), delta_p)[0][0]) # should be col vector
print F
return F # col vector
def get_control(self, q, q_d, F, dt):
# I think that this control law is not a function of the vector field, and that it should
# work if F(q) changes
#
# compute control signal u
delta_p = q[0:2] - q_d[0:2] # location - location_desired
self.e_int_w += self.sub_angles(q[2][0],q_d[2][0])*dt # accumulate angular error
self.e_int_u += linalg.norm(delta_p)*dt # accumulate position error
theta = q[2][0]
# unpack gains
k_p_u = self.k_p_u
k_p_w = self.k_p_w
k_i_w = self.k_i_w
k_i_u = self.k_i_u
k_d = self.k_d
Fx = F[0][0]
Fy = F[1][0]
phi = atan2(Fy,Fx)
# backward finite difference for phidot
if self.phi_prev == None: # if this is the first pass through the controller, phi_dot = 0
self.phi_prev = phi
# end if
phi_dot = (phi-self.phi_prev)/dt
self.phi_prev = phi
q_dot = (q-self.q_prev)/dt
self.q_prev = q
# controller
v = -k_p_u*sign( dot(transpose(delta_p), array([[cos(theta)],[sin(theta)]]) )[0][0] )*tanh(linalg.norm(delta_p)**2) - k_i_u*self.e_int_u
w = -k_p_w*self.sub_angles(theta, phi) - k_i_w*self.e_int_w - k_d*phi_dot # k_d determines whether derivative term is used, k_i for i term
u = array([[v], [w]])
print u
return u
def update_state(self, q_d, q, dt):
# x_k+1 = 0
pass
def sub_angles(self, ang1, ang2):
return (ang1 - ang2 + pi)%(2*pi) - pi
# For future:
# pass r vector as parameter
# low pass filtering for derivatives (PD control?) [phidot]
# visual stuff
# global feedback plan is the ref vecf field
# controller is a function of vector field, but you can use a better controller to get better performance
| bsd-3-clause | -3,581,265,295,090,304,000 | 33.606838 | 147 | 0.535935 | false |
tensorflow/datasets | tensorflow_datasets/summarization/newsroom.py | 1 | 4417 | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEWSROOM Dataset."""
import json
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@article{Grusky_2018,
title={Newsroom: A Dataset of 1.3 Million Summaries with Diverse Extractive Strategies},
url={http://dx.doi.org/10.18653/v1/n18-1065},
DOI={10.18653/v1/n18-1065},
journal={Proceedings of the 2018 Conference of the North American Chapter of
the Association for Computational Linguistics: Human Language
Technologies, Volume 1 (Long Papers)},
publisher={Association for Computational Linguistics},
author={Grusky, Max and Naaman, Mor and Artzi, Yoav},
year={2018}
}
"""
_DESCRIPTION = """
NEWSROOM is a large dataset for training and evaluating summarization systems.
It contains 1.3 million articles and summaries written by authors and
editors in the newsrooms of 38 major publications.
Dataset features includes:
- text: Input news text.
- summary: Summary for the news.
And additional features:
- title: news title.
- url: url of the news.
- date: date of the article.
- density: extractive density.
- coverage: extractive coverage.
- compression: compression ratio.
- density_bin: low, medium, high.
- coverage_bin: extractive, abstractive.
- compression_bin: low, medium, high.
This dataset can be downloaded upon requests. Unzip all the contents
"train.jsonl, dev.josnl, test.jsonl" to the tfds folder.
"""
_DOCUMENT = "text"
_SUMMARY = "summary"
_ADDITIONAL_TEXT_FEATURES = [
"title", "url", "date", "density_bin", "coverage_bin", "compression_bin"
]
_ADDITIONAL_FLOAT_FEATURES = [
"density",
"coverage",
"compression",
]
class Newsroom(tfds.core.GeneratorBasedBuilder):
"""NEWSROOM Dataset."""
VERSION = tfds.core.Version("1.0.0")
MANUAL_DOWNLOAD_INSTRUCTIONS = """\
You should download the dataset from https://summari.es/download/
The webpage requires registration.
After downloading, please put dev.jsonl, test.jsonl and train.jsonl
files in the manual_dir.
"""
def _info(self):
features = {
k: tfds.features.Text()
for k in [_DOCUMENT, _SUMMARY] + _ADDITIONAL_TEXT_FEATURES
}
features.update({
k: tfds.features.Tensor(shape=[], dtype=tf.float32)
for k in _ADDITIONAL_FLOAT_FEATURES
})
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict(features),
supervised_keys=(_DOCUMENT, _SUMMARY),
homepage="https://summari.es",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={
"input_file": os.path.join(dl_manager.manual_dir, "train.jsonl")
},
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={
"input_file": os.path.join(dl_manager.manual_dir, "dev.jsonl")
},
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs={
"input_file": os.path.join(dl_manager.manual_dir, "test.jsonl")
},
),
]
def _generate_examples(self, input_file=None):
"""Yields examples."""
with tf.io.gfile.GFile(input_file) as f:
for i, line in enumerate(f):
d = json.loads(line)
# fields are "url", "archive", "title", "date", "text",
# "compression_bin", "density_bin", "summary", "density",
# "compression', "coverage", "coverage_bin",
yield i, {
k: d[k] for k in [_DOCUMENT, _SUMMARY] + _ADDITIONAL_TEXT_FEATURES +
_ADDITIONAL_FLOAT_FEATURES
}
| apache-2.0 | 6,486,932,188,809,753,000 | 31.007246 | 91 | 0.655649 | false |
acsis-project/emissions | emissions/python/periodic_1960/regrid_OC_biomass_emissions_n96e_360d_1960.py | 1 | 7191 | #!/usr/bin/env python
##############################################################################################
#
#
# regrid_emissions_N96e.py
#
#
# Requirements:
# Iris 1.10, cf_units, numpy
#
#
# This Python script has been written by N.L. Abraham as part of the UKCA Tutorials:
# http://www.ukca.ac.uk/wiki/index.php/UKCA_Chemistry_and_Aerosol_Tutorials_at_vn10.4
#
# Copyright (C) 2015 University of Cambridge
#
# This is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# It is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You find a copy of the GNU Lesser General Public License at <http://www.gnu.org/licenses/>.
#
# Written by N. Luke Abraham 2016-10-20 <[email protected]>
#
#
##############################################################################################
# preamble
import os
import time
import iris
import cf_units
import numpy
# --- CHANGE THINGS BELOW THIS LINE TO WORK WITH YOUR FILES ETC. ---
# name of file containing an ENDGame grid, e.g. your model output
# NOTE: all the fields in the file should be on the same horizontal
# grid, as the field used MAY NOT be the first in order of STASH
grid_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/um/archer/ag542/apm.pp/ag542a.pm1988dec'
#
# name of emissions file
emissions_file='/group_workspaces/jasmin2/ukca/vol1/mkoehler/emissions/combined_1960/0.5x0.5/combined_sources_OC_biomass_1960_360d.nc'
#
# STASH code emissions are associated with
# 301-320: surface
# m01s00i323: Organic carbon biomass burning emissions
#
# 321-340: full atmosphere
#
stash='m01s00i323'
# --- BELOW THIS LINE, NOTHING SHOULD NEED TO BE CHANGED ---
species_name='OC_biomass'
# this is the grid we want to regrid to, e.g. N96 ENDGame
grd=iris.load(grid_file)[0]
grd.coord(axis='x').guess_bounds()
grd.coord(axis='y').guess_bounds()
# This is the original data
ems=iris.load_cube(emissions_file)
# make intersection between 0 and 360 longitude to ensure that
# the data is regridded correctly
nems = ems.intersection(longitude=(0, 360))
# make sure that we use the same coordinate system, otherwise regrid won't work
nems.coord(axis='x').coord_system=grd.coord_system()
nems.coord(axis='y').coord_system=grd.coord_system()
# now guess the bounds of the new grid prior to regridding
nems.coord(axis='x').guess_bounds()
nems.coord(axis='y').guess_bounds()
# now regrid
ocube=nems.regrid(grd,iris.analysis.AreaWeighted())
# now add correct attributes and names to netCDF file
ocube.var_name='emissions_'+str.strip(species_name)
ocube.long_name='OC biomass burning emissions expressed as carbon'
ocube.units=cf_units.Unit('kg m-2 s-1')
ocube.attributes['vertical_scaling']='high_level'
ocube.attributes['highest_level']='21'
ocube.attributes['lowest_level']='1'
ocube.attributes['um_stash_source']=stash
ocube.attributes['tracer_name']=str.strip(species_name)
# global attributes, so don't set in local_keys
# NOTE: all these should be strings, including the numbers!
# basic emissions type
ocube.attributes['emission_type']='2' # periodic time series
ocube.attributes['update_type']='2' # same as above
ocube.attributes['update_freq_in_hours']='120' # i.e. 5 days
ocube.attributes['um_version']='10.6' # UM version
ocube.attributes['source']='combined_sources_OC_biomass_1960_360d.nc'
ocube.attributes['title']='Monthly emissions of Organic Carbon (OC) from biomass burning for 1960'
ocube.attributes['File_version']='v1'
ocube.attributes['File_creation_date']=time.ctime(time.time())
ocube.attributes['grid']='regular 1.875 x 1.25 degree longitude-latitude grid (N96e)'
ocube.attributes['history']=time.ctime(time.time())+': '+__file__+' \n'+ocube.attributes['history']
ocube.attributes['institution']='Centre for Atmospheric Science, Department of Chemistry, University of Cambridge, U.K.'
ocube.attributes['reference']='Granier et al., Clim. Change, 2011; Lamarque et al., Atmos. Chem. Phys., 2010'
del ocube.attributes['NCO']
del ocube.attributes['file_creation_date']
del ocube.attributes['description']
# rename and set time coord - mid-month from 1960-Jan to 2020-Dec
# this bit is annoyingly fiddly
ocube.coord(axis='t').var_name='time'
ocube.coord(axis='t').standard_name='time'
ocube.coords(axis='t')[0].units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day')
ocube.coord(axis='t').points=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345])
# make z-direction.
zdims=iris.coords.DimCoord(numpy.array([0]),standard_name = 'model_level_number',
units='1',attributes={'positive':'up'})
ocube.add_aux_coord(zdims)
ocube=iris.util.new_axis(ocube, zdims)
# now transpose cube to put Z 2nd
ocube.transpose([1,0,2,3])
# make coordinates 64-bit
ocube.coord(axis='x').points=ocube.coord(axis='x').points.astype(dtype='float64')
ocube.coord(axis='y').points=ocube.coord(axis='y').points.astype(dtype='float64')
#ocube.coord(axis='z').points=ocube.coord(axis='z').points.astype(dtype='float64') # integer
ocube.coord(axis='t').points=ocube.coord(axis='t').points.astype(dtype='float64')
# for some reason, longitude_bounds are double, but latitude_bounds are float
ocube.coord('latitude').bounds=ocube.coord('latitude').bounds.astype(dtype='float64')
# add forecast_period & forecast_reference_time
# forecast_reference_time
frt=numpy.array([15, 45, 75, 105, 135, 165, 195, 225, 255, 285, 315, 345], dtype='float64')
frt_dims=iris.coords.AuxCoord(frt,standard_name = 'forecast_reference_time',
units=cf_units.Unit('days since 1960-01-01 00:00:00', calendar='360_day'))
ocube.add_aux_coord(frt_dims,data_dims=0)
ocube.coord('forecast_reference_time').guess_bounds()
# forecast_period
fp=numpy.array([-360],dtype='float64')
fp_dims=iris.coords.AuxCoord(fp,standard_name = 'forecast_period',
units=cf_units.Unit('hours'),bounds=numpy.array([-720,0],dtype='float64'))
ocube.add_aux_coord(fp_dims,data_dims=None)
# add-in cell_methods
ocube.cell_methods = [iris.coords.CellMethod('mean', 'time')]
# set _FillValue
fillval=1e+20
ocube.data = numpy.ma.array(data=ocube.data, fill_value=fillval, dtype='float32')
# output file name, based on species
outpath='ukca_emiss_'+species_name+'.nc'
# don't want time to be cattable, as is a periodic emissions file
iris.FUTURE.netcdf_no_unlimited=True
# annoying hack to set a missing_value attribute as well as a _FillValue attribute
dict.__setitem__(ocube.attributes, 'missing_value', fillval)
# now write-out to netCDF
saver = iris.fileformats.netcdf.Saver(filename=outpath, netcdf_format='NETCDF3_CLASSIC')
saver.update_global_attributes(Conventions=iris.fileformats.netcdf.CF_CONVENTIONS_VERSION)
saver.write(ocube, local_keys=['vertical_scaling', 'missing_value','um_stash_source','tracer_name','highest_level','lowest_level'])
# end of script
| gpl-3.0 | 508,121,574,114,016,400 | 41.550296 | 134 | 0.715339 | false |
Azure/azure-sdk-for-python | sdk/servicebus/azure-servicebus/tests/livetest/test_errors.py | 1 | 1336 | import logging
from uamqp import errors as AMQPErrors, constants as AMQPConstants
from azure.servicebus.exceptions import (
_create_servicebus_exception,
ServiceBusConnectionError,
ServiceBusError
)
def test_link_idle_timeout():
logger = logging.getLogger("testlogger")
amqp_error = AMQPErrors.LinkDetach(AMQPConstants.ErrorCodes.LinkDetachForced, description="Details: AmqpMessageConsumer.IdleTimerExpired: Idle timeout: 00:10:00.")
sb_error = _create_servicebus_exception(logger, amqp_error)
assert isinstance(sb_error, ServiceBusConnectionError)
assert sb_error._retryable
assert sb_error._shutdown_handler
def test_unknown_connection_error():
logger = logging.getLogger("testlogger")
amqp_error = AMQPErrors.AMQPConnectionError(AMQPConstants.ErrorCodes.UnknownError)
sb_error = _create_servicebus_exception(logger, amqp_error)
assert isinstance(sb_error,ServiceBusConnectionError)
assert sb_error._retryable
assert sb_error._shutdown_handler
amqp_error = AMQPErrors.AMQPError(AMQPConstants.ErrorCodes.UnknownError)
sb_error = _create_servicebus_exception(logger, amqp_error)
assert not isinstance(sb_error,ServiceBusConnectionError)
assert isinstance(sb_error,ServiceBusError)
assert not sb_error._retryable
assert sb_error._shutdown_handler
| mit | 5,185,030,741,937,892,000 | 39.484848 | 167 | 0.776946 | false |
codywilbourn/streamparse | streamparse/run.py | 1 | 1924 | """
Helper script for starting up bolts and spouts.
"""
import argparse
import importlib
import os
import sys
from pystorm.component import _SERIALIZERS
RESOURCES_PATH = 'resources'
def main():
"""main entry point for Python bolts and spouts"""
parser = argparse.ArgumentParser(description='Run a bolt/spout class',
epilog='This is internal to streamparse '
'and is used to run spout and bolt '
'classes via ``python -m '
'streamparse.run <class name>``.')
parser.add_argument('target_class', help='The bolt/spout class to start.')
parser.add_argument('-s', '--serializer',
help='The serialization protocol to use to talk to '
'Storm.',
choices=_SERIALIZERS.keys(),
default='json')
# Storm sends everything as one string, which is not great
if len(sys.argv) == 2:
sys.argv = [sys.argv[0]] + sys.argv[1].split()
args = parser.parse_args()
mod_name, cls_name = args.target_class.rsplit('.', 1)
# Add current directory to sys.path so imports will work
import_path = os.getcwd() # Storm <= 1.0.2
if RESOURCES_PATH in next(os.walk(import_path))[1] and \
os.path.isfile(os.path.join(import_path,
RESOURCES_PATH,
mod_name.replace('.', os.path.sep) + '.py')):
import_path = os.path.join(import_path,
RESOURCES_PATH) # Storm >= 1.0.3
sys.path.append(import_path)
# Import module
mod = importlib.import_module(mod_name)
# Get class from module and run it
cls = getattr(mod, cls_name)
cls(serializer=args.serializer).run()
if __name__ == '__main__':
main()
| apache-2.0 | 6,012,932,336,202,372,000 | 38.265306 | 80 | 0.539501 | false |
uni2u/neutron | neutron/db/metering/metering_db.py | 1 | 10691 | # Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy import sql
from neutron.api.rpc.agentnotifiers import metering_rpc_agent_api
from neutron.common import constants
from neutron.db import common_db_mixin as base_db
from neutron.db import l3_db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import metering
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
LOG = logging.getLogger(__name__)
class MeteringLabelRule(model_base.BASEV2, models_v2.HasId):
direction = sa.Column(sa.Enum('ingress', 'egress',
name='meteringlabels_direction'))
remote_ip_prefix = sa.Column(sa.String(64))
metering_label_id = sa.Column(sa.String(36),
sa.ForeignKey("meteringlabels.id",
ondelete="CASCADE"),
nullable=False)
excluded = sa.Column(sa.Boolean, default=False, server_default=sql.false())
class MeteringLabel(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(1024))
rules = orm.relationship(MeteringLabelRule, backref="label",
cascade="delete", lazy="joined")
routers = orm.relationship(
l3_db.Router,
primaryjoin="MeteringLabel.tenant_id==Router.tenant_id",
foreign_keys='MeteringLabel.tenant_id',
uselist=True)
shared = sa.Column(sa.Boolean, default=False, server_default=sql.false())
class MeteringDbMixin(metering.MeteringPluginBase,
base_db.CommonDbMixin):
def __init__(self):
self.meter_rpc = metering_rpc_agent_api.MeteringAgentNotifyAPI()
def _make_metering_label_dict(self, metering_label, fields=None):
res = {'id': metering_label['id'],
'name': metering_label['name'],
'description': metering_label['description'],
'shared': metering_label['shared'],
'tenant_id': metering_label['tenant_id']}
return self._fields(res, fields)
def create_metering_label(self, context, metering_label):
m = metering_label['metering_label']
tenant_id = self._get_tenant_id_for_create(context, m)
with context.session.begin(subtransactions=True):
metering_db = MeteringLabel(id=uuidutils.generate_uuid(),
description=m['description'],
tenant_id=tenant_id,
name=m['name'],
shared=m['shared'])
context.session.add(metering_db)
return self._make_metering_label_dict(metering_db)
def delete_metering_label(self, context, label_id):
with context.session.begin(subtransactions=True):
try:
label = self._get_by_id(context, MeteringLabel, label_id)
except orm.exc.NoResultFound:
raise metering.MeteringLabelNotFound(label_id=label_id)
context.session.delete(label)
def get_metering_label(self, context, label_id, fields=None):
try:
metering_label = self._get_by_id(context, MeteringLabel, label_id)
except orm.exc.NoResultFound:
raise metering.MeteringLabelNotFound(label_id=label_id)
return self._make_metering_label_dict(metering_label, fields)
def get_metering_labels(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'metering_labels', limit,
marker)
return self._get_collection(context, MeteringLabel,
self._make_metering_label_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def _make_metering_label_rule_dict(self, metering_label_rule, fields=None):
res = {'id': metering_label_rule['id'],
'metering_label_id': metering_label_rule['metering_label_id'],
'direction': metering_label_rule['direction'],
'remote_ip_prefix': metering_label_rule['remote_ip_prefix'],
'excluded': metering_label_rule['excluded']}
return self._fields(res, fields)
def get_metering_label_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'metering_label_rules',
limit, marker)
return self._get_collection(context, MeteringLabelRule,
self._make_metering_label_rule_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def get_metering_label_rule(self, context, rule_id, fields=None):
try:
metering_label_rule = self._get_by_id(context,
MeteringLabelRule, rule_id)
except orm.exc.NoResultFound:
raise metering.MeteringLabelRuleNotFound(rule_id=rule_id)
return self._make_metering_label_rule_dict(metering_label_rule, fields)
def _validate_cidr(self, context, label_id, remote_ip_prefix,
direction, excluded):
r_ips = self.get_metering_label_rules(context,
filters={'metering_label_id':
label_id,
'direction':
[direction],
'excluded':
[excluded]},
fields=['remote_ip_prefix'])
cidrs = [r['remote_ip_prefix'] for r in r_ips]
new_cidr_ipset = netaddr.IPSet([remote_ip_prefix])
if (netaddr.IPSet(cidrs) & new_cidr_ipset):
raise metering.MeteringLabelRuleOverlaps(
remote_ip_prefix=remote_ip_prefix)
def create_metering_label_rule(self, context, metering_label_rule):
m = metering_label_rule['metering_label_rule']
with context.session.begin(subtransactions=True):
label_id = m['metering_label_id']
ip_prefix = m['remote_ip_prefix']
direction = m['direction']
excluded = m['excluded']
self._validate_cidr(context, label_id, ip_prefix, direction,
excluded)
metering_db = MeteringLabelRule(id=uuidutils.generate_uuid(),
metering_label_id=label_id,
direction=direction,
excluded=m['excluded'],
remote_ip_prefix=ip_prefix)
context.session.add(metering_db)
return self._make_metering_label_rule_dict(metering_db)
def delete_metering_label_rule(self, context, rule_id):
with context.session.begin(subtransactions=True):
try:
rule = self._get_by_id(context, MeteringLabelRule, rule_id)
except orm.exc.NoResultFound:
raise metering.MeteringLabelRuleNotFound(rule_id=rule_id)
context.session.delete(rule)
def _get_metering_rules_dict(self, metering_label):
rules = []
for rule in metering_label.rules:
rule_dict = self._make_metering_label_rule_dict(rule)
rules.append(rule_dict)
return rules
def _make_router_dict(self, router):
res = {'id': router['id'],
'name': router['name'],
'tenant_id': router['tenant_id'],
'admin_state_up': router['admin_state_up'],
'status': router['status'],
'gw_port_id': router['gw_port_id'],
constants.METERING_LABEL_KEY: []}
return res
def _process_sync_metering_data(self, context, labels):
all_routers = None
routers_dict = {}
for label in labels:
if label.shared:
if not all_routers:
all_routers = self._get_collection_query(context,
l3_db.Router)
routers = all_routers
else:
routers = label.routers
for router in routers:
router_dict = routers_dict.get(
router['id'],
self._make_router_dict(router))
rules = self._get_metering_rules_dict(label)
data = {'id': label['id'], 'rules': rules}
router_dict[constants.METERING_LABEL_KEY].append(data)
routers_dict[router['id']] = router_dict
return routers_dict.values()
def get_sync_data_metering(self, context, label_id=None, router_ids=None):
labels = context.session.query(MeteringLabel)
if label_id:
labels = labels.filter(MeteringLabel.id == label_id)
elif router_ids:
labels = (labels.join(MeteringLabel.routers).
filter(l3_db.Router.id.in_(router_ids)))
return self._process_sync_metering_data(context, labels)
| apache-2.0 | -8,351,349,277,479,719,000 | 42.283401 | 79 | 0.549341 | false |
psychopy/psychopy | psychopy/tests/test_misc/test_microphone.py | 1 | 7457 | from __future__ import division
from builtins import object
from past.utils import old_div
import pytest
import shutil, os, glob
from tempfile import mkdtemp
from os.path import abspath, dirname, join
from psychopy import microphone
from psychopy.microphone import _getFlacPath
from psychopy import core
from psychopy.tests import skip_under_vm
# py.test -k microphone --cov-report term-missing --cov microphone.py tests/
# flac2wav will delete the .flac file unless given keep=True
# Speech2Text can overwrite and then delete .flac if given a .wav of the same name
from psychopy.tests.utils import TESTS_PATH, TESTS_DATA_PATH
@pytest.mark.needs_sound
@pytest.mark.microphone
@pytest.mark.slow
@skip_under_vm
class TestMicrophone(object):
@classmethod
def setup_class(self):
global sound
from psychopy import sound
microphone.switchOn(48000)
self.tmp = mkdtemp(prefix='psychopy-tests-microphone')
@classmethod
def teardown_class(self):
if hasattr(self, 'tmp'):
shutil.rmtree(self.tmp, ignore_errors=True)
microphone.switchOff() # not needed, just get code coverage
def test_AudioCapture_basics(self):
microphone.haveMic = False
with pytest.raises(microphone.MicrophoneError):
microphone.AdvAudioCapture(autoLog=False)
microphone.haveMic = True
microphone.switchOn(16000, 1, 2048)
microphone.switchOn(48000)
mic = microphone.AdvAudioCapture(saveDir=self.tmp, autoLog=False)
mic = microphone.AdvAudioCapture(saveDir=self.tmp+'_test', autoLog=False)
mic.record(.10, block=False) # returns immediately
core.wait(.02)
mic.stop()
mic.reset()
mic.record(0.2, block=True)
assert os.path.isfile(mic.savedFile)
def test_AdvAudioCapture(self):
filename = os.path.join(self.tmp, 'test_mic.wav')
mic = microphone.AdvAudioCapture(autoLog=False)
tone = sound.Sound(440, secs=.02, autoLog=False)
mic.setMarker(tone=tone)
mic = microphone.AdvAudioCapture(filename=filename, saveDir=self.tmp, autoLog=False)
mic.record(1, block=True)
mic.setFile(mic.savedFile) # same file name
mic.getMarkerOnset()
mic.compress()
assert os.path.exists(mic.savedFile)
assert mic.savedFile.endswith('.flac')
mic.uncompress()
assert mic.savedFile.endswith('.wav')
assert os.path.exists(mic.savedFile)
old_size = os.path.getsize(mic.savedFile)
new_file = mic.resample(keep=False)
assert old_div(old_size, 3.1) < os.path.getsize(new_file) < old_div(old_size, 2.9)
mic.getLoudness()
mic.playback()
mic.playback(loops=2, block=False)
mic.playback(stop=True)
tmp = mic.savedFile
mic.savedFile = None
with pytest.raises(ValueError):
mic.playback()
with pytest.raises(ValueError):
mic.getLoudness()
mic.savedFile = tmp
mic.resample(keep=False)
mic.resample(newRate=48000, keep=False)
tmp = mic.savedFile
mic.savedFile = None
with pytest.raises(ValueError):
mic.resample(keep=False)
mic.savedFile = tmp
with pytest.raises(ValueError):
mic.resample(newRate=-1)
#@pytest.mark.needs_sound
@pytest.mark.microphone
@pytest.mark.speech
@pytest.mark.mic_utils
class TestMicrophoneNoSound(object):
@classmethod
def setup_class(self):
try:
assert _getFlacPath()
except Exception:
# some of the utils could be designed not to need flac but they
# currently work on a file that is distributed in flac format
pytest.skip()
self.tmp = mkdtemp(prefix='psychopy-tests-microphone')
for testFile in ['red_16000.flac.dist', 'green_48000.flac.dist']:
t = join(TESTS_DATA_PATH, testFile)
new_wav = join(self.tmp, testFile.replace('.dist', ''))
shutil.copyfile(t, new_wav)
microphone.flac2wav(new_wav)
@classmethod
def teardown_class(self):
if hasattr(self, 'tmp'):
shutil.rmtree(self.tmp, ignore_errors=True)
def test_getFlacPath(self):
microphone.FLAC_PATH = None
with pytest.raises(microphone.MicrophoneError):
_getFlacPath('this is not the flac you are looking for')
microphone.FLAC_PATH = None
_getFlacPath('flac')
microphone.FLAC_PATH = 'flac'
assert microphone.FLAC_PATH
microphone.FLAC_PATH = None
_getFlacPath()
def test_wav_flac(self):
filename = os.path.join(self.tmp, 'test_bad_readWav')
with open(filename, 'wb') as fd:
fd.write(b'x')
with pytest.raises(microphone.SoundFileError):
microphone.readWavFile(filename)
testFile = join(self.tmp, 'green_48000.wav')
newFile = microphone.wav2flac(testFile, keep=True)
microphone.flac2wav(newFile, keep=True)
newFile0 = microphone.wav2flac(testFile, keep=True, level=0)
newFile8 = microphone.wav2flac(testFile, keep=True, level=8)
assert os.path.getsize(newFile0) >= os.path.getsize(newFile8)
microphone.wav2flac('.', keep=True)
microphone.flac2wav('.', keep=True)
microphone.wav2flac('', keep=True)
microphone.flac2wav('', keep=True)
microphone.wav2flac(self.tmp, keep=True)
def test_Speech2Text(self):
pytest.skip() # google speech API gives Error 400: Bad request
from psychopy import web
try:
web.requireInternetAccess()
except web.NoInternetAccessError:
pytest.skip()
# load a known sound file
testFile = join(self.tmp, 'red_16000.wav')
gs = microphone.Speech2Text(filename=testFile)
resp = gs.getResponse()
assert resp.word == 'red'
# test batch-discover files in a directory
tmp = join(self.tmp, 'tmp')
os.mkdir(tmp)
shutil.copy(testFile, tmp)
bs = microphone.BatchSpeech2Text(files=tmp)
bs = microphone.BatchSpeech2Text(files=glob.glob(join(self.tmp, 'red_*.wav')))
while bs._activeCount():
core.wait(.1, 0)
resp = bs[0][1]
assert 0.6 < resp.confidence < 0.75 # 0.68801856
assert resp.word == 'red'
def test_DFT(self):
testFile = join(self.tmp, 'red_16000.wav')
data, sampleRate = microphone.readWavFile(testFile)
with pytest.raises(OverflowError):
microphone.getDft([])
microphone.getDft(data)
microphone.getDftBins(data)
microphone.getDftBins(data, sampleRate=16000)
microphone.getDft(data, sampleRate=sampleRate)
microphone.getDft(data, wantPhase=True)
def test_RMS(self):
testFile = join(self.tmp, 'red_16000.wav')
data, sampleRate = microphone.readWavFile(testFile)
rms = microphone.getRMS(data)
assert 588.60 < rms < 588.61
rmsb = microphone.getRMSBins(data, chunk=64)
assert 10.2 < rmsb[0] < 10.3
assert len(rmsb) == 480
def test_marker(self):
testFile = join(self.tmp, 'green_48000.wav')
marker = microphone.getMarkerOnset(testFile) # 19kHz marker sound
assert 0.0666 < marker[0] < 0.06677 # start
assert 0.0773 < marker[1] < 0.07734 # end
| gpl-3.0 | 2,128,300,330,127,985,700 | 32.895455 | 92 | 0.636449 | false |
Martin456/eve | eve/render.py | 1 | 15213 | # -*- coding: utf-8 -*-
"""
eve.render
~~~~~~~~~~
Implements proper, automated rendering for Eve responses.
:copyright: (c) 2017 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
import re
import time
import datetime
import simplejson as json
from werkzeug import utils
from functools import wraps
from eve.methods.common import get_rate_limit
from eve.utils import date_to_str, date_to_rfc1123, config, \
debug_error_message
from flask import make_response, request, Response, current_app as app, abort
try:
from collections import OrderedDict # noqa
except ImportError:
# Python 2.6 needs this back-port
from ordereddict import OrderedDict
# mapping between supported mime types and render functions.
_MIME_TYPES = [
{'mime': ('application/json',), 'renderer': 'render_json', 'tag': 'JSON'},
{'mime': ('application/xml', 'text/xml', 'application/x-xml',),
'renderer': 'render_xml', 'tag': 'XML'}]
def raise_event(f):
""" Raises both general and resource-level events after the decorated
function has been executed. Returns both the flask.request object and the
response payload to the callback.
.. versionchanged:: 0.2
Renamed 'on_<method>' hooks to 'on_post_<method>' for coherence
with new 'on_pre_<method>' hooks.
.. versionchanged:: 0.1.0
Support for PUT.
.. versionchanged:: 0.0.9
To emphasize the fact that they are tied to a method, in `on_<method>`
events, <method> is now uppercase.
.. versionadded:: 0.0.6
"""
@wraps(f)
def decorated(*args, **kwargs):
r = f(*args, **kwargs)
method = request.method
if method in ('GET', 'POST', 'PATCH', 'DELETE', 'PUT'):
event_name = 'on_post_' + method
resource = args[0] if args else None
# general hook
getattr(app, event_name)(resource, request, r)
if resource:
# resource hook
getattr(app, event_name + '_' + resource)(request, r)
return r
return decorated
@raise_event
def send_response(resource, response):
""" Prepares the response for the client.
:param resource: the resource involved.
:param response: either a flask.Response object or a tuple. The former will
simply be forwarded to the client. If the latter a proper
response will be prepared, according to directives within
the tuple.
.. versionchanged:: 0.0.6
Support for HEAD requests.
.. versionchanged:: 0.0.5
Handling the case where response is None. Happens when the request
method is 'OPTIONS', most likely while processing a CORS 'preflight'
request.
.. versionchanged:: 0.0.4
Now a simple dispatcher. Moved the response preparation logic to
``_prepare_response``.
"""
if isinstance(response, Response):
return response
else:
return _prepare_response(resource, *response if response else [None])
def _prepare_response(resource, dct, last_modified=None, etag=None,
status=200, headers=None):
""" Prepares the response object according to the client request and
available renderers, making sure that all accessory directives (caching,
etag, last-modified) are present.
:param resource: the resource involved.
:param dct: the dict that should be sent back as a response.
:param last_modified: Last-Modified header value.
:param etag: ETag header value.
:param status: response status.
.. versionchanged:: 0.7
Add support for regexes in X_DOMAINS_RE. Closes #660, #974.
ETag value now surrounded by double quotes. Closes #794.
.. versionchanged:: 0.6
JSONP Support.
.. versionchanged:: 0.4
Support for optional extra headers.
Fix #381. 500 instead of 404 if CORS is enabled.
.. versionchanged:: 0.3
Support for X_MAX_AGE.
.. versionchanged:: 0.1.0
Support for optional HATEOAS.
.. versionchanged:: 0.0.9
Support for Python 3.3.
.. versionchanged:: 0.0.7
Support for Rate-Limiting.
.. versionchanged:: 0.0.6
Support for HEAD requests.
.. versionchanged:: 0.0.5
Support for Cross-Origin Resource Sharing (CORS).
.. versionadded:: 0.0.4
"""
if request.method == 'OPTIONS':
resp = app.make_default_options_response()
else:
# obtain the best match between client's request and available mime
# types, along with the corresponding render function.
mime, renderer = _best_mime()
# invoke the render function and obtain the corresponding rendered item
rendered = globals()[renderer](dct)
# JSONP
if config.JSONP_ARGUMENT:
jsonp_arg = config.JSONP_ARGUMENT
if jsonp_arg in request.args and 'json' in mime:
callback = request.args.get(jsonp_arg)
rendered = "%s(%s)" % (callback, rendered)
# build the main wsgi response object
resp = make_response(rendered, status)
resp.mimetype = mime
# extra headers
if headers:
for header, value in headers:
if header != 'Content-Type':
resp.headers.add(header, value)
# cache directives
if request.method in ('GET', 'HEAD'):
if resource:
cache_control = config.DOMAIN[resource]['cache_control']
expires = config.DOMAIN[resource]['cache_expires']
else:
cache_control = config.CACHE_CONTROL
expires = config.CACHE_EXPIRES
if cache_control:
resp.headers.add('Cache-Control', cache_control)
if expires:
resp.expires = time.time() + expires
# etag and last-modified
if etag:
resp.headers.add('ETag', '"' + etag + '"')
if last_modified:
resp.headers.add('Last-Modified', date_to_rfc1123(last_modified))
# CORS
origin = request.headers.get('Origin')
if origin and (config.X_DOMAINS or config.X_DOMAINS_RE):
if config.X_DOMAINS is None:
domains = []
elif isinstance(config.X_DOMAINS, str):
domains = [config.X_DOMAINS]
else:
domains = config.X_DOMAINS
if config.X_DOMAINS_RE is None:
domains_re = []
elif isinstance(config.X_DOMAINS_RE, str):
domains_re = [config.X_DOMAINS_RE]
else:
domains_re = config.X_DOMAINS_RE
# precompile regexes and ignore invalids
domains_re_compiled = []
for domain_re in domains_re:
try:
domains_re_compiled.append(re.compile(domain_re))
except re.error:
continue
if config.X_HEADERS is None:
headers = []
elif isinstance(config.X_HEADERS, str):
headers = [config.X_HEADERS]
else:
headers = config.X_HEADERS
if config.X_EXPOSE_HEADERS is None:
expose_headers = []
elif isinstance(config.X_EXPOSE_HEADERS, str):
expose_headers = [config.X_EXPOSE_HEADERS]
else:
expose_headers = config.X_EXPOSE_HEADERS
# The only accepted value for Access-Control-Allow-Credentials header
# is "true"
allow_credentials = config.X_ALLOW_CREDENTIALS is True
methods = app.make_default_options_response().headers.get('allow', '')
if '*' in domains:
resp.headers.add('Access-Control-Allow-Origin', origin)
resp.headers.add('Vary', 'Origin')
elif any(origin == domain for domain in domains):
resp.headers.add('Access-Control-Allow-Origin', origin)
elif any(domain.match(origin) for domain in domains_re_compiled):
resp.headers.add('Access-Control-Allow-Origin', origin)
else:
resp.headers.add('Access-Control-Allow-Origin', '')
resp.headers.add('Access-Control-Allow-Headers', ', '.join(headers))
resp.headers.add('Access-Control-Expose-Headers',
', '.join(expose_headers))
resp.headers.add('Access-Control-Allow-Methods', methods)
resp.headers.add('Access-Control-Max-Age', config.X_MAX_AGE)
if allow_credentials:
resp.headers.add('Access-Control-Allow-Credentials', "true")
# Rate-Limiting
limit = get_rate_limit()
if limit and limit.send_x_headers:
resp.headers.add('X-RateLimit-Remaining', str(limit.remaining))
resp.headers.add('X-RateLimit-Limit', str(limit.limit))
resp.headers.add('X-RateLimit-Reset', str(limit.reset))
return resp
def _best_mime():
""" Returns the best match between the requested mime type and the
ones supported by Eve. Along with the mime, also the corresponding
render function is returns.
.. versionchanged:: 0.3
Support for optional renderers via XML and JSON configuration keywords.
"""
supported = []
renders = {}
for mime in _MIME_TYPES:
# only mime types that have not been disabled via configuration
if app.config.get(mime['tag'], True):
for mime_type in mime['mime']:
supported.append(mime_type)
renders[mime_type] = mime['renderer']
if len(supported) == 0:
abort(500, description=debug_error_message(
'Configuration error: no supported mime types')
)
best_match = request.accept_mimetypes.best_match(supported) or \
supported[0]
return best_match, renders[best_match]
def render_json(data):
""" JSON render function
.. versionchanged:: 0.2
Json encoder class is now inferred by the active data layer, allowing
for customized, data-aware JSON encoding.
.. versionchanged:: 0.1.0
Support for optional HATEOAS.
"""
set_indent = None
# make pretty prints available
if 'GET' in request.method and 'pretty' in request.args:
set_indent = 4
return json.dumps(data, indent=set_indent, cls=app.data.json_encoder_class,
sort_keys=config.JSON_SORT_KEYS)
def render_xml(data):
""" XML render function.
:param data: the data stream to be rendered as xml.
.. versionchanged:: 0.4
Support for pagination info (_meta).
.. versionchanged:: 0.2
Use the new ITEMS configuration setting.
.. versionchanged:: 0.1.0
Support for optional HATEOAS.
.. versionchanged:: 0.0.3
Support for HAL-like hyperlinks and resource descriptors.
"""
if isinstance(data, list):
data = {config.ITEMS: data}
xml = ''
if data:
xml += xml_root_open(data)
xml += xml_add_links(data)
xml += xml_add_meta(data)
xml += xml_add_items(data)
xml += xml_root_close()
return xml
def xml_root_open(data):
""" Returns the opening tag for the XML root node. If the datastream
includes informations about resource endpoints (href, title), they will
be added as node attributes. The resource endpoint is then removed to allow
for further processing of the datastream.
:param data: the data stream to be rendered as xml.
.. versionchanged:: 0.1.0
Support for optional HATEOAS.
.. versionchanged:: 0.0.6
Links are now properly escaped.
.. versionadded:: 0.0.3
"""
links = data.get(config.LINKS)
href = title = ''
if links and 'self' in links:
self_ = links.pop('self')
href = ' href="%s" ' % utils.escape(self_['href'])
if 'title' in self_:
title = ' title="%s" ' % self_['title']
return '<resource%s%s>' % (href, title)
def xml_add_meta(data):
""" Returns a meta node with page, total, max_results fields.
:param data: the data stream to be rendered as xml.
.. versionchanged:: 0.5
Always return ordered items (#441).
.. versionadded:: 0.4
"""
xml = ''
meta = []
if data.get(config.META):
ordered_meta = OrderedDict(sorted(data[config.META].items()))
for name, value in ordered_meta.items():
meta.append('<%s>%d</%s>' % (name, value, name))
if meta:
xml = '<%s>%s</%s>' % (config.META, ''.join(meta), config.META)
return xml
def xml_add_links(data):
""" Returns as many <link> nodes as there are in the datastream. The links
are then removed from the datastream to allow for further processing.
:param data: the data stream to be rendered as xml.
.. versionchanged:: 0.5
Always return ordered items (#441).
.. versionchanged:: 0.0.6
Links are now properly escaped.
.. versionadded:: 0.0.3
"""
xml = ''
chunk = '<link rel="%s" href="%s" title="%s" />'
links = data.pop(config.LINKS, {})
ordered_links = OrderedDict(sorted(links.items()))
for rel, link in ordered_links.items():
if isinstance(link, list):
xml += ''.join([chunk % (rel, utils.escape(d['href']),
utils.escape(d['title'])) for d in link])
else:
xml += ''.join(chunk % (rel, utils.escape(link['href']),
link['title']))
return xml
def xml_add_items(data):
""" When this function is called the datastream can only contain a `_items`
list, or a dictionary. If a list, each item is a resource which rendered as
XML. If a dictionary, it will be rendered as XML.
:param data: the data stream to be rendered as xml.
.. versionadded:: 0.0.3
"""
try:
xml = ''.join([xml_item(item) for item in data[config.ITEMS]])
except:
xml = xml_dict(data)
return xml
def xml_item(item):
""" Represents a single resource (member of a collection) as XML.
:param data: the data stream to be rendered as xml.
.. versionadded:: 0.0.3
"""
xml = xml_root_open(item)
xml += xml_add_links(item)
xml += xml_dict(item)
xml += xml_root_close()
return xml
def xml_root_close():
""" Returns the closing tag of the XML root node.
.. versionadded:: 0.0.3
"""
return '</resource>'
def xml_dict(data):
""" Renders a dict as XML.
:param data: the data stream to be rendered as xml.
.. versionchanged:: 0.5
Always return ordered items (#441).
.. versionchanged:: 0.2
Leaf values are now properly escaped.
.. versionadded:: 0.0.3
"""
xml = ''
ordered_items = OrderedDict(sorted(data.items()))
for k, v in ordered_items.items():
if isinstance(v, datetime.datetime):
v = date_to_str(v)
elif isinstance(v, (datetime.time, datetime.date)):
v = v.isoformat()
if not isinstance(v, list):
v = [v]
for value in v:
if isinstance(value, dict):
links = xml_add_links(value)
xml += "<%s>" % k
xml += xml_dict(value)
xml += links
xml += "</%s>" % k
else:
xml += "<%s>%s</%s>" % (k, utils.escape(value), k)
return xml
| bsd-3-clause | -5,451,845,274,095,905,000 | 30.69375 | 79 | 0.602248 | false |
nwspeete-ibm/openwhisk | core/pythonAction/cli/wskrule.py | 1 | 5355 | #
# Copyright 2015-2016 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import httplib
from wskitem import Item
from wskutil import addAuthenticatedCommand, apiBase, parseQName, request, responseError
import urllib
class Rule(Item):
def __init__(self):
super(Rule, self).__init__('rule', 'rules')
def getItemSpecificCommands(self, parser, props):
subcmd = parser.add_parser('create', help='create new rule')
subcmd.add_argument('name', help='the name of the rule')
subcmd.add_argument('trigger', help='the trigger')
subcmd.add_argument('action', help='the action')
addAuthenticatedCommand(subcmd, props)
subcmd.add_argument('--shared', nargs='?', const='yes', choices=['yes', 'no'], help='shared action (default: private)')
subcmd.add_argument('--enable', help='enable rule after creating it', action='store_true', default=False)
subcmd = parser.add_parser('delete', help='delete %s' % self.name)
subcmd.add_argument('name', help='the name of the %s' % self.name)
addAuthenticatedCommand(subcmd, props)
subcmd.add_argument('--disable', help='automatically disable rule before deleting it', action='store_true', default=False)
subcmd = parser.add_parser('update', help='update an existing rule')
subcmd.add_argument('name', help='the name of the rule')
subcmd.add_argument('trigger', help='the trigger')
subcmd.add_argument('action', help='the action')
addAuthenticatedCommand(subcmd, props)
subcmd.add_argument('--shared', nargs='?', const='yes', choices=['yes', 'no'], help='shared action (default: private)')
subcmd = parser.add_parser('enable', help='enable rule')
subcmd.add_argument('name', help='the name of the rule')
addAuthenticatedCommand(subcmd, props)
subcmd = parser.add_parser('disable', help='disable rule')
subcmd.add_argument('name', help='the name of the rule')
addAuthenticatedCommand(subcmd, props)
subcmd = parser.add_parser('status', help='get rule status')
subcmd.add_argument('name', help='the name of the rule')
addAuthenticatedCommand(subcmd, props)
self.addDefaultCommands(parser, props, ['get', 'list'])
def cmd(self, args, props):
if args.subcmd == 'enable':
return self.setState(args, props, True)
elif args.subcmd == 'disable':
return self.setState(args, props, False)
elif args.subcmd == 'status':
return self.getState(args, props)
else:
return super(Rule, self).cmd(args, props)
def create(self, args, props, update):
payload = { 'trigger': args.trigger, 'action': args.action }
if args.shared:
self.addPublish(payload, args)
code = self.put(args, props, update, json.dumps(payload))
if (code == 0 and 'enable' in args and args.enable):
return self.setState(args, props, True)
else:
return code
def preProcessDelete(self, args, props):
if (args.disable):
return self.setState(args, props, False)
else:
return 0
def setState(self, args, props, enable):
namespace, pname = parseQName(args.name, props)
desc = 'active' if enable else 'inactive'
status = json.dumps({ 'status': desc })
url = '%(apibase)s/namespaces/%(namespace)s/rules/%(name)s' % {
'apibase': apiBase(props),
'namespace': urllib.quote(namespace),
'name': self.getSafeName(pname)
}
headers = {
'Content-Type': 'application/json'
}
res = request('POST', url, status, headers, auth=args.auth, verbose=args.verbose)
if res.status == httplib.OK:
print 'ok: rule %(name)s is %(desc)s' % {'desc': desc, 'name': args.name}
return 0
elif res.status == httplib.ACCEPTED:
desc = 'activating' if enable else 'deactivating'
print 'ok: rule %(name)s is %(desc)s' % {'desc': desc, 'name': args.name}
return 0
else:
return responseError(res)
def getState(self, args, props):
namespace, pname = parseQName(args.name, props)
url = '%(apibase)s/namespaces/%(namespace)s/rules/%(name)s' % {
'apibase': apiBase(props),
'namespace': urllib.quote(namespace),
'name': self.getSafeName(pname)
}
res = request('GET', url, auth=args.auth, verbose=args.verbose)
if res.status == httplib.OK:
result = json.loads(res.read())
print 'ok: rule %(name)s is %(status)s' % { 'name': args.name, 'status': result['status'] }
return 0
else:
return responseError(res)
| apache-2.0 | 3,352,856,487,309,075,500 | 40.835938 | 130 | 0.620355 | false |
erudit/zenon | tests/unit/apps/public/search/test_utils.py | 1 | 3018 | import pytest
from django.http.request import QueryDict
from apps.public.search.forms import SearchForm
from apps.public.search.utils import get_search_elements
class FakeSolrData:
def get_search_form_facets(self):
return {
'disciplines': [],
'languages': [
('fr', 'Français'),
('en', 'Anglais'),
],
'journals': [
('foo', 'Foo'),
('bar', 'Bar'),
],
}
@pytest.mark.parametrize('queryparams, expected_elements', [
('', []),
# Languages
('languages=es', []),
('languages=fr', [{
'field': 'Langues',
'operator': 'AND',
'str': " ET (Langues : ['Français'])",
'term': "['Français']",
}]),
('languages=fr&languages=en', [{
'field': 'Langues',
'operator': 'AND',
'str': " ET (Langues : ['Anglais', 'Français'])",
'term': "['Anglais', 'Français']",
}]),
('languages=fr&languages=en&languages=es', [{
'field': 'Langues',
'operator': 'AND',
'str': " ET (Langues : ['Anglais', 'Français'])",
'term': "['Anglais', 'Français']",
}]),
# Journals
('journal=baz', []),
('journals=foo', [{
'field': 'Revues',
'operator': 'AND',
'str': " ET (Revues : ['Foo'])",
'term': "['Foo']",
}]),
('journals=foo&journals=bar', [{
'field': 'Revues',
'operator': 'AND',
'str': " ET (Revues : ['Bar', 'Foo'])",
'term': "['Bar', 'Foo']",
}]),
('journals=foo&journals=bar&journals=baz', [{
'field': 'Revues',
'operator': 'AND',
'str': " ET (Revues : ['Bar', 'Foo'])",
'term': "['Bar', 'Foo']",
}]),
# Languages & Journals
('languages=es&journal=baz', []),
('languages=fr&journals=foo', [{
'field': 'Langues',
'operator': 'AND',
'str': " ET (Langues : ['Français'])",
'term': "['Français']",
}, {
'field': 'Revues',
'operator': 'AND',
'str': " ET (Revues : ['Foo'])",
'term': "['Foo']",
}]),
('languages=fr&languages=en&journals=foo&journals=bar', [{
'field': 'Langues',
'operator': 'AND',
'str': " ET (Langues : ['Anglais', 'Français'])",
'term': "['Anglais', 'Français']",
}, {
'field': 'Revues',
'operator': 'AND',
'str': " ET (Revues : ['Bar', 'Foo'])",
'term': "['Bar', 'Foo']",
}]),
])
def test_get_search_elements(queryparams, expected_elements, monkeypatch):
monkeypatch.setattr(SearchForm, 'solr_data', FakeSolrData())
elements = get_search_elements(
QueryDict(queryparams),
SearchForm(),
)
base_elements = [
{
'term': '*',
'field': 'Tous les champs',
'operator': None,
'str': '(Tous les champs : *)',
},
]
assert base_elements + expected_elements == elements
| gpl-3.0 | -2,187,604,127,645,334,000 | 27.638095 | 74 | 0.460925 | false |
psf/black | tests/data/cantfit.py | 1 | 4107 | # long variable name
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = 0
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = 1 # with a comment
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = [
1, 2, 3
]
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function()
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function(
arg1, arg2, arg3
)
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function(
[1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3
)
# long function name
normal_name = but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying()
normal_name = but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying(
arg1, arg2, arg3
)
normal_name = but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying(
[1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3
)
# long arguments
normal_name = normal_function_name(
"but with super long string arguments that on their own exceed the line limit so there's no way it can ever fit",
"eggs with spam and eggs and spam with eggs with spam and eggs and spam with eggs with spam and eggs and spam with eggs",
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it=0,
)
string_variable_name = (
"a string that is waaaaaaaayyyyyyyy too long, even in parens, there's nothing you can do" # noqa
)
for key in """
hostname
port
username
""".split():
if key in self.connect_kwargs:
raise ValueError(err.format(key))
concatenated_strings = "some strings that are " "concatenated implicitly, so if you put them on separate " "lines it will fit"
del concatenated_strings, string_variable_name, normal_function_name, normal_name, need_more_to_make_the_line_long_enough
# output
# long variable name
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = (
0
)
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = (
1 # with a comment
)
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = [
1,
2,
3,
]
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = (
function()
)
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function(
arg1, arg2, arg3
)
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function(
[1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3
)
# long function name
normal_name = (
but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying()
)
normal_name = (
but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying(
arg1, arg2, arg3
)
)
normal_name = (
but_the_function_name_is_now_ridiculously_long_and_it_is_still_super_annoying(
[1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3
)
)
# long arguments
normal_name = normal_function_name(
"but with super long string arguments that on their own exceed the line limit so"
" there's no way it can ever fit",
"eggs with spam and eggs and spam with eggs with spam and eggs and spam with eggs"
" with spam and eggs and spam with eggs",
this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it=0,
)
string_variable_name = "a string that is waaaaaaaayyyyyyyy too long, even in parens, there's nothing you can do" # noqa
for key in """
hostname
port
username
""".split():
if key in self.connect_kwargs:
raise ValueError(err.format(key))
concatenated_strings = (
"some strings that are "
"concatenated implicitly, so if you put them on separate "
"lines it will fit"
)
del (
concatenated_strings,
string_variable_name,
normal_function_name,
normal_name,
need_more_to_make_the_line_long_enough,
)
| mit | 2,691,835,164,074,515,500 | 37.383178 | 126 | 0.702703 | false |
JulyKikuAkita/PythonPrac | cs15211/SwapAdjacentinLRString.py | 1 | 5766 | __source__ = 'https://leetcode.com/problems/swap-adjacent-in-lr-string/'
# Time: O(N)
# Space: O(1)
#
# Description: Leetcode # 777. Swap Adjacent in LR String
#
# In a string composed of 'L', 'R', and 'X' characters, like "RXXLRXRXL",
# a move consists of either replacing one occurrence of "XL" with "LX",
# or replacing one occurrence of "RX" with "XR".
# Given the starting string start and the ending string end,
# return True if and only if there exists a sequence of moves to transform one string to the other.
#
# Example:
#
# Input: start = "RXXLRXRXL", end = "XRLXXRRLX"
# Output: True
# Explanation:
# We can transform start to end following these steps:
# RXXLRXRXL ->
# XRXLRXRXL ->
# XRLXRXRXL ->
# XRLXXRRXL ->
# XRLXXRRLX
#
# Note:
# 1 <= len(start) = len(end) <= 10000.
# Both start and end will only consist of characters in {'L', 'R', 'X'}.
#
import unittest
import itertools
# 40ms 96.45%
class Solution(object):
def canTransform(self, start, end):
"""
:type start: str
:type end: str
:rtype: bool
"""
# For (i, x) and (j, y) in enumerate(start), enumerate(end)
# where x != 'X' and y != 'X',
# and where if one exhausts early, it's elements are (None, None),...
for (i, x), (j, y) in itertools.izip_longest(
((i, x) for i, x in enumerate(start) if x != 'X'),
((j, y) for j, y in enumerate(end) if y != 'X'),
fillvalue = (None, None)):
# If not solid or accessible, return False
if x != y or (x == 'L' and i < j) or (x == 'R' and i > j):
return False
return True
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/swap-adjacent-in-lr-string/solution/
#
Approach #1: Invariant [Accepted]
Complexity Analysis
Time Complexity: O(N), where N is the length of start and end.
Space Complexity: O(N). The replacement operation is O(N),
while the remaining operations use O(1) additional space.
We could amend the replace part of our algorithm to use pointers so as to reduce the total complexity to O(1)
# 24ms 35.91%
class Solution {
public boolean canTransform(String start, String end) {
if (!start.replace("X", "").equals(end.replace("X", ""))) return false;
int t = 0;
for (int i = 0; i < start.length(); i++) {
if (start.charAt(i) == 'L') {
while(end.charAt(t) != 'L') t++;
if (i < t++) return false;
}
}
t = 0;
for (int i = 0; i < start.length(); i++) {
if (start.charAt(i) == 'R') {
while (end.charAt(t) != 'R') t++;
if (i > t++) return false;
}
}
return true;
}
}
Approach #2: Two Pointers [Accepted]
Complexity Analysis
Time Complexity: O(N), where N is the length of start and end.
Space Complexity: O(1)
# 5ms 98.26%
class Solution {
public boolean canTransform(String start, String end) {
int N = start.length();
char[] S = start.toCharArray(), T = end.toCharArray();
int i = -1, j = -1;
while (++i < N && ++j < N) {
while (i < N && S[i] == 'X') i++;
while (j < N && T[j] == 'X') j++;
/* At this point, i == N or S[i] != 'X',
and j == N or T[j] != 'X'. i and j
are the indices representing the next
occurrences of non-X characters in S and T.
*/
// If only one of i < N and j < N, then it isn't solid-
// there's more people in one of the strings.
if ((i < N) ^ (j < N)) return false;
if (i < N && j < N) {
// If the person isn't the same, it isn't solid.
// Or, if the person moved backwards, it isn't accessible.
if (S[i] != T[j] || (S[i] == 'L' && i < j) ||
(S[i] == 'R' && i > j) )
return false;
}
}
return true;
}
}
# 7ms 77,80%
class Solution {
public boolean canTransform(String start, String end) {
// the Two string should be the same after removing all the 'X'
// XL to LX --> L will only move the the left
// RX to XR --> R will only move the the right
// so, the position of R in the end String should >= that in the start String
// the position of L in the end String should <= that in the start String
char[] s = start.toCharArray();
char[] e = end.toCharArray();
int n = s.length;
int i = 0;
int j = 0;
while (i < n && j < n) {
// (1) ignore 'X'
while (i < n && s[i] == 'X') {
i++;
}
while (j < n && e[j] == 'X') {
j++;
}
// (2) check end of the string or not
if (i == n && j == n) {
return true;
}
if (i == n || j == n) {
return false;
}
// (3) check character is the same or not
if (s[i] != e[j]) { // s[i] and e[i] should all be 'L' or all be 'R'
return false;
}
// (4) check character index support the rule or not.
if (s[i] == 'L' && i < j) {
return false;
}
if (s[i] == 'R' && i > j) {
return false;
}
i++;
j++;
}
return true;
}
}
'''
| apache-2.0 | 7,551,753,896,796,756,000 | 30.681319 | 109 | 0.4889 | false |
siusoon/Python_SPEEDSHOW | checkinactivity.py | 1 | 1953 | # logic: detect idle time from mac os (mouse and keyboard) and force to go to a specific website
# to run the program, administrator needs to set the sleeptime, temp_idle_value and url
# Firefox should be the default browser, hide all other applications and docking on the screen
# set no screensaver
# install fullscreen firefox add-ons (to maintain the full screen mode of your firefox browser): https://addons.mozilla.org/en-US/firefox/addon/resizeit/contribute/roadblock/?src=search&version=3.6.2
# put the file in desktop, then open terminal to go to Desktop directory, type: python [filename]
import sys,os,time
import webbrowser
# define variable
sleeptime = 10 #how frequent to get the idle time
temp_idle_value = 60 #Duration to reset browser (in sec format)
url = "http://www.facebook.com"
def main_loop():
while 1:
time.sleep(sleeptime)
cmd = "ioreg -c IOHIDSystem | perl -ane 'if (/Idle/) {$idle=(pop @F)/1000000000; print $idle}'"
result = os.popen(cmd) #use popen instead of os.system to open a perl script
str = result.read()
temp_idle = int(str.split(".")[0])
#print(str)
if temp_idle > temp_idle_value and status == 0:
resetBrowser()
status = 1
elif temp_idle > temp_idle_value and status == 1:
print("do nothing")
else:
print("continue")
status = 0
def resetBrowser():
result1 = os.system("ps axo pid,command | grep '[f]irefox'") #256 means not active, else will display a whole line
if result1 == 256:
print("firefox is inactive -> start a browser")
webbrowser.open_new(url) #workable
else:
print("should kill the browser then open a firefox")
os.system("killall -9 firefox")
time.sleep(5)
webbrowser.get("firefox")
webbrowser.open(url, new=0, autoraise=False)
if __name__ == '__main__':
try:
status = 0
main_loop()
except KeyboardInterrupt: #control+c in mac
print ('stop')
sys.exit(0)
| unlicense | -1,857,018,180,525,671,700 | 31.016393 | 199 | 0.680492 | false |
almeidapaulopt/frappe | frappe/utils/boilerplate.py | 1 | 9260 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six.moves import input
import frappe, os, re
from frappe.utils import touch_file, encode, cstr
def make_boilerplate(dest, app_name):
if not os.path.exists(dest):
print("Destination directory does not exist")
return
# app_name should be in snake_case
app_name = frappe.scrub(app_name)
hooks = frappe._dict()
hooks.app_name = app_name
app_title = hooks.app_name.replace("_", " ").title()
for key in ("App Title (default: {0})".format(app_title),
"App Description", "App Publisher", "App Email",
"App Icon (default 'octicon octicon-file-directory')",
"App Color (default 'grey')",
"App License (default 'MIT')"):
hook_key = key.split(" (")[0].lower().replace(" ", "_")
hook_val = None
while not hook_val:
hook_val = cstr(input(key + ": "))
if not hook_val:
defaults = {
"app_title": app_title,
"app_icon": "octicon octicon-file-directory",
"app_color": "grey",
"app_license": "MIT"
}
if hook_key in defaults:
hook_val = defaults[hook_key]
if hook_key=="app_name" and hook_val.lower().replace(" ", "_") != hook_val:
print("App Name must be all lowercase and without spaces")
hook_val = ""
elif hook_key=="app_title" and not re.match("^(?![\W])[^\d_\s][\w -]+$", hook_val, re.UNICODE):
print("App Title should start with a letter and it can only consist of letters, numbers, spaces and underscores")
hook_val = ""
hooks[hook_key] = hook_val
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, frappe.scrub(hooks.app_title)),
with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "www"))
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates",
"pages"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "templates",
"includes"))
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "config"), with_init=True)
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "public",
"css"))
frappe.create_folder(os.path.join(dest, hooks.app_name, hooks.app_name, "public",
"js"))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "__init__.py"), "w") as f:
f.write(frappe.as_unicode(init_template))
with open(os.path.join(dest, hooks.app_name, "MANIFEST.in"), "w") as f:
f.write(frappe.as_unicode(manifest_template.format(**hooks)))
with open(os.path.join(dest, hooks.app_name, ".gitignore"), "w") as f:
f.write(frappe.as_unicode(gitignore_template.format(app_name = hooks.app_name)))
with open(os.path.join(dest, hooks.app_name, "setup.py"), "w") as f:
f.write(frappe.as_unicode(setup_template.format(**hooks)))
with open(os.path.join(dest, hooks.app_name, "requirements.txt"), "w") as f:
f.write("frappe")
with open(os.path.join(dest, hooks.app_name, "README.md"), "w") as f:
f.write(frappe.as_unicode("## {0}\n\n{1}\n\n#### License\n\n{2}".format(hooks.app_title,
hooks.app_description, hooks.app_license)))
with open(os.path.join(dest, hooks.app_name, "license.txt"), "w") as f:
f.write(frappe.as_unicode("License: " + hooks.app_license))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "modules.txt"), "w") as f:
f.write(frappe.as_unicode(hooks.app_title))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "hooks.py"), "w") as f:
f.write(frappe.as_unicode(hooks_template.format(**hooks)))
touch_file(os.path.join(dest, hooks.app_name, hooks.app_name, "patches.txt"))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "config", "desktop.py"), "w") as f:
f.write(frappe.as_unicode(desktop_template.format(**hooks)))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "config", "docs.py"), "w") as f:
f.write(frappe.as_unicode(docs_template.format(**hooks)))
print("'{app}' created at {path}".format(app=app_name, path=os.path.join(dest, app_name)))
manifest_template = """include MANIFEST.in
include requirements.txt
include *.json
include *.md
include *.py
include *.txt
recursive-include {app_name} *.css
recursive-include {app_name} *.csv
recursive-include {app_name} *.html
recursive-include {app_name} *.ico
recursive-include {app_name} *.js
recursive-include {app_name} *.json
recursive-include {app_name} *.md
recursive-include {app_name} *.png
recursive-include {app_name} *.py
recursive-include {app_name} *.svg
recursive-include {app_name} *.txt
recursive-exclude {app_name} *.pyc"""
init_template = """# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.0.1'
"""
hooks_template = """# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "{app_name}"
app_title = "{app_title}"
app_publisher = "{app_publisher}"
app_description = "{app_description}"
app_icon = "{app_icon}"
app_color = "{app_color}"
app_email = "{app_email}"
app_license = "{app_license}"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/{app_name}/css/{app_name}.css"
# app_include_js = "/assets/{app_name}/js/{app_name}.js"
# include js, css files in header of web template
# web_include_css = "/assets/{app_name}/css/{app_name}.css"
# web_include_js = "/assets/{app_name}/js/{app_name}.js"
# include js in page
# page_js = {{"page" : "public/js/file.js"}}
# include js in doctype views
# doctype_js = {{"doctype" : "public/js/doctype.js"}}
# doctype_list_js = {{"doctype" : "public/js/doctype_list.js"}}
# doctype_tree_js = {{"doctype" : "public/js/doctype_tree.js"}}
# doctype_calendar_js = {{"doctype" : "public/js/doctype_calendar.js"}}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {{
# "Role": "home_page"
# }}
# Website user home page (by function)
# get_website_user_home_page = "{app_name}.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "{app_name}.install.before_install"
# after_install = "{app_name}.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "{app_name}.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {{
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }}
#
# has_permission = {{
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }}
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {{
# "*": {{
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }}
# }}
# Scheduled Tasks
# ---------------
# scheduler_events = {{
# "all": [
# "{app_name}.tasks.all"
# ],
# "daily": [
# "{app_name}.tasks.daily"
# ],
# "hourly": [
# "{app_name}.tasks.hourly"
# ],
# "weekly": [
# "{app_name}.tasks.weekly"
# ]
# "monthly": [
# "{app_name}.tasks.monthly"
# ]
# }}
# Testing
# -------
# before_tests = "{app_name}.install.before_tests"
# Overriding Whitelisted Methods
# ------------------------------
#
# override_whitelisted_methods = {{
# "frappe.desk.doctype.event.event.get_events": "{app_name}.event.get_events"
# }}
"""
desktop_template = """# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{{
"module_name": "{app_title}",
"color": "{app_color}",
"icon": "{app_icon}",
"type": "module",
"label": _("{app_title}")
}}
]
"""
setup_template = """# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from pip.req import parse_requirements
import re, ast
# get version from __version__ variable in {app_name}/__init__.py
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('{app_name}/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
requirements = parse_requirements("requirements.txt", session="")
setup(
name='{app_name}',
version=version,
description='{app_description}',
author='{app_publisher}',
author_email='{app_email}',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link]
)
"""
gitignore_template = """.DS_Store
*.pyc
*.egg-info
*.swp
tags
{app_name}/docs/current"""
docs_template = '''"""
Configuration for docs
"""
# source_link = "https://github.com/[org_name]/{app_name}"
# docs_base_url = "https://[org_name].github.io/{app_name}"
# headline = "App that does everything"
# sub_heading = "Yes, you got that right the first time, everything"
def get_context(context):
context.brand_html = "{app_title}"
'''
| mit | 8,096,362,969,132,424,000 | 28.303797 | 117 | 0.653888 | false |
mrakitin/coding | route_tables_update/add_route.py | 1 | 1944 | __author__ = 'mrakitin'
import os
import socket
import subprocess
address_to_route = None
qsh_ip = '192.12.90.0'
qsh_ip_mask = '255.255.255.0'
# Find IP address provided by SBU VPN:
ips_dict = {}
for i in socket.getaddrinfo(socket.gethostname(), None):
ip = i[4][0]
try:
socket.inet_aton(ip)
ipv4 = True
except socket.error:
ipv4 = False
if ipv4:
key, none, value = socket.gethostbyaddr(ip)
ips_dict[key] = value[0]
for key in ips_dict.keys():
if key.find('stonybrook.edu') >= 0:
address_to_route = ips_dict[key]
break
# Delete the route first in case it existed:
try:
cmd_del = ['route', 'delete', qsh_ip]
out_del = subprocess.check_output(cmd_del, stderr=subprocess.STDOUT)
status_del = out_del.strip()
if status_del.find('OK') >= 0:
print 'Route %s has been deleted.' % (qsh_ip)
elif status_del.find('The route deletion failed: Element not found.') >= 0:
# print 'WARNING! ' + status_add
pass
else:
print 'Unknown error occurred during deletion.'
except:
print 'WARNING! Route %s has not been deleted.' % (qsh_ip)
# Add a new route if the VPN-provided address is found:
if address_to_route:
cmd = ['route', 'add', qsh_ip, 'mask', qsh_ip_mask, address_to_route]
try:
print 'The following command will be executed:\n\n\t%s\n' % (' '.join(cmd))
out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
status_add = out.strip()
if status_add.find('OK') >= 0:
print 'Addition was successful.\n'
os.system('route print')
elif status_add.find('The route addition failed') >= 0:
print 'ERROR! ' + status_add
else:
print 'Unknown error occurred during addition.'
except:
pass
else:
print 'ERROR! The VPN interface is not connected. The route to %s has not been added.' % (qsh_ip)
| gpl-2.0 | 4,063,799,232,763,459,000 | 28.907692 | 101 | 0.609053 | false |
moio/spacewalk | backend/server/rhnServer/server_wrapper.py | 1 | 3967 | #
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# this file implements the ServerWrapper class, which takes care
# of all the load and save functions for misc tables associated
# with a server (such as packages, hardware, history)
#
# the server.Server class inherits this ServerWrapper class
#
from server_hardware import Hardware
from server_packages import Packages
from server_history import History
from server_solarispatches import SolarisPatches
from rhn.UserDictCase import UserDictCase
from spacewalk.server import rhnSQL
class ServerWrapper(Packages, Hardware, History, SolarisPatches):
""" This is a middle class that ties all the subclasses together, plus it
provides a cleaner way to keep all the wrapper functions in one place.
The main Server class is based on this one and it looks a little bit
cleaner that way.
"""
def __init__(self):
self.server = UserDictCase()
Packages.__init__(self)
History.__init__(self)
Hardware.__init__(self)
SolarisPatches.__init__(self)
def __repr__(self):
return "<%s instance>" % (self.__class__,)
def set_value(self, name, value):
""" update a value in self.server """
if name is None or value is None:
return -1
self.server[name] = value
return 0
###
### PACKAGES
###
def add_package(self, entry):
""" Wrappers for the similar functions from Packages class that supplementaly
require a valid sysid.
"""
if entry['name'].startswith("patch-solaris"):
SolarisPatches.add_patch(self, self.server.get("id"), entry)
return Packages.add_package(self, self.server.get("id"), entry)
def delete_package(self, entry):
return Packages.delete_package(self, self.server.get("id"), entry)
def dispose_packages(self):
SolarisPatches.dispose_patched_packages(self, self.server["id"])
return Packages.dispose_packages(self, self.server["id"])
def save_packages(self, schedule=1):
""" wrapper for the Packages.save_packages_byid() which requires the sysid """
SolarisPatches.save_patched_packages(self, self.server["id"])
ret = self.save_packages_byid(self.server["id"], schedule=schedule)
# this function is primarily called from outside
# so we have to commit here
rhnSQL.commit()
return ret
###
### HARDWARE
###
def delete_hardware(self):
""" Wrappers for the similar functions from Hardware class """
return Hardware.delete_hardware(self, self.server.get("id"))
def save_hardware(self):
""" wrapper for the Hardware.save_hardware_byid() which requires the sysid """
ret = self.save_hardware_byid(self.server["id"])
# this function is primarily called from outside
# so we have to commit here
rhnSQL.commit()
return ret
def reload_hardware(self):
""" wrapper for the Hardware.reload_hardware_byid() which requires the sysid """
ret = self.reload_hardware_byid(self.server["id"])
return ret
###
### HISTORY
###
def save_history(self):
ret = self.save_history_byid(self.server["id"])
# this function is primarily called from outside
# so we have to commit here
rhnSQL.commit()
return ret
| gpl-2.0 | -8,041,331,313,070,005,000 | 35.394495 | 88 | 0.663222 | false |
SalesforceEng/Providence | Empire/cloudservices/github/GithubAPI.py | 1 | 5227 | '''
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
"""
GitHubCommit - Convert JSON message from GH into an object representative of the commit
GitHubRepo - Represent the basic information needed to interact with a GH repo
GitHubAPI - Send and receive data from the REST API
"""
# TODO:
# - Pagination the github way
# - Groups / Users / Org
# - Security
# - Stale branches
import sys
import os.path
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
from creds.credentials import Credentials
import requests
import urllib
import datetime
import pytz
import json
import logging
logger = logging.getLogger('GithubAPI')
__copyright__ = "2015 Salesforce.com, Inc"
__status__ = "Prototype"
class GithubAPI(object):
def __init__(self, server, credentials):
self.server = server
self.credentials = credentials
self._no_more_requests_until = None
def fetch(self, url, params=None, post_data=None):
if self._no_more_requests_until:
if self._no_more_requests_until < datetime.datetime.utcnow():
return None
self._no_more_requests_until = None
r = None
if post_data:
raise NotImplementedError("GithubAPI Post unimplemented")
return
else:
if self.credentials:
r = requests.get(url, params=params, headers={ "Authorization":self.credentials.authorizationHeaderValue() })
else:
r = requests.get(url, params=params)
if r.headers.get('x-ratelimit-remaining'):
remaining_requests = int(r.headers['x-ratelimit-remaining'])
if (remaining_requests == 0):
logger.warning("Github API hit the rate limiter")
self._no_more_requests_until = datetime.datetime.fromtimestamp(float(r.headers.get('x-ratelimit-reset')));
return None
if(r.ok):
results = r.json()
return results
logger.warning("Github fetch of %s failed\n%s\n",r.url,r.text)
return None
def fetch_raw(self, url):
if self._no_more_requests_until:
if self._no_more_requests_until < datetime.datetime.utcnow():
return None
self._no_more_requests_until = None
r = None
if self.credentials:
r = requests.get(url, headers={ "Authorization":self.credentials.authorizationHeaderValue(),"Accept":"application/vnd.github.v3.raw" })
else:
r = requests.get(url)
if r.headers.get('x-ratelimit-remaining'):
remaining_requests = int(r.headers['x-ratelimit-remaining'])
if (remaining_requests == 0):
logger.warning("Github API hit the rate limiter")
self._no_more_requests_until = datetime.datetime.fromtimestamp(float(r.headers.get('x-ratelimit-reset')));
return None
if(r.ok):
results = r.text
return results
logger.warning("Github fetch of %s failed\n%s\n",r.url,r.text)
return None
def baseURL(self, org_name=None, repo_name=None):
baseurl = 'https://%s' % (self.server)
if repo_name is not None:
baseurl += "/repos/%s/%s" % (org_name, repo_name)
elif org_name is not None:
baseurl += "/orgs/%s" % (org_name)
return baseurl
if __name__ == "__main__":
creds = Credentials("github")
git = GithubAPI(GithubRepo('api.github.com', 'salesforce','providence'), creds)
bugs = git.issues(params={"labels":"bug,security","state":"all","since":"2015-02-01T00:00:00Z"})
import json
print json.dumps(bugs, indent=2)
if bugs:
for bug in bugs:
print bug["title"], bug["state"]
| bsd-3-clause | -3,992,668,494,168,806,400 | 44.850877 | 755 | 0.664817 | false |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/AuthKit-0.4.0-py2.5.egg/authkit/authenticate/__init__.py | 1 | 19006 | """Authentication middleware
This module provides one piece of middleware named
``authkit.authenticate.middleware`` which is used to intercept responses with
a specified status code, present a user with a means of authenticating
themselves and handle the sign in process.
Each of the authentication methods supported by the middleware is described in
detail in the main AuthKit manual. The methods include:
* HTTP Basic (``basic``)
* HTTP Digest (``digest``)
* OpenID Passurl (``openid``)
* Form and Cookie (``form``)
* Forward (``forward``)
* Redirect (``redirect``)
The authenticate middleware can be configured directly or by means of a Paste
deploy config file as used by Pylons. It can be used directly like this:
.. code-block:: Python
from authkit.authenticate import middleware, test_app
from paste.httpserver import serve
import sys
app = middleware(
test_app,
enable = True,
method = 'passurl',
cookie_secret='some_secret',
)
serve(app, host='0.0.0.0', port=8000)
"""
import types
import warnings
import logging
import os
import os.path
from paste.util.import_string import eval_import
from multi import MultiHandler, status_checker
from pkg_resources import iter_entry_points, load_entry_point
from paste.deploy.converters import asbool
from paste.httpexceptions import HTTPExceptionHandler
from authkit.authorize import authorize_request
from authkit.permissions import RemoteUser, no_authkit_users_in_environ, \
AuthKitConfigError
# Main middleware base classes
class AuthKitAuthHandler(object):
"""
The base class for all middleware responsible for handling
authentication and setting whatever needs to be set so that the
``AuthKitUserSetter`` middleware can set REMOTE_USER on subsequent
requests. ``AuthKitAuthHandler``s only get inserted into the
middleware stack if an appropriate status code (as set in the
``authkit.setup.intercept`` config option) is intercepted by the
authentication middleware.
"""
pass
class AuthKitUserSetter(object):
"""
The base class for all middleware responsible for attempting to set
REMOTE_USER on each request. The class is overridden by the induvidual
handlers.
"""
pass
# Setting up logging
log = logging.getLogger('authkit.authenticate')
def strip_base(conf, base):
result = {}
for key in conf.keys():
if key.startswith(base):
result[key[len(base):]] = conf[key]
return result
def swap_underscore(*confs):
results = []
for conf in confs:
result = {}
for k,v in conf.items():
result[k.replace('.','_')] = v
results.append(result)
return results
def valid_password(environ, username, password):
"""
A function which can be used with the ``basic`` and ``form`` authentication
methods to validate a username and passowrd.
This implementation is used by default if no other method is specified. It
checks the for an ``authkit.users`` object present in the ``environ``
dictionary under the ``authkit.users`` key and uses the information there
to validate the username and password.
In this implementation usernames are case insensitive and passwords are
case sensitive. The function returns ``True`` if the user ``username`` has
the password specified by ``password`` and returns ``False`` if the user
doesn't exist or the password is incorrect.
If you create and specify your own ``authkit.users`` object with the same
API, this method will also work correctly with your custom solution. See
the AuthKit manual for information on the user management api, how to
specify a different ``authkit.users`` object (say to read user information
from a file rather than have it specified directly) and for information on
how to create your own ``Users`` objects.
"""
log.debug("valid_password called. username: %s", username)
if not environ.has_key('authkit.users'):
raise no_authkit_users_in_environ
users = environ['authkit.users']
if not users.user_exists(username):
return False
elif users.user_has_password(username.lower(), password):
return True
return False
def digest_password(environ, realm, username):
"""
This is similar to ``valid_password()`` but is used with the ``digest``
authentication method and rather than checking a username and password and
returning ``True`` or ``False`` it takes the realm and username as input,
looks up the correct password and and returns a digest by calling the
``authkit.authenticate.digest.digest_password()`` function with the
parameters ``realm``, ``username`` and ``password`` respectively. The
digest returned is then compared with the one submitted by the browser.
As with ``valid_password()`` this method is designed to work with the user
management API so you can use it with ``authkit.users`` objects or your own
custom ``Users`` objects. Alternatively you can specify your own function
which can lookup the password in whichever way you prefer, perhaps from a
database or LDAP connection.
Only required if you intend to use HTTP digest authentication.
"""
log.debug(
"digest_password called. username: %s, realm: %s", username, realm
)
if not environ.has_key('authkit.users'):
raise no_authkit_users_in_environ
users = environ['authkit.users']
if users.user_exists(username):
password = users.user(username)['password']
return digest.digest_password(realm, username, password)
# After speaking to Clark Evans who wrote the origianl code, this is the
# correct thing:
return None
def get_authenticate_function(app, authenticate_conf, format, prefix):
"""
Sets up the users object, adds the middleware to add the users object
to the environ and then returns authenticate methods to check a password
and a digest.
"""
function = None
users = None
if len(authenticate_conf) < 1:
raise AuthKitConfigError('Expected at least one authenticate key, not'
' %r'%authenticate_conf)
if authenticate_conf.keys() == ['function']:
function = authenticate_conf['function']
if isinstance(function, (str, unicode)):
function = eval_import(function)
else:
user_conf = strip_base(authenticate_conf, 'user.')
if not user_conf:
raise AuthKitConfigError('No authenticate function or users specified')
else:
if user_conf.has_key('encrypt'):
enc_func = eval_import(user_conf['encrypt'])
secret = user_conf.get('encrypt.secret','')
def encrypt(password):
return enc_func(password, secret)
else:
encrypt = None
user_object = 'authkit.users.UsersFromString'
if 'type' in user_conf.keys():
user_object = user_conf['type']
if isinstance(user_object, (str, unicode)):
user_object = eval_import(user_object)
users = user_object(user_conf['data'], encrypt)
app = AddToEnviron(app, 'authkit.users', users)
log.debug("authkit.users added to environ")
if format == 'basic':
function = valid_password
log.debug("valid_password chosen %r", function)
elif format == 'digest':
log.debug("digest_password chosen %r", function)
function = digest_password
else:
raise Exception('Invalid format for authenticate function %r'
% format)
return app, function, users
def get_template(template_conf, prefix):
"""
Another utility method to reduce code duplication. This function parses a
template from one of the available template options:
``string``
The template as a string
``file``
A file containing the template
``obj``
A paste eval_import string or callable which returns a string
authkit.form.template.string =
authkit.form.template.file =
authkit.form.template.obj =
"""
template = None
if len(template_conf) != 1:
raise AuthKitConfigError('Expected one template entry, not %r' %
(', '.join(template_conf.keys())))
if template_conf.keys()[0] not in ['string', 'file', 'obj']:
raise AuthKitConfigError("Template option can only be 'string', 'file'"
" or 'obj'")
if template_conf.keys()[0] == 'string':
template = template_conf['string']
elif template_conf.keys()[0] == 'file':
if not os.path.exists(template_conf['file']):
raise AuthKitConfigError('No such file %r exists. It was specified'
' by config option %r' %
(template_conf['file'], prefix+'file'))
fp = open(template_conf['file'], 'r')
template = fp.read()
fp.close()
if not template:
raise AuthKitConfigError('No data in template file %s specified by'
' config option %r' %
(template_conf['file'], prefix+'file'))
elif template_conf.keys()[0] == 'obj':
template = eval_import(template_conf['obj'])
if not template:
raise AuthKitConfigError('No data in template obj %s specified by '
'config option %r' %
(template_conf['obj'], prefix+'obj'))
else:
raise AuthKitConfigError("Unknown option %r" %
(prefix+template_conf.keys()[0]))
if not template:
raise AuthKitConfigError("The template loaded did not contain any data")
if isinstance(template, (str, unicode)):
def render_template():
return template
return render_template
return template
#
# Main middleware creator
#
class AddToEnviron(object):
"""
Simple middleware which adds a key to the ``environ`` dictionary.
Used to add the ``authkit.users`` key to the environ when this is
appropriate.
"""
def __init__(self, app, key, object):
self.app = app
self.key = key
self.object = object
def __call__(self, environ, start_response):
environ[self.key] = self.object
return self.app(environ, start_response)
class AddDictToEnviron(object):
"""Simple middleware which adds the values of a dict to the environ."""
def __init__(self, app, dct):
self.app = app
self.dct = dct
def __call__(self, environ, start_response):
environ.update(self.dct)
return self.app(environ, start_response)
class RequireEnvironKey(object):
def __init__(self, app, key, missing_error=None):
self.app = app
self.key = key
self.missing_error = missing_error or \
'Missing the key %(key)s from the environ. Have you setup the ' \
'correct middleware?'
def __call__(self, environ, start_response):
if not environ.has_key(self.key):
raise Exception(self.missing_error%{'key':self.key})
return self.app(environ, start_response)
def get_methods():
"""Get a dictionary of the available method entry points."""
available_methods = {}
for method_handler in iter_entry_points(group='authkit.method', name=None):
available_methods[method_handler.name] = method_handler
return available_methods
def load_method(name, from_these=None):
if from_these:
return from_these[name].load()
else:
return load_entry_point('AuthKit','authkit.method',name)
def load_config(options, app_conf, prefix):
merged = strip_base(app_conf, prefix)
# Now override the auth_conf_options with the manaully specified options
for key, value in options.items():
if merged.has_key(key):
warnings.warn(
'Key %s with value %r set in the config file is being ' + \
'replaced with value %r set in the application'%(
key,
auth_conf_options[key],
value
)
)
merged[key.replace('_','.')] = value
return merged
def middleware(app, app_conf=None, global_conf=None, prefix='authkit.',
handle_httpexception=True, middleware=None, **options):
"""
This function sets up the AuthKit authenticate middleware and its use and
options are described in detail in the AuthKit manual.
The function takes the following arguments and returns a WSGI application
wrapped in the appropriate AuthKit authentication middleware based on the
options specified:
``app``
The WSGI application the authenticate middleware should wrap
``app_conf``
A paste deploy ``app_conf`` dictionary to be used to setup the
middleware
``global_conf``
A paste deploy ``global_conf`` dictionary
``prefix``
The prefix which all authkit related options in the config file will
have prefixed to their names. This defaults to ``authkit.`` and
shouldn't normally need overriding.
``middleware``
A make_middleware function which should be called directly instead of
loading and calling a function based on the method name. If this is
set then ``authkit.setup.methof`` should not be set.
``**options``
Any AuthKit options which are setup directly in Python code. If
specified, these options will override any options specifed in a config
file.
All option names specified in the config file will have their prefix
removed and any ``.`` characters replaced by ``_`` before the options
specified by ``options`` are merged in. This means that the the option
``authkit.cookie.name`` specified in a config file sets the same options as
``cookie_name`` specified directly as an option.
"""
if handle_httpexception:
app = HTTPExceptionHandler(app)
# Configure the config files
if global_conf is None:
global_conf = {}
if app_conf is None:
app_conf = {}
if not isinstance(app_conf, dict):
raise AuthKitConfigError(
"Expected app_conf to be paste deploy app_conf dictionary "
"from not %r" % app_conf
)
# Merge config file and options
available_methods = get_methods()
all_conf = load_config(options, app_conf, prefix)
if middleware is not None and all_conf.has_key('setup.method'):
raise AuthKitConfigError(
'You cannot specify a middleware function '
'and an authkit.setup.method'
)
if not middleware and not all_conf.has_key('setup.method'):
raise AuthKitConfigError('No authkit.setup.method was specified')
# Check to see if middleware is disabled
if asbool(all_conf.get('setup.enable', True)) == False:
warnings.warn("AuthKit middleware has been turned off by the config "
"option authkit.setup.enable")
return app
# Status Checking/Changing Middleware
intercept = [str(x).strip() for x in \
all_conf.get('setup.intercept','401').split(',')]
if not '401' in intercept:
warnings.warn(
"AuthKit is configured via the authkit.setup.intercept option not "
"to intercept 401 responses so the authentication middleware will "
"not be triggered even if a 401 Unauthenticated response is "
"returned.")
if middleware:
prefix_ = prefix
app = middleware(
app,
auth_conf=all_conf,
app_conf=app_conf,
global_conf=global_conf,
prefix=prefix_,
)
else:
methods = [method.strip() for method in all_conf['setup.method'].split(',')]
log.debug("Trying to load the following methods: %r", methods)
for method in methods:
if method in ['setup','config']:
raise AuthKitConfigError("The name %s is reserved cannot be used "
"as a method name" % method)
if not available_methods.has_key(method):
raise AuthKitConfigError(
'The authkit method %r is not available. The available methods '
'are %s and %s'%(
all_conf['setup.method'],
', '.join(available_methods.keys()[:-1]),
available_methods.keys()[-1],
)
)
prefix_ = prefix+method+'.'
auth_conf = strip_base(all_conf, method+'.')
app = available_methods[method].load()(
app,
auth_conf=auth_conf,
app_conf=app_conf,
global_conf=global_conf,
prefix=prefix_,
)
app = AddDictToEnviron(
app,
{
'authkit.config':strip_base(all_conf, 'config.'),
'authkit.intercept':intercept,
'authkit.authenticate': True,
}
)
return app
def sample_app(environ, start_response):
"""
A sample WSGI application that returns a 401 status code when the path
``/private`` is entered, triggering the authenticate middleware to
prompt the user to sign in.
If used with the authenticate middleware's form method, the path
``/signout`` will display a signed out message if
``authkit.cookie.signout = /signout`` is specified in the config file.
If used with the authenticate middleware's forward method, the path
``/signin`` should be used to display the sign in form.
The path ``/`` always displays the environment.
"""
if environ['PATH_INFO']=='/private':
authorize_request(environ, RemoteUser())
if environ['PATH_INFO'] == '/signout':
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
if environ.has_key('REMOTE_USER'):
return ["Signed Out"]
else:
return ["Not signed in"]
elif environ['PATH_INFO'] == '/signin':
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
return ["Your application would display a \nsign in form here."]
else:
start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
result = ['You Have Access To This Page.\n\nHere is the environment...\n\n']
for k,v in environ.items():
result.append('%s: %s\n'%(k,v))
return result
| bsd-3-clause | 4,010,506,043,394,868,000 | 37.318548 | 84 | 0.620225 | false |
noahbenson/neuropythy | neuropythy/graphics/__init__.py | 1 | 1109 | ####################################################################################################
# neuropythy/graphics/__init__.py
# Simple tools for making matplotlib/pyplot graphics with neuropythy.
# By Noah C. Benson
'''
The neuropythy.graphics package contains definitions of the various tools for making plots with
cortical data. The primary entry point is the function cortex_plot.
'''
from .core import (
cmap_curvature,
cmap_polar_angle_sym, cmap_polar_angle_lh, cmap_polar_angle_rh, cmap_polar_angle,
cmap_theta_sym, cmap_theta_lh, cmap_theta_rh, cmap_theta,
cmap_eccentricity, cmap_log_eccentricity, cmap_radius, cmap_log_radius,
cmap_cmag, cmap_log_cmag, label_cmap,
vertex_curvature_color, vertex_weight,
vertex_angle, vertex_eccen, vertex_sigma, vertex_varea,
vertex_angle_color, vertex_eccen_color, vertex_sigma_color, vertex_varea_color,
angle_colors, eccen_colors, sigma_colors, radius_colors, varea_colors, to_rgba,
color_overlap, visual_field_legend, curvature_colors, cortex_plot, cortex_plot_colors,
ROIDrawer, trace_roi, scale_for_cmap)
| agpl-3.0 | -797,286,915,728,350,000 | 49.409091 | 100 | 0.678088 | false |
NMisko/monkalot | bot/commands/speech_cleverbot.py | 1 | 1114 | """Commands: "@[botname] XXXXX"."""
from cleverwrap import CleverWrap
from bot.commands.abstract.speech import Speech, Chatbot
class CleverbotSpeech(Speech):
"""Natural language by using cleverbot."""
def __init__(self, bot):
"""Initialize variables."""
if "cleverbot_key" in bot.config and bot.config["cleverbot_key"] != "":
self.chatbot = Cleverbot(bot.config["cleverbot_key"])
else:
raise RuntimeError(
"Cleverbot instantiated, but no key set in configuration."
)
class Cleverbot(Chatbot):
"""A replier that uses cleverbot."""
name = "cleverbot"
def __init__(self, key):
self.cleverbot_key = key
self.conversations = {}
def get_reply(self, message, name):
"""Get a reply from cleverbot api."""
if name not in self.conversations:
self.conversations[name] = CleverWrap(self.cleverbot_key, name)
return self.conversations[name].say(message)
def get_name(self):
"""Returns name or short description for this bot."""
return self.name
| mit | 3,116,736,034,507,032,600 | 29.108108 | 79 | 0.618492 | false |
SukkoPera/audiotrans | AudioTrans/Encoder.py | 1 | 2300 | #!/usr/bin/env python
###########################################################################
# Copyright (C) 2008-2016 by SukkoPera #
# [email protected] #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the #
# Free Software Foundation, Inc., #
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. #
###########################################################################
from BaseCoder import BaseCoder, MissingCoderExe
import Process
#import Quality
class EncoderFactory (BaseCoder):
def __init__ (self):
try:
BaseCoder.__init__ (self)
print "Using \"%s\" as \"%s\" encoder" % (self.executablePath, "/".join (self.supportedExtensions))
except:
raise MissingCoderExe ("Cannot find \"%s\" (\"%s\" encoder) in path" % (self.executable, "/".join (self.supportedExtensions)))
def getEncoder (self, outFilename, quality = None):
try:
if quality is None:
quality = self.defaultQuality
argv = self.makeCmdLine (outFilename, quality)
enc = Process.EncoderProcess (argv)
return enc, self.endianness
except Exception, ex:
print "Exception in getEncoder(): %s" % ex.message
raise
#def __del__ (self):
#print "Encoder for \"%s\" being destroyed!" % self.outfileext
| gpl-3.0 | -316,361,670,134,837,250 | 45.938776 | 129 | 0.508696 | false |
elena/django | tests/cache/tests.py | 9 | 104310 | # Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import copy
import io
import os
import pickle
import re
import shutil
import sys
import tempfile
import threading
import time
import unittest
import warnings
from pathlib import Path
from unittest import mock, skipIf
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS, CacheHandler, CacheKeyWarning, InvalidCacheKey, cache,
caches,
)
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.cache.utils import make_template_fragment_key
from django.db import close_old_connections, connection, connections
from django.http import (
HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse,
)
from django.middleware.cache import (
CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory, SimpleTestCase, TestCase, TransactionTestCase,
ignore_warnings, override_settings,
)
from django.test.signals import setting_changed
from django.utils import timezone, translation
from django.utils.cache import (
get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers,
)
from django.utils.deprecation import RemovedInDjango41Warning
from django.views.decorators.cache import cache_control, cache_page
from .models import Poll, expensive_calculation
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpicklable:
def __getstate__(self):
raise pickle.PickleError()
def empty_response(request):
return HttpResponse()
KEY_ERRORS_WITH_MEMCACHED_MSG = (
'Cache key contains characters that will cause errors if used with '
'memcached: %r'
)
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
})
class DummyCacheTests(SimpleTestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), True)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Nonexistent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {})
def test_get_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.get_many(['key with spaces'])
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertIsNone(cache.get("key1"))
self.assertIs(cache.delete("key1"), False)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), False)
self.assertIs(cache.has_key("goodbye1"), False)
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr('answer')
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr('answer')
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
def test_touch(self):
"""Dummy cache can't do touch()."""
self.assertIs(cache.touch('whatever'), False)
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertIsNone(cache.get("stuff"))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertIsNone(cache.get("expire2"))
self.assertIs(cache.has_key("expire3"), False)
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
self.assertEqual(cache.set_many({'a': 1, 'b': 2}), [])
self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), [])
def test_set_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.set_many({'key with spaces': 'foo'})
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(['a', 'b'])
def test_delete_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.delete_many({'key with spaces': 'foo'})
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr_version('answer')
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr_version('answer')
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist')
def test_get_or_set(self):
self.assertEqual(cache.get_or_set('mykey', 'default'), 'default')
self.assertIsNone(cache.get_or_set('mykey', None))
def test_get_or_set_callable(self):
def my_callable():
return 'default'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'default')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default')
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
_caches_setting_base = {
'default': {},
'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())},
'v2': {'VERSION': 2},
'custom_key': {'KEY_FUNCTION': custom_key_func},
'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'},
'cull': {'OPTIONS': {'MAX_ENTRIES': 30}},
'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}},
}
def caches_setting_for_tests(base=None, exclude=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `exclude` is a set of cache names denoting which `_caches_setting_base` keys
# should be omitted.
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
exclude = exclude or set()
setting = {k: base.copy() for k in _caches_setting_base if k not in exclude}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests:
# A common set of tests to apply to all cache backends
factory = RequestFactory()
# RemovedInDjango41Warning: python-memcached doesn't support .get() with
# default.
supports_get_with_default = True
# Some clients raise custom exceptions when .incr() or .decr() are called
# with a non-integer value.
incr_decr_type_error = TypeError
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_default_used_when_none_is_set(self):
"""If None is cached, get() returns it instead of the default."""
cache.set('key_default_none', None)
self.assertIsNone(cache.get('key_default_none', default='default'))
def test_add(self):
# A key can be added to a cache
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), False)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertIs(caches['prefix'].has_key('somekey'), False)
caches['prefix'].set('somekey', 'value2')
self.assertEqual(cache.get('somekey'), 'value')
self.assertEqual(caches['prefix'].get('somekey'), 'value2')
def test_non_existent(self):
"""Nonexistent cache keys return as None/default."""
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'})
cache.set_many({'x': None, 'y': 1})
self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1})
def test_delete(self):
# Cache keys can be deleted
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(cache.get("key1"), "spam")
self.assertIs(cache.delete("key1"), True)
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_delete_nonexistent(self):
self.assertIs(cache.delete('nonexistent_key'), False)
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), True)
self.assertIs(cache.has_key("goodbye1"), False)
cache.set("no_expiry", "here", None)
self.assertIs(cache.has_key("no_expiry"), True)
cache.set('null', None)
self.assertIs(
cache.has_key('null'),
True if self.supports_get_with_default else False,
)
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
cache.set('null', None)
if self.supports_get_with_default:
self.assertIn('null', cache)
else:
self.assertNotIn('null', cache)
def test_incr(self):
# Cache values can be incremented
cache.set('answer', 41)
self.assertEqual(cache.incr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.incr('answer', 10), 52)
self.assertEqual(cache.get('answer'), 52)
self.assertEqual(cache.incr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
cache.set('null', None)
with self.assertRaises(self.incr_decr_type_error):
cache.incr('null')
def test_decr(self):
# Cache values can be decremented
cache.set('answer', 43)
self.assertEqual(cache.decr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.decr('answer', 10), 32)
self.assertEqual(cache.get('answer'), 32)
self.assertEqual(cache.decr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
cache.set('null', None)
with self.assertRaises(self.incr_decr_type_error):
cache.decr('null')
def test_close(self):
self.assertTrue(hasattr(cache, 'close'))
cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set('question', my_poll)
cached_poll = cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertIs(cache.has_key("expire3"), False)
def test_touch(self):
# cache.touch() updates the timeout.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1', timeout=4), True)
time.sleep(2)
self.assertIs(cache.has_key('expire1'), True)
time.sleep(3)
self.assertIs(cache.has_key('expire1'), False)
# cache.touch() works without the timeout argument.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1'), True)
time.sleep(2)
self.assertIs(cache.has_key('expire1'), True)
self.assertIs(cache.touch('nonexistent'), False)
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
# Test `set`
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertIs(cache.delete(key), True)
self.assertIs(cache.add(key, value), True)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
self.assertIs(cache.delete(key), True)
cache.set_many(stuff)
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
cache.set('binary1', compressed_value)
compressed_result = cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
self.assertIs(cache.add('binary1-add', compressed_value), True)
compressed_result = cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({'binary1-set_many': compressed_value})
compressed_result = cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_returns_empty_list_on_success(self):
"""set_many() returns an empty list when all keys are inserted."""
failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(failing_keys, [])
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'})
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
"""
Follow memcached's convention where a timeout greater than 30 days is
treated as an absolute expiration timestamp instead of a relative
offset (#12399).
"""
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
"""
Passing in None into timeout results in a value that is cached forever
"""
cache.set('key1', 'eggs', None)
self.assertEqual(cache.get('key1'), 'eggs')
self.assertIs(cache.add('key2', 'ham', None), True)
self.assertEqual(cache.get('key2'), 'ham')
self.assertIs(cache.add('key1', 'new eggs', None), False)
self.assertEqual(cache.get('key1'), 'eggs')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
cache.set('key5', 'belgian fries', timeout=1)
self.assertIs(cache.touch('key5', timeout=None), True)
time.sleep(2)
self.assertEqual(cache.get('key5'), 'belgian fries')
def test_zero_timeout(self):
"""
Passing in zero into timeout results in a value that is not cached
"""
cache.set('key1', 'eggs', 0)
self.assertIsNone(cache.get('key1'))
self.assertIs(cache.add('key2', 'ham', 0), True)
self.assertIsNone(cache.get('key2'))
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertIsNone(cache.get('key3'))
self.assertIsNone(cache.get('key4'))
cache.set('key5', 'belgian fries', timeout=5)
self.assertIs(cache.touch('key5', timeout=0), True)
self.assertIsNone(cache.get('key5'))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache_name, initial_count, final_count):
try:
cull_cache = caches[cull_cache_name]
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key('cull%d' % i):
count += 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test('cull', 50, 29)
def test_zero_cull(self):
self._perform_cull_test('zero_cull', 50, 19)
def test_cull_delete_when_store_empty(self):
try:
cull_cache = caches['cull']
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
old_max_entries = cull_cache._max_entries
# Force _cull to delete on first cached record.
cull_cache._max_entries = -1
try:
cull_cache.set('force_cull_delete', 'value', 1000)
self.assertIs(cull_cache.has_key('force_cull_delete'), True)
finally:
cull_cache._max_entries = old_max_entries
def _perform_invalid_key_test(self, key, expected_warning):
"""
All the builtin backends should warn (except memcached that should
error) on keys that would be refused by memcached. This encourages
portable caching code without making it too difficult to use production
backends with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = func
tests = [
('add', [key, 1]),
('get', [key]),
('set', [key, 1]),
('incr', [key]),
('decr', [key]),
('touch', [key]),
('delete', [key]),
('get_many', [[key, 'b']]),
('set_many', [{key: 1, 'b': 2}]),
('delete_many', [{key: 1, 'b': 2}]),
]
try:
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertWarns(CacheKeyWarning) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.warning), expected_warning)
finally:
cache.key_func = old_func
def test_invalid_key_characters(self):
# memcached doesn't allow whitespace or control characters in keys.
key = 'key with spaces and 清'
self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key)
def test_invalid_key_length(self):
# memcached limits key length to 250.
key = ('a' * 250) + '清'
expected_warning = (
'Cache key will cause errors if used with memcached: '
'%r (longer than %s)' % (key, 250)
)
self._perform_invalid_key_test(key, expected_warning)
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertEqual(cache.get('answer1', version=1), 42)
self.assertIsNone(cache.get('answer1', version=2))
self.assertIsNone(caches['v2'].get('answer1'))
self.assertEqual(caches['v2'].get('answer1', version=1), 42)
self.assertIsNone(caches['v2'].get('answer1', version=2))
# set, default version = 1, but manually override version = 2
cache.set('answer2', 42, version=2)
self.assertIsNone(cache.get('answer2'))
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
# v2 set, using default version = 2
caches['v2'].set('answer3', 42)
self.assertIsNone(cache.get('answer3'))
self.assertIsNone(cache.get('answer3', version=1))
self.assertEqual(cache.get('answer3', version=2), 42)
self.assertEqual(caches['v2'].get('answer3'), 42)
self.assertIsNone(caches['v2'].get('answer3', version=1))
self.assertEqual(caches['v2'].get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set('answer4', 42, version=1)
self.assertEqual(cache.get('answer4'), 42)
self.assertEqual(cache.get('answer4', version=1), 42)
self.assertIsNone(cache.get('answer4', version=2))
self.assertIsNone(caches['v2'].get('answer4'))
self.assertEqual(caches['v2'].get('answer4', version=1), 42)
self.assertIsNone(caches['v2'].get('answer4', version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
self.assertIs(cache.add('answer1', 42, version=2), True)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertIs(cache.add('answer1', 37, version=2), False)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertIs(cache.add('answer1', 37, version=1), True)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
self.assertIs(caches['v2'].add('answer2', 42), True)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertIs(caches['v2'].add('answer2', 37), False)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertIs(caches['v2'].add('answer2', 37, version=1), True)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
self.assertIs(caches['v2'].add('answer3', 42, version=1), True)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
self.assertIs(caches['v2'].add('answer3', 37, version=1), False)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
self.assertIs(caches['v2'].add('answer3', 37), True)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertEqual(cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
cache.set('answer1', 42)
# has_key
self.assertIs(cache.has_key('answer1'), True)
self.assertIs(cache.has_key('answer1', version=1), True)
self.assertIs(cache.has_key('answer1', version=2), False)
self.assertIs(caches['v2'].has_key('answer1'), False)
self.assertIs(caches['v2'].has_key('answer1', version=1), True)
self.assertIs(caches['v2'].has_key('answer1', version=2), False)
def test_cache_versioning_delete(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
self.assertIs(cache.delete('answer1'), True)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
self.assertIs(cache.delete('answer2', version=2), True)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertIsNone(cache.get('answer2', version=2))
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
self.assertIs(caches['v2'].delete('answer3'), True)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertIsNone(cache.get('answer3', version=2))
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
self.assertIs(caches['v2'].delete('answer4', version=1), True)
self.assertIsNone(cache.get('answer4', version=1))
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
self.assertEqual(cache.incr('answer1'), 38)
self.assertEqual(cache.get('answer1', version=1), 38)
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertEqual(cache.decr('answer1'), 37)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
self.assertEqual(cache.incr('answer2', version=2), 43)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 43)
self.assertEqual(cache.decr('answer2', version=2), 42)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
self.assertEqual(caches['v2'].incr('answer3'), 43)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 43)
self.assertEqual(caches['v2'].decr('answer3'), 42)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 42)
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
self.assertEqual(caches['v2'].incr('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=2), 42)
self.assertEqual(caches['v2'].decr('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertEqual(cache.get_many(['ford2', 'arthur2']), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
caches['v2'].set_many({'ford3': 37, 'arthur3': 42})
self.assertEqual(cache.get_many(['ford3', 'arthur3']), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {})
def test_incr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertIsNone(cache.get('answer', version=3))
self.assertEqual(cache.incr_version('answer', version=2), 3)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertIsNone(cache.get('answer', version=2))
self.assertEqual(cache.get('answer', version=3), 42)
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertIsNone(caches['v2'].get('answer2', version=3))
self.assertEqual(caches['v2'].incr_version('answer2'), 3)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertEqual(caches['v2'].get('answer2', version=3), 42)
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
cache.set('null', None)
if self.supports_get_with_default:
self.assertEqual(cache.incr_version('null'), 2)
else:
with self.assertRaises(self.incr_decr_type_error):
cache.incr_version('null')
def test_decr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertEqual(cache.decr_version('answer', version=2), 1)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.get('answer', version=1), 42)
self.assertIsNone(cache.get('answer', version=2))
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertEqual(caches['v2'].decr_version('answer2'), 1)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertEqual(caches['v2'].get('answer2', version=1), 42)
self.assertIsNone(caches['v2'].get('answer2', version=2))
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist', version=2)
cache.set('null', None, version=2)
if self.supports_get_with_default:
self.assertEqual(cache.decr_version('null', version=2), 1)
else:
with self.assertRaises(self.incr_decr_type_error):
cache.decr_version('null', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertIsNone(caches['custom_key'].get('answer1'))
self.assertIsNone(caches['custom_key2'].get('answer1'))
caches['custom_key'].set('answer2', 42)
self.assertIsNone(cache.get('answer2'))
self.assertEqual(caches['custom_key'].get('answer2'), 42)
self.assertEqual(caches['custom_key2'].get('answer2'), 42)
def test_cache_write_unpicklable_object(self):
fetch_middleware = FetchFromCacheMiddleware(empty_response)
fetch_middleware.cache = cache
request = self.factory.get('/cache/test')
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
content = 'Testing cookie serialization.'
def get_response(req):
response = HttpResponse(content)
response.set_cookie('foo', 'bar')
return response
update_middleware = UpdateCacheMiddleware(get_response)
update_middleware.cache = cache
response = update_middleware(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
UpdateCacheMiddleware(lambda req: get_cache_data)(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
# Shouldn't fail silently if trying to cache an unpicklable type.
with self.assertRaises(pickle.PickleError):
cache.add('unpicklable', Unpicklable())
def test_set_fail_on_pickleerror(self):
with self.assertRaises(pickle.PickleError):
cache.set('unpicklable', Unpicklable())
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
self.assertIsNone(cache.get_or_set('null', None))
if self.supports_get_with_default:
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get('null', 'default'))
else:
self.assertEqual(cache.get('null', 'default'), 'default')
def test_get_or_set_callable(self):
def my_callable():
return 'value'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'value')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value')
self.assertIsNone(cache.get_or_set('null', lambda: None))
if self.supports_get_with_default:
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get('null', 'default'))
else:
self.assertEqual(cache.get('null', 'default'), 'default')
def test_get_or_set_version(self):
msg = "get_or_set() missing 1 required positional argument: 'default'"
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian')
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian', version=1)
self.assertIsNone(cache.get('brian', version=1))
self.assertEqual(cache.get_or_set('brian', 42, version=1), 42)
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
self.assertIsNone(cache.get('brian', version=3))
def test_get_or_set_racing(self):
with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add:
# Simulate cache.add() failing to add a value. In that case, the
# default value should be returned.
cache_add.return_value = False
self.assertEqual(cache.get_or_set('key', 'default'), 'default')
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION='test cache table'
))
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
def setUp(self):
# The super calls needs to happen first for the settings override.
super().setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super().tearDown()
self.drop_table()
def create_table(self):
management.call_command('createcachetable', verbosity=0)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name('test cache table')
cursor.execute('DROP TABLE %s' % table_name)
def test_get_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2})
cache.set('expired', 'expired', 0.01)
with self.assertNumQueries(1):
self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2})
time.sleep(0.02)
with self.assertNumQueries(2):
self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2})
def test_delete_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2, 'c': 3})
with self.assertNumQueries(1):
cache.delete_many(['a', 'b', 'c'])
def test_zero_cull(self):
self._perform_cull_test('zero_cull', 50, 18)
def test_second_call_doesnt_crash(self):
out = io.StringIO()
management.call_command('createcachetable', stdout=out)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Use another table name to avoid the 'table already exists' message.
LOCATION='createcachetable_dry_run_mode'
))
def test_createcachetable_dry_run_mode(self):
out = io.StringIO()
management.call_command('createcachetable', dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = io.StringIO()
management.call_command(
'createcachetable',
'test cache table',
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n")
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter:
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == 'django_cache':
return db == 'other'
return None
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'my_cache_table',
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
databases = {'default', 'other'}
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable', database='default', verbosity=0)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections['other'].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable', database='other', verbosity=0)
class PicklingSideEffect:
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
self.locked = self.cache._lock.locked()
return {}
limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
OPTIONS={'MAX_ENTRIES': 9},
))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
))
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super().setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches['prefix']._cache = cache._cache
caches['prefix']._expire_info = cache._expire_info
caches['v2']._cache = cache._cache
caches['v2']._expire_info = cache._expire_info
caches['custom_key']._cache = cache._cache
caches['custom_key']._expire_info = cache._expire_info
caches['custom_key2']._cache = cache._cache
caches['custom_key2']._expire_info = cache._expire_info
@override_settings(CACHES={
'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other'
},
})
def test_multiple_caches(self):
"Multiple locmem caches are isolated"
cache.set('value', 42)
self.assertEqual(caches['default'].get('value'), 42)
self.assertIsNone(caches['other'].get('value'))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set('set', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
self.assertIs(cache.add('add', bad_obj), True)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
self.assertEqual(cache.incr(key), 2)
self.assertEqual(expire, cache._expire_info[_key])
self.assertEqual(cache.decr(key), 1)
self.assertEqual(expire, cache._expire_info[_key])
@limit_locmem_entries
def test_lru_get(self):
"""get() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
@limit_locmem_entries
def test_lru_set(self):
"""set() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(3, 9):
cache.set(key, key, timeout=None)
cache.set(9, 9, timeout=None)
for key in range(3, 10):
self.assertEqual(cache.get(key), key)
for key in range(3):
self.assertIsNone(cache.get(key))
@limit_locmem_entries
def test_lru_incr(self):
"""incr() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.incr(key), key + 1)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key + 1)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
configured_caches = {}
for _cache_params in settings.CACHES.values():
configured_caches[_cache_params['BACKEND']] = _cache_params
PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache')
PyMemcacheCache_params = configured_caches.get('django.core.cache.backends.memcached.PyMemcacheCache')
# The memcached backends don't support cull-related options like `MAX_ENTRIES`.
memcached_excluded_caches = {'cull', 'zero_cull'}
class BaseMemcachedTests(BaseCacheTests):
# By default it's assumed that the client doesn't clean up connections
# properly, in which case the backend must do so after each request.
should_disconnect_on_close = True
def test_location_multiple_servers(self):
locations = [
['server1.tld', 'server2:11211'],
'server1.tld;server2:11211',
'server1.tld,server2:11211',
]
for location in locations:
with self.subTest(location=location):
params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location}
with self.settings(CACHES={'default': params}):
self.assertEqual(cache._servers, ['server1.tld', 'server2:11211'])
def _perform_invalid_key_test(self, key, expected_warning):
"""
While other backends merely warn, memcached should raise for an invalid
key.
"""
msg = expected_warning.replace(key, cache.make_key(key))
tests = [
('add', [key, 1]),
('get', [key]),
('set', [key, 1]),
('incr', [key]),
('decr', [key]),
('touch', [key]),
('delete', [key]),
('get_many', [[key, 'b']]),
('set_many', [{key: 1, 'b': 2}]),
('delete_many', [{key: 1, 'b': 2}]),
]
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertRaises(InvalidCacheKey) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.exception), msg)
def test_default_never_expiring_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
TIMEOUT=None)):
cache.set('infinite_foo', 'bar')
self.assertEqual(cache.get('infinite_foo'), 'bar')
def test_default_far_future_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
# 60*60*24*365, 1 year
TIMEOUT=31536000)):
cache.set('future_foo', 'bar')
self.assertEqual(cache.get('future_foo'), 'bar')
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
max_value_length = 2 ** 20
cache.set('small_value', 'a')
self.assertEqual(cache.get('small_value'), 'a')
large_value = 'a' * (max_value_length + 1)
try:
cache.set('small_value', large_value)
except Exception:
# Most clients (e.g. pymemcache or pylibmc) raise when the value is
# too large. This test is primarily checking that the key was
# deleted, so the return/exception behavior for the set() itself is
# not important.
pass
# small_value should be deleted, or set if configured to accept larger values
value = cache.get('small_value')
self.assertTrue(value is None or value == large_value)
def test_close(self):
# For clients that don't manage their connections properly, the
# connection is closed when the request is complete.
signals.request_finished.disconnect(close_old_connections)
try:
with mock.patch.object(cache._class, 'disconnect_all', autospec=True) as mock_disconnect:
signals.request_finished.send(self.__class__)
self.assertIs(mock_disconnect.called, self.should_disconnect_on_close)
finally:
signals.request_finished.connect(close_old_connections)
def test_set_many_returns_failing_keys(self):
def fail_set_multi(mapping, *args, **kwargs):
return mapping.keys()
with mock.patch.object(cache._class, 'set_multi', side_effect=fail_set_multi):
failing_keys = cache.set_many({'key': 'value'})
self.assertEqual(failing_keys, ['key'])
# RemovedInDjango41Warning.
MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache')
@ignore_warnings(category=RemovedInDjango41Warning)
@unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
))
class MemcachedCacheTests(BaseMemcachedTests, TestCase):
base_params = MemcachedCache_params
supports_get_with_default = False
incr_decr_type_error = ValueError
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key in settings.CACHES:
with self.subTest(cache_key=cache_key):
self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL)
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
OPTIONS={'server_max_value_length': 9999},
))
def test_memcached_options(self):
self.assertEqual(cache._cache.server_max_value_length, 9999)
def test_default_used_when_none_is_set(self):
"""
python-memcached doesn't support default in get() so this test
overrides the one in BaseCacheTests.
"""
cache.set('key_default_none', None)
self.assertEqual(cache.get('key_default_none', default='default'), 'default')
class MemcachedCacheDeprecationTests(SimpleTestCase):
def test_warning(self):
from django.core.cache.backends.memcached import MemcachedCache
# Remove warnings filter on MemcachedCache deprecation warning, added
# in runtests.py.
warnings.filterwarnings(
'error',
'MemcachedCache is deprecated',
category=RemovedInDjango41Warning,
)
try:
msg = (
'MemcachedCache is deprecated in favor of PyMemcacheCache and '
'PyLibMCCache.'
)
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
MemcachedCache('127.0.0.1:11211', {})
finally:
warnings.filterwarnings(
'ignore',
'MemcachedCache is deprecated',
category=RemovedInDjango41Warning,
)
@unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
))
class PyLibMCCacheTests(BaseMemcachedTests, TestCase):
base_params = PyLibMCCache_params
# libmemcached manages its own connections.
should_disconnect_on_close = False
@property
def incr_decr_type_error(self):
return cache._lib.ClientError
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
OPTIONS={
'binary': True,
'behaviors': {'tcp_nodelay': True},
},
))
def test_pylibmc_options(self):
self.assertTrue(cache._cache.binary)
self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True))
def test_pylibmc_client_servers(self):
backend = self.base_params['BACKEND']
tests = [
('unix:/run/memcached/socket', '/run/memcached/socket'),
('/run/memcached/socket', '/run/memcached/socket'),
('localhost', 'localhost'),
('localhost:11211', 'localhost:11211'),
('[::1]', '[::1]'),
('[::1]:11211', '[::1]:11211'),
('127.0.0.1', '127.0.0.1'),
('127.0.0.1:11211', '127.0.0.1:11211'),
]
for location, expected in tests:
settings = {'default': {'BACKEND': backend, 'LOCATION': location}}
with self.subTest(location), self.settings(CACHES=settings):
self.assertEqual(cache.client_servers, [expected])
@unittest.skipUnless(PyMemcacheCache_params, 'PyMemcacheCache backend not configured')
@override_settings(CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
))
class PyMemcacheCacheTests(BaseMemcachedTests, TestCase):
base_params = PyMemcacheCache_params
@property
def incr_decr_type_error(self):
return cache._lib.exceptions.MemcacheClientError
def test_pymemcache_highest_pickle_version(self):
self.assertEqual(
cache._cache.default_kwargs['serde']._serialize_func.keywords['pickle_version'],
pickle.HIGHEST_PROTOCOL,
)
for cache_key in settings.CACHES:
for client_key, client in caches[cache_key]._cache.clients.items():
with self.subTest(cache_key=cache_key, server=client_key):
self.assertEqual(
client.serde._serialize_func.keywords['pickle_version'],
pickle.HIGHEST_PROTOCOL,
)
@override_settings(CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
OPTIONS={'no_delay': True},
))
def test_pymemcache_options(self):
self.assertIs(cache._cache.default_kwargs['no_delay'], True)
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.filebased.FileBasedCache',
))
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super().setUp()
self.dirname = self.mkdtemp()
# Caches location cannot be modified through override_settings / modify_settings,
# hence settings are manipulated directly here and the setting_changed signal
# is triggered manually.
for cache_params in settings.CACHES.values():
cache_params['LOCATION'] = self.dirname
setting_changed.send(self.__class__, setting='CACHES', enter=False)
def tearDown(self):
super().tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def mkdtemp(self):
return tempfile.mkdtemp()
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, 'not-a-cache-file')
with open(fname, 'w'):
os.utime(fname, None)
cache.clear()
self.assertTrue(os.path.exists(fname),
'Expected cache.clear to ignore non cache files')
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(os.path.exists(self.dirname),
'Expected cache.clear to keep the cache dir')
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set('foo', 'bar')
self.assertTrue(os.path.exists(self.dirname))
def test_get_ignores_enoent(self):
cache.set('foo', 'bar')
os.unlink(cache._key_to_file('foo'))
# Returns the default instead of erroring.
self.assertEqual(cache.get('foo', 'baz'), 'baz')
@skipIf(
sys.platform == 'win32',
'Windows only partially supports umasks and chmod.',
)
def test_cache_dir_permissions(self):
os.rmdir(self.dirname)
dir_path = Path(self.dirname) / 'nested' / 'filebasedcache'
for cache_params in settings.CACHES.values():
cache_params['LOCATION'] = dir_path
setting_changed.send(self.__class__, setting='CACHES', enter=False)
cache.set('foo', 'bar')
self.assertIs(dir_path.exists(), True)
tests = [
dir_path,
dir_path.parent,
dir_path.parent.parent,
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o700)
def test_get_does_not_ignore_non_filenotfound_exceptions(self):
with mock.patch('builtins.open', side_effect=OSError):
with self.assertRaises(OSError):
cache.get('foo')
def test_empty_cache_file_considered_expired(self):
cache_file = cache._key_to_file('foo')
with open(cache_file, 'wb') as fh:
fh.write(b'')
with open(cache_file, 'rb') as fh:
self.assertIs(cache._is_expired(fh), True)
class FileBasedCachePathLibTests(FileBasedCacheTests):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
return Path(tmp_dir)
@override_settings(CACHES={
'default': {
'BACKEND': 'cache.liberal_backend.CacheClass',
},
})
class CustomCacheKeyValidationTests(SimpleTestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
'default': {
'BACKEND': 'cache.closeable_cache.CacheClass',
}
}
)
class CacheClosingTests(SimpleTestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
DEFAULT_MEMORY_CACHES_SETTINGS = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None
class DefaultNonExpiringCacheKeyTests(SimpleTestCase):
"""
Settings having Cache arguments with a TIMEOUT=None create Caches that will
set non-expiring keys.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del(self.DEFAULT_TIMEOUT)
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined in
django.core.cache.backends.base.BaseCache.__init__().
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
ALLOWED_HOSTS=['.example.com'],
)
class CacheUtils(SimpleTestCase):
"""TestCase for django.utils.cache functions."""
host = 'www.example.com'
path = '/cache/test/'
factory = RequestFactory(HTTP_HOST=host)
def tearDown(self):
cache.clear()
def _get_request_cache(self, method='GET', query_string=None, update_cache=None):
request = self._get_request(self.host, self.path,
method, query_string=query_string)
request._cache_update_cache = update_cache if update_cache else True
return request
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('*', ('Accept-Language', 'Cookie'), '*'),
('Accept-Language, Cookie', ('*',), '*'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
response = HttpResponse()
if initial_vary is not None:
response.headers['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
key_prefix = 'localprefix'
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e'
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com')
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response.headers['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private': True}, {'private'}),
('', {'private': True}, {'private'}),
# no-cache.
('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}),
('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}),
('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}),
('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}),
('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}),
(
'no-cache=Set-Cookie,no-cache=Link',
{'no_cache': 'Custom'},
{'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'},
),
# Test whether private/public attributes are mutually exclusive
('private', {'private': True}, {'private'}),
('private', {'public': True}, {'public'}),
('public', {'public': True}, {'public'}),
('public', {'private': True}, {'private'}),
('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}),
('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
with self.subTest(initial_cc=initial_cc, newheaders=newheaders):
response = HttpResponse()
if initial_cc is not None:
response.headers['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response.headers['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=[
('en', 'English'),
('es', 'Spanish'),
],
)
class CacheI18nTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response.headers['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response.headers['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
def get_response(req):
return HttpResponse(msg)
translation.activate(lang)
return UpdateCacheMiddleware(get_response)(request)
# cache with non empty request.GET
request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# first access, cache must return None
self.assertIsNone(get_cache_data)
content = 'Check for cache with QUERY_STRING'
def get_response(req):
return HttpResponse(content)
UpdateCacheMiddleware(get_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# The cache can be recovered
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
def get_stream_response(req):
return StreamingHttpResponse(['Check for cache with streaming content.'])
UpdateCacheMiddleware(get_stream_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(SimpleTestCase):
factory = RequestFactory()
def setUp(self):
self.default_cache = caches['default']
self.other_cache = caches['other']
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super().tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If only one argument is passed in construction, it's being used as
# middleware.
middleware = CacheMiddleware(empty_response)
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
# If more arguments are being passed in construction, it's being used
# as a decorator. First, test with "defaults":
as_view_decorator = CacheMiddleware(empty_response, cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, '')
# Value of DEFAULT_CACHE_ALIAS from django.core.cache
self.assertEqual(as_view_decorator.cache_alias, 'default')
self.assertEqual(as_view_decorator.cache, self.default_cache)
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(
hello_world_view, cache_timeout=60, cache_alias='other', key_prefix='foo'
)
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
self.assertEqual(as_view_decorator_with_custom.cache, self.other_cache)
def test_update_cache_middleware_constructor(self):
middleware = UpdateCacheMiddleware(empty_response)
self.assertEqual(middleware.cache_timeout, 30)
self.assertIsNone(middleware.page_timeout)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
def test_fetch_cache_middleware_constructor(self):
middleware = FetchFromCacheMiddleware(empty_response)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
def test_middleware(self):
middleware = CacheMiddleware(hello_world_view)
prefix_middleware = CacheMiddleware(hello_world_view, key_prefix='prefix1')
timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches['default']
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_cache_page_timeout(self):
# Page timeout takes precedence over the "max-age" section of the
# "Cache-Control".
tests = [
(1, 3), # max_age < page_timeout.
(3, 1), # max_age > page_timeout.
]
for max_age, page_timeout in tests:
with self.subTest(max_age=max_age, page_timeout=page_timeout):
view = cache_page(timeout=page_timeout)(
cache_control(max_age=max_age)(hello_world_view)
)
request = self.factory.get('/view/')
response = view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
time.sleep(1)
response = view(request, '2')
self.assertEqual(
response.content,
b'Hello World 1' if page_timeout > max_age else b'Hello World 2',
)
cache.clear()
def test_cached_control_private_not_cached(self):
"""Responses with 'Cache-Control: private' are not cached."""
view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view))
request = self.factory.get('/view/')
response = view_with_private_cache(request, '1')
self.assertEqual(response.content, b'Hello World 1')
response = view_with_private_cache(request, '2')
self.assertEqual(response.content, b'Hello World 2')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
request = self.factory.get('/view/')
csrf_middleware = CsrfViewMiddleware(csrf_view)
csrf_middleware.process_view(request, csrf_view, (), {})
cache_middleware = CacheMiddleware(csrf_middleware)
self.assertIsNone(cache_middleware.process_request(request))
cache_middleware(request)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
def test_304_response_has_http_caching_headers_but_not_cached(self):
original_view = mock.Mock(return_value=HttpResponseNotModified())
view = cache_page(2)(original_view)
request = self.factory.get('/view/')
# The view shouldn't be cached on the second call.
view(request).close()
response = view(request)
response.close()
self.assertEqual(original_view.call_count, 2)
self.assertIsInstance(response, HttpResponseNotModified)
self.assertIn('Cache-Control', response)
self.assertIn('Expires', response)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(SimpleTestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the ETag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response.headers['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e'
)
class TestMakeTemplateFragmentKey(SimpleTestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc')
def test_with_ints_vary_on(self):
key = make_template_fragment_key('foo', [1, 2, 3, 4, 5])
self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461')
def test_with_unicode_vary_on(self):
key = make_template_fragment_key('foo', ['42º', '😀'])
self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237')
def test_long_vary_on(self):
key = make_template_fragment_key('foo', ['x' * 10000])
self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a')
class CacheHandlerTest(SimpleTestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches['default']
cache2 = caches['default']
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches['default'])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
def test_nonexistent_alias(self):
msg = "The connection 'nonexistent' doesn't exist."
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
caches['nonexistent']
def test_nonexistent_backend(self):
test_caches = CacheHandler({
'invalid_backend': {
'BACKEND': 'django.nonexistent.NonexistentBackend',
},
})
msg = (
"Could not find backend 'django.nonexistent.NonexistentBackend': "
"No module named 'django.nonexistent'"
)
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
test_caches['invalid_backend']
| bsd-3-clause | 2,060,232,267,471,206,400 | 39.023033 | 115 | 0.620804 | false |
ingadhoc/website | payment_todopago/todopago/test/GetCredentialsTest.py | 1 | 3238 | # pylint: disable-all
# flake8: noqa
import sys
sys.path.append("..")
from todopagoconnector import TodoPagoConnector
from CredentialsData import CredentialsData
import unittest
from unittest import TestCase
if sys.version_info[0] >= 3:
from unittest.mock import patch, Mock
else:
from mock import patch, Mock, MagicMock
class CredentialsTest(TestCase):
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_ok(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnector = MockTodoPagoConnector(j_header_http, "test")
instanceCredential = CredentialsData()
MTPConnector.getCredentials.return_value = instanceCredential.get_credentials_ok_response()
UserAccount = {
'USUARIO': "[email protected]",
'CLAVE': "1970Stk!"
}
responseGetCredential = MTPConnector.getCredentials(UserAccount)
self.assertEquals(
responseGetCredential['Credentials']['resultado']
['codigoResultado'],
0)
self.assertTrue(len(responseGetCredential['Credentials']['merchantId']))
self.assertTrue(len(responseGetCredential['Credentials']['APIKey']))
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_user_empty(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnector = MockTodoPagoConnector(j_header_http, "test")
instanceCredential = CredentialsData()
MTPConnector.getCredentials.return_value = instanceCredential.get_credentials_wrong_user_response()
UserAccount = {
'USUARIO': "[email protected]",
'CLAVE': "pass123"
}
responseGetCredential = MTPConnector.getCredentials(UserAccount)
self.assertEquals(
responseGetCredential['Credentials']['resultado']
['codigoResultado'],
1050)
self.assertFalse(
len(responseGetCredential['Credentials']['merchantId']))
self.assertFalse(len(responseGetCredential['Credentials']['APIKey']))
@patch('todopagoconnector.TodoPagoConnector')
def test_get_credentials_pass_empty(self, MockTodoPagoConnector):
j_header_http = {
'Authorization': 'TODOPAGO f3d8b72c94ab4a06be2ef7c95490f7d3'
}
MTPConnector = MockTodoPagoConnector(j_header_http, "test")
instanceCredential = CredentialsData()
MTPConnector.getCredentials.return_value = instanceCredential.get_credentials_wrong_password_response()
UserAccount = {
'USUARIO': "[email protected]",
'CLAVE': ""
}
responseGetCredential = MTPConnector.getCredentials(UserAccount)
self.assertEquals(
responseGetCredential['Credentials']['resultado']
['codigoResultado'],
1055)
self.assertFalse(
len(responseGetCredential['Credentials']['merchantId']))
self.assertFalse(len(responseGetCredential['Credentials']['APIKey']))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | 5,073,129,623,555,999,000 | 32.040816 | 111 | 0.662446 | false |
google/autocjk | src/model.py | 1 | 14838 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GAN for generating CJK characters.
The vast majority of this code is adapted from the pix2pix GAN described in
https://www.tensorflow.org/tutorials/generative/pix2pix. Changes include the
specific tensor dimensions, some tuning of magic numbers, and some changes to
loss functions.
TODO(ambuc): This file has type annotations because they're useful for a human
reader, but the build system doesn't yet enforce them with a strictly-typed
python build rule.
"""
import time
from typing import List, Text, Tuple
from IPython import display
import matplotlib.pyplot as plt
import tensorflow as tf
_LAMBDA = 100
def _load_image(filename: Text) -> List[List[tf.Tensor]]:
"""Given the filename of a PNG, returns a list of three tensors: a, b, a+b.
Args:
filename: Path to a file. The file must be a PNG and greyscale and 256x256.
Returns:
A list of tensors: a, b, and a+b.
"""
image = tf.io.read_file(filename)
image = tf.image.decode_png(image, channels=1) # greyscale
# Our images have a width which is divisible by three.
w = tf.shape(image)[1] // 3
return [
tf.cast(image[:, n * w:(n + 1) * w, :], tf.float32) for n in range(3)
]
def make_datasets(files_glob: Text) -> Tuple[tf.data.Dataset, tf.data.Dataset]:
"""Makes the train/test datasets.
Args:
files_glob: A glob (like "/tmp/folder/*.png") of all the input images.
Returns:
A pair of train, test datasets of type tf.data.Dataset.
"""
ds = tf.data.Dataset.list_files(files_glob).map(
_load_image, num_parallel_calls=tf.data.AUTOTUNE).shuffle(400).batch(1)
train_dataset_a = ds.shard(num_shards=3, index=0)
train_dataset_b = ds.shard(num_shards=3, index=1)
train_ds = train_dataset_a.concatenate(train_dataset_b)
test_ds = ds.shard(num_shards=3, index=2)
return train_ds, test_ds
def _downsample(filters: int,
size: int,
apply_batchnorm: bool = True) -> tf.keras.Sequential:
"""Downsampler from https://www.tensorflow.org/tutorials/generative/pix2pix#build_the_generator.
Args:
filters: The number of filters.
size: The size of the input tensor width at this step.
apply_batchnorm: Whether or not to apply batch normalization. Probably
should be false on the input layer, and true elsewhere.
Returns:
A sequential model.
"""
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2D(filters,
size,
strides=2,
padding='same',
kernel_initializer=initializer,
use_bias=False))
if apply_batchnorm:
result.add(tf.keras.layers.BatchNormalization())
result.add(tf.keras.layers.LeakyReLU())
return result
def _upsample(filters: int,
size: int,
apply_dropout: bool = False) -> tf.keras.Sequential:
"""Upsampler from https://www.tensorflow.org/tutorials/generative/pix2pix#build_the_generator.
Args:
filters: The number of filters.
size: The size of the input tensor width at this step.
apply_dropout: Whether or not to apply dropout. Probably should be true for
the first few layers and false elsewhere.
Returns:
A sequential model.
"""
initializer = tf.random_normal_initializer(0., 0.02)
result = tf.keras.Sequential()
result.add(
tf.keras.layers.Conv2DTranspose(filters,
size,
strides=2,
padding='same',
kernel_initializer=initializer,
use_bias=False))
result.add(tf.keras.layers.BatchNormalization())
if apply_dropout:
result.add(tf.keras.layers.Dropout(0.5))
result.add(tf.keras.layers.ReLU())
return result
def make_generator() -> tf.keras.Model:
"""Creates a generator.
99% of this is copied directly from
https://www.tensorflow.org/tutorials/generative/pix2pix#build_the_generator,
except for the input shape (now two channels, two greyscale images instead of
one RGB image) and output shape (one channel, one greyscale image instead of
one RGB image).
Returns:
a tf.keras.Model which returns a 256x256x1 tensor.
"""
inputs = tf.keras.layers.Input(shape=[256, 256, 2])
up_stack = [
_upsample(512, 4, apply_dropout=True), # (bs, 2, 2, 1024)
_upsample(512, 4, apply_dropout=True), # (bs, 4, 4, 1024)
_upsample(512, 4, apply_dropout=True), # (bs, 8, 8, 1024)
_upsample(512, 4), # (bs, 16, 16, 1024)
_upsample(256, 4), # (bs, 32, 32, 512)
_upsample(128, 4), # (bs, 64, 64, 256)
_upsample(64, 4), # (bs, 128, 128, 128)
]
x = inputs
skips = []
for down in [
_downsample(64, 4, apply_batchnorm=False), # (bs, 128, 128, 64)
_downsample(128, 4), # (bs, 64, 64, 128)
_downsample(256, 4), # (bs, 32, 32, 256)
_downsample(512, 4), # (bs, 16, 16, 512)
_downsample(512, 4), # (bs, 8, 8, 512)
_downsample(512, 4), # (bs, 4, 4, 512)
_downsample(512, 4), # (bs, 2, 2, 512)
_downsample(512, 4), # (bs, 1, 1, 512)
]:
x = down(x)
skips.append(x)
skips = reversed(skips[:-1])
# Upsampling and establishing the skip connections
for up, skip in zip(up_stack, skips):
x = up(x)
x = tf.keras.layers.Concatenate()([x, skip])
last = tf.keras.layers.Conv2DTranspose(
1, # one output channel, i.e. greyscale
4,
strides=2,
padding='same',
kernel_initializer=tf.random_normal_initializer(0., 0.02),
activation='tanh') # (bs, 256, 256, 3)
x = last(x)
return tf.keras.Model(inputs=inputs, outputs=x)
def generator_loss(loss_object: tf.keras.losses.Loss, disc_generated_output,
gen_output, target):
gan_loss = loss_object(tf.ones_like(disc_generated_output),
disc_generated_output)
# mean absolute error
l1_loss = tf.reduce_mean(tf.abs(target - gen_output))
total_gen_loss = gan_loss + (_LAMBDA * l1_loss)
return total_gen_loss, gan_loss, l1_loss
def make_discriminator() -> tf.keras.Model:
"""Returns a discriminator.
This is 99% the same as
https://www.tensorflow.org/tutorials/generative/pix2pix#build_the_discriminator,
except that the shape of the input and output tensors are different.
Returns:
A tf.keras.model which accepts a 256x256x2 tensor and compares it to a
target 256x256x1 tensor.
"""
initializer = tf.random_normal_initializer(0., 0.02)
input_img = tf.keras.layers.Input(shape=[256, 256, 2], name='input_image')
target_img = tf.keras.layers.Input(shape=[256, 256, 1],
name='target_image')
x = tf.keras.layers.concatenate([input_img,
target_img]) # (bs, 256, 256, channels*2)
down1 = _downsample(64, 4, False)(x) # (bs, 128, 128, 64)
down2 = _downsample(128, 4)(down1) # (bs, 64, 64, 128)
down3 = _downsample(256, 4)(down2) # (bs, 32, 32, 256)
zero_pad1 = tf.keras.layers.ZeroPadding2D()(down3) # (bs, 34, 34, 256)
conv = tf.keras.layers.Conv2D(512,
4,
strides=1,
kernel_initializer=initializer,
use_bias=False)(
zero_pad1) # (bs, 31, 31, 512)
batchnorm1 = tf.keras.layers.BatchNormalization()(conv)
leaky_relu = tf.keras.layers.LeakyReLU()(batchnorm1)
zero_pad2 = tf.keras.layers.ZeroPadding2D()(
leaky_relu) # (bs, 33, 33, 512)
last = tf.keras.layers.Conv2D(1,
4,
strides=1,
kernel_initializer=initializer)(
zero_pad2) # (bs, 30, 30, 1)
return tf.keras.Model(inputs=[input_img, target_img], outputs=last)
def discriminator_loss(loss_object: tf.keras.losses.Loss, disc_real_output,
disc_generated_output) -> float:
"""Returns discriminator loss.
100% the same as
https://www.tensorflow.org/tutorials/generative/pix2pix#build_the_discriminator.
Args:
loss_object: A reusable loss_object of type
tf.keras.losses.BinaryCrossentropy.
disc_real_output: A set of real images.
disc_generated_output: A set of generator images.
Returns:
The sum of some loss functions.
"""
real_loss = loss_object(tf.ones_like(disc_real_output), disc_real_output)
generated_loss = loss_object(tf.zeros_like(disc_generated_output),
disc_generated_output)
return real_loss + generated_loss
def generate_images(model: tf.keras.Model, input_a: tf.Tensor,
input_b: tf.Tensor, target: tf.Tensor) -> None:
"""In Colab, prints [a | b | real(a,b) | predicted(a,b)] to the display.
Args:
model: The generator to use.
input_a: the LHS image.
input_b: the RHS image.
target: The real(a,b) composition.
"""
x = tf.concat([input_a, input_b], 3)
x = tf.reshape(x, [256, 256, 2])
prediction = model(x[tf.newaxis, ...], training=True)
images = [input_a[0], input_b[0], target[0], prediction[0]]
fig, axes = plt.subplots(1, 4)
titles = [
'Input Image A', 'Input Image B', 'Ground Truth', 'Predicted Image'
]
for image, axis, title in zip(images, axes, titles):
axis.set_title(title)
axis.imshow(image[:, :, 0])
axis.axis('off')
fig.show()
@tf.function
def train_step(generator: tf.keras.Model,
generator_optimizer: tf.keras.optimizers.Optimizer,
discriminator: tf.keras.Model,
discriminator_optimizer: tf.keras.optimizers.Optimizer,
loss_object: tf.keras.losses.Loss, inp_a: tf.Tensor,
inp_b: tf.Tensor, target: tf.Tensor, epoch: int,
summary_writer: tf.summary.SummaryWriter) -> None:
"""Trains the models for one (1) epoch.
See https://www.tensorflow.org/tutorials/generative/pix2pix#training.
Args:
generator: A generator model,
generator_optimizer: and an optimizer for the generator.
discriminator: A discriminator model,
discriminator_optimizer: and an optimizer for the generator.
loss_object: A reusable BinaryCrossentropy object.
inp_a: A full-width image of the left-most component.
inp_b: A full-width image of the right-most component.
target: The human-authored image of the a+b character.
epoch: The index of the epoch we're in.
summary_writer: A SummaryWriter object for writing.... summaries.
"""
with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
inp_x = tf.concat([inp_a, inp_b], 3)
gen_output = generator(inp_x, training=True)
disc_real_output = discriminator([inp_x, target], training=True)
disc_generated_output = discriminator([inp_x, gen_output],
training=True)
gen_total_loss, gen_gan_loss, gen_l1_loss = generator_loss(
loss_object, disc_generated_output, gen_output, target)
disc_loss = discriminator_loss(loss_object, disc_real_output,
disc_generated_output)
# TODO(ambuc): Should this simply be gen_l1_loss?
generator_gradients = gen_tape.gradient(gen_total_loss,
generator.trainable_variables)
discriminator_gradients = disc_tape.gradient(
disc_loss, discriminator.trainable_variables)
generator_optimizer.apply_gradients(
zip(generator_gradients, generator.trainable_variables))
discriminator_optimizer.apply_gradients(
zip(discriminator_gradients, discriminator.trainable_variables))
with summary_writer.as_default():
tf.summary.scalar('gen_total_loss', gen_total_loss, step=epoch)
tf.summary.scalar('gen_gan_loss', gen_gan_loss, step=epoch)
tf.summary.scalar('gen_l1_loss', gen_l1_loss, step=epoch)
tf.summary.scalar('disc_loss', disc_loss, step=epoch)
def fit(generator: tf.keras.Model,
generator_optimizer: tf.keras.optimizers.Optimizer,
discriminator: tf.keras.Model,
discriminator_optimizer: tf.keras.optimizers.Optimizer,
loss_object: tf.keras.losses.Loss, train_ds: tf.data.Dataset,
epochs: int, test_ds: tf.data.Dataset, checkpoint: tf.train.Checkpoint,
checkpoint_prefix: Text,
summary_writer: tf.summary.SummaryWriter) -> None:
"""Runs for |epochs| and trains the models.
Args:
generator: A generator model,
generator_optimizer: and an optimizer for the generator.
discriminator: A discriminator model,
discriminator_optimizer: and an optimizer for the generator.
loss_object: A reusable BinaryCrossentropy object.
train_ds:
epochs: The number of epochs to train for.
test_ds:
checkpoint:
checkpoint_prefix:
summary_writer: A SummaryWriter object for writing.... summaries.
"""
for epoch in range(epochs):
start = time.time()
display.clear_output(wait=True)
for a, b, ab in test_ds.take(1):
generate_images(generator, a, b, ab)
print('Epoch: ', epoch)
for n, (inp_a, inp_b, target) in train_ds.enumerate():
print('.', end='')
if (n + 1) % 100 == 0:
print()
train_step(generator, generator_optimizer, discriminator,
discriminator_optimizer, loss_object, inp_a, inp_b,
target, epoch, summary_writer)
print()
checkpoint.save(file_prefix=checkpoint_prefix)
print('Time taken for epoch {} is {} sec\n'.format(
epoch + 1,
time.time() - start))
checkpoint.save(file_prefix=checkpoint_prefix)
| apache-2.0 | 932,325,098,372,212,200 | 35.278729 | 100 | 0.612616 | false |
wdv4758h/ZipPy | edu.uci.python.benchmark/src/benchmarks/python-graph/tests/unittests-sorting.py | 1 | 3085 | # Copyright (c) Pedro Matiello <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""
Unittests for graph.algorithms.sorting
"""
import unittest
import pygraph.classes
from pygraph.algorithms.sorting import topological_sorting
from pygraph.algorithms.searching import depth_first_search
from sys import getrecursionlimit
import testlib
class test_topological_sorting(unittest.TestCase):
def test_topological_sorting_on_tree(self):
gr = testlib.new_graph()
st, pre, post = depth_first_search(gr)
tree = pygraph.classes.digraph.digraph()
for each in st:
if st[each]:
if (each not in tree.nodes()):
tree.add_node(each)
if (st[each] not in tree.nodes()):
tree.add_node(st[each])
tree.add_edge((st[each], each))
ts = topological_sorting(tree)
for each in ts:
if (st[each]):
assert ts.index(each) > ts.index(st[each])
def test_topological_sorting_on_digraph(self):
def is_ordered(node, list):
# Has parent on list
for each in list:
if gr.has_edge((each, node)):
return True
# Has no possible ancestors on list
st, pre, post = depth_first_search(gr, node)
for each in list:
if (each in st):
return False
return True
gr = testlib.new_digraph()
ts = topological_sorting(gr)
while (ts):
x = ts.pop()
assert is_ordered(x, ts)
def test_topological_sort_on_very_deep_graph(self):
gr = pygraph.classes.graph.graph()
gr.add_nodes(range(0,20001))
for i in range(0,20000):
gr.add_edge((i,i+1))
recursionlimit = getrecursionlimit()
topological_sorting(gr)
assert getrecursionlimit() == recursionlimit
if __name__ == "__main__":
unittest.main() | bsd-3-clause | -1,540,136,428,661,142,300 | 33.288889 | 67 | 0.628525 | false |
mahmoud/wapiti | wapiti/operations/feedback.py | 1 | 1807 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from base import QueryOperation
from params import SingleParam, StaticParam
from utils import OperationExample
#class GetFeedbackV4(QueryOperation):
# """
# This API is no longer available (on en or de wikipedia). As of
# 3/9/2013, this API does not even appear in the documentation at:
# http://en.wikipedia.org/w/api.php
# """
# field_prefix = 'af'
# input_field = SingleParam('pageid')
# fields = [StaticParam('list', 'articlefeedback')]
# output_type = list
#
# def extract_results(self, query_resp):
# ret = query_resp['articlefeedback'][0].get('ratings', [])
# return ret
_FV5_KNOWN_FILTERS = ['*', 'featured', 'unreviewed', 'helpful', 'unhelpful',
'flagged', 'useful', 'resolved', 'noaction',
'inappropriate', 'archived', 'allcomment', 'hidden',
'requested', 'declined', 'oversighted', 'all']
class GetFeedbackV5(QueryOperation):
"""
article feedback v5 breaks standards in a couple ways.
* the various v5 APIs use different prefixes (af/afvf)
* it doesn't put its results under 'query', requiring a custom
post_process_response()
"""
field_prefix = 'afvf'
input_field = SingleParam('pageid')
fields = [StaticParam('list', 'articlefeedbackv5-view-feedback'),
SingleParam('filter', default='featured')]
output_type = list
examples = [OperationExample('604727')]
def post_process_response(self, response):
if not response.results:
return {}
return dict(response.results)
def extract_results(self, query_resp):
count = query_resp['articlefeedbackv5-view-feedback']['count']
return ['TODO'] * int(count)
| bsd-3-clause | -2,002,363,507,429,485,600 | 33.75 | 76 | 0.630327 | false |
irinabov/debian-qpid-python | qpid/selector.py | 2 | 6778 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time, errno, os, atexit, traceback
from compat import select, SelectError, set, selectable_waiter, format_exc
from threading import Thread, Lock
from logging import getLogger
from qpid.messaging import InternalError
def _stack(skip=0):
return ("".join(traceback.format_stack()[:-(1+skip)])).strip()
class SelectorStopped(InternalError):
def __init__(self, msg, where=None):
InternalError.__init__(self, text=msg)
self.where = _stack(1)
def _check(ex, skip=0):
if ex:
log.error("illegal use of qpid.messaging at:\n%s\n%s" % (_stack(1), ex))
where = getattr(ex, 'where')
if where:
log.error("qpid.messaging was previously stopped at:\n%s\n%s" % (where, ex))
raise ex
log = getLogger("qpid.messaging")
class Acceptor:
def __init__(self, sock, handler):
self.sock = sock
self.handler = handler
def fileno(self):
return self.sock.fileno()
def reading(self):
return True
def writing(self):
return False
def readable(self):
sock, addr = self.sock.accept()
self.handler(sock)
class Selector:
lock = Lock()
DEFAULT = None
_current_pid = None
@staticmethod
def default():
Selector.lock.acquire()
try:
if Selector.DEFAULT is None or Selector._current_pid != os.getpid():
# If we forked, mark the existing Selector dead.
if Selector.DEFAULT is not None:
log.warning("process forked, child must not use parent qpid.messaging")
Selector.DEFAULT.dead(SelectorStopped("forked child using parent qpid.messaging"))
sel = Selector()
sel.start()
atexit.register(sel.stop)
Selector.DEFAULT = sel
Selector._current_pid = os.getpid()
return Selector.DEFAULT
finally:
Selector.lock.release()
def __init__(self):
self.selectables = set()
self.reading = set()
self.writing = set()
self.waiter = selectable_waiter()
self.reading.add(self.waiter)
self.stopped = False
self.exception = None
def wakeup(self):
_check(self.exception)
self.waiter.wakeup()
def register(self, selectable):
self.selectables.add(selectable)
self.modify(selectable)
def _update(self, selectable):
if selectable.reading():
self.reading.add(selectable)
else:
self.reading.discard(selectable)
if selectable.writing():
self.writing.add(selectable)
else:
self.writing.discard(selectable)
return selectable.timing()
def modify(self, selectable):
self._update(selectable)
self.wakeup()
def unregister(self, selectable):
self.reading.discard(selectable)
self.writing.discard(selectable)
self.selectables.discard(selectable)
self.wakeup()
def start(self):
_check(self.exception)
self.thread = Thread(target=self.run)
self.thread.setDaemon(True)
self.thread.start();
def run(self):
try:
while not self.stopped and not self.exception:
wakeup = None
for sel in self.selectables.copy():
t = self._update(sel)
if t is not None:
if wakeup is None:
wakeup = t
else:
wakeup = min(wakeup, t)
rd = []
wr = []
ex = []
while True:
try:
if wakeup is None:
timeout = None
else:
timeout = max(0, wakeup - time.time())
rd, wr, ex = select(self.reading, self.writing, (), timeout)
break
except SelectError, e:
# Repeat the select call if we were interrupted.
if e[0] == errno.EINTR:
continue
else:
# unrecoverable: promote to outer try block
raise
for sel in wr:
if sel.writing():
sel.writeable()
for sel in rd:
if sel.reading():
sel.readable()
now = time.time()
for sel in self.selectables.copy():
w = sel.timing()
if w is not None and now > w:
sel.timeout()
except Exception, e:
log.error("qpid.messaging thread died: %s" % e)
self.exception = SelectorStopped(str(e))
self.exception = self.exception or self.stopped
self.dead(self.exception or SelectorStopped("qpid.messaging thread died: reason unknown"))
def stop(self, timeout=None):
"""Stop the selector and wait for it's thread to exit. It cannot be re-started"""
if self.thread and not self.stopped:
self.stopped = SelectorStopped("qpid.messaging thread has been stopped")
self.wakeup()
self.thread.join(timeout)
def dead(self, e):
"""Mark the Selector as dead if it is stopped for any reason. Ensure there any future
attempt to use the selector or any of its connections will throw an exception.
"""
self.exception = e
try:
for sel in self.selectables.copy():
c = sel.connection
for ssn in c.sessions.values():
for l in ssn.senders + ssn.receivers:
disable(l, self.exception)
disable(ssn, self.exception)
disable(c, self.exception)
except Exception, e:
log.error("error stopping qpid.messaging (%s)\n%s", self.exception, format_exc())
try:
self.waiter.close()
except Exception, e:
log.error("error stopping qpid.messaging (%s)\n%s", self.exception, format_exc())
# Disable an object to avoid hangs due to forked mutex locks or a stopped selector thread
import inspect
def disable(obj, exception):
assert(exception)
# Replace methods to raise exception or be a no-op
for m in inspect.getmembers(
obj, predicate=lambda m: inspect.ismethod(m) and not inspect.isbuiltin(m)):
name = m[0]
if name in ["close", "detach", "detach_all"]: # No-ops for these
setattr(obj, name, lambda *args, **kwargs: None)
else: # Raise exception for all others
setattr(obj, name, lambda *args, **kwargs: _check(exception, 1))
| apache-2.0 | -213,931,680,745,974,880 | 29.949772 | 94 | 0.638241 | false |
django-inplaceedit/django-inplaceedit | inplaceeditform/settings.py | 1 | 2626 | # -*- coding: utf-8 -*-
# Copyright (c) 2010-2013 by Yaco Sistemas <[email protected]>
# 2015 by Pablo Martín <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this programe. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
INPLACEEDIT_EDIT_EMPTY_VALUE = (getattr(settings, 'INPLACEEDIT_EDIT_EMPTY_VALUE', None) and
_(settings.INPLACEEDIT_EDIT_EMPTY_VALUE) or _('Doubleclick to edit'))
INPLACEEDIT_AUTO_SAVE = getattr(settings, 'INPLACEEDIT_AUTO_SAVE', False)
INPLACEEDIT_EVENT = getattr(settings, 'INPLACEEDIT_EVENT', 'dblclick')
INPLACEEDIT_DISABLE_CLICK = getattr(settings, 'INPLACEEDIT_DISABLE_CLICK', True)
INPLACEEDIT_EDIT_MESSAGE_TRANSLATION = (getattr(settings, 'INPLACEEDIT_EDIT_MESSAGE_TRANSLATION', None) and
_(settings.INPLACEEDIT_EDIT_MESSAGE_TRANSLATION) or _('Write a translation'))
INPLACEEDIT_SUCCESS_TEXT = (getattr(settings, 'INPLACEEDIT_SUCCESS_TEXT', None) and
_(settings.INPLACEEDIT_SUCCESS_TEXT) or _('Successfully saved'))
INPLACEEDIT_UNSAVED_TEXT = (getattr(settings, 'INPLACEEDIT_UNSAVED_TEXT', None) and
_(settings.INPLACEEDIT_UNSAVED_TEXT) or _('You have unsaved changes!'))
INPLACE_ENABLE_CLASS = getattr(settings, 'INPLACE_ENABLE_CLASS', 'enable')
DEFAULT_INPLACE_EDIT_OPTIONS = getattr(settings, "DEFAULT_INPLACE_EDIT_OPTIONS", {})
DEFAULT_INPLACE_EDIT_OPTIONS_ONE_BY_ONE = getattr(settings, 'DEFAULT_INPLACE_EDIT_OPTIONS_ONE_BY_ONE', False)
ADAPTOR_INPLACEEDIT_EDIT = getattr(settings, 'ADAPTOR_INPLACEEDIT_EDIT', None)
ADAPTOR_INPLACEEDIT = getattr(settings, 'ADAPTOR_INPLACEEDIT', {})
INPLACE_GET_FIELD_URL = getattr(settings, 'INPLACE_GET_FIELD_URL', None)
INPLACE_SAVE_URL = getattr(settings, 'INPLACE_SAVE_URL', None)
INPLACE_FIELD_TYPES = getattr(settings, 'INPLACE_FIELD_TYPES', 'input, select, textarea')
INPLACE_FOCUS_WHEN_EDITING = getattr(settings, 'INPLACE_FOCUS_WHEN_EDITING', True)
| lgpl-3.0 | 6,846,049,735,984,986,000 | 60.046512 | 117 | 0.726476 | false |
kohnle-lernmodule/palama | exe/engine/idevicestore.py | 1 | 36506 | # ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
The collection of iDevices available
"""
from exe.engine import persist
from exe.engine.idevice import Idevice
from exe.engine.field import TextAreaField, FeedbackField
from nevow.flat import flatten
import imp
import sys
import logging
import copy
log = logging.getLogger(__name__)
# ===========================================================================
class IdeviceStore:
"""
The collection of iDevices available
"""
def __init__(self, config):
"""
Initialize
"""
self._nextIdeviceId = 0
self.config = config
self.extended = []
self.generic = []
self.listeners = []
#JR: Anado una lista que contendra todos los iDevices disponibles
self.factoryiDevices = []
def getNewIdeviceId(self):
"""
Returns an iDevice Id which is unique
"""
id_ = unicode(self._nextIdeviceId)
self._nextIdeviceId += 1
return id_
def isGeneric(self, idevice):
"""
Devuelve true si el iDevice es de la clase GenericIdevie
"""
from exe.engine.genericidevice import GenericIdevice
if isinstance(idevice, GenericIdevice):
return True
else:
return False
def getIdevices(self):
"""
Get the idevices which are applicable for the current node of
this package
In future the idevices which are returned will depend
upon the pedagogical template we are using
"""
return self.extended + self.generic
def getFactoryIdevices(self):
"""
JR: Devuelve todos los iDevices de fabrica
"""
return self.factoryiDevices
def __delGenericIdevice(self, idevice):
"""
Delete a generic idevice from idevicestore.
"""
idevice_remove = None
exist = False
for i in self.generic:
if idevice.title == i.title:
idevice_remove = i
exist = True
break
if exist:
self.generic.remove(idevice_remove)
#JR: Comunicamos a los listener que este iDevice ya no esta disponible
for listener in self.listeners:
listener.delIdevice(idevice_remove)
def __delExtendedIdevice(self, idevice):
"""
Delete an extended idevice from idevicestore.
"""
idevice_remove = None
exist = False
for i in self.extended:
if idevice.title == i.title:
idevice_remove = i
exist = True
break
if exist:
self.extended.remove(idevice_remove)
#JR: Comunicamos a los listener que este iDevice ya no esta disponible
for listener in self.listeners:
listener.delIdevice(idevice_remove)
def delIdevice(self, idevice):
"""
JR: Borra un idevice
"""
if not self.isGeneric(idevice):
idevice_remove = None
exist = False
for i in self.extended:
if i.title == idevice.title:
idevice_remove = i
exist = True
break
if exist:
self.__delExtendedIdevice(idevice_remove)
else:
idevice_remove = None
exist = False
for i in self.generic:
if i.title == idevice.title:
idevice_remove = i
exist = True
break
if exist:
self.__delGenericIdevice(idevice_remove)
def register(self, listener):
"""
Register a listener who is interested in changes to the
IdeviceStore.
Created for IdevicePanes, but could be used by other objects
"""
self.listeners.append(listener)
def addIdevice(self, idevice):
"""
Register another iDevice as available
"""
if not self.isGeneric(idevice):
exist = False
for i in self.extended:
if i.title == idevice.title:
exist = True
if not exist:
self.extended.append(idevice)
idevice.edit = True
for listener in self.listeners:
listener.addIdevice(idevice)
else:
exist = False
for i in self.generic:
if i.title == idevice.title:
exist = True
if not exist:
self.generic.append(idevice)
idevice.edit = True
for listener in self.listeners:
listener.addIdevice(idevice)
def load(self):
"""
Load iDevices from the generic iDevices and the extended ones
"""
log.debug("load iDevices")
idevicesDir = self.config.configDir/'idevices'
if not idevicesDir.exists():
idevicesDir.mkdir()
self.__loadExtended()
self.__loadGeneric()
#JR: comunicamos a los listener los iDevices extendidos
for listener in self.listeners:
for idevice in self.getIdevices():
listener.addIdevice(idevice)
def __getIdevicesFPD(self):
"""
JR: Esta funcion devuelve los iDevices de FPD
"""
from exe.engine.reflectionfpdidevice import ReflectionfpdIdevice
from exe.engine.reflectionfpdmodifidevice import ReflectionfpdmodifIdevice
from exe.engine.clozefpdidevice import ClozefpdIdevice
from exe.engine.clozelangfpdidevice import ClozelangfpdIdevice
from exe.engine.parasabermasfpdidevice import ParasabermasfpdIdevice
from exe.engine.debesconocerfpdidevice import DebesconocerfpdIdevice
from exe.engine.citasparapensarfpdidevice import CitasparapensarfpdIdevice
from exe.engine.recomendacionfpdidevice import RecomendacionfpdIdevice
from exe.engine.verdaderofalsofpdidevice import VerdaderofalsofpdIdevice
from exe.engine.seleccionmultiplefpdidevice import SeleccionmultiplefpdIdevice
from exe.engine.eleccionmultiplefpdidevice import EleccionmultiplefpdIdevice
from exe.engine.casopracticofpdidevice import CasopracticofpdIdevice
from exe.engine.ejercicioresueltofpdidevice import EjercicioresueltofpdIdevice
from exe.engine.destacadofpdidevice import DestacadofpdIdevice
from exe.engine.orientacionesalumnadofpdidevice import OrientacionesalumnadofpdIdevice
from exe.engine.orientacionestutoriafpdidevice import OrientacionestutoriafpdIdevice
from exe.engine.freetextfpdidevice import FreeTextfpdIdevice
idevices_FPD = []
idevices_FPD.append(ReflectionfpdIdevice())
idevices_FPD.append(ReflectionfpdmodifIdevice())
idevices_FPD.append(ClozefpdIdevice())
idevices_FPD.append(ClozelangfpdIdevice())
idevices_FPD.append(ParasabermasfpdIdevice())
idevices_FPD.append(DebesconocerfpdIdevice())
idevices_FPD.append(CitasparapensarfpdIdevice())
idevices_FPD.append(RecomendacionfpdIdevice())
idevices_FPD.append(VerdaderofalsofpdIdevice())
idevices_FPD.append(SeleccionmultiplefpdIdevice())
idevices_FPD.append(EleccionmultiplefpdIdevice())
idevices_FPD.append(CasopracticofpdIdevice())
idevices_FPD.append(EjercicioresueltofpdIdevice())
idevices_FPD.append(DestacadofpdIdevice())
#idevices_FPD.append(CorreccionfpdIdevice())
idevices_FPD.append(OrientacionesalumnadofpdIdevice())
idevices_FPD.append(OrientacionestutoriafpdIdevice())
idevices_FPD.append(FreeTextfpdIdevice())
return idevices_FPD
def __getFactoryExtendediDevices(self):
"""
JR: Carga los iDevices de fabrica
"""
from exe.engine.freetextidevice import FreeTextIdevice
from exe.engine.multimediaidevice import MultimediaIdevice
from exe.engine.reflectionidevice import ReflectionIdevice
from exe.engine.casestudyidevice import CasestudyIdevice
from exe.engine.truefalseidevice import TrueFalseIdevice
# converting ImageWithTextIdevice -> FreeTextIdevice:
#from exe.engine.imagewithtextidevice import ImageWithTextIdevice
#from exe.engine.wikipediaidevice import WikipediaIdevice
from exe.engine.attachmentidevice import AttachmentIdevice
from exe.engine.titleidevice import TitleIdevice
from exe.engine.galleryidevice import GalleryIdevice
from exe.engine.clozeidevice import ClozeIdevice
#from exe.engine.clozelangidevice import ClozelangIdevice
from exe.engine.flashwithtextidevice import FlashWithTextIdevice
from exe.engine.externalurlidevice import ExternalUrlIdevice
from exe.engine.imagemagnifieridevice import ImageMagnifierIdevice
# converting Maths Idevice -> FreeTextIdevice:
#from exe.engine.mathidevice import MathIdevice
from exe.engine.multichoiceidevice import MultichoiceIdevice
#from exe.engine.rssidevice import RssIdevice
from exe.engine.multiselectidevice import MultiSelectIdevice
#from exe.engine.appletidevice import AppletIdevice
from exe.engine.flashmovieidevice import FlashMovieIdevice
from exe.engine.quiztestidevice import QuizTestIdevice
# JR
# Necesarios para la FPD
from exe.engine.reflectionfpdidevice import ReflectionfpdIdevice
from exe.engine.reflectionfpdmodifidevice import ReflectionfpdmodifIdevice
from exe.engine.clozefpdidevice import ClozefpdIdevice
from exe.engine.clozelangfpdidevice import ClozelangfpdIdevice
from exe.engine.parasabermasfpdidevice import ParasabermasfpdIdevice
from exe.engine.debesconocerfpdidevice import DebesconocerfpdIdevice
from exe.engine.citasparapensarfpdidevice import CitasparapensarfpdIdevice
from exe.engine.recomendacionfpdidevice import RecomendacionfpdIdevice
from exe.engine.verdaderofalsofpdidevice import VerdaderofalsofpdIdevice
from exe.engine.seleccionmultiplefpdidevice import SeleccionmultiplefpdIdevice
from exe.engine.eleccionmultiplefpdidevice import EleccionmultiplefpdIdevice
from exe.engine.casopracticofpdidevice import CasopracticofpdIdevice
from exe.engine.ejercicioresueltofpdidevice import EjercicioresueltofpdIdevice
from exe.engine.destacadofpdidevice import DestacadofpdIdevice
#from exe.engine.correccionfpdidevice import CorreccionfpdIdevice
from exe.engine.orientacionesalumnadofpdidevice import OrientacionesalumnadofpdIdevice
from exe.engine.orientacionestutoriafpdidevice import OrientacionestutoriafpdIdevice
from exe.engine.freetextfpdidevice import FreeTextfpdIdevice
# eXelearningPlus iDevices
from exe.engine.scormclozeidevice import ScormClozeIdevice
from exe.engine.scormmultiselectidevice import ScormMultiSelectIdevice
from exe.engine.scormdropdownidevice import ScormDropDownIdevice
from exe.engine.scormmulticlozeidevice import ScormMultiClozeIdevice
from exe.engine.opinionidevice import OpinionIdevice
from exe.engine.dropdownidevice import DropDownIdevice
from exe.engine.scormmultiselectindfeedbackidevice import ScormMultiSelectIndFeedbackIdevice
factoryExtendedIdevices = []
factoryExtendedIdevices.append(FreeTextIdevice())
factoryExtendedIdevices.append(MultichoiceIdevice())
factoryExtendedIdevices.append(ReflectionIdevice())
factoryExtendedIdevices.append(CasestudyIdevice())
factoryExtendedIdevices.append(TrueFalseIdevice())
defaultImage = unicode(self.config.webDir / "images" / "sunflowers.jpg")
# converting ImageWithTextIdevice -> FreeTextIdevice:
#factoryExtendedIdevices.append(ImageWithTextIdevice(defaultImage))
factoryExtendedIdevices.append(ImageMagnifierIdevice(defaultImage))
defaultImage = unicode(self.config.webDir / "images" / "sunflowers.jpg")
#defaultSite = 'http://%s.wikipedia.org/' % self.config.locale
#factoryExtendedIdevices.append(WikipediaIdevice(defaultSite))
#JR: Eliminamos este iDevices de los extendidos
#factoryExtendedIdevices.append(AttachmentIdevice())
factoryExtendedIdevices.append(GalleryIdevice())
factoryExtendedIdevices.append(ClozeIdevice())
#factoryExtendedIdevices.append(ClozelangIdevice())
#JR: Eliminamos este iDevices de los extendidos
#factoryExtendedIdevices.append(FlashWithTextIdevice())
factoryExtendedIdevices.append(ExternalUrlIdevice())
# converting Maths Idevice -> FreeTextIdevice:
#factoryExtendedIdevices.append(MathIdevice())
#JR: Eliminamos este iDevices de los extendidos
#factoryExtendedIdevices.append(MultimediaIdevice())
#factoryExtendedIdevices.append(RssIdevice())
factoryExtendedIdevices.append(MultiSelectIdevice())
#factoryExtendedIdevices.append(AppletIdevice())
#JR: Eliminamos este iDevices de los extendidos
#factoryExtendedIdevices.append(FlashMovieIdevice())
#modification lernmodule.net
#factoryExtendedIdevices.append(QuizTestIdevice())
#end modification lernmodule.net
# JR
# iDevices para la FPD
factoryExtendedIdevices.append(ReflectionfpdIdevice())
factoryExtendedIdevices.append(ReflectionfpdmodifIdevice())
factoryExtendedIdevices.append(ClozefpdIdevice())
factoryExtendedIdevices.append(ClozelangfpdIdevice())
factoryExtendedIdevices.append(ParasabermasfpdIdevice())
factoryExtendedIdevices.append(DebesconocerfpdIdevice())
factoryExtendedIdevices.append(CitasparapensarfpdIdevice())
factoryExtendedIdevices.append(RecomendacionfpdIdevice())
factoryExtendedIdevices.append(VerdaderofalsofpdIdevice())
factoryExtendedIdevices.append(SeleccionmultiplefpdIdevice())
factoryExtendedIdevices.append(EleccionmultiplefpdIdevice())
factoryExtendedIdevices.append(CasopracticofpdIdevice())
factoryExtendedIdevices.append(EjercicioresueltofpdIdevice())
factoryExtendedIdevices.append(DestacadofpdIdevice())
#factoryExtendedIdevices.append(CorreccionfpdIdevice())
factoryExtendedIdevices.append(OrientacionesalumnadofpdIdevice())
factoryExtendedIdevices.append(OrientacionestutoriafpdIdevice())
factoryExtendedIdevices.append(FreeTextfpdIdevice())
# eXelearningPlus
factoryExtendedIdevices.append(ScormClozeIdevice())
factoryExtendedIdevices.append(ScormMultiSelectIdevice())
factoryExtendedIdevices.append(ScormDropDownIdevice())
factoryExtendedIdevices.append(ScormMultiClozeIdevice())
factoryExtendedIdevices.append(OpinionIdevice())
factoryExtendedIdevices.append(DropDownIdevice())
factoryExtendedIdevices.append(ScormMultiSelectIndFeedbackIdevice())
return factoryExtendedIdevices
def __loadExtended(self):
"""
Load the Extended iDevices (iDevices coded in Python)
JR: Modifico esta funcion para que tambien cargue los idevices extendidos de fabrica
"""
self.__loadUserExtended()
#JR: Si existe el archivo extended.data cargamos de ahi los iDevices extendidos
extendedPath = self.config.configDir/'idevices'/'extended.data'
log.debug("load extended iDevices from "+extendedPath)
self.factoryiDevices = self.__getFactoryExtendediDevices()
if extendedPath.exists():
self.extended = persist.decodeObject(extendedPath.bytes())
else:
self.extended = copy.deepcopy(self.factoryiDevices)
#self.extended = self.factoryiDevices
for idevice in self.__getIdevicesFPD():
self.delIdevice(idevice)
# generate new ids for these iDevices, to avoid any clashes
for idevice in self.extended:
idevice.id = self.getNewIdeviceId()
def __loadUserExtended(self):
"""
Load the user-created extended iDevices which are in the idevices
directory
"""
idevicePath = self.config.configDir/'idevices'
log.debug("load extended iDevices from "+idevicePath)
if not idevicePath.exists():
idevicePath.makedirs()
sys.path = [idevicePath] + sys.path
# Add to the list of extended idevices
for path in idevicePath.listdir("*idevice.py"):
log.debug("loading "+path)
moduleName = path.basename().splitext()[0]
module = __import__(moduleName, globals(), locals(), [])
module.register(self)
# Register the blocks for rendering the idevices
for path in idevicePath.listdir("*block.py"):
log.debug("loading "+path)
moduleName = path.basename().splitext()[0]
module = __import__(moduleName, globals(), locals(), [])
module.register()
def __loadGeneric(self):
"""
Load the Generic iDevices from the appdata directory
"""
genericPath = self.config.configDir/'idevices'/'generic.data'
log.debug("load generic iDevices from "+genericPath)
if genericPath.exists():
self.generic = persist.decodeObject(genericPath.bytes())
self.__upgradeGeneric()
self.factoryiDevices += self.__createGeneric()
else:
self.generic = self.__createGeneric()
self.factoryiDevices += self.generic
# generate new ids for these iDevices, to avoid any clashes
for idevice in self.generic:
idevice.id = self.getNewIdeviceId()
def __upgradeGeneric(self):
"""
Upgrades/removes obsolete generic idevices from before
"""
# We may have two reading activites,
# one problably has the wrong title,
# the other is redundant
readingActivitiesFound = 0
for idevice in self.generic:
if idevice.class_ == 'reading':
if readingActivitiesFound == 0:
# Rename the first one we find
idevice.title = x_(u"Reading Activity")
# and also upgrade its feedback field from using a simple
# string, to a subclass of TextAreaField.
# While this will have been initially handled by the
# field itself, and if not, then by the genericidevice's
# upgrade path, this is included here as a possibly
# painfully redundant safety check due to the extra
# special handing of generic idevices w/ generic.dat
for field in idevice.fields:
if isinstance(field, FeedbackField):
# must check for the upgrade manually, since
# persistence versions not used here.
# (but note that the persistence versioning
# will probably have ALREADY happened anyway!)
if not hasattr(field,"content"):
# this FeedbackField has NOT been upgraded:
field.content = field.feedback
field.content_w_resourcePaths = field.content
field.content_wo_resourcePaths = field.content
else:
# Destroy the second
self.generic.remove(idevice)
readingActivitiesFound += 1
if readingActivitiesFound == 2:
break
self.save()
def __createGeneric(self):
"""
Create the Generic iDevices which you get for free
(not created using the iDevice editor, but could have been)
Called when we can't find 'generic.data', generates an initial set of
free/builtin idevices and writes the new 'generic.data' file
JR: Modifico este metodo para que acepte otro parametro que sera la lista
en la que anadimos los idevices gnericos
"""
idevices = []
from exe.engine.genericidevice import GenericIdevice
readingAct = GenericIdevice(_(u"Reading Activity"),
u"reading",
_(u"University of Auckland"),
x_(u"""<p>The Reading Activity will primarily
be used to check a learner's comprehension of a given text. This can be done
by asking the learner to reflect on the reading and respond to questions about
the reading, or by having them complete some other possibly more physical task
based on the reading.</p>"""),
x_(u"<p>Teachers should keep the following "
"in mind when using this iDevice: </p>"
"<ol>"
"<li>"
"Think about the number of "
"different types of activity "
"planned for your resource that "
"will be visually signalled in the "
"content. Avoid using too many "
"different types or classification "
"of activities otherwise learner "
"may become confused. Usually three "
"or four different types are more "
"than adequate for a teaching "
"resource."
"</li>"
"<li>"
"From a visual design "
"perspective, avoid having two "
"iDevices immediately following "
"each other without any text in "
"between. If this is required, "
"rather collapse two questions or "
"events into one iDevice. "
"</li>"
"<li>"
"Think "
"about activities where the "
"perceived benefit of doing the "
"activity outweighs the time and "
"effort it will take to complete "
"the activity. "
"</li>"
"</ol>"))
readingAct.emphasis = Idevice.SomeEmphasis
readingAct.addField(TextAreaField(_(u"What to read"),
_(u"""Enter the details of the reading including reference details. The
referencing style used will depend on the preference of your faculty or
department.""")))
readingAct.addField(TextAreaField(_(u"Activity"),
_(u"""Describe the tasks related to the reading learners should undertake.
This helps demonstrate relevance for learners.""")))
readingAct.addField(FeedbackField(_(u"Feedback"),
_(u"""Use feedback to provide a summary of the points covered in the reading,
or as a starting point for further analysis of the reading by posing a question
or providing a statement to begin a debate.""")))
#idevices.append(readingAct)
objectives = GenericIdevice(_(u"Objectives"),
u"objectives",
_(u"University of Auckland"),
_(u"""Objectives describe the expected outcomes of the learning and should
define what the learners will be able to do when they have completed the
learning tasks."""),
u"")
objectives.emphasis = Idevice.SomeEmphasis
objectives.addField(TextAreaField(_(u"Objectives"),
_(u"""Type the learning objectives for this resource.""")))
#idevices.append(objectives)
#added kthamm summary idevice 111027
devsummary = GenericIdevice(_(u"Summary"),
u"devsummary",
_(u"University of Auckland"),
_(u"""Provide a summary of the learning resource."""),
u"")
devsummary.emphasis = Idevice.SomeEmphasis
devsummary.addField(TextAreaField(_(u"Summary"),
_(u"""Type a brief summary for this resource.""")))
idevices.append(devsummary)
#end added
#added kthamm preview idevice 111028
devpreview = GenericIdevice(_(u"Preview"),
u"devpreview",
_(u"University of Auckland"),
_(u"""A preview to introduce the learning resource"""),
u"")
devpreview.emphasis = Idevice.SomeEmphasis
devpreview.addField(TextAreaField(_(u"Preview"),
_(u"""Type the learning objectives for this resource.""")))
idevices.append(devpreview)
#end added
#added kthamm 111028 resource idevice
devresource = GenericIdevice(_(u"Resource"),
u"devresource",
_(u"University of Auckland"),
x_(u""" """),
x_(u" "))
devresource.emphasis = Idevice.SomeEmphasis
devresource.addField(TextAreaField(_(u"Resource"),
_(u"""Enter an URL to a resource, you want to provide. Mark the URL and click on the link button in the editor""")))
# devresource.addField(TextAreaField(_(u"Activity"),
#_(u"""Describe the tasks related to the reading learners should undertake.
#This helps demonstrate relevance for learners.""")))
#
# devresource.addField(FeedbackField(_(u"Feedback"),
#_(u"""Use feedback to provide a summary of the points covered in the reading,
#or as a starting point for further analysis of the reading by posing a question
#or providing a statement to begin a debate.""")))
idevices.append(devresource)
#end added
#added kthamm 111028 discussion idevice
devdiscussion = GenericIdevice(_(u"Discussion"),
u"devdiscussion",
_(u"University of Auckland"),
x_(u""" """),
x_(u" "))
devdiscussion.emphasis = Idevice.SomeEmphasis
devdiscussion.addField(TextAreaField(_(u"Discussion"),
_(u"""Enter the details of the reading including reference details. The
referencing style used will depend on the preference of your faculty or
department.""")))
devdiscussion.addField(TextAreaField(_(u"Activity"),
_(u"""Describe the tasks related to the reading learners should undertake.
This helps demonstrate relevance for learners.""")))
idevices.append(devdiscussion)
#end added
preknowledge = GenericIdevice(_(u"Preknowledge"),
u"preknowledge",
"",
_(u"""Prerequisite knowledge refers to the knowledge learners should already
have in order to be able to effectively complete the learning. Examples of
pre-knowledge can be: <ul>
<li> Learners must have level 4 English </li>
<li> Learners must be able to assemble standard power tools </li></ul>
"""), u"")
preknowledge.emphasis = Idevice.SomeEmphasis
preknowledge.addField(TextAreaField(_(u"Preknowledge"),
_(u"""Describe the prerequisite knowledge learners should have to effectively
complete this learning.""")))
#idevices.append(preknowledge)
activity = GenericIdevice(_(u"Activity"),
u"activity",
_(u"University of Auckland"),
_(u"""An activity can be defined as a task or set of tasks a learner must
complete. Provide a clear statement of the task and consider any conditions
that may help or hinder the learner in the performance of the task."""),
u"")
activity.emphasis = Idevice.SomeEmphasis
activity.addField(TextAreaField(_(u"Activity"),
_(u"""Describe the tasks the learners should complete.""")))
#idevices.append(activity)
self.save()
return idevices
def __createReading011(self):
"""
Create the Reading Activity 0.11
We do this only once when the user first runs eXe 0.11
"""
from exe.engine.genericidevice import GenericIdevice
readingAct = GenericIdevice(_(u"Reading Activity 0.11"),
u"reading",
_(u"University of Auckland"),
x_(u"""<p>The reading activity, as the name
suggests, should ask the learner to perform some form of activity. This activity
should be directly related to the text the learner has been asked to read.
Feedback to the activity where appropriate, can provide the learner with some
reflective guidance.</p>"""),
x_(u"Teachers should keep the following "
"in mind when using this iDevice: "
"<ol>"
"<li>"
"Think about the number of "
"different types of activity "
"planned for your resource that "
"will be visually signalled in the "
"content. Avoid using too many "
"different types or classification "
"of activities otherwise learner "
"may become confused. Usually three "
"or four different types are more "
"than adequate for a teaching "
"resource."
"</li>"
"<li>"
"From a visual design "
"perspective, avoid having two "
"iDevices immediately following "
"each other without any text in "
"between. If this is required, "
"rather collapse two questions or "
"events into one iDevice. "
"</li>"
"<li>"
"Think "
"about activities where the "
"perceived benefit of doing the "
"activity outweighs the time and "
"effort it will take to complete "
"the activity. "
"</li>"
"</ol>"))
readingAct.emphasis = Idevice.SomeEmphasis
readingAct.addField(TextAreaField(_(u"What to read"),
_(u"""Enter the details of the reading including reference details. The
referencing style used will depend on the preference of your faculty or
department.""")))
readingAct.addField(TextAreaField(_(u"Activity"),
_(u"""Describe the tasks related to the reading learners should undertake.
This helps demonstrate relevance for learners.""")))
readingAct.addField(FeedbackField(_(u"Feedback"),
_(u"""Use feedback to provide a summary of the points covered in the reading,
or as a starting point for further analysis of the reading by posing a question
or providing a statement to begin a debate.""")))
objectives = GenericIdevice(_(u"Objectives"),
u"objectives",
_(u"University of Auckland"),
_(u"""Objectives describe the expected outcomes of the learning and should
define what the learners will be able to do when they have completed the
learning tasks."""),
u"")
objectives.emphasis = Idevice.SomeEmphasis
objectives.addField(TextAreaField(_(u"Objectives"),
_(u"""Type the learning objectives for this resource.""")))
self.generic.append(objectives)
preknowledge = GenericIdevice(_(u"Preknowledge"),
u"preknowledge",
"",
_(u"""Prerequisite knowledge refers to the knowledge learners should already
have in order to be able to effectively complete the learning. Examples of
pre-knowledge can be: <ul>
<li> Learners must have level 4 English </li>
<li> Learners must be able to assemble standard power tools </li></ul>
"""), u"")
preknowledge.emphasis = Idevice.SomeEmphasis
preknowledge.addField(TextAreaField(_(u"Preknowledge"),
_(u"""Describe the prerequisite knowledge learners should have to effectively
complete this learning.""")))
self.generic.append(preknowledge)
activity = GenericIdevice(_(u"Activity"),
u"activity",
_(u"University of Auckland"),
_(u"""An activity can be defined as a task or set of tasks a learner must
complete. Provide a clear statement of the task and consider any conditions
that may help or hinder the learner in the performance of the task."""),
u"")
activity.emphasis = Idevice.SomeEmphasis
activity.addField(TextAreaField(_(u"Activity"),
_(u"""Describe the tasks the learners should complete.""")))
self.generic.append(activity)
self.save()
def save(self):
"""
Save the Generic iDevices to the appdata directory
"""
idevicesDir = self.config.configDir/'idevices'
if not idevicesDir.exists():
idevicesDir.mkdir()
fileOut = open(idevicesDir/'generic.data', 'wb')
fileOut.write(persist.encodeObject(self.generic))
#JR: Guardamos tambien los iDevices extendidos
fileOut = open(idevicesDir/'extended.data', 'wb')
fileOut.write(persist.encodeObject(self.extended))
# ===========================================================================
| gpl-2.0 | -7,368,021,126,483,378,000 | 45.742638 | 116 | 0.594478 | false |
seung-lab/neuroglancer | python/neuroglancer/tool/agglomeration_split_tool.py | 1 | 29368 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import collections
import uuid
import copy
import heapq
import json
import re
import sqlite3
import logging
import os
import numpy as np
import six
import neuroglancer
debug_graph = False
verbose_merging = False
def normalize_edge((id_a, id_b)):
if id_a > id_b:
id_a, id_b = id_b, id_a
return id_a, id_b
class GreedyMulticut(object):
def __init__(self, combine_edges, edge_priority):
# Contains (score, edge_map_value) tuple values in heap order. The
# edge_map_value is the actual corresponding value in edge_map, not a copy.
self.edge_heap = []
# Maps segment_id -> set of segment_id neighbors.
self.regions = dict()
# Maps (id_a, id_b) -> edge_map_value=[score, key, edge_object]
self.edge_map = dict()
self.combine_edges = combine_edges
self.edge_priority = edge_priority
self.num_valid_edges = 0
self._initialized = False
def add_edge(self, (id_a, id_b), edge):
id_a, id_b = normalize_edge((id_a, id_b))
self.regions.setdefault(id_a, set()).add(id_b)
self.regions.setdefault(id_b, set()).add(id_a)
key = (id_a, id_b)
entry = self.edge_map.get(key, None)
if entry is not None:
edge_data = entry[2] = self.combine_edges(entry[0], edge)
entry[0] = self.edge_priority(edge_data)
else:
entry = self.edge_map[key] = [self.edge_priority(edge), key, edge]
self.num_valid_edges += 1
if self._initialized:
self._add_to_heap(entry)
def _initialize_heap(self):
if self._initialized:
return
for key in self.edge_map:
entry = self.edge_map[key]
self._add_to_heap(entry)
self._initialized = True
def _add_to_heap(self, entry):
heapq.heappush(self.edge_heap, (entry[0], entry))
def remove_edge_from_heap(self, segment_ids):
"""Remove an edge from the heap."""
self._initialize_heap()
key = normalize_edge(segment_ids)
if key in self.edge_map:
self.edge_map[key][0] = None
self.num_valid_edges -= 1
def check_consistency(self):
self._initialize_heap()
expected_regions = dict()
for key, entry in six.viewitems(self.edge_map):
assert entry[1] == key
expected_regions.setdefault(key[0], set()).add(key[1])
expected_regions.setdefault(key[1], set()).add(key[0])
assert expected_regions == self.regions
num_valid_edges = 0
for e in self.edge_heap:
if self._is_valid_heap_entry(e):
num_valid_edges += 1
assert num_valid_edges == self.num_valid_edges
def merge(self, (id_a, id_b)):
self._initialize_heap()
id_a, id_b = normalize_edge((id_a, id_b))
if (id_a, id_b) not in self.edge_map:
raise KeyError
for neighbor in self.regions[id_b]:
if neighbor == id_a:
continue
expired_ids = normalize_edge((neighbor, id_b))
new_ids = normalize_edge((neighbor, id_a))
new_edge = self.edge_map.get(new_ids)
expired_edge = self.edge_map[expired_ids]
if new_edge is not None:
edge_data = new_edge[2] = self.combine_edges(new_edge[2], expired_edge[2])
if new_edge[0] is not None:
self.num_valid_edges -= 1
if expired_edge[0] is not None:
self.num_valid_edges -= 1
self.num_valid_edges += 1
new_edge[0] = self.edge_priority(edge_data)
self._add_to_heap(new_edge)
else:
self.regions[neighbor].add(id_a)
self.regions[id_a].add(neighbor)
self.edge_map[new_ids] = expired_edge
expired_edge[1] = new_ids
# No need to add to heap, since score hasn't changed.
del self.edge_map[expired_ids]
self.regions[neighbor].remove(id_b)
del self.regions[id_b]
self.regions[id_a].remove(id_b)
del self.edge_map[(id_a, id_b)]
self.num_valid_edges -= 1
def _is_valid_heap_entry(self, heap_entry):
score, entry = heap_entry
expected_entry = self.edge_map.get(entry[1])
if entry is not expected_entry or entry[0] is not score:
return None
else:
return entry
def get_next_edge(self):
self._initialize_heap()
while True:
if self.num_valid_edges == 0:
return None
heap_entry = self.edge_heap[0]
entry = self._is_valid_heap_entry(heap_entry)
if entry is None:
heapq.heappop(self.edge_heap)
else:
return entry
Edge = collections.namedtuple('Edge', ['segment_ids', 'score', 'position'])
def load_edges(path):
edges = []
with open(path, 'r') as f:
f.readline()
for line in f:
parts = line.split(',')
segment_a = int(parts[0].strip())
segment_b = int(parts[1].strip())
score = float(parts[2].strip())
position = (int(parts[3].strip()), int(parts[4].strip()), int(parts[5].strip()))
edges.append(Edge(segment_ids=(segment_a, segment_b), score=score, position=position))
return edges
def load_split_seeds(path):
with open(path, 'r') as f:
raw_seeds = json.loads(f.read())
seeds = collections.OrderedDict()
for component in raw_seeds:
seeds.setdefault(component['label'], []).extend(component['supervoxels'])
return seeds
def build_graph(edges):
logging.info('Building graph with %d edges', len(edges))
def combine_edges(a, b):
return a + b
def edge_priority(x):
return x
greedy_multicut = GreedyMulticut(
combine_edges=combine_edges,
edge_priority=edge_priority,
)
for edge in edges:
greedy_multicut.add_edge(edge.segment_ids, edge.score)
return greedy_multicut
class AgglomerationGraph(object):
def __init__(self, conn):
self.conn = conn
self.agglo_members_cache = dict()
self.agglo_edges_cache = dict()
def get_agglo_id(self, supervoxel_id):
c = self.conn.cursor()
c.execute('SELECT agglo_id FROM supervoxels WHERE supervoxel_id=?', (int(supervoxel_id), ))
result = c.fetchone()
if result is None:
return supervoxel_id
else:
return result[0]
def get_agglo_members(self, agglo_id):
result = self.agglo_members_cache.get(agglo_id)
if result is not None:
return result
c = self.conn.cursor()
c.execute('SELECT supervoxel_id FROM supervoxels WHERE agglo_id=?', (int(agglo_id), ))
result = [row[0] for row in c.fetchall()]
self.agglo_members_cache[agglo_id] = result
return result
def get_agglo_edges(self, agglo_id):
result = self.agglo_edges_cache.get(agglo_id)
if result is not None:
return result
c = self.conn.cursor()
c.execute('SELECT segment_a, segment_b, score, x, y, z FROM edges WHERE agglo_id=?',
(int(agglo_id), ))
result = [
Edge(segment_ids=(row[0], row[1]), score=row[2], position=(row[3], row[4], row[5]))
for row in c.fetchall()
]
self.agglo_edges_cache[agglo_id] = result
return result
def _make_supervoxel_map(graph, split_seeds, need_agglo_ids):
supervoxel_map = dict()
agglo_ids = dict()
for label in [0, 1]:
for seed in split_seeds[label]:
supervoxel_id = seed['supervoxel_id']
if need_agglo_ids:
agglo_id = graph.get_agglo_id(supervoxel_id)
if agglo_id == 0:
continue
agglo_ids.setdefault(agglo_id, []).append((label, seed))
supervoxel_map.setdefault(supervoxel_id, set()).add(label)
return agglo_ids, supervoxel_map
def do_split(graph, split_seeds, agglo_id=None, supervoxels=None):
agglo_ids, supervoxel_map = _make_supervoxel_map(graph, split_seeds, need_agglo_ids=agglo_id is None)
if agglo_id is None:
agglo_id_counts = {
agglo_id: sum(z[1]['count'] for z in seeds)
for agglo_id, seeds in six.viewitems(agglo_ids)
}
agglo_id = max(agglo_ids, key=lambda x: agglo_id_counts[x])
if len(agglo_ids) > 1:
logging.info('Warning: more than one agglomerated component. ' +
'Choosing component %d with maximum number of seed points.', agglo_id)
logging.info('agglo_id_counts = %r', agglo_id_counts)
input_edges = graph.get_agglo_edges(agglo_id)
if supervoxels is not None:
input_edges = [x for x in input_edges if x.segment_ids[0] in supervoxels and x.segment_ids[1] in supervoxels]
graph = build_graph(input_edges)
if debug_graph:
graph.check_consistency()
cur_eqs = neuroglancer.EquivalenceMap()
logging.info('Agglomerating')
threshold = float('inf')
while True:
entry = graph.get_next_edge()
if entry is None:
if verbose_merging:
logging.info('Stopping because entry is None')
break
if entry[0] > threshold:
if verbose_merging:
logging.info('Stopping because edge score %r is > threshold %r', entry[0],
threshold)
break
segment_ids = entry[1]
seeds_a = supervoxel_map.get(segment_ids[0])
seeds_b = supervoxel_map.get(segment_ids[1])
if ((seeds_a is not None and len(seeds_a) > 1) or (seeds_b is not None and len(seeds_b) > 1)
or (seeds_a is not None and seeds_b is not None and seeds_a != seeds_b)):
if verbose_merging:
logging.info('Excluding edge %r because of seeds: %r %r', segment_ids, seeds_a,
seeds_b)
graph.remove_edge_from_heap(segment_ids)
continue
if verbose_merging:
logging.info('Merging %r with score %r', segment_ids, entry[0])
graph.merge(segment_ids)
if debug_graph:
graph.check_consistency()
new_id = cur_eqs.union(*segment_ids)
new_seeds = seeds_a or seeds_b
if new_seeds:
supervoxel_map[new_id] = new_seeds
return dict(agglo_id=agglo_id, cur_eqs=cur_eqs, supervoxel_map=supervoxel_map)
def display_split_result(graph, agglo_id, cur_eqs, supervoxel_map, split_seeds, image_url,
segmentation_url):
agglo_members = set(graph.get_agglo_members(agglo_id))
state = neuroglancer.ViewerState()
state.layers.append(name='image', layer=neuroglancer.ImageLayer(source=image_url))
state.layers.append(
name='original',
layer=neuroglancer.SegmentationLayer(
source=segmentation_url,
segments=agglo_members,
),
visible=False,
)
state.layers.append(
name='isolated-supervoxels',
layer=neuroglancer.SegmentationLayer(
source=segmentation_url,
segments=set(x for x, seeds in six.viewitems(supervoxel_map) if len(seeds) > 1),
),
visible=False,
)
state.layers.append(
name='split',
layer=neuroglancer.SegmentationLayer(
source=segmentation_url,
equivalences=cur_eqs,
segments=set(cur_eqs[x] for x in agglo_members),
))
for label, component in six.viewitems(split_seeds):
state.layers.append(
name='seed%d' % label,
layer=neuroglancer.PointAnnotationLayer(
points=[seed['position'] for seed in component],
),
)
state.show_slices = False
state.layout = '3d'
all_seed_points = [
seed['position'] for component in six.viewvalues(split_seeds) for seed in component
]
state.voxel_coordinates = np.mean(all_seed_points, axis=0)
state.perspective_zoom = 140
return state
def _set_viewer_seeds(s, seeds):
for inclusive in [False, True]:
layer_name = 'inclusive-seeds' if inclusive else 'exclusive-seeds'
s.layers[layer_name] = neuroglancer.AnnotationLayer(
annotation_color='green' if inclusive else 'red',
annotations=[
dict(
type='point',
id=x['id'],
point=x['position'],
description=str(x['supervoxel_id']),
) for x in seeds[inclusive]
],
)
def _get_viewer_seeds(s):
seeds = [[], []]
for inclusive in [False, True]:
layer_name = 'inclusive-seeds' if inclusive else 'exclusive-seeds'
try:
layer = s.layers[layer_name]
except KeyError:
pass
for x in layer.annotations:
seeds[inclusive].append(
dict(
id=x.id,
supervoxel_id=int(x.description),
position=tuple(map(int, x.point)),
))
return seeds
class ComponentState(object):
def __init__(self, data=None):
self.supervoxels = set()
self.seeds = [[], []]
if data is not None:
self.load(data)
def load(self, data):
self.supervoxels = set(data['supervoxels'])
self.seeds = data['seeds']
def to_json(self):
return {
'supervoxels': sorted(self.supervoxels),
'seeds': self.seeds,
}
class InteractiveState(object):
def __init__(self, path):
self.unused_supervoxels = set()
self.components = []
self.path = path
self.selected_component = None
def load(self):
with open(self.path, 'r') as f:
data = json.load(f)
self.unused_supervoxels = set(data['unused_supervoxels'])
self.components = map(ComponentState, data['components'])
self.selected_component = data['selected_component']
def initialize(self, supervoxel_ids):
self.unused_supervoxels = set(supervoxel_ids)
self.components = []
self.selected_component = None
def to_json(self):
return {
'unused_supervoxels': sorted(self.unused_supervoxels),
'components': [x.to_json() for x in self.components],
'selected_component': self.selected_component,
}
def save(self):
if self.path is None:
return
tmp_path = self.path + '.tmp'
with open(tmp_path, 'w') as f:
f.write(json.dumps(self.to_json()))
os.rename(tmp_path, self.path)
def make_new_component(self):
c = ComponentState()
c.supervoxels = self.unused_supervoxels
self.unused_supervoxels = set()
self.selected_component = len(self.components)
self.components.append(c)
def cycle_selected_component(self, amount):
if len(self.components) == 0:
return
if self.selected_component is None:
if amount > 0:
self.selected_component = 0
else:
self.selected_component = len(self.components) - 1
else:
self.selected_component = (
self.selected_component + amount + len(self.components)) % len(self.components)
def add_seed(self, supervoxel_id, position, inclusive):
if self.selected_component is None:
return
c = self.components[self.selected_component]
c.seeds[inclusive].append(
dict(
supervoxel_id=supervoxel_id,
position=position,
id=uuid.uuid4().hex))
class CachedSplitResult(object):
def __init__(self, state, graph, agglo_id):
self.state = state
self.graph = graph
self.agglo_id = agglo_id
self.reset()
def reset(self):
self.selected_component = None
self.seeds = [[], []]
self.supervoxels = set()
self.split_result = None
def update(self):
selected_component = self.state.selected_component
if selected_component is None:
if self.selected_component is None:
return False
self.reset()
return True
component = self.state.components[selected_component]
if selected_component == self.selected_component:
if self.supervoxels == component.supervoxels:
if self.seeds == component.seeds:
return False
self.selected_component = self.state.selected_component
self.seeds = copy.deepcopy(component.seeds)
self.supervoxels = set(component.supervoxels)
print('Recomputing split result')
self.split_result = do_split(
graph=self.graph, split_seeds=self.seeds, agglo_id=self.agglo_id,
supervoxels=self.supervoxels)
print('Done recomputing split result')
return True
class InteractiveSplitter(object):
def __init__(self, graph, agglo_id, image_url, segmentation_url, state_path):
self.graph = graph
self.agglo_id = agglo_id
self.image_url = image_url
self.segmentation_url = segmentation_url
self.state = InteractiveState(state_path)
self.cached_split_result = CachedSplitResult(
state=self.state, graph=self.graph, agglo_id=self.agglo_id)
self.agglo_members = set(self.graph.get_agglo_members(agglo_id))
if state_path is not None and os.path.exists(state_path):
self.state.load()
else:
self.state.initialize(self.agglo_members)
viewer = self.viewer = neuroglancer.Viewer()
viewer.actions.add('inclusive-seed', self._add_inclusive_seed)
viewer.actions.add('exclusive-seed', self._add_exclusive_seed)
viewer.actions.add('next-component', self._next_component)
viewer.actions.add('prev-component', self._prev_component)
viewer.actions.add('new-component', self._make_new_component)
viewer.actions.add('exclude-component', self._exclude_component)
viewer.actions.add('exclude-all-but-component', self._exclude_all_but_component)
key_bindings = [
['bracketleft', 'prev-component'],
['bracketright', 'next-component'],
['at:dblclick0', 'exclude-component'],
['at:shift+mousedown2', 'exclude-all-but-component'],
['at:control+mousedown0', 'inclusive-seed'],
['at:shift+mousedown0', 'exclusive-seed'],
['enter', 'new-component'],
]
with viewer.txn() as s:
s.perspective_zoom = 140
s.layers.append(
name='image',
layer=neuroglancer.ImageLayer(source=self.image_url),
)
s.layers.append(
name='original',
layer=neuroglancer.SegmentationLayer(
source=self.segmentation_url,
segments=self.agglo_members,
),
)
s.layers.append(
name='unused',
layer=neuroglancer.SegmentationLayer(source=self.segmentation_url,
),
visible=False,
)
s.layers.append(
name='split-result',
layer=neuroglancer.SegmentationLayer(
source=self.segmentation_url,
segments=self.agglo_members,
),
)
s.concurrent_downloads = 256
self._update_state(s)
with viewer.config_state.txn() as s:
s.status_messages['help'] = ('KEYS: ' + ' | '.join('%s=%s' % (key, command)
for key, command in key_bindings))
for key, command in key_bindings:
s.input_event_bindings.viewer[key] = command
s.input_event_bindings.slice_view[key] = command
s.input_event_bindings.perspective_view[key] = command
self._update_config_state(s)
viewer.shared_state.add_changed_callback(
lambda: viewer.defer_callback(self._handle_state_changed))
def _add_inclusive_seed(self, s):
self._add_seed(s, True)
def _add_exclusive_seed(self, s):
self._add_seed(s, False)
def _exclude_component(self, s):
if self.state.selected_component is None:
return
component = self.state.components[self.state.selected_component]
supervoxel_id = self._get_mouse_supervoxel(s)
if supervoxel_id is None:
return
self.cached_split_result.update()
members = set(self.cached_split_result.split_result['cur_eqs'].members(supervoxel_id))
component.supervoxels = set(x for x in component.supervoxels if x not in members)
self.state.unused_supervoxels.update(members)
self._update_view()
def _exclude_all_but_component(self, s):
if self.state.selected_component is None:
return
component = self.state.components[self.state.selected_component]
supervoxel_id = self._get_mouse_supervoxel(s)
if supervoxel_id is None:
return
self.cached_split_result.update()
members = set(self.cached_split_result.split_result['cur_eqs'].members(supervoxel_id))
new_unused = set(x for x in component.supervoxels if x not in members)
component.supervoxels = members
self.state.unused_supervoxels.update(new_unused)
self._update_view()
def _make_new_component(self, s):
self.state.make_new_component()
self._update_view()
def _next_component(self, s):
self.state.cycle_selected_component(1)
self._update_view()
def _prev_component(self, s):
self.state.cycle_selected_component(-1)
self._update_view()
def _handle_state_changed(self):
if self.state.selected_component is None:
return
seeds = _get_viewer_seeds(self.viewer.state)
component = self.state.components[self.state.selected_component]
if seeds == component.seeds:
return
component.seeds = seeds
with self.viewer.txn() as s:
self._update_state(s)
def _get_mouse_supervoxel(self, s):
supervoxel_id = s.selected_values['original']
if supervoxel_id is None:
m = s.selected_values['split-result']
if m is not None:
if isinstance(m, neuroglancer.MapEntry):
supervoxel_id = m.key
else:
supervoxel_id = m
if supervoxel_id is None or supervoxel_id == 0:
return None
return supervoxel_id
def _add_seed(self, s, inclusive):
supervoxel_id = self._get_mouse_supervoxel(s)
mouse_voxel_coordinates = s.mouse_voxel_coordinates
if mouse_voxel_coordinates is None or supervoxel_id is None:
return
position = tuple(int(x) for x in mouse_voxel_coordinates)
self.state.add_seed(supervoxel_id, position, inclusive)
self._update_view()
def _update_view(self):
with self.viewer.txn() as s:
self._update_state(s)
with self.viewer.config_state.txn() as s:
self._update_config_state(s)
def _update_config_state(self, s):
if self.state.selected_component is None:
msg = '[No component selected] %d unused supervoxels' % len(
self.state.unused_supervoxels)
else:
selected_component = self.state.selected_component
msg = '[Component %d/%d] : %d supervoxels, %d connected components, %d unused' % (
selected_component, len(self.state.components),
len(self.cached_split_result.supervoxels),
len(self.cached_split_result.split_result['cur_eqs'].sets()), len(self.state.unused_supervoxels))
s.status_messages['status'] = msg
def _update_state(self, s):
self.cached_split_result.update()
self.state.save()
_set_viewer_seeds(s, self.cached_split_result.seeds)
s.layers['unused'].segments = self.state.unused_supervoxels
s.layers['original'].segments = self.cached_split_result.supervoxels
s.layers['split-result'].segments = self.cached_split_result.supervoxels
split_result = self.cached_split_result.split_result
if split_result is not None:
self._show_split_result(
s,
cur_eqs=split_result['cur_eqs'],
supervoxel_map=split_result['supervoxel_map'],
)
s.layout = neuroglancer.row_layout([
neuroglancer.LayerGroupViewer(
layout='3d',
layers=['image', 'original', 'unused', 'inclusive-seeds', 'exclusive-seeds']),
neuroglancer.LayerGroupViewer(
layout='3d', layers=['image', 'split-result', 'inclusive-seeds',
'exclusive-seeds']),
])
def _show_split_result(self, s, cur_eqs, supervoxel_map):
split_layer = s.layers['split-result']
split_layer.equivalences = cur_eqs
split_layer.segments = set(cur_eqs[x] for x in self.cached_split_result.supervoxels)
def run_batch(args, graph):
for path in args.split_seeds:
split_seeds = load_split_seeds(path)
split_result = do_split(graph=graph, split_seeds=split_seeds, agglo_id=args.agglo_id)
state = display_split_result(
graph=graph,
split_seeds=split_seeds,
image_url=args.image_url,
segmentation_url=args.segmentation_url,
**split_result)
print('<p><a href="%s">%s</a></p>' % (neuroglancer.to_url(state), path))
def run_interactive(args, graph):
# Make splitter a global variable so that it is accessible from the
# interactive `python -i` shell.
global splitter
if args.bind_address:
neuroglancer.set_server_bind_address(args.bind_address)
if args.static_content_url:
neuroglancer.set_static_content_source(url=args.static_content_url)
splitter = InteractiveSplitter(
graph,
agglo_id=args.agglo_id,
image_url=args.image_url,
segmentation_url=args.segmentation_url,
state_path=args.state)
print(splitter.viewer)
def open_graph(path, agglo_id):
# Check if graph_db is sharded
graph_db = path
m = re.match('(.*)@([0-9]+)((?:\..*)?)$', graph_db)
if m is not None:
num_shards = int(m.group(2))
shard = agglo_id % num_shards
graph_db = m.group(1) + ('-%05d-of-%05d' % (shard, num_shards)) + m.group(3)
return AgglomerationGraph(sqlite3.connect(graph_db, check_same_thread=False))
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('-v', '--verbose', action='store_true', help='Display verbose log messages.')
common_ap = argparse.ArgumentParser(add_help=False)
common_ap.add_argument(
'--graph-db', required=True, help='Path to sqlite3 database specifying agglomeration graph')
common_ap.add_argument(
'--image-url', required=True, help='Neuroglancer data source URL for image')
common_ap.add_argument(
'--segmentation-url', required=True, help='Neuroglancer data source URL for segmentation')
sub_aps = ap.add_subparsers(help='command to run')
interactive_ap = sub_aps.add_parser(
'interactive', help='Interactively split an aglomerated component', parents=[common_ap])
batch_ap = sub_aps.add_parser(
'batch', help='Split based on pre-specified seed files', parents=[common_ap])
interactive_ap.add_argument(
'--agglo-id', type=int, required=True, help='Agglomerated component id to split')
interactive_ap.add_argument('--split-seeds', help='Path to JSON file specifying split seeds')
interactive_ap.add_argument('--state', help='Path to JSON state file.')
interactive_ap.add_argument(
'-a',
'--bind-address',
help='Bind address for Python web server. Use 127.0.0.1 (the default) to restrict access '
'to browers running on the local machine, use 0.0.0.0 to permit access from remote browsers.'
)
interactive_ap.add_argument(
'--static-content-url', help='Obtain the Neuroglancer client code from the specified URL.')
interactive_ap.set_defaults(func=run_interactive)
batch_ap.add_argument(
'--split-seeds', nargs='+', help='Path to JSON file specifying split seeds')
batch_ap.add_argument('--agglo-id', type=int, help='Agglomerated component id to split')
batch_ap.set_defaults(func=run_batch)
args = ap.parse_args()
graph = open_graph(args.graph_db, args.agglo_id)
if args.verbose:
logging.basicConfig(level=logging.INFO)
args.func(args, graph)
| apache-2.0 | -3,544,483,489,328,351,700 | 35.078624 | 117 | 0.583662 | false |
kenny1352/ratemyhistory | project/server.py | 1 | 16356 | import os
import uuid
import psycopg2
import psycopg2.extras
import crypt, getpass, pwd
import time
import smtplib, json
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from datetime import date
from flask import Flask, redirect, url_for,session, render_template, jsonify, request
from flask.ext.socketio import SocketIO, emit
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
messages = []
users = {}
def connectToDB():
#change connection to session db
connectionString = 'dbname=ratemyhistory user=assist password=assist host=localhost'
print connectionString
try:
print("connected!")
return psycopg2.connect(connectionString)
except:
print("Can't connect to database")
@socketio.on('connect', namespace='/iss')
def makeConnection():
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
if 'username' in session:
print session['username']
print session['logged']
session['logged']= 1
emit('logged', {'logged_in' : session['logged'], 'username' : session['username'] })
print('connected')
try:
print "before query in connect"
query =cur.mogrify("SELECT c.message, s.sender FROM chat AS c CROSS JOIN usersChat AS s WHERE c.chat_id = s.chat_id")
print "after query"
cur.execute(query)
print query
messages = cur.fetchall()
print messages
for message in messages:
tmp = {'name': message[1], 'text': message[0]}
print(message)
emit('message', tmp)
except:
print("Error in database")
@socketio.on('message', namespace='/iss')
def new_message(message):
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
senderUser = session['username']
try:
print('message: ' + str(message))
print('senderUser: ' + str(senderUser))
userQuery = cur.mogrify("INSERT INTO usersChat (sender) VALUES (%s);", (senderUser,))
msgQuery = cur.mogrify("INSERT INTO chat (message) VALUES (%s);", (message,))
print userQuery
print msgQuery
cur.execute(userQuery)
cur.execute(msgQuery)
print("message added to database")
conn.commit()
tmp = {'text': message, 'name': senderUser}
emit('message', tmp, broadcast=True)
except Exception as e:
print type(e)
print("Error inserting")
conn.rollback()
# I added
# @socketio.on('message', namespace='/iss')
# def new_message(message):
# print "IN MESSAGE!"
# conn = connectToDB()
# cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# print "CONNECTED IN MESSAGE", message, " " , users[session['uuid']]['username']
# tmp ={'text': message, 'name': users[session['uuid']]['username']}
# cur.execute("""INSERT INTO userschat ( chat_id, users) VALUES(%s, %s); """,
# (users[session['uuid']]['id'], users[session['uuid']]['username']))
# conn.commit()
# print("tmp: ",tmp)
# print ("message: ", message, "ID: ",users[session['uuid']]['id'] )
# messages.append(tmp)
# emit('message', tmp, broadcast=True)
# # end I added
print ("before app route")
#for displaying html pages
@app.route('/')
def mainIndex():
print 'in hello world'
#print session['username']
# not sure we need this, but might be helpful later on
logged = 0
if 'username' in session:
logged = 1
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
try:
profQuery = cur.mogrify("SELECT name, views from events UNION SELECT name, views from people ORDER BY views desc LIMIT 10;")
cur.execute(profQuery)
rows = cur.fetchall()
print profQuery
except:
print("Error executing SELECT statement")
return render_template('index.html', SelectedMenu = 'Index', topten = rows)
@app.route('/index.html')
def dashIndex():
print 'in hello world'
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
try:
profQuery = cur.mogrify("SELECT name, views from events UNION SELECT name, views from people ORDER BY views desc LIMIT 10;")
cur.execute(profQuery)
rows = cur.fetchall()
print profQuery
except:
print("Error executing SELECT statement")
return render_template('index.html', SelectedMenu = 'Index', topten = rows)
@app.route('/SuggestEvent.html', methods=['GET','POST'])
def suggestEvent():
print 'in forms'
if request.method == 'POST':
eventName = request.form['eventName']
eventLoc = request.form['eventLoc']
email = request.form['senderEmail']
# file upload request
# 2 options requests
importance = request.form['importance']
time = request.form['timePeriod']
eventDesc = request.form['eventDesc']
receiver=['[email protected]']
sender = ['[email protected]']
message = "<p>Here is a suggested Event:<br /><br />"
message += "<b>Event Name: </b>" + eventName + "<br />"
message += "<b>Event Location: </b>" + eventLoc + "<br />"
message += "<b>Importance: </b>" + importance + "<br />"
message += "<b>Time: </b>" + time + "<br />"
message += "<b>Description: </b>" + eventDesc + "<br />"
message += "<b>User Email: </b>" + email + "<br />"
print(message)
message += "<br /><br />Thank you, <br />Rate My History User"
msg = MIMEMultipart('alternative')
emailMsg = MIMEText(message, 'html')
msg.attach(emailMsg)
msg['Subject'] = 'Suggest Event'
msg['From'] = '[email protected]'
msg['To'] = '[email protected]'
try:
smtpObj = smtplib.SMTP("smtp.gmail.com", 587)
smtpObj.ehlo()
smtpObj.starttls()
smtpObj.login('[email protected]', 'zacharski350')
smtpObj.sendmail(sender, receiver, msg.as_string())
smtpObj.quit()
print "Successfully sent email"
complete = True
except Exception as e:
print(e)
return render_template('SuggestEvent.html', SelectedMenu = 'SuggestEvent')
@app.route('/SuggestPerson.html', methods=['GET','POST'])
def suggestPerson():
print 'in forms'
return render_template('SuggestPerson.html', SelectedMenu = 'SuggestPerson')
@app.route('/profile.html')
def profile():
print 'in profile'
if session['loggedIn'] == 'Yes':
uEmail = session['email']
print uEmail
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
try:
profQuery = cur.mogrify("SELECT Firstname, Lastname, Address, Company, Job, Fax, Email, Phone FROM users WHERE Email = %s LIMIT 1;", (uEmail,))
cur.execute(profQuery)
print profQuery
except:
print("Error executing SELECT statement")
pageStuff = cur.fetchall()
entry = pageStuff[0]
print entry[1]
else:
print "Error: Not logged in"
return render_template('index.html', SelectedMenu = 'Index')
return render_template('anotherProfile.html', pageInfo=entry, SelectedMenu = 'Profile')
# @app.route('/charts.html')
# def charts():
# print 'in charts'
# return render_template('charts.html', SelectedMenu = 'Charts')
# @app.route('/tables.html')
# def tables():
# print 'in tables'
# return render_template('tables.html', SelectedMenu = 'Tables')
@app.route('/register.html', methods=['GET','POST'])
def register():
print 'in register'
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
rows = []
if request.method == 'POST':
email = request.form['email']
regQuery = cur.mogrify("SELECT Email FROM users WHERE Email = %s", (email,))
print (regQuery)
cur.execute(regQuery)
rows=cur.fetchall()
print ("rows")
if (rows == []):
check = request.form['password']
check2 = request.form['pwConfirm']
if (check == check2):
# THIS CAN STILL BE USED, BUT CURRENTLY DOESNT SUPPORT 3NF TABLES AND THE DATA OUR TABLE NEEDS
# regAddQuery = cur.mogrify("""INSERT INTO users (Username, Email, Password, Firstname, Lastname, Company, Job, Address, City, Country, Phone, Fax)
# VALUES(%s, %s, crypt(%s, gen_salt('bf')), %s, %s, %s, %s, %s, %s, %s, %s, %s);""", (request.form['userName'],request.form['email'],request.form['password'],
# request.form['firstName'],request.form['lastName'],request.form['comp'],request.form['prof'],request.form['address'],request.form['city'],
# request.form['country'],request.form['phoneNumber'],request.form['faxNumber']))
regAddQuery = cur.mogrify("INSERT INTO users (Username, Email, Password) VALUES(%s, %s, crypt(%s, gen_salt('bf')));",(request.form['userName'],request.form['email'],request.form['password'],))
print (regAddQuery)
cur.execute(regAddQuery)
print("after add execute")
#commented commit until I know the query is printing right
conn.commit()
print("person registered")
return redirect(url_for('mainIndex'))
else:
print("passwords dont match, cant register")
return redirect(url_for('register'))
else:
print ("email is taken so user exists")
return render_template('register.html', SelectedMenu = 'Register')
@app.route('/AddEvent.html', methods=['GET','POST'])
def addEvent():
print 'in event addition'
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
if request.method == 'POST':
print ("in requests")
# eventName = request.form['addEventName']
# eventLoc = request.form['addEventLoc']
# email = request.form['addSenderEmail']
# # file upload request
# eventDesc = request.form['addEventDesc']
# # 2 options requests
# importance = request.form['addImportance']
# date = request.form['year']
print (request.form["addEventName"])
print (request.form["addEventLoc"])
print (request.form["addEventDesc"])
print (request.form["year"])
addEventQuery=cur.mogrify("""INSERT INTO events (Name, Location, Description, Year) Values(%s, %s, %s, %s);""", (request.form['addEventName'],request.form['addEventLoc'],request.form['addEventDesc'], request.form['year'],))
print addEventQuery
cur.execute(addEventQuery)
conn.commit()
return render_template('AddEvent.html', SelectedMenu = 'AddEvent')
@app.route('/AddPerson.html', methods=['GET','POST'])
def addPerson():
print 'in forms'
return render_template('AddPerson.html', SelectedMenu = 'AddPerson')
@app.route('/timeline.html')
def timeline():
print 'in timeline'
return render_template('timeline.html', SelectedMenu = 'Timeline')
@app.route('/search.html')
def search():
print 'in search'
return render_template('search.html', SelectedMenu = 'searchengine')
@socketio.on('identify', namespace='/iss')
def on_identify(message):
pass
@socketio.on('userLogin', namespace='/iss')
def on_login(data):
print "in logincheck"
# pw = data['password']
username = data['username']
logging = data['logged']
print username
if (logging==1):
emit ('logged',{'logged_in' : session['logged'] })
#print (user)
# print (userEmail)
# print 'login ' + pw
# #session['logged'] = 0
# conn = connectToDB()
# cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# print('connected')
# # userQuery = cur.mogrify("select email from users where email = %s", (userEmail,))
# # cur.execute(userQuery)
# # userResult = cur.fetchone()
# print 'already there'
# loginQuery = cur.mogrify("select Username, Email from users WHERE Email = %s AND Password = crypt(%s, Password)" , (userEmail, pw,))
# cur.execute(loginQuery)
# print ('query executed')
# result = cur.fetchone()
# print result
# if result:
# print('logged in!')
# print('saving information to the session...')
# #needs work to pass to javascript to limit the message send function
# #session['logged'] = json.dumps('true')
# session['loggedIn'] = 1
# session['username'] = result[0]
# print session['username']
# emit('logged', {'logged_in' : session['logged'] })
# #return redirect(url_for('mainIndex'))
# else:
# print ('incorrect login information')
# session['loggedIn'] = 0
# emit ('logged',{'logged_in' : session['logged'] })
#return redirect(url_for('login'))
# def loggedIn(logged):
# log = logged
# return log
#updateRoster()
@socketio.on('logout', namespace='/iss')
def on_disconnect(data):
print("i am here")
session['loggedIn'] = 0
emit('logged', {'logged_in' : session['logged']})
print 'user disconnected'
# need to login in app.rout('/login') and create session variable in the app.route so that it carries across sessions
# ng-init LOOK THIS UPs
@app.route('/login.html', methods=['GET', 'POST'])
def login():
print 'in login'
conn = connectToDB()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
if request.method == 'POST':
print "in request login"
email = request.form['email']
password = request.form['password']
print email
print password
try:
loginQuery = cur.mogrify("SELECT Username, Email FROM users WHERE Email = %s AND Password = crypt(%s, Password);" , (email, password,))
print loginQuery
cur.execute(loginQuery)
print "EXECUTED: ", loginQuery
result = cur.fetchone()
#result = result
print('logged in')
# print('name = ', result['username'])
session['username'] = result['username']
session['logged'] = 1
session['email'] = result['email']
print ("username is : ", session['username'])
#socketio.emit('userLogin', {'logged_in' : session['logged'], 'username' : session['username']})
return redirect(url_for('mainIndex'))
except Exception as e:
print(e)
#print "passwords didnt match"
print "error logging in"
session['logged'] = 0
return redirect(url_for('login'))
return render_template('login.html', SelectedMenu = 'Login')
@app.route('/logout.html')
def logout():
print('removing session variables')
if 'username' in session:
del session['username']
session['loggedIn'] = 0
#print session['userName']
#session['userName'].close()
return redirect(url_for('mainIndex'))
#probably remove these later, but added them just to see what things could look like
@app.route('/bootstrap-elements')
def bootstrap():
print 'in tables'
return render_template('bootstrap-elements.html', SelectedMenu = 'Bootstrap-elements')
@app.route('/bootstrap-grid')
def bootstrap2():
print 'in tables'
return render_template('bootstrap-grid.html', SelectedMenu = 'Bootstrap-grid')
# start the server
if __name__ == '__main__':
socketio.run(app, host=os.getenv('IP', '0.0.0.0'), port =int(os.getenv('PORT', 8080)), debug=True) | apache-2.0 | 4,584,796,409,708,986,000 | 32.587269 | 231 | 0.590548 | false |
josenavas/QiiTa | qiita_pet/handlers/study_handlers/sample_template.py | 1 | 18227 | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import basename
from json import loads, dumps
from tornado.web import authenticated, HTTPError
from natsort import natsorted
from qiita_core.qiita_settings import r_client
from qiita_pet.handlers.base_handlers import BaseHandler
from qiita_db.util import get_files_from_uploads_folders
from qiita_db.study import Study
from qiita_db.metadata_template.sample_template import SampleTemplate
from qiita_db.metadata_template.util import looks_like_qiime_mapping_file
from qiita_db.software import Software, Parameters
from qiita_db.processing_job import ProcessingJob
from qiita_db.exceptions import QiitaDBUnknownIDError
from qiita_pet.handlers.api_proxy import (
data_types_get_req, sample_template_samples_get_req,
prep_template_samples_get_req, study_prep_get_req,
sample_template_meta_cats_get_req, sample_template_category_get_req,
get_sample_template_processing_status,
check_fp)
SAMPLE_TEMPLATE_KEY_FORMAT = 'sample_template_%s'
def sample_template_checks(study_id, user, check_exists=False):
"""Performs different checks and raises errors if any of the checks fail
Parameters
----------
study_id : int
The study id
user : qiita_db.user.User
The user trying to access the study
check_exists : bool, optional
If true, check if the sample template exists
Raises
------
HTTPError
404 if the study does not exist
403 if the user does not have access to the study
404 if check_exists == True and the sample template doesn't exist
"""
try:
study = Study(int(study_id))
except QiitaDBUnknownIDError:
raise HTTPError(404, 'Study does not exist')
if not study.has_access(user):
raise HTTPError(403, 'User does not have access to study')
# Check if the sample template exists
if check_exists and not SampleTemplate.exists(study_id):
raise HTTPError(404, "Study %s doesn't have sample information"
% study_id)
def sample_template_handler_post_request(study_id, user, filepath,
data_type=None):
"""Creates a new sample template
Parameters
----------
study_id: int
The study to add the sample information
user: qiita_db.user import User
The user performing the request
filepath: str
The path to the sample template file
data_type: str, optional
If filepath is a QIIME mapping file, the data type of the prep
information file
Returns
-------
dict of {'job': str}
job: the id of the job adding the sample information to the study
Raises
------
HTTPError
404 if the filepath doesn't exist
"""
# Check if the current user has access to the study
sample_template_checks(study_id, user)
# Check if the file exists
fp_rsp = check_fp(study_id, filepath)
if fp_rsp['status'] != 'success':
raise HTTPError(404, 'Filepath not found')
filepath = fp_rsp['file']
is_mapping_file = looks_like_qiime_mapping_file(filepath)
if is_mapping_file and not data_type:
raise HTTPError(400, 'Please, choose a data type if uploading a '
'QIIME mapping file')
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('create_sample_template')
params = Parameters.load(
cmd, values_dict={'fp': filepath, 'study_id': study_id,
'is_mapping_file': is_mapping_file,
'data_type': data_type})
job = ProcessingJob.create(user, params, True)
r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id,
dumps({'job_id': job.id}))
job.submit()
return {'job': job.id}
def sample_template_handler_patch_request(user, req_op, req_path,
req_value=None, req_from=None):
"""Patches the sample template
Parameters
----------
user: qiita_db.user.User
The user performing the request
req_op : str
The operation to perform on the sample template
req_path : str
The path to the attribute to patch
req_value : str, optional
The new value
req_from : str, optional
The original path of the element
Returns
-------
Raises
------
HTTPError
400 If the path parameter doens't follow the expected format
400 If the given operation is not supported
"""
req_path = [v for v in req_path.split('/') if v]
# At this point we know the path should be at least length 2
if len(req_path) < 2:
raise HTTPError(400, 'Incorrect path parameter')
study_id = int(req_path[0])
# Check if the current user has access to the study and if the sample
# template exists
sample_template_checks(study_id, user, check_exists=True)
if req_op == 'remove':
# Path format
# column: study_id/columns/column_name
# sample: study_id/samples/sample_id
if len(req_path) != 3:
raise HTTPError(400, 'Incorrect path parameter')
attribute = req_path[1]
attr_id = req_path[2]
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('delete_sample_or_column')
params = Parameters.load(
cmd, values_dict={'obj_class': 'SampleTemplate',
'obj_id': study_id,
'sample_or_col': attribute,
'name': attr_id})
job = ProcessingJob.create(user, params, True)
# Store the job id attaching it to the sample template id
r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id,
dumps({'job_id': job.id}))
job.submit()
return {'job': job.id}
elif req_op == 'replace':
# WARNING: Although the patch operation is a replace, is not a full
# true replace. A replace is in theory equivalent to a remove + add.
# In this case, the replace operation doesn't necessarily removes
# anything (e.g. when only new columns/samples are being added to the)
# sample information.
# Path format: study_id/data
# Forcing to specify data for extensibility. In the future we may want
# to use this function to replace other elements of the sample
# information
if len(req_path) != 2:
raise HTTPError(400, 'Incorrect path parameter')
attribute = req_path[1]
if attribute == 'data':
# Update the sample information
if req_value is None:
raise HTTPError(400, "Value is required when updating "
"sample information")
# Check if the file exists
fp_rsp = check_fp(study_id, req_value)
if fp_rsp['status'] != 'success':
raise HTTPError(404, 'Filepath not found')
filepath = fp_rsp['file']
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('update_sample_template')
params = Parameters.load(
cmd, values_dict={'study': study_id,
'template_fp': filepath})
job = ProcessingJob.create(user, params, True)
# Store the job id attaching it to the sample template id
r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id,
dumps({'job_id': job.id}))
job.submit()
return {'job': job.id}
else:
raise HTTPError(404, 'Attribute %s not found' % attribute)
else:
raise HTTPError(400, 'Operation %s not supported. Current supported '
'operations: remove, replace' % req_op)
def sample_template_handler_delete_request(study_id, user):
"""Deletes the sample template
Parameters
----------
study_id: int
The study to delete the sample information
user: qiita_db.user
The user performing the request
Returns
-------
dict of {'job': str}
job: the id of the job deleting the sample information to the study
Raises
------
HTTPError
404 If the sample template doesn't exist
"""
# Check if the current user has access to the study and if the sample
# template exists
sample_template_checks(study_id, user, check_exists=True)
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('delete_sample_template')
params = Parameters.load(cmd, values_dict={'study': int(study_id)})
job = ProcessingJob.create(user, params, True)
# Store the job if deleteing the sample template
r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id,
dumps({'job_id': job.id}))
job.submit()
return {'job': job.id}
class SampleTemplateHandler(BaseHandler):
@authenticated
def get(self):
study_id = self.get_argument('study_id')
# Check if the current user has access to the study
sample_template_checks(study_id, self.current_user)
self.render('study_ajax/sample_summary.html', study_id=study_id)
@authenticated
def post(self):
study_id = int(self.get_argument('study_id'))
filepath = self.get_argument('filepath')
data_type = self.get_argument('data_type')
self.write(sample_template_handler_post_request(
study_id, self.current_user, filepath, data_type=data_type))
@authenticated
def patch(self):
req_op = self.get_argument('op')
req_path = self.get_argument('path')
req_value = self.get_argument('value', None)
req_from = self.get_argument('from', None)
self.write(sample_template_handler_patch_request(
self.current_user, req_op, req_path, req_value, req_from))
@authenticated
def delete(self):
study_id = int(self.get_argument('study_id'))
self.write(sample_template_handler_delete_request(
study_id, self.current_user))
def sample_template_overview_handler_get_request(study_id, user):
# Check if the current user has access to the sample template
sample_template_checks(study_id, user)
# Check if the sample template exists
exists = SampleTemplate.exists(study_id)
# The following information should always be provided:
# The files that have been uploaded to the system and can be a
# sample template file
files = [f for _, f in get_files_from_uploads_folders(study_id)
if f.endswith(('txt', 'tsv'))]
# If there is a job associated with the sample information, the job id
job = None
job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % study_id)
if job_info:
job = loads(job_info)['job_id']
# Specific information if it exists or not:
data_types = []
st_fp_id = None
old_files = []
num_samples = 0
num_cols = 0
if exists:
# If it exists we need to provide:
# The id of the sample template file so the user can download it and
# the list of old filepaths
st = SampleTemplate(study_id)
all_st_files = st.get_filepaths()
# The current sample template file is the first one in the list
# (pop(0)) and we are interested only in the id ([0])
st_fp_id = all_st_files.pop(0)[0]
# For the old filepaths we are only interested in their basename
old_files = [basename(fp) for _, fp in all_st_files]
# The number of samples - this is a space efficient way of counting
# the number of samples. Doing len(list(st.keys())) creates a list
# that we are not using
num_samples = sum(1 for _ in st.keys())
# The number of columns
num_cols = len(st.categories())
else:
# It doesn't exist, we also need to provide the data_types in case
# the user uploads a QIIME mapping file
data_types = sorted(data_types_get_req()['data_types'])
return {'exists': exists,
'uploaded_files': files,
'data_types': data_types,
'user_can_edit': Study(study_id).can_edit(user),
'job': job,
'download_id': st_fp_id,
'old_files': old_files,
'num_samples': num_samples,
'num_columns': num_cols}
class SampleTemplateOverviewHandler(BaseHandler):
@authenticated
def get(self):
study_id = int(self.get_argument('study_id'))
self.write(
sample_template_overview_handler_get_request(
study_id, self.current_user))
def sample_template_summary_get_req(study_id, user):
"""Returns a summary of the sample template metadata columns
Parameters
----------
study_id: int
The study to retrieve the sample information summary
user: qiita_db.user
The user performing the request
Returns
-------
dict of {str: object}
Keys are metadata categories and the values are list of tuples. Each
tuple is an observed value in the category and the number of times
it's seen.
Raises
------
HTTPError
404 If the sample template doesn't exist
"""
# Check if the current user has access to the study and if the sample
# template exists
sample_template_checks(study_id, user, check_exists=True)
st = SampleTemplate(study_id)
df = st.to_dataframe()
# Drop the study_id column if it exists
if 'study_id' in df.columns:
df.drop('study_id', axis=1, inplace=True)
res = {}
for column in df.columns:
counts = df[column].value_counts()
res[str(column)] = [(str(key), counts[key])
for key in natsorted(
counts.index,
key=lambda x: unicode(x, errors='ignore'))]
return res
class SampleTemplateSummaryHandler(BaseHandler):
@authenticated
def get(self):
"""Send formatted summary page of sample template"""
study_id = int(self.get_argument('study_id'))
self.write(
sample_template_summary_get_req(study_id, self.current_user))
def _build_sample_summary(study_id, user_id):
"""Builds the initial table of samples associated with prep templates
Parameters
----------
study_id : int
Study to get samples from
user_id : str
User requesting the information
Returns
-------
columns : list of dict
SlickGrid formatted list of columns
samples_table : list of dict
SlickGrid formatted table information
"""
# Load all samples available into dictionary and set
samps_table = {s: {'sample': s} for s in
sample_template_samples_get_req(
study_id, user_id)['samples']}
all_samps = set(samps_table.keys())
columns = [{"id": "sample", "name": "Sample", "field": "sample",
"width": 240, "sortable": False}]
# Add one column per prep template highlighting what samples exist
preps = study_prep_get_req(study_id, user_id)["info"]
for dt in preps:
for prep in preps[dt]:
col_field = "prep%d" % prep["id"]
col_name = "%s - %d" % (prep["name"], prep["id"])
columns.append({"id": col_field,
"name": col_name,
"field": col_field,
"sortable": False,
"width": 240})
prep_samples = prep_template_samples_get_req(
prep['id'], user_id)['samples']
# Empty cell for samples not in the prep template
for s in all_samps.difference(prep_samples):
samps_table[s][col_field] = ""
# X in cell for samples in the prep template
for s in all_samps.intersection(prep_samples):
samps_table[s][col_field] = "X"
return columns, samps_table.values()
class SampleAJAX(BaseHandler):
@authenticated
def get(self):
"""Show the sample summary page"""
study_id = self.get_argument('study_id')
res = sample_template_meta_cats_get_req(
int(study_id), self.current_user.id)
if res['status'] == 'error':
if 'does not exist' in res['message']:
raise HTTPError(404, res['message'])
elif 'User does not have access to study' in res['message']:
raise HTTPError(403, res['message'])
else:
raise HTTPError(500, res['message'])
meta_cats = res['categories']
cols, samps_table = _build_sample_summary(study_id,
self.current_user.id)
_, alert_type, alert_msg = get_sample_template_processing_status(
study_id)
self.render('study_ajax/sample_prep_summary.html',
table=samps_table, cols=cols, meta_available=meta_cats,
study_id=study_id, alert_type=alert_type,
alert_message=alert_msg,
user_can_edit=Study(study_id).can_edit(self.current_user))
@authenticated
def post(self):
study_id = int(self.get_argument('study_id'))
meta_col = self.get_argument('meta_col')
values = sample_template_category_get_req(meta_col, study_id,
self.current_user.id)
if values['status'] != 'success':
self.write(values)
else:
self.write({'status': 'success',
'message': '',
'values': values['values']
})
| bsd-3-clause | -9,172,197,313,224,403,000 | 34.80943 | 79 | 0.594009 | false |
dabrahams/zeroinstall | tests/testdownload.py | 1 | 15513 | #!/usr/bin/env python
from __future__ import with_statement
from basetest import BaseTest
import sys, tempfile, os
from StringIO import StringIO
import unittest, signal
from logging import getLogger, WARN, ERROR
from contextlib import contextmanager
sys.path.insert(0, '..')
os.environ["http_proxy"] = "localhost:8000"
from zeroinstall.injector import model, gpg, download, trust, background, arch, selections, qdom, run
from zeroinstall.injector.requirements import Requirements
from zeroinstall.injector.driver import Driver
from zeroinstall.zerostore import Store, NotStored; Store._add_with_helper = lambda *unused: False
from zeroinstall.support import basedir, tasks
from zeroinstall.injector import fetch
import data
import my_dbus
import server
ran_gui = False
def raise_gui(*args):
global ran_gui
ran_gui = True
background._detach = lambda: False
background._exec_gui = raise_gui
@contextmanager
def output_suppressed():
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield
except Exception:
raise
except BaseException as ex:
# Don't abort unit-tests if someone raises SystemExit
raise Exception(str(type(ex)) + " " + str(ex))
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
class Reply:
def __init__(self, reply):
self.reply = reply
def readline(self):
return self.reply
def download_and_execute(driver, prog_args, main = None):
downloaded = driver.solve_and_download_impls()
if downloaded:
tasks.wait_for_blocker(downloaded)
run.execute_selections(driver.solver.selections, prog_args, stores = driver.config.stores, main = main)
class NetworkManager:
def state(self):
return 3 # NM_STATUS_CONNECTED
server_process = None
def kill_server_process():
global server_process
if server_process is not None:
os.kill(server_process, signal.SIGTERM)
os.waitpid(server_process, 0)
server_process = None
def run_server(*args):
global server_process
assert server_process is None
server_process = server.handle_requests(*args)
class TestDownload(BaseTest):
def setUp(self):
BaseTest.setUp(self)
self.config.handler.allow_downloads = True
self.config.key_info_server = 'http://localhost:3333/key-info'
self.config.fetcher = fetch.Fetcher(self.config)
stream = tempfile.TemporaryFile()
stream.write(data.thomas_key)
stream.seek(0)
gpg.import_key(stream)
stream.close()
trust.trust_db.watchers = []
def tearDown(self):
BaseTest.tearDown(self)
kill_server_process()
def testRejectKey(self):
with output_suppressed():
run_server('Hello', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B')
driver = Driver(requirements = Requirements('http://localhost:8000/Hello'), config = self.config)
assert driver.need_download()
sys.stdin = Reply("N\n")
try:
download_and_execute(driver, ['Hello'])
assert 0
except model.SafeException as ex:
if "has no usable implementations" not in str(ex):
raise ex
if "Not signed with a trusted key" not in str(self.config.handler.ex):
raise ex
self.config.handler.ex = None
def testRejectKeyXML(self):
with output_suppressed():
run_server('Hello.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B')
driver = Driver(requirements = Requirements('http://example.com:8000/Hello.xml'), config = self.config)
assert driver.need_download()
sys.stdin = Reply("N\n")
try:
download_and_execute(driver, ['Hello'])
assert 0
except model.SafeException as ex:
if "has no usable implementations" not in str(ex):
raise ex
if "Not signed with a trusted key" not in str(self.config.handler.ex):
raise
self.config.handler.ex = None
def testImport(self):
from zeroinstall.injector import cli
rootLogger = getLogger()
rootLogger.disabled = True
try:
try:
cli.main(['--import', '-v', 'NO-SUCH-FILE'], config = self.config)
assert 0
except model.SafeException as ex:
assert 'NO-SUCH-FILE' in str(ex)
finally:
rootLogger.disabled = False
rootLogger.setLevel(WARN)
hello = self.config.iface_cache.get_feed('http://localhost:8000/Hello')
self.assertEqual(None, hello)
with output_suppressed():
run_server('6FCF121BE2390E0B.gpg')
sys.stdin = Reply("Y\n")
assert not trust.trust_db.is_trusted('DE937DD411906ACF7C263B396FCF121BE2390E0B')
cli.main(['--import', 'Hello'], config = self.config)
assert trust.trust_db.is_trusted('DE937DD411906ACF7C263B396FCF121BE2390E0B')
# Check we imported the interface after trusting the key
hello = self.config.iface_cache.get_feed('http://localhost:8000/Hello', force = True)
self.assertEqual(1, len(hello.implementations))
self.assertEqual(None, hello.local_path)
# Shouldn't need to prompt the second time
sys.stdin = None
cli.main(['--import', 'Hello'], config = self.config)
def testSelections(self):
from zeroinstall.injector import cli
root = qdom.parse(open("selections.xml"))
sels = selections.Selections(root)
class Options: dry_run = False
with output_suppressed():
run_server('Hello.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
sys.stdin = Reply("Y\n")
try:
self.config.stores.lookup_any(sels.selections['http://example.com:8000/Hello.xml'].digests)
assert False
except NotStored:
pass
cli.main(['--download-only', 'selections.xml'], config = self.config)
path = self.config.stores.lookup_any(sels.selections['http://example.com:8000/Hello.xml'].digests)
assert os.path.exists(os.path.join(path, 'HelloWorld', 'main'))
assert sels.download_missing(self.config) is None
def testHelpers(self):
from zeroinstall import helpers
with output_suppressed():
run_server('Hello.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
sys.stdin = Reply("Y\n")
sels = helpers.ensure_cached('http://example.com:8000/Hello.xml', config = self.config)
path = self.config.stores.lookup_any(sels.selections['http://example.com:8000/Hello.xml'].digests)
assert os.path.exists(os.path.join(path, 'HelloWorld', 'main'))
assert sels.download_missing(self.config) is None
def testSelectionsWithFeed(self):
from zeroinstall.injector import cli
root = qdom.parse(open("selections.xml"))
sels = selections.Selections(root)
with output_suppressed():
run_server('Hello.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
sys.stdin = Reply("Y\n")
tasks.wait_for_blocker(self.config.fetcher.download_and_import_feed('http://example.com:8000/Hello.xml', self.config.iface_cache))
cli.main(['--download-only', 'selections.xml'], config = self.config)
path = self.config.stores.lookup_any(sels.selections['http://example.com:8000/Hello.xml'].digests)
assert os.path.exists(os.path.join(path, 'HelloWorld', 'main'))
assert sels.download_missing(self.config) is None
def testAcceptKey(self):
with output_suppressed():
run_server('Hello', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
driver = Driver(requirements = Requirements('http://localhost:8000/Hello'), config = self.config)
assert driver.need_download()
sys.stdin = Reply("Y\n")
try:
download_and_execute(driver, ['Hello'], main = 'Missing')
assert 0
except model.SafeException as ex:
if "HelloWorld/Missing" not in str(ex):
raise
def testAutoAcceptKey(self):
self.config.auto_approve_keys = True
with output_suppressed():
run_server('Hello', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
driver = Driver(requirements = Requirements('http://localhost:8000/Hello'), config = self.config)
assert driver.need_download()
sys.stdin = Reply("")
try:
download_and_execute(driver, ['Hello'], main = 'Missing')
assert 0
except model.SafeException as ex:
if "HelloWorld/Missing" not in str(ex):
raise
def testDistro(self):
with output_suppressed():
native_url = 'http://example.com:8000/Native.xml'
# Initially, we don't have the feed at all...
master_feed = self.config.iface_cache.get_feed(native_url)
assert master_feed is None, master_feed
trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')
run_server('Native.xml', '6FCF121BE2390E0B.gpg', '/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B')
driver = Driver(requirements = Requirements(native_url), config = self.config)
assert driver.need_download()
solve = driver.solve_with_downloads()
tasks.wait_for_blocker(solve)
tasks.check(solve)
master_feed = self.config.iface_cache.get_feed(native_url)
assert master_feed is not None
assert master_feed.implementations == {}
distro_feed_url = master_feed.get_distro_feed()
assert distro_feed_url is not None
distro_feed = self.config.iface_cache.get_feed(distro_feed_url)
assert distro_feed is not None
assert len(distro_feed.implementations) == 2, distro_feed.implementations
def testWrongSize(self):
with output_suppressed():
run_server('Hello-wrong-size', '6FCF121BE2390E0B.gpg',
'/key-info/key/DE937DD411906ACF7C263B396FCF121BE2390E0B', 'HelloWorld.tgz')
driver = Driver(requirements = Requirements('http://localhost:8000/Hello-wrong-size'), config = self.config)
assert driver.need_download()
sys.stdin = Reply("Y\n")
try:
download_and_execute(driver, ['Hello'], main = 'Missing')
assert 0
except model.SafeException as ex:
if "Downloaded archive has incorrect size" not in str(ex):
raise ex
def testRecipe(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
run_server(('HelloWorld.tar.bz2', 'redirect/dummy_1-1_all.deb', 'dummy_1-1_all.deb'))
driver = Driver(requirements = Requirements(os.path.abspath('Recipe.xml')), config = self.config)
try:
download_and_execute(driver, [])
assert False
except model.SafeException as ex:
if "HelloWorld/Missing" not in str(ex):
raise ex
finally:
sys.stdout = old_out
def testSymlink(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
run_server(('HelloWorld.tar.bz2', 'HelloSym.tgz'))
driver = Driver(requirements = Requirements(os.path.abspath('RecipeSymlink.xml')), config = self.config)
try:
download_and_execute(driver, [])
assert False
except model.SafeException as ex:
if 'Attempt to unpack dir over symlink "HelloWorld"' not in str(ex):
raise
self.assertEqual(None, basedir.load_first_cache('0install.net', 'implementations', 'main'))
finally:
sys.stdout = old_out
def testAutopackage(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
run_server('HelloWorld.autopackage')
driver = Driver(requirements = Requirements(os.path.abspath('Autopackage.xml')), config = self.config)
try:
download_and_execute(driver, [])
assert False
except model.SafeException as ex:
if "HelloWorld/Missing" not in str(ex):
raise
finally:
sys.stdout = old_out
def testRecipeFailure(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
run_server('*')
driver = Driver(requirements = Requirements(os.path.abspath('Recipe.xml')), config = self.config)
try:
download_and_execute(driver, [])
assert False
except download.DownloadError as ex:
if "Connection" not in str(ex):
raise
finally:
sys.stdout = old_out
def testMirrors(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
getLogger().setLevel(ERROR)
trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')
run_server(server.Give404('/Hello.xml'), 'latest.xml', '/0mirror/keys/6FCF121BE2390E0B.gpg')
driver = Driver(requirements = Requirements('http://example.com:8000/Hello.xml'), config = self.config)
self.config.feed_mirror = 'http://example.com:8000/0mirror'
refreshed = driver.solve_with_downloads()
tasks.wait_for_blocker(refreshed)
assert driver.solver.ready
finally:
sys.stdout = old_out
def testReplay(self):
old_out = sys.stdout
try:
sys.stdout = StringIO()
getLogger().setLevel(ERROR)
iface = self.config.iface_cache.get_interface('http://example.com:8000/Hello.xml')
mtime = int(os.stat('Hello-new.xml').st_mtime)
self.config.iface_cache.update_feed_from_network(iface.uri, open('Hello-new.xml').read(), mtime + 10000)
trust.trust_db.trust_key('DE937DD411906ACF7C263B396FCF121BE2390E0B', 'example.com:8000')
run_server(server.Give404('/Hello.xml'), 'latest.xml', '/0mirror/keys/6FCF121BE2390E0B.gpg', 'Hello.xml')
driver = Driver(requirements = Requirements('http://example.com:8000/Hello.xml'), config = self.config)
self.config.feed_mirror = 'http://example.com:8000/0mirror'
# Update from mirror (should ignore out-of-date timestamp)
refreshed = self.config.fetcher.download_and_import_feed(iface.uri, self.config.iface_cache)
tasks.wait_for_blocker(refreshed)
# Update from upstream (should report an error)
refreshed = self.config.fetcher.download_and_import_feed(iface.uri, self.config.iface_cache)
try:
tasks.wait_for_blocker(refreshed)
raise Exception("Should have been rejected!")
except model.SafeException as ex:
assert "New feed's modification time is before old version" in str(ex)
# Must finish with the newest version
self.assertEqual(1235911552, self.config.iface_cache._get_signature_date(iface.uri))
finally:
sys.stdout = old_out
def testBackground(self, verbose = False):
r = Requirements('http://example.com:8000/Hello.xml')
d = Driver(requirements = r, config = self.config)
self.import_feed(r.interface_uri, 'Hello.xml')
self.config.freshness = 0
self.config.network_use = model.network_minimal
d.solver.solve(r.interface_uri, arch.get_host_architecture())
assert d.solver.ready, d.solver.get_failure_reason()
@tasks.async
def choose_download(registed_cb, nid, actions):
try:
assert actions == ['download', 'Download'], actions
registed_cb(nid, 'download')
except:
import traceback
traceback.print_exc()
yield None
global ran_gui
ran_gui = False
old_out = sys.stdout
try:
sys.stdout = StringIO()
run_server('Hello.xml', '6FCF121BE2390E0B.gpg')
my_dbus.system_services = {"org.freedesktop.NetworkManager": {"/org/freedesktop/NetworkManager": NetworkManager()}}
my_dbus.user_callback = choose_download
pid = os.getpid()
old_exit = os._exit
def my_exit(code):
# The background handler runs in the same process
# as the tests, so don't let it abort.
if os.getpid() == pid:
raise SystemExit(code)
# But, child download processes are OK
old_exit(code)
from zeroinstall.injector import config
key_info = config.DEFAULT_KEY_LOOKUP_SERVER
config.DEFAULT_KEY_LOOKUP_SERVER = None
try:
try:
os._exit = my_exit
background.spawn_background_update(d, verbose)
assert False
except SystemExit as ex:
self.assertEqual(1, ex.code)
finally:
os._exit = old_exit
config.DEFAULT_KEY_LOOKUP_SERVER = key_info
finally:
sys.stdout = old_out
assert ran_gui
def testBackgroundVerbose(self):
self.testBackground(verbose = True)
if __name__ == '__main__':
try:
unittest.main()
finally:
kill_server_process()
| lgpl-2.1 | 3,751,625,645,233,192,400 | 33.245033 | 133 | 0.713982 | false |
venicegeo/eventkit-cloud | eventkit_cloud/core/tests/test_models.py | 1 | 5605 | # -*- coding: utf-8 -*-
from unittest.mock import patch, MagicMock
from eventkit_cloud.auth.models import OAuth
from eventkit_cloud.core.models import AttributeClass, update_all_attribute_classes_with_user, \
update_all_users_with_attribute_class, get_users_from_attribute_class, validate_user_attribute_class, \
annotate_users_restricted, get_unrestricted_users
import json
import logging
from django.contrib.auth.models import User
from django.test import TestCase
logger = logging.getLogger(__name__)
class TestCoreModels(TestCase):
def setUp(self, ):
self.user1 = User.objects.create_user(
username='demo1', email='[email protected]', password='demo1'
)
self.user2 = User.objects.create_user(
username='demo2', email='[email protected]', password='demo2'
)
self.attribute_class = AttributeClass.objects.create(name="test", slug="test")
@patch("eventkit_cloud.core.models.update_all_users_with_attribute_class")
def test_save(self, mock_update_all_users_with_attribute_class):
self.attribute_class.name = "new"
self.attribute_class.save()
mock_update_all_users_with_attribute_class.assert_not_called()
self.attribute_class.filter = json.dumps({"some": "new_filter"})
self.attribute_class.save()
mock_update_all_users_with_attribute_class.assert_called_once()
@patch("eventkit_cloud.core.models.validate_user_attribute_class")
def test_update_all_attribute_classes_with_user(self, mock_validate_user_attribute_class):
mock_validate_user_attribute_class.return_value = True
update_all_attribute_classes_with_user(self.user1)
self.assertCountEqual(list(self.attribute_class.users.all()), [self.user1])
mock_validate_user_attribute_class.return_value = False
update_all_attribute_classes_with_user(self.user1)
self.assertCountEqual(list(self.attribute_class.users.all()), list())
@patch("eventkit_cloud.core.models.get_users_from_attribute_class")
def test_update_all_users_with_attribute_class(self, mock_get_users_from_attribute_class):
users = User.objects.all()
mock_get_users_from_attribute_class.return_value = list(users)
update_all_users_with_attribute_class(self.attribute_class)
self.assertCountEqual(list(self.attribute_class.users.all()), list(users))
@patch("eventkit_cloud.core.models.validate_object")
def test_get_users_from_attribute_class(self, mock_validate_object):
self.attribute_class.filter = {"username": "demo1"}
self.attribute_class.save()
users = get_users_from_attribute_class(self.attribute_class)
self.assertCountEqual(list(users), list(User.objects.filter(id=self.user1.id)))
expected_response = list(User.objects.filter(id=self.user2.id))
mock_validate_object.return_value = expected_response
OAuth.objects.create(user=self.user2, identification="test_ident",
commonname="test_common", user_info={"color": "blue"})
self.attribute_class.filter = None
self.attribute_class.complex = ["blue", "==", "color"]
users = get_users_from_attribute_class(self.attribute_class)
self.assertCountEqual(list(users), expected_response)
expected_response = list()
mock_validate_object.return_value = expected_response
self.attribute_class.complex = ["red", "==", "color"]
users = get_users_from_attribute_class(self.attribute_class)
self.assertCountEqual(list(users), expected_response)
@patch("eventkit_cloud.core.models.validate_object")
def test_validate_user_attribute_class(self, mock_validate_object):
self.attribute_class.filter = {"username": "demo1"}
self.attribute_class.save()
self.assertTrue(validate_user_attribute_class(self.user1, self.attribute_class))
mock_validate_object.return_value = True
OAuth.objects.create(user=self.user2, identification="test_ident",
commonname="test_common", user_info={"color": "blue"})
self.attribute_class.filter = None
self.attribute_class.complex = ["blue", "==", "color"]
self.attribute_class.save()
self.assertTrue(validate_user_attribute_class(self.user2, self.attribute_class))
mock_validate_object.return_value = False
self.attribute_class.complex = ["red", "==", "color"]
self.attribute_class.save()
self.assertFalse(validate_user_attribute_class(self.user2, self.attribute_class))
@patch("eventkit_cloud.core.models.get_unrestricted_users")
def test_annotate_users_restricted(self, mock_get_unrestricted_users):
expected_unrestricted_users = User.objects.filter(username="demo2")
mock_get_unrestricted_users.return_value = expected_unrestricted_users
users = User.objects.all()
job = MagicMock()
users = annotate_users_restricted(users, job)
self.assertTrue(users[0].restricted)
self.assertFalse(users[1].restricted)
def test_get_unrestricted_users(self):
job = MagicMock()
provider_task = MagicMock()
provider_task.provider.attribute_class = self.attribute_class
job.provider_tasks.all.return_value = [provider_task]
self.attribute_class.users.set([self.user1])
users = User.objects.all()
unrestricted_users = get_unrestricted_users(users, job)
self.assertEqual(len(unrestricted_users), 1)
self.assertEqual(self.user1, unrestricted_users.first())
| bsd-3-clause | 4,534,848,612,090,168,000 | 46.905983 | 107 | 0.685816 | false |
verificarlo/verificarlo | src/tools/ci/vfc_ci_report/inspect_runs.py | 1 | 24408 | #############################################################################
# #
# This file is part of Verificarlo. #
# #
# Copyright (c) 2015-2021 #
# Verificarlo contributors #
# Universite de Versailles St-Quentin-en-Yvelines #
# CMLA, Ecole Normale Superieure de Cachan #
# #
# Verificarlo is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# Verificarlo is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with Verificarlo. If not, see <http://www.gnu.org/licenses/>. #
# #
#############################################################################
# Manage the view comparing the variables of a run
# Manage the view comparing a variable over different runs
# At its creation, an InspectRuns object will create all the needed Bokeh widgets
# and plots, setup the callback functions (either server side or client side),
# initialize widgets selection, and from this selection generate the first plots.
# Then, when callback functions are triggered, widgets selections are updated,
# and plots are re-generated with the newly selected data.
from math import pi
from functools import partial
import pandas as pd
import numpy as np
from bokeh.plotting import figure, curdoc
from bokeh.embed import components
from bokeh.models import Select, ColumnDataSource, Panel, Tabs, HoverTool,\
RadioButtonGroup, CheckboxGroup, CustomJS
import helper
import plot
##########################################################################
class InspectRuns:
# Helper functions related to InspectRun
def gen_runs_selection(self):
'''
Returns a dictionary mapping user-readable strings to all run timestamps
'''
runs_dict = {}
# Iterate over timestamp rows (runs) and fill dict
for row in self.metadata.iloc:
# The syntax used by pandas makes this part a bit tricky :
# row.name is the index of metadata (so it refers to the
# timestamp), whereas row["name"] is the column called "name"
# (which is the display string used for the run)
# runs_dict[run's name] = run's timestamp
runs_dict[row["name"]] = row.name
return runs_dict
def gen_boxplot_tooltips(self, prefix):
return [
("Name", "@%s_x" % prefix),
("Min", "@" + prefix + "_min{%0.18e}"),
("Max", "@" + prefix + "_max{%0.18e}"),
("1st quartile", "@" + prefix + "_quantile25{%0.18e}"),
("Median", "@" + prefix + "_quantile50{%0.18e}"),
("3rd quartile", "@" + prefix + "_quantile75{%0.18e}"),
("μ", "@" + prefix + "_mu{%0.18e}"),
("Number of samples (tests)", "@nsamples")
]
def gen_boxplot_tooltips_formatters(self, prefix):
return {
"@%s_min" % prefix: "printf",
"@%s_max" % prefix: "printf",
"@%s_quantile25" % prefix: "printf",
"@%s_quantile50" % prefix: "printf",
"@%s_quantile75" % prefix: "printf",
"@%s_mu" % prefix: "printf"
}
# Data processing helper
# (computes new distributions for sigma, s2, s10)
def data_processing(self, dataframe):
# Compute aggragated mu
dataframe["mu"] = np.vectorize(
np.average)(
dataframe["mu"],
weights=dataframe["nsamples"])
# nsamples is the number of aggregated elements (as well as the number
# of samples for our new sigma and s distributions)
dataframe["nsamples"] = dataframe["nsamples"].apply(lambda x: len(x))
dataframe["mu_x"] = dataframe.index
# Make sure that strings don't excede a certain length
dataframe["mu_x"] = dataframe["mu_x"].apply(
lambda x: x[:17] + "[...]" + x[-17:] if len(x) > 39 else x
)
# Get quantiles and mu for sigma, s10, s2
for prefix in ["sigma", "s10", "s2"]:
dataframe["%s_x" % prefix] = dataframe["mu_x"]
dataframe[prefix] = dataframe[prefix].apply(np.sort)
dataframe["%s_min" % prefix] = dataframe[prefix].apply(np.min)
dataframe["%s_quantile25" % prefix] = dataframe[prefix].apply(
np.quantile, args=(0.25,))
dataframe["%s_quantile50" % prefix] = dataframe[prefix].apply(
np.quantile, args=(0.50,))
dataframe["%s_quantile75" % prefix] = dataframe[prefix].apply(
np.quantile, args=(0.75,))
dataframe["%s_max" % prefix] = dataframe[prefix].apply(np.max)
dataframe["%s_mu" % prefix] = dataframe[prefix].apply(np.average)
del dataframe[prefix]
return dataframe
# Plots update function
def update_plots(self):
groupby_display = self.widgets["groupby_radio"].labels[
self.widgets["groupby_radio"].active
]
groupby = self.factors_dict[groupby_display]
filterby_display = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby_display]
# Groupby and aggregate lines belonging to the same group in lists
groups = self.run_data[
self.run_data.index.isin(
[self.widgets["select_filter"].value],
level=filterby
)
].groupby(groupby)
groups = groups.agg({
"sigma": lambda x: x.tolist(),
"s10": lambda x: x.tolist(),
"s2": lambda x: x.tolist(),
"mu": lambda x: x.tolist(),
# Used for mu weighted average first, then will be replaced
"nsamples": lambda x: x.tolist()
})
# Compute the new distributions, ...
groups = self.data_processing(groups).to_dict("list")
# Update source
# Assign each ColumnDataSource, starting with the boxplots
for prefix in ["sigma", "s10", "s2"]:
dict = {
"%s_x" % prefix: groups["%s_x" % prefix],
"%s_min" % prefix: groups["%s_min" % prefix],
"%s_quantile25" % prefix: groups["%s_quantile25" % prefix],
"%s_quantile50" % prefix: groups["%s_quantile50" % prefix],
"%s_quantile75" % prefix: groups["%s_quantile75" % prefix],
"%s_max" % prefix: groups["%s_max" % prefix],
"%s_mu" % prefix: groups["%s_mu" % prefix],
"nsamples": groups["nsamples"]
}
# Filter outliers if the box is checked
if len(self.widgets["outliers_filtering_inspect"].active) > 0:
# Boxplots will be filtered by max then min
top_outliers = helper.detect_outliers(dict["%s_max" % prefix])
helper.remove_boxplot_outliers(dict, top_outliers, prefix)
bottom_outliers = helper.detect_outliers(
dict["%s_min" % prefix])
helper.remove_boxplot_outliers(dict, bottom_outliers, prefix)
self.sources["%s_source" % prefix].data = dict
# Finish with the mu plot
dict = {
"mu_x": groups["mu_x"],
"mu": groups["mu"],
"nsamples": groups["nsamples"]
}
self.sources["mu_source"].data = dict
# Filter outliers if the box is checked
if len(self.widgets["outliers_filtering_inspect"].active) > 0:
mu_outliers = helper.detect_outliers(groups["mu"])
groups["mu"] = helper.remove_outliers(groups["mu"], mu_outliers)
groups["mu_x"] = helper.remove_outliers(
groups["mu_x"], mu_outliers)
# Update plots axis/titles
# Get display string of the last (unselected) factor
factors_dict = self.factors_dict.copy()
del factors_dict[groupby_display]
del factors_dict[filterby_display]
for_all = list(factors_dict.keys())[0]
# Update all display strings for plot title (remove caps, plural)
groupby_display = groupby_display.lower()
filterby_display = filterby_display.lower()[:-1]
for_all = for_all.lower()
self.plots["mu_inspect"].title.text = \
"Empirical average μ of %s (groupped by %s, for all %s)" \
% (filterby_display, groupby_display, for_all)
self.plots["sigma_inspect"].title.text = \
"Standard deviation σ of %s (groupped by %s, for all %s)" \
% (filterby_display, groupby_display, for_all)
self.plots["s10_inspect"].title.text = \
"Significant digits s of %s (groupped by %s, for all %s)" \
% (filterby_display, groupby_display, for_all)
self.plots["s2_inspect"].title.text = \
"Significant digits s of %s (groupped by %s, for all %s)" \
% (filterby_display, groupby_display, for_all)
helper.reset_x_range(self.plots["mu_inspect"], groups["mu_x"])
helper.reset_x_range(self.plots["sigma_inspect"], groups["sigma_x"])
helper.reset_x_range(self.plots["s10_inspect"], groups["s10_x"])
helper.reset_x_range(self.plots["s2_inspect"], groups["s2_x"])
# Widets' callback functions
# Run selector callback
def update_run(self, attrname, old, new):
filterby = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby]
# Update run selection (by using dict mapping)
self.current_run = self.runs_dict[new]
# Update run data
self.run_data = self.data[self.data["timestamp"] == self.current_run]
# Save old selected option
old_value = self.widgets["select_filter"].value
# Update filter options
options = self.run_data.index\
.get_level_values(filterby).drop_duplicates().tolist()
self.widgets["select_filter"].options = options
if old_value not in self.widgets["select_filter"].options:
self.widgets["select_filter"].value = options[0]
# The update_var callback will be triggered by the assignment
else:
# Trigger the callback manually (since the plots need to be updated
# anyway)
self.update_filter("", "", old_value)
# "Group by" radio
def update_groupby(self, attrname, old, new):
# Update "Filter by" radio list
filterby_list = list(self.factors_dict.keys())
del filterby_list[self.widgets["groupby_radio"].active]
self.widgets["filterby_radio"].labels = filterby_list
filterby = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby]
# Save old selected option
old_value = self.widgets["select_filter"].value
# Update filter options
options = self.run_data.index\
.get_level_values(filterby).drop_duplicates().tolist()
self.widgets["select_filter"].options = options
if old_value not in self.widgets["select_filter"].options:
self.widgets["select_filter"].value = options[0]
# The update_var callback will be triggered by the assignment
else:
# Trigger the callback manually (since the plots need to be updated
# anyway)
self.update_filter("", "", old_value)
# "Filter by" radio
def update_filterby(self, attrname, old, new):
filterby = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby]
# Save old selected option
old_value = self.widgets["select_filter"].value
# Update filter selector options
options = self.run_data.index\
.get_level_values(filterby).drop_duplicates().tolist()
self.widgets["select_filter"].options = options
if old_value not in self.widgets["select_filter"].options:
self.widgets["select_filter"].value = options[0]
# The update_var callback will be triggered by the assignment
else:
# Trigger the callback manually (since the plots need to be updated
# anyway)
self.update_filter("", "", old_value)
# Filter selector callback
def update_filter(self, attrname, old, new):
self.update_plots()
# Filter outliers checkbox callback
def update_outliers_filtering(self, attrname, old, new):
# The status (checked/unchecked) of the checkbox is also verified inside
# self.update_plots(), so calling this function is enough
self.update_plots()
# Bokeh setup functions
# (for both variable and backend selection at once)
def setup_plots(self):
tools = "pan, wheel_zoom, xwheel_zoom, ywheel_zoom, reset, save"
# Tooltips and formatters
dotplot_tooltips = [
("Name", "@mu_x"),
("μ", "@mu{%0.18e}"),
("Number of samples (tests)", "@nsamples")
]
dotplot_formatters = {
"@mu": "printf"
}
sigma_boxplot_tooltips = self.gen_boxplot_tooltips("sigma")
sigma_boxplot_tooltips_formatters = self.gen_boxplot_tooltips_formatters(
"sigma")
s10_boxplot_tooltips = self.gen_boxplot_tooltips("s10")
s10_boxplot_tooltips_formatters = self.gen_boxplot_tooltips_formatters(
"s10")
s2_boxplot_tooltips = self.gen_boxplot_tooltips("s2")
s2_boxplot_tooltips_formatters = self.gen_boxplot_tooltips_formatters(
"s2")
# Plots
# Mu plot
self.plots["mu_inspect"] = figure(
name="mu_inspect",
title="",
plot_width=900, plot_height=400, x_range=[""],
tools=tools, sizing_mode="scale_width"
)
plot.fill_dotplot(
self.plots["mu_inspect"], self.sources["mu_source"], "mu",
tooltips=dotplot_tooltips,
tooltips_formatters=dotplot_formatters
)
self.doc.add_root(self.plots["mu_inspect"])
# Sigma plot
self.plots["sigma_inspect"] = figure(
name="sigma_inspect",
title="",
plot_width=900, plot_height=400, x_range=[""],
tools=tools, sizing_mode="scale_width"
)
plot.fill_boxplot(
self.plots["sigma_inspect"],
self.sources["sigma_source"],
prefix="sigma",
tooltips=sigma_boxplot_tooltips,
tooltips_formatters=sigma_boxplot_tooltips_formatters)
self.doc.add_root(self.plots["sigma_inspect"])
# s plots
self.plots["s10_inspect"] = figure(
name="s10_inspect",
title="",
plot_width=900, plot_height=400, x_range=[""],
tools=tools, sizing_mode='scale_width'
)
plot.fill_boxplot(
self.plots["s10_inspect"],
self.sources["s10_source"],
prefix="s10",
tooltips=s10_boxplot_tooltips,
tooltips_formatters=s10_boxplot_tooltips_formatters)
s10_tab_inspect = Panel(
child=self.plots["s10_inspect"],
title="Base 10")
self.plots["s2_inspect"] = figure(
name="s2_inspect",
title="",
plot_width=900, plot_height=400, x_range=[""],
tools=tools, sizing_mode='scale_width'
)
plot.fill_boxplot(
self.plots["s2_inspect"], self.sources["s2_source"], prefix="s2",
tooltips=s2_boxplot_tooltips,
tooltips_formatters=s2_boxplot_tooltips_formatters
)
s2_tab_inspect = Panel(child=self.plots["s2_inspect"], title="Base 2")
s_tabs_inspect = Tabs(
name="s_tabs_inspect",
tabs=[s10_tab_inspect, s2_tab_inspect], tabs_location="below"
)
self.doc.add_root(s_tabs_inspect)
def setup_widgets(self):
# Generation of selectable items
# Dict contains all inspectable runs (maps display strings to timestamps)
# The dict structure allows to get the timestamp from the display string
# in O(1)
self.runs_dict = self.gen_runs_selection()
# Dict maps display strings to column names for the different factors
# (var, backend, test)
self.factors_dict = {
"Variables": "variable",
"Backends": "vfc_backend",
"Tests": "test"
}
# Run selection
# Contains all options strings
runs_display = list(self.runs_dict.keys())
# Will be used when updating plots (contains actual number)
self.current_run = self.runs_dict[runs_display[-1]]
# Contains the selected option string, used to update current_n_runs
current_run_display = runs_display[-1]
# This contains only entries matching the run
self.run_data = self.data[self.data["timestamp"] == self.current_run]
change_run_callback_js = "updateRunMetadata(cb_obj.value);"
self.widgets["select_run"] = Select(
name="select_run", title="Run :",
value=current_run_display, options=runs_display
)
self.doc.add_root(self.widgets["select_run"])
self.widgets["select_run"].on_change("value", self.update_run)
self.widgets["select_run"].js_on_change("value", CustomJS(
code=change_run_callback_js,
args=(dict(
metadata=helper.metadata_to_dict(
helper.get_metadata(self.metadata, self.current_run)
)
))
))
# Factors selection
# "Group by" radio
self.widgets["groupby_radio"] = RadioButtonGroup(
name="groupby_radio",
labels=list(self.factors_dict.keys()), active=0
)
self.doc.add_root(self.widgets["groupby_radio"])
# The functions are defined inside the template to avoid writing too
# much JS server side
self.widgets["groupby_radio"].on_change(
"active",
self.update_groupby
)
# "Filter by" radio
# Get all possible factors, and remove the one selected in "Group by"
filterby_list = list(self.factors_dict.keys())
del filterby_list[self.widgets["groupby_radio"].active]
self.widgets["filterby_radio"] = RadioButtonGroup(
name="filterby_radio",
labels=filterby_list, active=0
)
self.doc.add_root(self.widgets["filterby_radio"])
# The functions are defined inside the template to avoid writing too
# much JS server side
self.widgets["filterby_radio"].on_change(
"active",
self.update_filterby
)
# Filter selector
filterby = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby]
options = self.run_data.index\
.get_level_values(filterby).drop_duplicates().tolist()
self.widgets["select_filter"] = Select(
# We need a different name to avoid collision in the template with
# the runs comparison's widget
name="select_filter", title="Select a filter :",
value=options[0], options=options
)
self.doc.add_root(self.widgets["select_filter"])
self.widgets["select_filter"]\
.on_change("value", self.update_filter)
# Toggle for outliers filtering
self.widgets["outliers_filtering_inspect"] = CheckboxGroup(
name="outliers_filtering_inspect",
labels=["Filter outliers"], active=[]
)
self.doc.add_root(self.widgets["outliers_filtering_inspect"])
self.widgets["outliers_filtering_inspect"]\
.on_change("active", self.update_outliers_filtering)
# Communication methods
# (to send/receive messages to/from master)
def change_repo(self, new_data, new_metadata):
'''
When received, update data and metadata with the new repo, and update
everything
'''
self.data = new_data
self.metadata = new_metadata
self.runs_dict = self.gen_runs_selection()
runs_display = list(self.runs_dict.keys())
current_run_display = runs_display[-1]
# Update widget (and trigger its callback)
self.widgets["select_run"].options = runs_display
self.widgets["select_run"].value = current_run_display
filterby = self.widgets["filterby_radio"].labels[
self.widgets["filterby_radio"].active
]
filterby = self.factors_dict[filterby]
self.run_data = self.data[self.data["timestamp"] == self.current_run]
options = self.run_data.index\
.get_level_values(filterby).drop_duplicates().tolist()
# Update widget (and trigger its callback)
self.widgets["select_filter"].options = options
self.widgets["select_filter"].value = options[0]
def switch_view(self, run_name):
'''When received, switch selected run to run_name'''
# This will trigger the widget's callback
self.widgets["select_run"].value = run_name
# Constructor
def __init__(self, master, doc, data, metadata):
'''
Here are the most important attributes of the InspectRuns class
master : reference to the ViewMaster class
doc : an object provided by Bokeh to add elements to the HTML document
data : pandas dataframe containing all the tests data
metadata : pandas dataframe containing all the tests metadata
sources : ColumnDataSource object provided by Bokeh, contains current
data for the plots (inside the .data attribute)
plots : dictionary of Bokeh plots
widgets : dictionary of Bokeh widgets
'''
self.master = master
self.doc = doc
self.data = data
self.metadata = metadata
self.sources = {
"mu_source": ColumnDataSource(data={}),
"sigma_source": ColumnDataSource(data={}),
"s10_source": ColumnDataSource(data={}),
"s2_source": ColumnDataSource(data={})
}
self.plots = {}
self.widgets = {}
# Setup Bokeh objects
self.setup_plots()
self.setup_widgets()
# Pass the initial metadata to the template (will be updated in CustomJS
# callbacks). This is required because metadata is not displayed in a
# Bokeh widget, so we can't update this with a server callback.
initial_run = helper.get_metadata(self.metadata, self.current_run)
self.doc.template_variables["initial_timestamp"] = initial_run.name
self.doc.template_variables["initial_repo"] = initial_run.repo_name
# At this point, everything should have been initialized, so we can
# show the plots for the first time
self.update_plots()
| gpl-3.0 | -3,457,770,372,378,908,000 | 36.602465 | 81 | 0.56585 | false |
lhfei/spark-in-action | spark-2.x/src/main/python/mllib/binary_classification_metrics_example.py | 1 | 2177 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Binary Classification Metrics Example.
"""
from __future__ import print_function
from pyspark import SparkContext
# $example on$
from pyspark.mllib.classification import LogisticRegressionWithLBFGS
from pyspark.mllib.evaluation import BinaryClassificationMetrics
from pyspark.mllib.util import MLUtils
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="BinaryClassificationMetricsExample")
# $example on$
# Several of the methods available in scala are currently missing from pyspark
# Load training data in LIBSVM format
data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_binary_classification_data.txt")
# Split data into training (60%) and test (40%)
training, test = data.randomSplit([0.6, 0.4], seed=11)
training.cache()
# Run training algorithm to build the model
model = LogisticRegressionWithLBFGS.train(training)
# Compute raw scores on the test set
predictionAndLabels = test.map(lambda lp: (float(model.predict(lp.features)), lp.label))
# Instantiate metrics object
metrics = BinaryClassificationMetrics(predictionAndLabels)
# Area under precision-recall curve
print("Area under PR = %s" % metrics.areaUnderPR)
# Area under ROC curve
print("Area under ROC = %s" % metrics.areaUnderROC)
# $example off$
sc.stop()
| apache-2.0 | -8,696,846,894,260,542,000 | 36.875 | 92 | 0.725769 | false |
UITools/saleor | tests/dashboard/test_staff.py | 1 | 7623 | from django.contrib.auth.tokens import default_token_generator
from django.core import mail
from django.templatetags.static import static
from django.urls import reverse
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from templated_email import send_templated_mail
from saleor.account.models import User
from saleor.core.utils import build_absolute_uri
from saleor.dashboard.staff.forms import StaffForm
from saleor.dashboard.staff.utils import remove_staff_member
from saleor.settings import DEFAULT_FROM_EMAIL
def test_remove_staff_member_with_orders(
staff_user, permission_manage_products, order):
order.user = staff_user
order.save()
staff_user.user_permissions.add(permission_manage_products)
remove_staff_member(staff_user)
staff_user = User.objects.get(pk=staff_user.pk)
assert not staff_user.is_staff
assert not staff_user.user_permissions.exists()
def test_remove_staff_member(staff_user):
remove_staff_member(staff_user)
assert not User.objects.filter(pk=staff_user.pk).exists()
def test_staff_form_not_valid(staff_user):
data = {'user_permissions': 1}
form = StaffForm(data=data, user=staff_user)
assert not form.is_valid()
def test_staff_form_create_valid(
admin_client, staff_user, permission_manage_products):
assert staff_user.user_permissions.count() == 0
url = reverse('dashboard:staff-details', kwargs={'pk': staff_user.pk})
data = {
'email': '[email protected]', 'is_staff': True,
'user_permissions': permission_manage_products.pk}
admin_client.post(url, data)
staff_user = User.objects.get(pk=staff_user.pk)
assert staff_user.user_permissions.count() == 1
def test_staff_form_create_not_valid(admin_client, staff_user):
url = reverse('dashboard:staff-details', kwargs={'pk': staff_user.pk})
data = {'csrf': 'examplecsfr'}
admin_client.post(url, data)
staff_user = User.objects.get(pk=staff_user.pk)
assert staff_user.user_permissions.count() == 0
def test_admin_cant_change_his_permissions(admin_client, admin_user):
assert admin_user.is_active
assert admin_user.is_staff
url = reverse('dashboard:staff-details', kwargs={'pk': admin_user.pk})
data = {'is_active': False, 'is_staff': False}
response = admin_client.post(url, data)
admin_user = User.objects.get(pk=admin_user.pk)
assert response.status_code == 200
assert admin_user.is_active
assert admin_user.is_staff
def test_staff_form_remove_permissions_after_unassign_is_staff(
admin_client, staff_user, permission_manage_products):
staff_user.user_permissions.add(permission_manage_products)
assert staff_user.is_active
assert staff_user.is_staff
assert staff_user.user_permissions.count() == 1
url = reverse('dashboard:staff-details', kwargs={'pk': staff_user.pk})
data = {
'email': staff_user.email, 'is_active': True, 'is_staff': False,
'user_permissions': permission_manage_products.pk}
response = admin_client.post(url, data)
staff_user.refresh_from_db()
assert response.status_code == 302
assert staff_user.is_active
assert not staff_user.is_staff
assert staff_user.user_permissions.count() == 0
def test_delete_staff(admin_client, staff_user):
user_count = User.objects.all().count()
url = reverse('dashboard:staff-delete', kwargs={'pk': staff_user.pk})
data = {'pk': staff_user.pk}
response = admin_client.post(url, data)
assert User.objects.all().count() == user_count - 1
assert response['Location'] == reverse('dashboard:staff-list')
def test_delete_staff_no_post(admin_client, staff_user):
user_count = User.objects.all().count()
url = reverse('dashboard:staff-delete', kwargs={'pk': staff_user.pk})
admin_client.get(url)
assert User.objects.all().count() == user_count
def test_delete_staff_with_orders(admin_client, staff_user, order):
order.user = staff_user
order.save()
user_count = User.objects.all().count()
url = reverse('dashboard:staff-delete', kwargs={'pk': staff_user.pk})
data = {'pk': staff_user.pk}
response = admin_client.post(url, data)
# Staff placed some orders in the past, so his acc should be not deleted
assert User.objects.all().count() == user_count
staff_user.refresh_from_db()
# Instead, his privileges are taken away
assert not staff_user.is_staff
assert response['Location'] == reverse('dashboard:staff-list')
def test_staff_create_email_with_set_link_password(admin_client):
user_count = User.objects.count()
mail_outbox_count = len(mail.outbox)
url = reverse('dashboard:staff-create')
data = {'email': '[email protected]', 'is_staff': True}
response = admin_client.post(url, data)
assert User.objects.count() == user_count + 1
assert len(mail.outbox) == mail_outbox_count + 1
assert response['Location'] == reverse('dashboard:staff-list')
def test_send_set_password_email(staff_user, site_settings):
site = site_settings.site
uid = urlsafe_base64_encode(force_bytes(staff_user.pk))
token = default_token_generator.make_token(staff_user)
logo_url = build_absolute_uri(static('images/logo-light.svg'))
password_set_url = build_absolute_uri(
reverse(
'account:reset-password-confirm',
kwargs={'token': token, 'uidb64': uid}))
ctx = {
'logo_url': logo_url,
'password_set_url': password_set_url,
'site_name': site.name}
send_templated_mail(
template_name='dashboard/staff/set_password',
from_email=DEFAULT_FROM_EMAIL,
recipient_list=[staff_user.email],
context=ctx)
assert len(mail.outbox) == 1
generated_link = reverse(
'account:reset-password-confirm',
kwargs={
'uidb64': uid,
'token': token})
absolute_generated_link = build_absolute_uri(generated_link)
sended_message = mail.outbox[0].body
assert absolute_generated_link in sended_message
def test_create_staff_and_set_password(admin_client):
url = reverse('dashboard:staff-create')
data = {
'first_name': 'Jan', 'last_name': 'Nowak',
'email': '[email protected]', 'is_staff': True}
response = admin_client.post(url, data)
assert response.status_code == 302
new_user = User.objects.get(email='[email protected]')
assert new_user.first_name == 'Jan'
assert new_user.last_name == 'Nowak'
assert not new_user.password
uid = urlsafe_base64_encode(force_bytes(new_user.pk))
token = default_token_generator.make_token(new_user)
response = admin_client.get(
reverse(
'account:reset-password-confirm',
kwargs={
'uidb64': uid,
'token': token}))
assert response.status_code == 302
post_data = {'new_password1': 'password', 'new_password2': 'password'}
response = admin_client.post(response['Location'], post_data)
assert response.status_code == 302
assert response['Location'] == reverse('account:reset-password-complete')
new_user = User.objects.get(email='[email protected]')
assert new_user.has_usable_password()
def test_create_staff_from_customer(
admin_client, customer_user, permission_manage_products):
url = reverse('dashboard:staff-create')
data = {
'email': customer_user.email, 'is_staff': True,
'user_permissions': permission_manage_products.pk}
admin_client.post(url, data)
customer_user.refresh_from_db()
assert customer_user.is_staff
| bsd-3-clause | 4,133,092,150,845,388,000 | 36.737624 | 77 | 0.681228 | false |
linkhub-sdk/popbill.fax.example.py | getFaxResult.py | 1 | 2664 | # -*- coding: utf-8 -*-
# code for console Encoding difference. Dont' mind on it
import sys
import imp
imp.reload(sys)
try:
sys.setdefaultencoding('UTF8')
except Exception as E:
pass
import testValue
from popbill import FaxService, PopbillException
faxService = FaxService(testValue.LinkID, testValue.SecretKey)
faxService.IsTest = testValue.IsTest
faxService.IPRestrictOnOff = testValue.IPRestrictOnOff
faxService.UseStaticIP = testValue.UseStaticIP
faxService.UseLocalTimeYN = testValue.UseLocalTimeYN
'''
팩스 전송요청시 반환받은 접수번호(receiptNum)을 사용하여 팩스전송 결과를 확인합니다.
- https://docs.popbill.com/fax/python/api#GetFaxResult
'''
try:
print("=" * 15 + " 팩스전송 내역 및 전송상태 확인 " + "=" * 15)
# 팝빌회원 사업자번호
CorpNum = testValue.testCorpNum
# 팩스전송요청시 발급받은 접수번호(receiptNum)
receiptNum = "019012818144300001"
resultList = faxService.getFaxResult(CorpNum, receiptNum)
for index, f in enumerate(resultList):
print(" state (전송상태 코드) : %s" % f.state)
print(" result (전송결과 코드) : %s" % f.result)
print(" sendNum (발신번호) : %s" % f.sendNum)
print(" senderName (발신자명) : %s" % f.senderName)
print(" receiveNum (수신번호) : %s" % f.receiveNum)
print(" receiveName (수신자명) : %s" % f.receiveName)
print(" title (팩스제목) : %s" % f.title)
print(" sendPageCnt (전체 페이지수) : %s" % f.sendPageCnt)
print(" successPageCnt (성공 페이지수) : %s" % f.successPageCnt)
print(" failPageCnt (실패 페이지수) : %s" % f.failPageCnt)
print(" refundPageCnt (환불 페이지수) : %s" % f.refundPageCnt)
print(" cancelPageCnt (취소 페이지수) : %s" % f.cancelPageCnt)
print(" reserveDT (예약일시) : %s" % f.reserveDT)
print(" receiptDT (접수일시) : %s" % f.receiptDT)
print(" sendDT (발송일시) : %s" % f.sendDT)
print(" resultDT (전송결과 수신일시) : %s" % f.resultDT)
print(" fileNames (전송 파일명 리스트) : %s" % f.fileNames)
print(" receiptNum (접수번호) : %s" % f.receiptNum)
print(" requestNum (요청번호) : %s" % f.requestNum)
print(" chargePageCnt (과금 페이지수) : %s" % f.chargePageCnt)
print(" tiffFileSize (변환파일용랑(단위 : byte)) : %s" % f.tiffFileSize + '\n')
except PopbillException as PE:
print("Exception Occur : [%d] %s" % (PE.code, PE.message))
| mit | 2,274,178,893,654,013,200 | 36.737705 | 82 | 0.617289 | false |
coleifer/scout | scout/validator.py | 1 | 2599 | import json
import sys
from flask import request
from scout.constants import PROTECTED_KEYS
from scout.exceptions import error
from scout.models import Index
if sys.version_info[0] == 2:
json_load = lambda d: json.loads(d)
else:
json_load = lambda d: json.loads(d.decode('utf-8') if isinstance(d, bytes)
else d)
class RequestValidator(object):
def parse_post(self, required_keys=None, optional_keys=None):
"""
Clean and validate POSTed JSON data by defining sets of required and
optional keys.
"""
if request.headers.get('content-type') == 'application/json':
data = request.data
elif 'data' not in request.form:
error('Missing correct content-type or missing "data" field.')
else:
data = request.form['data']
if data:
try:
data = json_load(data)
except ValueError:
error('Unable to parse JSON data from request.')
else:
data = {}
required = set(required_keys or ())
optional = set(optional_keys or ())
all_keys = required | optional
keys_present = set(key for key in data if data[key] not in ('', None))
missing = required - keys_present
if missing:
error('Missing required fields: %s' % ', '.join(sorted(missing)))
invalid_keys = keys_present - all_keys
if invalid_keys:
error('Invalid keys: %s' % ', '.join(sorted(invalid_keys)))
return data
def validate_indexes(self, data, required=True):
if data.get('index'):
index_names = (data['index'],)
elif data.get('indexes'):
index_names = data['indexes']
elif ('index' in data or 'indexes' in data) and not required:
return ()
else:
return None
indexes = list(Index.select().where(Index.name << index_names))
# Validate that all the index names exist.
observed_names = set(index.name for index in indexes)
invalid_names = []
for index_name in index_names:
if index_name not in observed_names:
invalid_names.append(index_name)
if invalid_names:
error('The following indexes were not found: %s.' %
', '.join(invalid_names))
return indexes
def extract_get_params(self):
return dict(
(key, request.args.getlist(key))
for key in request.args
if key not in PROTECTED_KEYS)
| mit | 5,230,470,322,012,655,000 | 30.313253 | 78 | 0.569065 | false |
ov1d1u/tv-maxe-ng | tv-maxe/tvmaxe.py | 1 | 3058 | #!/usr/bin/env python3
import sys
import os
import logger
import logging
import argparse
from os.path import isfile, join, splitext
from importlib import import_module
from PyQt5 import QtCore, QtWidgets, uic
from PyQt5.QtGui import QIcon, QPixmap
from settingsmanager import SettingsManager
from mainwindow import TVMaxeMainWindow
log = logging.getLogger(__name__)
class TVMaxe(QtWidgets.QApplication):
protocol_plugins = {}
def __init__(self, argv):
super(QtWidgets.QApplication, self).__init__(argv)
self.setApplicationName("TV-Maxe")
self.setApplicationVersion("0.1a")
self.setOrganizationDomain("org.tv-maxe.app")
self.setOrganizationName("TV-Maxe")
log.info('{0} {1}'.format(self.applicationName(), self.applicationVersion()))
self.settings_manager = SettingsManager()
self.init_plugins()
log.debug('Current localization: {0}'.format(QtCore.QLocale.system().name()))
translator = QtCore.QTranslator()
translator.load("i18n/{0}.qm".format(QtCore.QLocale.system().name()))
self.installTranslator(translator)
self.mainw = TVMaxeMainWindow(None)
self.mainw.show()
def init_plugins(self):
log.debug('Initializing plugins:')
protocols_dir = 'protocols'
sys.path.insert(0, 'protocols/')
protocol_modules = [f for f in os.listdir(protocols_dir) if isfile(join(protocols_dir, f))]
for filename in protocol_modules:
if filename == '__init__.py' or filename == '__init__.pyc':
continue
file, extension = splitext(filename)
if extension == '.py':
protocol_module = import_module(file)
protocol_class = protocol_module.__classname__
log.debug('- Plugin found: {0} {1} ({2})'.format(
protocol_module.__classname__.name,
protocol_module.__classname__.version,
protocol_module.__classname__)
)
for protocol in protocol_class.protocols:
self.protocol_plugins[protocol] = protocol_class
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--log-level",
help="Sets the logger verbosity",
choices=["debug", "warn", "info"]
)
args = parser.parse_args()
if args.log_level:
if args.log_level == 'debug':
logger.set_logging_level(logging.DEBUG)
elif args.log_level == 'warn':
logger.set_logging_level(logging.WARNING)
else:
logger.set_logging_level(logging.INFO)
else:
logger.set_logging_level(logging.INFO)
if __name__ == '__main__':
parse_args()
if getattr(sys, 'frozen', False):
os.chdir(sys._MEIPASS)
else:
os.chdir(os.path.dirname(os.path.realpath(__file__)))
log.debug('Current working directory: {0}'.format(os.getcwd()))
app = TVMaxe(sys.argv)
sys.exit(app.exec_())
log.debug('Exiting app...') | lgpl-3.0 | 2,025,557,249,018,245,600 | 32.615385 | 99 | 0.611511 | false |
projecthamster/experiments | pulse.py | 1 | 3779 | #!/usr/bin/env python
# - coding: utf-8 -
# Copyright (C) 2010 Toms Bauģis <toms.baugis at gmail.com>
"""
Demo of a a timer based ripple running through nodes and initiating
sub-animations. Not sure where this could come handy.
"""
from gi.repository import Gtk as gtk
from lib import graphics
from lib.pytweener import Easing
from random import random
import math
class Node(graphics.Sprite):
def __init__(self, angle, distance):
graphics.Sprite.__init__(self)
self.angle = angle
self.distance = distance
self.base_angle = 0
self.distance_scale = 1
self.radius = 4.0
self.phase = 0
self.connect("on-render", self.on_render)
def on_render(self, sprite):
self.graphics.clear()
self.x = math.cos(self.angle + self.base_angle) * self.distance * self.distance_scale
self.y = math.sin(self.angle + self.base_angle) * self.distance * self.distance_scale
self.graphics.circle(0, 0, self.radius)
self.graphics.fill("#aaa")
class Scene(graphics.Scene):
def __init__(self):
graphics.Scene.__init__(self)
self.nodes = []
self.tick = 0
self.phase = 0
self.container = graphics.Sprite()
self.add_child(self.container)
self.framerate = 30
self.connect("on-enter-frame", self.on_enter_frame)
self.connect("on-mouse-move", self.on_mouse_move)
def on_mouse_move(self, scene, event):
if gdk.ModifierType.BUTTON1_MASK & event.state:
# rotate and scale on mouse
base_angle = math.pi * 2 * ((self.width / 2 - event.x) / self.width) / 3
distance_scale = math.sqrt((self.width / 2 - event.x) ** 2 + (self.height / 2 - event.y) ** 2) \
/ math.sqrt((self.width / 2) ** 2 + (self.height / 2) ** 2)
for node in self.nodes:
node.base_angle = base_angle
node.distance_scale = distance_scale
def on_enter_frame(self, scene, context):
self.container.x = self.width / 2
self.container.y = self.height / 2
if len(self.nodes) < 100:
for i in range(100 - len(self.nodes)):
angle = random() * math.pi * 2
distance = random() * 500
node = Node(angle, distance)
node.phase = self.phase
self.container.add_child(node)
self.nodes.append(node)
if not self.tick:
self.phase +=1
self.animate(self,
tick = 550,
duration = 3,
on_complete = self.reset_tick,
easing = Easing.Expo.ease_in_out)
for node in self.nodes:
if node.phase < self.phase and node.distance < self.tick:
node.phase = self.phase
self.tweener.kill_tweens(node)
self.animate(node,
duration = 0.5,
radius = 20,
easing = Easing.Expo.ease_in,
on_complete = self.slide_back)
def reset_tick(self, target):
self.tick = 0
def slide_back(self, node):
self.animate(node,
radius = 4,
duration = 0.5,
easing = Easing.Expo.ease_out)
class BasicWindow:
def __init__(self):
window = gtk.Window()
window.set_size_request(600, 500)
window.connect("delete_event", lambda *args: gtk.main_quit())
window.add(Scene())
window.show_all()
example = BasicWindow()
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL) # gtk3 screws up ctrl+c
gtk.main()
| mit | 8,402,965,764,535,398,000 | 31.568966 | 108 | 0.543409 | false |
RianFuro/vint | test/unit/vint/linting/test_cli.py | 1 | 1647 | import unittest
from vint.compat.unittest import mock
from vint.linting.cli import CLI
from vint.bootstrap import import_all_policies
class TestCLI(unittest.TestCase):
@classmethod
def setUpClass(cls):
# For test_start_with_invalid_file_path.
# The test case want to load saveral policies.
import_all_policies()
def assertExitWithSuccess(self, argv):
with mock.patch('sys.argv', argv):
with self.assertRaises(SystemExit) as e:
cli = CLI()
cli.start()
self.assertEqual(e.exception.code, 0)
def assertExitWithFailure(self, argv):
with mock.patch('sys.argv', argv):
with self.assertRaises(SystemExit) as e:
cli = CLI()
cli.start()
self.assertNotEqual(e.exception.code, 0)
def test_start_with_no_arg(self):
argv = ['bin/vint']
self.assertExitWithFailure(argv)
def test_start_with_unexistent_file_path(self):
argv = ['bin/vint', 'path/to/unexistent']
self.assertExitWithFailure(argv)
def test_start_with_valid_file_path(self):
argv = ['bin/vint', 'test/fixture/cli/valid1.vim']
self.assertExitWithSuccess(argv)
def test_start_with_invalid_file_path(self):
argv = ['bin/vint', 'test/fixture/cli/invalid1.vim']
self.assertExitWithFailure(argv)
def test_start_with_both_calid_invalid_file_paths(self):
argv = ['bin/vint', 'test/fixture/cli/valid1.vim', 'test/fixture/cli/invalid1.vim']
self.assertExitWithFailure(argv)
if __name__ == '__main__':
unittest.main()
| mit | 7,550,699,786,045,907,000 | 26.45 | 91 | 0.625987 | false |
Linutronix/elbe | elbepack/debianize/panels/kernel.py | 1 | 3110 | # ELBE - Debian Based Embedded Rootfilesystem Builder
# Copyright (c) 2016-2017 John Ogness <[email protected]>
# Copyright (c) 2016-2017 Manuel Traut <[email protected]>
# Copyright (c) 2017 Torben Hohn <[email protected]>
# Copyright (c) 2019 Olivier Dion <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import os
import stat
from shutil import copyfile
from elbepack.debianize.panels.base import Panel
from elbepack.debianize.widgets.edit import Edit
from elbepack.debianize.widgets.radio import RadioGroup
from elbepack.directories import mako_template_dir
from elbepack.templates import template
from elbepack.shellhelper import system
class ImgType(object):
BZ = "bzImage"
ZI = "zImage"
UI = "uImage"
I = "Image"
class Kernel(Panel):
match_files = ['Kbuild', 'Kconfig', 'MAINTAINERS', 'kernel/futex.c']
def __init__(self):
loadaddr = Edit("Load Addr", "0x800800")
defconfig = Edit("defconfig", "omap2plus_defconfig")
imgtype = RadioGroup("Image Format", ImgType, ImgType.BZ)
cross = Edit("Cross compile", "arm-linux-gnueabihf-")
k_version = Edit("Kernel version", "4.4")
grid_elements = [
{"loadaddr":loadaddr, "defconfig":defconfig},
{"imgtype":imgtype, "cross_compile":cross},
{"k_version":k_version}
]
super(Kernel, self).__init__(grid_elements)
@staticmethod
def imgtype_to_install(imgtype):
c = imgtype[0]
if c == 'z':
return "zinstall"
if c == 'u':
return "uinstall"
return "install"
def debianize(self):
self.deb['imgtype_install'] = Kernel.imgtype_to_install(self.deb['imgtype'])
self.tmpl_dir = os.path.join(mako_template_dir, 'debianize/kernel')
pkg_name = self.deb['k_version'] + '-' + self.deb['p_name']
for tmpl in [
'control',
'rules',
'preinst',
'postinst',
'prerm',
'postrm']:
with open(os.path.join('debian/', tmpl), 'w') as f:
mako = os.path.join(self.tmpl_dir, tmpl + '.mako')
f.write(template(mako, self.deb))
st = os.stat(os.path.join('debian', 'rules'))
os.chmod(os.path.join('debian', 'rules'), st.st_mode | stat.S_IEXEC)
cmd = 'dch --package linux-' + pkg_name + \
' -v ' + self.deb['p_version'] + \
' --create -M -D ' + self.deb['release'] + \
' "generated by elbe debianize"'
system(cmd)
copyfile(os.path.join(self.tmpl_dir, 'linux-image.install'),
'debian/linux-image-' + pkg_name + '.install')
copyfile(os.path.join(self.tmpl_dir, 'linux-headers.install'),
'debian/linux-headers-' + pkg_name + '.install')
copyfile(os.path.join(self.tmpl_dir, 'linux-libc-dev.install'),
'debian/linux-libc-dev-' + pkg_name + '.install')
self.hint = "use 'dpkg-buildpackage -a%s' to build the package" % (
self.deb['p_arch'])
| gpl-3.0 | -3,399,240,221,416,125,000 | 32.804348 | 84 | 0.587781 | false |
jameinel/juju | acceptancetests/assess_cloud_display.py | 1 | 4632 | #!/usr/bin/env python
from __future__ import print_function
import os
from argparse import ArgumentParser
from contextlib import contextmanager
from copy import deepcopy
from difflib import ndiff
from pprint import pformat
import sys
import yaml
from jujupy import client_from_config
from utility import (
add_arg_juju_bin,
JujuAssertionError,
)
def remove_display_attributes(cloud):
"""Remove the attributes added by display.
The 'defined' attribute is asserted to be 'local'.
The description attribute is asserted to be appropriate for the cloud type.
"""
type_descriptions = {
'openstack': 'Openstack Cloud',
'vsphere': '',
'manual': '',
'maas': 'Metal As A Service',
'lxd': 'LXD Container Hypervisor'
}
# The lack of built-in descriptions for vsphere and manual is
# bug #1646128. The inability to specify descriptions interactively is
# bug #1645783.
defined = cloud.pop('defined')
assert_equal(defined, 'local')
description = cloud.pop('description', "")
# Delete None values, which are "errors" from parsing the yaml
# E.g. output can show values which we show to the customers but should actually not parsed and compared
for key in cloud.keys():
if cloud[key] is None:
del cloud[key]
try:
expected_type = type_descriptions[cloud['type']]
# We skip types we do not have yet, because this is not part of this test.
# We only want to ensure the description of the above types
except Exception:
expected_type = None
assert_equal(description, expected_type)
def get_clouds(client):
cloud_list = yaml.safe_load(client.get_juju_output(
'clouds', '--format', 'yaml', '--local', include_e=False))
for cloud_name, cloud in cloud_list.items():
if cloud['defined'] == 'built-in':
del cloud_list[cloud_name]
continue
remove_display_attributes(cloud)
return cloud_list
def get_home_path(client, subpath):
return os.path.join(client.env.juju_home, subpath)
def assert_equal(first, second):
"""If two values are not the same, raise JujuAssertionError.
The text of the error is a diff of the pretty-printed values.
"""
if first != second:
diff = ndiff(pformat(first).splitlines(), pformat(second).splitlines())
raise JujuAssertionError('\n' + '\n'.join(diff))
def assess_clouds(client, expected):
"""Assess how clouds behaves when only expected clouds are defined."""
cloud_list = get_clouds(client)
assert_equal(cloud_list, expected)
def assess_show_cloud(client, expected):
"""Assess how show-cloud behaves."""
for cloud_name, expected_cloud in expected.items():
actual_cloud = yaml.safe_load(client.get_juju_output(
'show-cloud', cloud_name, '--format', 'yaml', '--local', include_e=False))
remove_display_attributes(actual_cloud)
assert_equal(actual_cloud, expected_cloud)
def strip_redundant_endpoints(clouds):
no_region_endpoint = deepcopy(clouds)
for cloud in no_region_endpoint.values():
for region in cloud.get('regions', {}).values():
if region == {} or cloud.get('endpoint', {}) == {}:
continue
if region['endpoint'] == cloud['endpoint']:
region.pop('endpoint')
return no_region_endpoint
@contextmanager
def testing(test_name):
try:
yield
except Exception:
print('{}: FAIL'.format(test_name))
raise
else:
print('{}: PASS'.format(test_name))
def main():
parser = ArgumentParser()
parser.add_argument('clouds_file')
add_arg_juju_bin(parser)
args = parser.parse_args()
client = client_from_config(None, args.juju_bin)
with client.env.make_juju_home(
client.env.juju_home, 'mytest') as juju_home:
client.env.juju_home = juju_home
with open(get_home_path(client, 'public-clouds.yaml'), 'w') as f:
f.write('')
with testing('assess_clouds (no_clouds)'):
assess_clouds(client, {})
with open(args.clouds_file) as f:
supplied_clouds = yaml.safe_load(f.read().decode('utf-8'))
client.env.write_clouds(client.env.juju_home, supplied_clouds)
no_region_endpoint = strip_redundant_endpoints(supplied_clouds['clouds'])
with testing('assess_clouds'):
assess_clouds(client, no_region_endpoint)
with testing('assess_show_cloud'):
assess_show_cloud(client, no_region_endpoint)
if __name__ == '__main__':
sys.exit(main())
| agpl-3.0 | 991,493,918,524,038,700 | 31.391608 | 108 | 0.643351 | false |
yephper/django | django/bin/roub/home/migrations/0007_auto_20160414_1100.py | 1 | 2138 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-04-14 03:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0006_auto_20160414_1055'),
]
operations = [
migrations.CreateModel(
name='Commodity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('gname', models.CharField(max_length=100, unique=True, verbose_name='商品名称')),
('gdescription', models.CharField(max_length=200, unique=True, verbose_name='商品描述')),
('gcontent', models.TextField(verbose_name='商品详情')),
('cid', models.IntegerField(verbose_name='商品分类id')),
('sid', models.IntegerField(verbose_name='分店ID')),
('goprice', models.FloatField(max_length=10, unique=True, verbose_name='原价')),
('gdprice', models.FloatField(max_length=10, unique=True, verbose_name='折扣价')),
('gstock', models.IntegerField(max_length=10, unique=True, verbose_name='库存')),
('gimg', models.CharField(max_length=200, unique=True, verbose_name='商品图片')),
('pictureset', models.TextField(verbose_name='商品图片集')),
('gorder', models.IntegerField(max_length=5, unique=True, verbose_name='商品排序')),
('gtype', models.IntegerField(max_length=1, unique=True, verbose_name='消费类型')),
('gstatus', models.IntegerField(max_length=1, unique=True, verbose_name='商品状态')),
('gvrebate', models.FloatField(max_length=10, verbose_name='VIP会员返现金额')),
('printid', models.CharField(max_length=32, unique=True, verbose_name='打印机ID')),
('isboutique', models.IntegerField(max_length=1, unique=True, verbose_name='是否精品')),
],
),
migrations.DeleteModel(
name='Goods',
),
]
| bsd-3-clause | -564,066,498,514,451,000 | 49.5 | 114 | 0.588119 | false |
cechrist/cardoon | cardoon/devices/autoThermal.py | 1 | 6405 | """
Generates an electrothermal device from a nonlinear device class
One assumption is that the base class defines numTerms directly in the
class definition and it is not changed in process_params().
-------------------------------------------------------------------
Copyright Carlos Christoffersen <[email protected]>
This file is part of the cardoon electronic circuit simulator.
Cardoon is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3 or later:
http://www.gnu.org/licenses/gpl.html
"""
import numpy as np
from cardoon.globalVars import glVar
# For automatic differentiation:
import cppaddev as ad
def thermal_device(nle):
class ThermalDevice(nle):
"""
Generic electrothermal nonlinear element
Inherits from a regular nonlinear device class (nle). nle can
be a linear device (like resistor.py) but it must implement
all nonlinear functions to use this template. The
electrothermal device is always nonlinear.
Adds one thermal port (pair of terminals) connected after the
regular terminals. Temperature in this port is the difference
with ambient temperature in degrees C. A current source
proportional to the instantaneous power dissipated in device
is connected to the thermal port.
"""
# devtype is the 'model' name
devType = nle.devType + '_t'
# Force nonlinear behaviour (even if base class is linear, see
# resistor.py)
isNonlinear = True
def __init__(self, instanceName):
nle.__init__(self, instanceName)
self.__addThermalPorts = True
if nle.numTerms:
# Add two thermal terminals
self.numTerms = nle.numTerms + 2
self.__varTerms = False
else:
self.__varTerms = True
def process_params(self):
"""
Process parameters as in base class
Add extra thermal terminals if __addThermalPorts is True
"""
# Process parameters in base class
nle.process_params(self, thermal = True)
# Add thermal terminals to control and output tuples only
# if needed. Base class must reset __addThermalPorts to
# True if needed.
if self.__addThermalPorts:
# Add units to thermal port
self.connection[self.numTerms-1].unit = \
'+{0} C'.format(glVar.temp)
self.connection[self.numTerms-2].unit = \
'+{0} C'.format(glVar.temp)
self.csOutPorts = self.csOutPorts + [(self.numTerms-1,
self.numTerms-2)]
self.controlPorts = self.controlPorts + [(self.numTerms-2,
self.numTerms-1)]
# Thermal output number
self.__ton = len(self.csOutPorts) - 1
# Thermal control port number
self.__tpn = len(self.controlPorts) - 1
self.__addThermalPorts = False
# Initial guess for input ports:
try:
# Consider time-delayed ports come after regular control ports
if len(self.vPortGuess) < len(self.controlPorts) + self.nDelays:
self.vPortGuess = np.insert(self.vPortGuess, self.__tpn, 0.)
except AttributeError:
# Ignore if vPortGuess not provided
pass
def eval_cqs(self, vPort, getOP = False):
"""
vPort is a vector with control voltages (last port is thermal)
"""
if getOP:
# assume that temperature is not passed in vPort: that
# is because only nle can call this function with
# getOP = True
opDict = nle.eval_cqs(self, vPort, True)
return opDict
# set temperature in base class first
self.temp = vPort[self.__tpn] + glVar.temp
nle.set_temp_vars(self, self.temp)
# Remove thermal port from vPort (needed in case
# time-delayed ports follow regular control ports)
vPort1 = np.delete(vPort, self.__tpn)
# now calculate currents and charges
(iVec1, qVec) = nle.eval_cqs(self, vPort1)
# Calculate instantaneous power
pout = nle.power(self, vPort1, iVec1)
# Re-arrange output vector
iVec = np.append(iVec1, pout)
return (iVec, qVec)
# Create these using the AD facility
eval_and_deriv = ad.eval_and_deriv
eval = ad.eval
def power(self, vPort, ioutV):
"""
Calculate total instantaneous power
Input: control voltages and currents from eval_cqs()
It works OK even if ioutV includes charges
"""
# Power is already stored in ioutV
return ioutV[self.__ton]
# Return template class
return ThermalDevice
# No need to override this: too much overhead and not much advantage
#
# def get_OP(self, vPort):
# """
# Calculates operating point information
#
# Input: vPort (port voltages, including thermal)
# Output: dictionary with OP variables
# """
# # set temperature in base class first
# temp = vPort[self.__tpn] + glVar.temp
# nle.set_temp_vars(self, temp)
#
# # Remove thermal port from vPort (needed in case
# # time-delayed ports follow regular control ports)
# vPort1 = np.delete(vPort, self.__tpn)
# # now calculate currents and charges
# (iVec1, qVec) = nle.eval_cqs(self, vPort1)
# # Calculate instantaneous power
# pout = nle.power(self, vPort1, iVec1)
#
# # Get operating point dictionary from base class
# opDict = nle.get_OP(self, vPort)
# # Add temperature / power
# opDict.update({'Temp': temp,
# 'Power': pout})
# return opDict
| gpl-3.0 | -7,248,309,319,612,847,000 | 37.353293 | 80 | 0.560812 | false |
pylonsoflight/kea | interfaces/rectangular/rectangular.py | 1 | 30689 | # rectangular
# Created by Rains Jordan
# Last updated 12/4/14
#
# Non-standard shortcuts:
# Ctrl+Home, Ctrl+End: Move to start/end of playlist column view, if the window size is
# too small.
#
# Notes:
# This is a simple demonstration of a sample kea interface, using a standard windowed interface
# with a track table and a directory tree.
#
# Implementation notes:
# This file is more of a custom-rigged interface than the Shaped interface, with a custom table
# and track pane which are highly dependent on each other, making free use of each other's
# resources.
# This file demonstrates a main widget-heavy solution, where most of the code is based around
# the main widget. Even the main window code takes place there, using custom events. (The
# alternative would be to use a StdMainWidget subclass like CommonStdMainWidget.)
# There are a few commented-out blocks of code in this file where I explain how we would do
# things if using StdMainWindowBase instead of StdMainWindow.
#
# To do:
#
#!/usr/bin/env python
from __future__ import absolute_import, print_function
if __name__ == '__main__':
import direct_test_enabler # Make the interface runnable directly, for testing purposes. This should be done before most of the other imports.
from std_interface_imports import *
from std_main_window import StdMainWindow
from dir_system_model import DirSystemModel, GetBRemoveSingleTreeRootNode
from std_player_manager import StdPlayerManager
from std_progress_bar import StdProgressBar
from simple_track_query_table import SimpleTrackQueryTable
parentClass = StdMainWindow
config = GetConfigFile()
imgDir = GetImgDir()
# A lot of literals are used for positioning purposes in this file, but here are a few ones that are worth naming.
SEARCH_AREA_WIDTH = 203 # Approximate width of the relevant part of the search area decoration image. The + 1 is cosmetic.
SEARCH_AREA_HEIGHT = 112
SEARCH_AREA_RIGHT_BUFFER = 84 # The amount of pixels needed to push the search area right decoration to the correct position.
SEARCH_AREA_LEFT_WIDTH = 118
SEARCH_AREA_MIDDLE_WIDTH = 1 # The width of the scalable middle section.
ALBUM_ART_SIZE = 192
VOLUME_BAR_TOP = 22
VOLUME_BAR_HEIGHT = 59
PROGRESS_BAR_LEFT = 300
PROGRESS_BAR_RIGHT = 20
class CustomSplitter(QSplitter):
"""A custom QSplitter designed to work with my main widget."""
def __init__(self, orientation, parent):
self.parent = parent
# For some reason, if the parent is supplied, Qt acts weird and doesn't display the tree right. So we won't pass the parent argument in.
super(CustomSplitter, self).__init__(orientation)
def createHandle(self):
return CustomSplitterHandle(Qt.Horizontal, self)
class CustomSplitterHandle(QSplitterHandle):
def __init__(self, orientation, parent):
self.parent = parent
super(CustomSplitterHandle, self).__init__(orientation, parent)
def mouseMoveEvent(self, event):
self.parent.parent.tree.resize(self.parent.parent.treeFrame.width(), self.parent.parent.treeFrame.height() - SEARCH_AREA_HEIGHT)
super(CustomSplitterHandle, self).mouseMoveEvent(event)
if self.parent.sizes()[0] < SEARCH_AREA_WIDTH: # Test the width of the first element in the splitter.
self.parent.parent.searchDecorationLeft.hide()
self.parent.parent.searchDecorationMiddle.hide()
self.parent.parent.searchDecorationRight.hide()
self.parent.parent.searchText.hide()
self.parent.parent.searchBox.hide()
else:
self.parent.parent.searchDecorationMiddle.setScaledContents(True)
self.parent.parent.searchDecorationMiddle.resize(self.parent.parent.treeFrame.width() - SEARCH_AREA_WIDTH + SEARCH_AREA_MIDDLE_WIDTH, SEARCH_AREA_HEIGHT)
self.parent.parent.searchDecorationMiddle.move(SEARCH_AREA_LEFT_WIDTH, self.parent.parent.height() - SEARCH_AREA_HEIGHT)
self.parent.parent.searchDecorationRight.move(self.parent.parent.treeFrame.width() - SEARCH_AREA_RIGHT_BUFFER, self.parent.parent.height() - SEARCH_AREA_HEIGHT)
self.parent.parent.searchDecorationLeft.show()
self.parent.parent.searchDecorationMiddle.show()
self.parent.parent.searchDecorationRight.show()
self.parent.parent.searchText.show()
self.parent.parent.searchBox.show()
# Note: This class has custom mouse events, but I've left them as external event handlers, so to speak, in the main widget because they're so inextricably linked with the widget's activities.
class LibraryTreeView(QTreeView):
def edit(self, index, trigger, event):
if trigger == QtGui.QAbstractItemView.DoubleClicked:
return False
return QtGui.QTreeView.edit(self, index, trigger, event)
class MainWidget(QWidget):
def __init__(self, parent): # The main widget should take the main window as its first argument.
super(MainWidget, self).__init__(parent)
self.parent = parent
##--- Tree
self.dirSystemModel = DirSystemModel(self, config)
self.treeFrame = QFrame()
self.treeFrame.setStyleSheet('background-color: #e2e2e2;')
self.treeFrame.setMinimumSize(SEARCH_AREA_WIDTH, 0) # For search area.
self.treeFrame.resizeEvent = self.resizeTreeFrameEvent
self.tree = LibraryTreeView(self.treeFrame)
self.tree.setModel(self.dirSystemModel)
if len(GetLibraryDirs()) == 1:
if not GetBRemoveSingleTreeRootNode(config):
# Expand the first node of the tree.
firstIndex = self.tree.model().index(0, 0, QModelIndex())
self.tree.setExpanded(firstIndex, True)
self.tree.setStyleSheet('QTreeView { border: 0px; }')
self.tree.setHeaderHidden(True)
self.tree.keyPressEvent = self.keyPressTreeEvent
self.tree.mousePressEvent = self.mousePressTreeEvent
self.tree.mouseDoubleClickEvent = self.mouseDoubleClickTreeEvent
treeScrollbarStyle = '''
/* Style #1 */
/*QScrollBar:vertical { width: 14px; background-color: white; color: black; }*/
/* Style #2 */
QScrollBar:vertical { width: 8px; }
QScrollBar:handle:vertical { min-height: 5px; border-top: 8px solid #e2e2e2; border-bottom: 8px solid #e2e2e2; border-left: 2px solid #e2e2e2; border-right: 2px solid #e2e2e2; background: qlineargradient(x1: 0, y1: 0.8, x2: 1, y2: 0.8, stop: 0 darkgray, stop: 1 black); }
QScrollBar:sub-page, QScrollBar:add-page { background-color: #e2e2e2; }
'''
self.tree.verticalScrollBar().setStyleSheet(treeScrollbarStyle)
##--- The rest of the widget
hbox = QHBoxLayout(self)
hbox.setContentsMargins(0, 0, 0, 0)
hSplitter = CustomSplitter(Qt.Horizontal, self)
self.table = MainTable(self)
self.trackFrame = TrackFrame(self)
vSplitter = QSplitter(Qt.Vertical)
vSplitter.addWidget(self.table)
vSplitter.addWidget(self.trackFrame)
# Make the track window not resize when the overall window size is changed.
vSplitter.setStretchFactor(0, 1)
vSplitter.setStretchFactor(1, 0)
vSplitter.setStyleSheet('QSplitter { background: #e6e6e6; height: 1px; }')
hSplitter.addWidget(self.treeFrame)
hSplitter.addWidget(vSplitter)
hSplitter.setObjectName('mainHSplitter')
hSplitter.setStyleSheet('QSplitter#mainHSplitter { width: 2px; } QSplitter#mainHSplitter::handle { background-color: #8a8a8a; border-left: 1px solid #9b9b9b; }')
# It doesn't matter much what the second argument (or the first argument, exactly) here is, as long as it's big enough to handle the rest of the widget.
hSplitter.setSizes([SEARCH_AREA_WIDTH, parent.width()])
hbox.addWidget(hSplitter)
self.setLayout(hbox)
# Note: We'll set the positions of these things in the resize event.
self.searchDecorationLeft = QLabel(self)
pixmap = QPixmap(os.path.join(imgDir, 'search_area_left.png'))
self.searchDecorationLeft.setPixmap(pixmap)
self.searchDecorationLeft.resize(pixmap.rect().size())
#self.searchDecorationLeft.setStyleSheet('background-color: rgba(0,0,0,0%)') # TODO: Remove.
self.searchDecorationMiddle = QLabel(self)
pixmap = QPixmap(os.path.join(imgDir, 'search_area_middle.png'))
self.searchDecorationMiddle.setPixmap(pixmap)
self.searchDecorationMiddle.resize(pixmap.rect().size())
#self.searchDecorationMiddle.setStyleSheet('background-color: rgba(0,0,0,0%)') # TODO: Remove.
self.searchDecorationRight = QLabel(self)
pixmap = QPixmap(os.path.join(imgDir, 'search_area_right.png'))
self.searchDecorationRight.setPixmap(pixmap)
self.searchDecorationRight.resize(pixmap.rect().size())
#self.searchDecorationRight.setStyleSheet('background-color: rgba(0,0,0,0%)') # TODO: Remove, I guess.
self.searchText = QLabel('Search:', parent=self)
self.searchText.setFont(QFont(config.get('General', 'fontName'), int(config.get('General', 'fontSize')) - 1))
self.searchText.setAttribute(Qt.WA_TranslucentBackground)
self.searchBox = QLineEdit(self)
self.searchBox.setStyleSheet('background-color: #e2e2e2; border-top: 1px solid #adadad; border-bottom: 1px solid #adadad; border-left: 2px solid #adadad; border-right: 2px solid #adadad;')
self.searchBox.resize(self.searchBox.width(), self.searchBox.height() - 5) # TODO customize based on OS or something?
##--- Add action(s) to main toolbar
self.parent.mainMenu.settingsMenu.addSeparator()
removeSingleTreeNodeAction = QAction('&Remove single tree node?', self)
removeSingleTreeNodeAction.setCheckable(True)
removeSingleTreeNodeAction.setChecked(GetBRemoveSingleTreeRootNode(config))
removeSingleTreeNodeAction.triggered.connect(self.ToggleRemoveSingleTreeNode)
self.parent.mainMenu.settingsMenu.addAction(removeSingleTreeNodeAction)
# These won't catch the first events, from before the main widget is created, but it doesn't matter.
# NOTE: We would use this if using StdMainWindowBase.
#self.parent.moveEvent = self.moveWindowEvent
#self.parent.resizeEvent = self.resizeWindowEvent
self.searchBox.keyPressEvent = self.keyPressSearchBoxEvent
self.searchBox.textChanged.connect(self.SearchTextChanged)
self.searchText.mousePressEvent = self.mousePressSearchTextEvent
SetLibraryChangedCallback(self.LibraryChangedCallback)
CreateStdHotkeys(self, self.trackFrame.playerManager, config)
QShortcut(QKeySequence('Ctrl+T'), self, self.OnFocusTree)
QShortcut(QKeySequence('Ctrl+F'), self, self.OnFocusSearch)
QShortcut(QKeySequence('Ctrl+R'), self, self.OnFocusResults)
# TODO delete
#self.parent.installEventFilter(self)
#from std_main_window import AboutDialog
#self.tempDialog = AboutDialog(parent)
def LibraryChangedCallback(self, bRedrawTree=True):
if bRedrawTree:
self.tree.model().Reload()
self.table.ReloadQuery()
AcceptCurrentLibrary()
def OnFocusTree(self):
self.tree.setFocus()
def OnFocusSearch(self):
self.searchBox.setFocus()
def OnFocusResults(self):
# TODO highlight row here? not always done already. maybe not.
self.table.setFocus()
def LoadDirContents(self, dir):
self.table.ModifyQuery(InsertDirSearchIntoQuery, dir)
def LaunchSearch(self):
word = self.searchBox.text()
if len(word) > 0:
self.table.ModifyQuery(InsertWordSearchIntoQuery, word)
def resizeEvent(self, event):
self.searchDecorationLeft.move(0, self.height() - SEARCH_AREA_HEIGHT)
self.searchDecorationMiddle.setScaledContents(True)
self.searchDecorationMiddle.resize(self.treeFrame.width() - SEARCH_AREA_WIDTH + SEARCH_AREA_MIDDLE_WIDTH, SEARCH_AREA_HEIGHT)
self.searchDecorationMiddle.move(SEARCH_AREA_LEFT_WIDTH, self.height() - SEARCH_AREA_HEIGHT)
self.searchDecorationRight.move(self.treeFrame.width() - SEARCH_AREA_RIGHT_BUFFER, self.height() - SEARCH_AREA_HEIGHT)
self.searchText.move(28, self.height() - 91)
self.searchBox.move(16, self.height() - 64)
super(MainWidget, self).resizeEvent(event)
'''
# NOTE: We would use this if using StdMainWindowBase.
def moveWindowEvent(self, event):
bMaximized = self.parent.windowState() & Qt.WindowMaximized
if not bMaximized:
config.set('Program', 'x', str(event.pos().x()))
config.set('Program', 'y', str(event.pos().y()))
super(parentClass, self.parent).moveEvent(event)
def resizeWindowEvent(self, event):
if self.parent.windowState() & Qt.WindowMaximized:
config.set('Program', 'bMaximized', str(True))
else:
config.set('Program', 'bMaximized', str(False))
config.set('Program', 'width', str(event.size().width()))
config.set('Program', 'height', str(event.size().height()))
super(parentClass, self.parent).resizeEvent(event)
'''
def ToggleRemoveSingleTreeNode(self, bChecked):
config.set('Program', 'bRemoveSingleTreeRootNode', str(bChecked))
ShowRestartChangesMessage()
def resizeTreeFrameEvent(self, event):
self.tree.resize(self.treeFrame.width(), self.treeFrame.height() - SEARCH_AREA_HEIGHT)
def keyPressTreeEvent(self, event):
if event.key() == Qt.Key_Right:
index = self.tree.currentIndex()
if not self.tree.isExpanded(index):
self.dirSystemModel.AllowExpandLayer(index)
elif event.key() == Qt.Key_Return:
index = self.tree.currentIndex()
dir = self.dirSystemModel.dir(index)
if dir is not None:
self.LoadDirContents(dir)
super(LibraryTreeView, self.tree).keyPressEvent(event)
def mousePressTreeEvent(self, event):
"""Select a tree branch."""
# Note: Doesn't check to see if indexes apply to actual items, but that seems to be harmless.
if event.buttons() & Qt.LeftButton:
index = self.tree.indexAt(event.pos())
dir = self.dirSystemModel.dir(index)
if dir is not None: # It will be None if we, say, clicked on some random spot in the tree view that didn't have an actual item there.
self.LoadDirContents(dir)
super(LibraryTreeView, self.tree).mousePressEvent(event)
def mouseDoubleClickTreeEvent(self, event):
"""Open/close a tree branch."""
# Note: Doesn't check to see if indexes apply to actual items, but that seems to be harmless.
if event.buttons() & Qt.LeftButton:
index = self.tree.indexAt(event.pos())
# TODO: I have no idea why the boolean values used in here work. They should be the opposite.
if self.tree.isExpanded(index):
self.tree.setExpanded(index, True)
else:
self.dirSystemModel.AllowExpandLayer(index)
self.tree.setExpanded(index, False)
super(LibraryTreeView, self.tree).mouseDoubleClickEvent(event)
def keyPressSearchBoxEvent(self, event):
if event.key() == Qt.Key_Return:
self.LaunchSearch()
else:
QLineEdit.keyPressEvent(self.searchBox, event)
def SearchTextChanged(self, text):
font = self.searchText.font()
bNonBlank = len(text) != 0
font.setItalic(bNonBlank)
font.setUnderline(bNonBlank)
if bNonBlank:
self.searchText.setStyleSheet('color: #111d79;')
else:
self.searchText.setStyleSheet('color: black;')
self.searchText.setFont(font)
def mousePressSearchTextEvent(self, event):
if event.buttons() & Qt.LeftButton:
self.LaunchSearch()
class MainTable(SimpleTrackQueryTable):
def __init__(self, parent):
super(MainTable, self).__init__(parent, highlightColor='#cfc7d6', bAlternatingRowColors=True)
# Set up various important details.
self.columnHeaders = {}
self.columnVisualIndexes = {}
self.sortedHeader = None
self.sorted = None
# Parent overrides
self.bHidePathColumn = False # We always need a path column so the data will be accessible, but it may not be visible.
self.LoadConfigFile()
self.horizontalHeader().setStretchLastSection(False) # TODO: keep?
self.horizontalHeader().show() # Parent override
self.horizontalHeader().setStyleSheet('font-size: 12px; font-weight: bold; color: {};'.format(config.get('General', 'headerColor')))
self.verticalHeader().setClickable(True)
self.horizontalHeader().setMovable(True)
self.horizontalHeader().sectionClicked.connect(self.HeaderClicked)
self.horizontalHeader().sectionMoved.connect(self.ColumnMoved)
self.horizontalHeader().sectionResized.connect(self.ColumnResized)
self.libChangedCallbackArgs = (False,) # Allows us not to redraw the tree when mere file(s) are deleted.
self.SetItemCallback(self.ChooseTrack)
def ChooseTrack(self, path, row):
gbl.currentTrackRow = row
self.parent.trackFrame.PlayFile(path)
def LoadConfigFile(self):
columnSizes = {}
fieldLookup = {'title':'title AS Title', 'artist':'artist AS Artist', 'album':'album AS Album', 'track':"track AS '#'", 'length':'length AS Length', 'year':'year AS Year', 'genre':'genre AS Genre', 'path':'path AS Path'}
self.query = 'SELECT '
i = 0
while True:
if config.has_option('Table', 'c' + str(i)):
value = config.get('Table', 'c' + str(i))
if value not in fieldLookup:
raise ConfigValueError(value)
self.columnHeaders[i] = value
self.columnVisualIndexes[i] = i
columnSizes[i] = int(config.get('Table', 'c' + str(i) + 'width'))
self.query += fieldLookup[value] + ', '
if value == 'path':
self.dataColumn = i # Set the path column.
else:
break
i += 1
# The path needs to be in our results somewhere, so generate and hide it if necessary.
if self.dataColumn is None:
self.query += 'path, '
self.dataColumn = i
self.bHidePathColumn = True
# The initial query behavior. Note that we create an extra column as padding.
self.query += "NULL AS '' FROM File, Artist, Album, Genre WHERE File.artistid == Artist.artistid AND File.albumid == Album.albumid AND File.genreid == Genre.genreid ORDER BY "
# The initial sorting behavior.
self.query += 'artist COLLATE NOCASE ASC, album COLLATE NOCASE ASC'
self.sortedHeader = 'artist'
self.sorted = SORTED_ASC
self.setFont(QFont(config.get('General', 'fontName'), int(config.get('General', 'fontSize'))))
self._LoadQuery(self.query, bLoadEvenIfSameQuery=True)
for i in range(len(columnSizes)):
self.setColumnWidth(i, columnSizes[i])
self.setColumnHidden(self.model.columnCount() - 1, True)
def HeaderClicked(self, index):
"""Sorts the contents of the table when a header is clicked."""
if index >= len(self.columnHeaders): # Clicked on the empty padding column.
return
#print('CLICKED', index, self.columnHeaders[index]) # TODO remove
header = self.columnHeaders[index]
if self.sortedHeader == header and self.sorted == SORTED_ASC:
if header == 'artist':
self.query = StripOrderBy(self.query) + ' ORDER BY artist COLLATE NOCASE DESC, album COLLATE NOCASE ASC'
else:
self.query = StripOrderBy(self.query) + ' ORDER BY ' + header + ' COLLATE NOCASE DESC'
self.sorted = SORTED_DESC
else:
if header == 'artist':
self.query = StripOrderBy(self.query) + ' ORDER BY artist COLLATE NOCASE ASC, album COLLATE NOCASE ASC'
else:
self.query = StripOrderBy(self.query) + ' ORDER BY ' + header + ' COLLATE NOCASE ASC'
self.sorted = SORTED_ASC
self.sortedHeader = header
self._LoadQuery(self.query, bLoadEvenIfSameQuery=True)
#print(self.query) # TODO remove
def ColumnMoved(self, logicalIndex, oldVisualIndex, newVisualIndex):
# If we either moved the empty padding column or moved something past that column, it'll screw up our visual index arithmetic, so undo it.
if logicalIndex >= len(self.columnHeaders) or newVisualIndex >= len(self.columnHeaders):
self.horizontalHeader().sectionMoved.disconnect(self.ColumnMoved)
self.horizontalHeader().moveSection(newVisualIndex, oldVisualIndex)
self.horizontalHeader().sectionMoved.connect(self.ColumnMoved)
return
movedWidth = config.get('Table', 'c' + str(oldVisualIndex) + 'width') # Store this for later.
# Move other columns' visual indexes to fit with the newly moved column. We'll go in order, either up or down depending on which direction the column moved in, so that we only ever overwrite options we've already dealt with.
# Note that we don't need to "wipe out" the old options when their index numbers change, as they're guaranteed to be overwritten by new values by the end of the loop anyway.
if oldVisualIndex < newVisualIndex:
# Move columns left one by one, starting with the leftmost.
for i in sorted(self.columnVisualIndexes, key=self.columnVisualIndexes.get): # Scan columns in ascending order by visual index.
if i != logicalIndex: # Don't move our original column.
visualIndex = self.columnVisualIndexes[i]
if visualIndex > oldVisualIndex and visualIndex <= newVisualIndex:
self.columnVisualIndexes[i] -= 1
width = config.get('Table', 'c' + str(visualIndex) + 'width')
config.set('Table', 'c' + str(visualIndex - 1) + 'width', str(width))
config.set('Table', 'c' + str(visualIndex - 1), self.columnHeaders[i])
else:
# Move columns right one by one, starting with the rightmost.
for i in sorted(self.columnVisualIndexes, key=self.columnVisualIndexes.get, reverse=True): # Scan columns in descending order by visual index.
if i != logicalIndex: # Don't move our original column.
visualIndex = self.columnVisualIndexes[i]
if visualIndex < oldVisualIndex and visualIndex >= newVisualIndex:
self.columnVisualIndexes[i] += 1
width = config.get('Table', 'c' + str(visualIndex) + 'width')
config.set('Table', 'c' + str(visualIndex + 1) + 'width', str(width))
config.set('Table', 'c' + str(visualIndex + 1), self.columnHeaders[i])
self.columnVisualIndexes[logicalIndex] = newVisualIndex
config.set('Table', 'c' + str(newVisualIndex), self.columnHeaders[logicalIndex])
config.set('Table', 'c' + str(newVisualIndex) + 'width', movedWidth)
def ColumnResized(self, logicalIndex, oldSize, newSize):
if logicalIndex >= len(self.columnHeaders): # Resized the empty padding column, either directly or indirectly (by fiddling with the window, or other columns).
return
visualIndex = self.columnVisualIndexes[logicalIndex] # We need to use the visual index because that's what's used in the config file; our logical indexes never change while the program is running.
config.set('Table', 'c' + str(visualIndex) + 'width', str(newSize))
class TrackFrame(QFrame):
def __init__(self, parent):
super(TrackFrame, self).__init__(parent)
self.parent = parent
self.resize(1, 216) # The width doesn't matter.
self.setStyleSheet('QFrame { background-color: #d8d8d8; }')
self.playerManager = StdPlayerManager(self, trackTable=self.parent.table)
# Note: We set the size/position of many of these things in the resize event.
##--- Album art decoration
self.albumArtDecoration = QLabel(self)
self.albumArtDecoration.setPixmap(QPixmap(os.path.join(imgDir, 'album_art_area.png')))
self.albumArtDecoration.setAttribute(Qt.WA_TranslucentBackground)
##--- Album art
self.albumArt = QLabel(self)
self.albumArt.setAttribute(Qt.WA_TranslucentBackground)
self.albumArt.move(12, 9)
##--- Textual info
fontName = config.get('General', 'paneFontName')
fontSize = int(config.get('General', 'paneFontSize'))
font = QFont(fontName, fontSize)
maxSizeUsed = fontSize + 5 # The maximum font size that will be used here. It's okay to overestimate this, so I have. Truth is, we could use a huge value with no problem.
maxFontMetrics = QFontMetrics(QFont(fontName, maxSizeUsed))
self.maxFontHeight = maxFontMetrics.height() # It doesn't matter really if this is too big.
self.artistText = QLabel(self)
self.artistText.setFont(font)
self.artistText.setAttribute(Qt.WA_TranslucentBackground)
self.artistText.move(242, 30)
self.albumText = QLabel(self)
self.albumText.setFont(font)
self.albumText.setAttribute(Qt.WA_TranslucentBackground)
self.albumText.move(242, 58)
self.trackText = QLabel(self)
self.trackText.setFont(QFont(fontName, fontSize + 2))
self.trackText.setAttribute(Qt.WA_TranslucentBackground)
self.trackText.setTextFormat(Qt.RichText) # Possibly unnecessary.
self.trackText.move(242, 86)
self.genreText = QLabel(self)
self.genreText.setFont(QFont(fontName, fontSize - 2))
self.genreText.setAttribute(Qt.WA_TranslucentBackground)
self.genreText.move(250, 118)
##--- Buttons
self.playerManager.AddPrevButton(os.path.join(imgDir, 'prev_button.png'), os.path.join(imgDir, 'prev_button_hover.png'), 0, 0)
self.playerManager.AddPauseButton(os.path.join(imgDir, 'play_button.png'), os.path.join(imgDir, 'play_button_hover.png'), os.path.join(imgDir, 'pause_button.png'), os.path.join(imgDir, 'pause_button_hover.png'), 0, 0)
self.playerManager.AddNextButton(os.path.join(imgDir, 'next_button.png'), os.path.join(imgDir, 'next_button_hover.png'), 0, 0)
##--- Progress bar
self.progressBar = StdProgressBar(self, 0, 0, 0, 0, self.playerManager.player)
##--- Volume bar
self.volumeBar = VolumeBar(self, self.playerManager)
QShortcut(QKeySequence('Ctrl+Alt+Up'), self, self.IncreaseVolume)
QShortcut(QKeySequence('Ctrl+Alt+Down'), self, self.DecreaseVolume)
def resizeEvent(self, event):
self.albumArtDecoration.move(0, self.height() - 172)
# This is all sort of unnecessary since we could pick some unreasonably large number at the start and stick with it, but let's do this anyway.
maxTextWidth = self.width() # Doesn't matter if this is too big, really.
self.artistText.resize(maxTextWidth, self.maxFontHeight)
self.albumText.resize(maxTextWidth, self.maxFontHeight)
self.trackText.resize(maxTextWidth, self.maxFontHeight)
self.genreText.resize(maxTextWidth, self.maxFontHeight)
self.playerManager.prevButton.move(self.width() * .62, 13)
self.playerManager.pauseButton.move(self.width() * .62 + 38, 13)
self.playerManager.nextButton.move(self.width() * .62 + 68, 13)
progressBarX = self.width() / 2
progressBarWidth = self.width() / 2 - PROGRESS_BAR_RIGHT
if progressBarX > PROGRESS_BAR_LEFT:
progressBarWidth += progressBarX - PROGRESS_BAR_LEFT
progressBarX = PROGRESS_BAR_LEFT
self.progressBar.resize(progressBarWidth, 50)
self.progressBar.move(progressBarX, 130)
self.volumeBar.volumeBarFill.move(self.width() - 39, self.volumeBar.volumeBarFill.y())
self.volumeBar.volumeBarShape.move(self.width() - 39, VOLUME_BAR_TOP)
super(TrackFrame, self).resizeEvent(event)
def PlayFile(self, path):
self.playerManager.PlayFile(path)
# Set values for each field.
tags = gbl.currentTrackTags
fontName = config.get('General', 'paneFontName')
fontSize = int(config.get('General', 'paneFontSize'))
font = QFont(fontName, fontSize)
oldDir = os.path.dirname(gbl.currentTrackPath) if gbl.currentTrackPath is not None else None
dir = os.path.dirname(tags[DB_PATH])
if dir != oldDir:
albumArtPath = GetAlbumArtFromDir(dir)
if albumArtPath is None:
self.albumArt.hide()
else:
albumArtPixmap = QPixmap(albumArtPath)
self.albumArt.setPixmap(albumArtPixmap.scaled(ALBUM_ART_SIZE, ALBUM_ART_SIZE, Qt.KeepAspectRatio, Qt.SmoothTransformation))
self.albumArt.resize(ALBUM_ART_SIZE, ALBUM_ART_SIZE) # Note: This is only necessary when setting the album art later instead of at the start.
self.albumArt.show()
if tags[DB_ARTIST] is None:
self.artistText.setText('')
else:
self.artistText.setText(tags[DB_ARTIST])
if tags[DB_ALBUM] is None:
self.albumText.setText('')
else:
self.albumText.setText(tags[DB_ALBUM])
trackStr = ''
if tags[DB_TRACK] is not None:
trackStr += "<span style='font-size: {}pt;'>#{}: </span>".format(fontSize, str(tags[DB_TRACK]))
if tags[DB_TITLE] is not None:
trackStr += tags[DB_TITLE]
self.trackText.setText(trackStr)
if tags[DB_GENRE] is None:
self.genreText.setText('')
else:
self.genreText.setText(tags[DB_GENRE])
def DecreaseVolume(self):
self.playerManager.player.DecreaseVolume()
self.volumeBar.SetPercent(self.playerManager.player.GetVolume())
#print(self.playerManager.player.GetVolume()) # TODO remove
def IncreaseVolume(self):
self.playerManager.player.IncreaseVolume()
self.volumeBar.SetPercent(self.playerManager.player.GetVolume())
#print(self.playerManager.player.GetVolume()) # TODO remove
class VolumeBar:
"""Custom volume bar, designed only for use in this interface."""
def __init__(self, parent, playerManager):
self.parent = parent
self.playerManager = playerManager
self.volumeBarFill = QLabel(self.parent)
self.volumeBarFill.setPixmap(QPixmap(os.path.join(imgDir, 'volume_bar_fill.png')))
self.volumeBarFill.setStyleSheet('background: transparent;') # Note: Only relevant if the image itself has transparency.
self.volumeBarFill.move(0, VOLUME_BAR_TOP) # We'll set the x position later.
self.volumeBarShape = QLabel(self.parent)
self.volumeBarShape.setPixmap(QPixmap(os.path.join(imgDir, 'volume_bar_opaque.png')))
self.volumeBarShape.setStyleSheet('background: transparent;')
#self.volumeBarShape.hide() # TODO remove
# Note: Since mouse tracking isn't enabled for this widget, the mouse move event will trigger only when a mouse button is pressed.
self.volumeBarShape.mousePressEvent = self.mousePressVolumeBarShapeEvent
self.volumeBarShape.mouseMoveEvent = self.mouseMoveVolumeBarShapeEvent
def mousePressVolumeBarShapeEvent(self, event):
if event.buttons() & Qt.LeftButton:
y = event.pos().y()
self.volumeBarFill.resize(self.volumeBarFill.width(), VOLUME_BAR_HEIGHT - y)
self.volumeBarFill.move(self.volumeBarFill.x(), VOLUME_BAR_TOP + y)
QLabel.mousePressEvent(self.volumeBarShape, event)
volumePercent = (VOLUME_BAR_HEIGHT - 1.0 - y) / (VOLUME_BAR_HEIGHT - 1)
self.playerManager.SetVolume(volumePercent)
def mouseMoveVolumeBarShapeEvent(self, event):
# Nicely enough, Qt does most of the work here for us; it'll keep on sending the "drag" event even after the mouse moves away, as long as the button is still down.
if event.buttons() & Qt.LeftButton:
y = event.pos().y()
if y < 0:
y = 0
if y >= VOLUME_BAR_HEIGHT:
y = VOLUME_BAR_HEIGHT - 1
self.volumeBarFill.resize(self.volumeBarFill.width(), VOLUME_BAR_HEIGHT - y)
self.volumeBarFill.move(self.volumeBarFill.x(), VOLUME_BAR_TOP + y)
QLabel.mouseMoveEvent(self.volumeBarShape, event)
volumePercent = (VOLUME_BAR_HEIGHT - 1.0 - y) / (VOLUME_BAR_HEIGHT - 1)
self.playerManager.SetVolume(volumePercent)
def SetPercent(self, volumePercent):
if volumePercent > 1:
volumePercent = 1
elif volumePercent < 0:
volumePercent = 0
y = int((VOLUME_BAR_HEIGHT - 1) * (1 - volumePercent))
self.volumeBarFill.resize(self.volumeBarFill.width(), VOLUME_BAR_HEIGHT - y)
self.volumeBarFill.move(self.volumeBarFill.x(), VOLUME_BAR_TOP + y)
self.playerManager.SetVolume(volumePercent)
RunStdInterface(parentClass, MainWidget, config)
# NOTE: We would use this if using StdMainWindowBase.
#RunStdInterface(parentClass, MainWidget, config, bWindowUsesConfigFile=False) | gpl-3.0 | -4,665,944,476,377,647,000 | 40.641791 | 273 | 0.733064 | false |
jesuscript/topo-mpi | topo/sheet/lissom.py | 1 | 18697 | """
LISSOM and related sheet classes.
$Id$
"""
__version__='$Revision$'
from numpy import zeros,ones
import copy
import param
import topo
from topo.base.projection import Projection
from topo.base.sheet import activity_type
from topo.base.simulation import EPConnectionEvent
from topo.transferfn.basic import PiecewiseLinear
from topo.sheet import JointNormalizingCFSheet
class LISSOM(JointNormalizingCFSheet):
"""
A Sheet class implementing the LISSOM algorithm
(Sirosh and Miikkulainen, Biological Cybernetics 71:66-78, 1994).
A LISSOM sheet is a JointNormalizingCFSheet slightly modified to
enforce a fixed number of settling steps. Settling is controlled
by the tsettle parameter; once that number of settling steps has
been reached, an external input is required before the sheet will
activate again.
"""
strict_tsettle = param.Parameter(default = None,doc="""
If non-None, delay sending output until activation_count reaches this value.""")
mask_init_time=param.Integer(default=5,bounds=(0,None),doc="""
Determines when a new mask is initialized in each new iteration.
The mask is reset whenever new input comes in. Once the
activation_count (see tsettle) reaches mask_init_time, the mask
is initialized to reflect the current activity profile.""")
tsettle=param.Integer(default=8,bounds=(0,None),doc="""
Number of times to activate the LISSOM sheet for each external input event.
A counter is incremented each time an input is received from any
source, and once the counter reaches tsettle, the last activation
step is skipped so that there will not be any further recurrent
activation. The next external (i.e., afferent or feedback)
event will then start the counter over again.""")
continuous_learning = param.Boolean(default=False, doc="""
Whether to modify the weights after every settling step.
If false, waits until settling is completed before doing learning.""")
output_fns = param.HookList(default=[PiecewiseLinear(lower_bound=0.1,upper_bound=0.65)])
precedence = param.Number(0.6)
post_initialization_weights_output_fns = param.HookList([],doc="""
If not empty, weights output_fns that will replace the
existing ones after an initial normalization step.""")
beginning_of_iteration = param.HookList(default=[],instantiate=False,doc="""
List of callables to be executed at the beginning of each iteration.""")
end_of_iteration = param.HookList(default=[],instantiate=False,doc="""
List of callables to be executed at the end of each iteration.""")
def __init__(self,**params):
super(LISSOM,self).__init__(**params)
self.__counter_stack=[]
self.activation_count = 0
self.new_iteration = True
def start(self):
self._normalize_weights(active_units_mask=False)
if len(self.post_initialization_weights_output_fns)>0:
for proj in self.in_connections:
if not isinstance(proj,Projection):
self.debug("Skipping non-Projection ")
else:
proj.weights_output_fns=self.post_initialization_weights_output_fns
def input_event(self,conn,data):
# On a new afferent input, clear the activity
if self.new_iteration:
for f in self.beginning_of_iteration: f()
self.new_iteration = False
self.activity *= 0.0
for proj in self.in_connections:
proj.activity *= 0.0
self.mask.reset()
super(LISSOM,self).input_event(conn,data)
### JABALERT! There should be some sort of warning when
### tsettle times the input delay is larger than the input period.
### Right now it seems to do strange things in that case (does it
### settle at all after the first iteration?), but of course that
### is arguably an error condition anyway (and should thus be
### flagged).
# CEBALERT: there is at least one bug in here for tsettle==0: see
# CB/JAB email "LISSOM tsettle question", 2010/03/22.
def process_current_time(self):
"""
Pass the accumulated stimulation through self.output_fns and
send it out on the default output port.
"""
if self.new_input:
self.new_input = False
if self.activation_count == self.mask_init_time:
self.mask.calculate()
if self.tsettle == 0:
# Special case: behave just like a CFSheet
self.activate()
self.learn()
elif self.activation_count == self.tsettle:
# Once we have been activated the required number of times
# (determined by tsettle), reset various counters, learn
# if appropriate, and avoid further activation until an
# external event arrives.
for f in self.end_of_iteration: f()
self.activation_count = 0
self.new_iteration = True # used by input_event when it is called
if (self.plastic and not self.continuous_learning):
self.learn()
else:
self.activate()
self.activation_count += 1
if (self.plastic and self.continuous_learning):
self.learn()
# print the weights of a unit
def printwts(self,x,y):
for proj in self.in_connections:
print proj.name, x, y
print proj.cfs[x,y].weights
def state_push(self,**args):
super(LISSOM,self).state_push(**args)
self.__counter_stack.append((self.activation_count,self.new_iteration))
def state_pop(self,**args):
super(LISSOM,self).state_pop(**args)
self.activation_count,self.new_iteration=self.__counter_stack.pop()
def send_output(self,src_port=None,data=None):
"""Send some data out to all connections on the given src_port."""
out_conns_on_src_port = [conn for conn in self.out_connections
if self._port_match(conn.src_port,[src_port])]
for conn in out_conns_on_src_port:
if self.strict_tsettle != None:
if self.activation_count < self.strict_tsettle:
if len(conn.dest_port)>2 and conn.dest_port[2] == 'Afferent':
continue
self.verbose("Sending output on src_port %s via connection %s to %s" %
(str(src_port), conn.name, conn.dest.name))
e=EPConnectionEvent(self.simulation._convert_to_time_type(conn.delay)+self.simulation.time(),conn,data)
self.simulation.enqueue_event(e)
class JointScaling(LISSOM):
"""
LISSOM sheet extended to allow joint auto-scaling of Afferent input projections.
An exponentially weighted average is used to calculate the average
joint activity across all jointly-normalized afferent projections.
This average is then used to calculate a scaling factor for the
current afferent activity and for the afferent learning rate.
The target average activity for the afferent projections depends
on the statistics of the input; if units are activated more often
(e.g. the number of Gaussian patterns on the retina during each
iteration is increased) the target average activity should be
larger in order to maintain a constant average response to similar
inputs in V1. The target activity for learning rate scaling does
not need to change, because the learning rate should be scaled
regardless of what causes the change in average activity.
"""
# ALERT: Should probably be extended to jointly scale different
# groups of projections. Currently only works for the joint
# scaling of projections named "Afferent", grouped together by
# JointNormalize in dest_port.
target = param.Number(default=0.045, doc="""
Target average activity for jointly scaled projections.""")
# JABALERT: I cannot parse the docstring; is it an activity or a learning rate?
target_lr = param.Number(default=0.045, doc="""
Target average activity for jointly scaled projections.
Used for calculating a learning rate scaling factor.""")
smoothing = param.Number(default=0.999, doc="""
Influence of previous activity, relative to current, for computing the average.""")
apply_scaling = param.Boolean(default=True, doc="""Whether to apply the scaling factors.""")
precedence = param.Number(0.65)
def __init__(self,**params):
super(JointScaling,self).__init__(**params)
self.x_avg=None
self.sf=None
self.lr_sf=None
self.scaled_x_avg=None
self.__current_state_stack=[]
def calculate_joint_sf(self, joint_total):
"""
Calculate current scaling factors based on the target and previous average joint activities.
Keeps track of the scaled average for debugging. Could be
overridden by a subclass to calculate the factors differently.
"""
if self.plastic:
self.sf *=0.0
self.lr_sf *=0.0
self.sf += self.target/self.x_avg
self.lr_sf += self.target_lr/self.x_avg
self.x_avg = (1.0-self.smoothing)*joint_total + self.smoothing*self.x_avg
self.scaled_x_avg = (1.0-self.smoothing)*joint_total*self.sf + self.smoothing*self.scaled_x_avg
def do_joint_scaling(self):
"""
Scale jointly normalized projections together.
Assumes that the projections to be jointly scaled are those
that are being jointly normalized. Calculates the joint total
of the grouped projections, and uses this to calculate the
scaling factor.
"""
joint_total = zeros(self.shape, activity_type)
for key,projlist in self._grouped_in_projections('JointNormalize'):
if key is not None:
if key =='Afferent':
for proj in projlist:
joint_total += proj.activity
self.calculate_joint_sf(joint_total)
if self.apply_scaling:
for proj in projlist:
proj.activity *= self.sf
if hasattr(proj.learning_fn,'learning_rate_scaling_factor'):
proj.learning_fn.update_scaling_factor(self.lr_sf)
else:
raise ValueError("Projections to be joint scaled must have a learning_fn that supports scaling, such as CFPLF_PluginScaled")
else:
raise ValueError("Only Afferent scaling currently supported")
def activate(self):
"""
Compute appropriate scaling factors, apply them, and collect resulting activity.
Scaling factors are first computed for each set of jointly
normalized projections, and the resulting activity patterns
are then scaled. Then the activity is collected from each
projection, combined to calculate the activity for this sheet,
and the result is sent out.
"""
self.activity *= 0.0
if self.x_avg is None:
self.x_avg=self.target*ones(self.shape, activity_type)
if self.scaled_x_avg is None:
self.scaled_x_avg=self.target*ones(self.shape, activity_type)
if self.sf is None:
self.sf=ones(self.shape, activity_type)
if self.lr_sf is None:
self.lr_sf=ones(self.shape, activity_type)
#Afferent projections are only activated once at the beginning of each iteration
#therefore we only scale the projection activity and learning rate once.
if self.activation_count == 0:
self.do_joint_scaling()
for proj in self.in_connections:
self.activity += proj.activity
if self.apply_output_fns:
for of in self.output_fns:
of(self.activity)
self.send_output(src_port='Activity',data=self.activity)
def state_push(self,**args):
super(JointScaling,self).state_push(**args)
self.__current_state_stack.append((copy.copy(self.x_avg),copy.copy(self.scaled_x_avg),
copy.copy(self.sf), copy.copy(self.lr_sf)))
def state_pop(self,**args):
super(JointScaling,self).state_pop(**args)
self.x_avg,self.scaled_x_avg, self.sf, self.lr_sf=self.__current_state_stack.pop()
def schedule_events(sheet_str="topo.sim['V1']",st=0.5,aff_name="Afferent",
ids=1.0,ars=1.0,increase_inhibition=False):
"""
Convenience function for scheduling a default set of events
typically used with a LISSOM sheet. The parameters used
are the defaults from Miikkulainen, Bednar, Choe, and Sirosh
(2005), Computational Maps in the Visual Cortex, Springer.
Note that Miikulainen 2005 specifies only one output_fn for the
LISSOM sheet; where these scheduled actions operate on an
output_fn, they do so only on the first output_fn in the sheet's
list of output_fns.
Installs afferent learning rate changes for any projection whose
name contains the keyword specified by aff_name (typically
"Afferent").
The st argument determines the timescale relative to a
20000-iteration simulation, and results in the default
10000-iteration simulation for the default st=0.5.
The ids argument specifies the input density scale, i.e. how much
input there is at each iteration, on average, relative to the
default. The ars argument specifies how much to scale the
afferent learning rate, if necessary.
If increase_inhibition is true, gradually increases the strength
of the inhibitory connection, typically used for natural image
simulations.
"""
# Allow sheet.BoundingBox calls (below) after reloading a snapshot
topo.sim.startup_commands.append("from topo import sheet")
# Lateral excitatory bounds changes
# Convenience variable: excitatory projection
LE=sheet_str+".projections()['LateralExcitatory']"
topo.sim.schedule_command( 200*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.06250))')
topo.sim.schedule_command( 500*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.04375))')
topo.sim.schedule_command( 1000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.03500))')
topo.sim.schedule_command( 2000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.02800))')
topo.sim.schedule_command( 3000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.02240))')
topo.sim.schedule_command( 4000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.01344))')
topo.sim.schedule_command( 5000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.00806))')
topo.sim.schedule_command( 6500*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.00484))')
topo.sim.schedule_command( 8000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.00290))')
topo.sim.schedule_command(20000*st,LE+'.change_bounds(sheet.BoundingBox(radius=0.00174))')
# Lateral excitatory learning rate changes
idss=("" if ids==1 else "/%3.1f"%ids)
estr='%s.learning_rate=%%s%s*%s.n_units'%(LE,idss,LE)
topo.sim.schedule_command( 200*st,estr%'0.12168')
topo.sim.schedule_command( 500*st,estr%'0.06084')
topo.sim.schedule_command( 1000*st,estr%'0.06084')
topo.sim.schedule_command( 2000*st,estr%'0.06084')
topo.sim.schedule_command( 3000*st,estr%'0.06084')
topo.sim.schedule_command( 4000*st,estr%'0.06084')
topo.sim.schedule_command( 5000*st,estr%'0.06084')
topo.sim.schedule_command( 6500*st,estr%'0.06084')
topo.sim.schedule_command( 8000*st,estr%'0.06084')
topo.sim.schedule_command(20000*st,estr%'0.06084')
### Lateral inhibitory learning rate and strength changes
if increase_inhibition:
LI=sheet_str+".projections()['LateralInhibitory']"
istr='%s.learning_rate=%%s%s'%(LI,idss)
topo.sim.schedule_command( 1000*st,istr%'1.80873/5.0*2.0')
topo.sim.schedule_command( 2000*st,istr%'1.80873/5.0*3.0')
topo.sim.schedule_command( 5000*st,istr%'1.80873/5.0*5.0')
topo.sim.schedule_command( 1000*st,LI+'.strength=-2.2')
topo.sim.schedule_command( 2000*st,LI+'.strength=-2.6')
# Afferent learning rate changes (for every Projection named Afferent)
sheet_=eval(sheet_str)
projs = [pn for pn in sheet_.projections().keys() if pn.count(aff_name)]
num_aff=len(projs)
arss="" if ars==1.0 else "*%3.1f"%ars
for pn in projs:
ps="%s.projections()['%s'].learning_rate=%%s%s%s" % \
(sheet_str,pn,idss if num_aff==1 else "%s/%d"%(idss,num_aff),arss)
topo.sim.schedule_command( 500*st,ps%('0.6850'))
topo.sim.schedule_command( 2000*st,ps%('0.5480'))
topo.sim.schedule_command( 4000*st,ps%('0.4110'))
topo.sim.schedule_command(20000*st,ps%('0.2055'))
# Activation function threshold changes
bstr = sheet_str+'.output_fns[0].lower_bound=%5.3f;'+\
sheet_str+'.output_fns[0].upper_bound=%5.3f'
lbi=sheet_.output_fns[0].lower_bound
ubi=sheet_.output_fns[0].upper_bound
topo.sim.schedule_command( 200*st,bstr%(lbi+0.01,ubi+0.01))
topo.sim.schedule_command( 500*st,bstr%(lbi+0.02,ubi+0.02))
topo.sim.schedule_command( 1000*st,bstr%(lbi+0.05,ubi+0.03))
topo.sim.schedule_command( 2000*st,bstr%(lbi+0.08,ubi+0.05))
topo.sim.schedule_command( 3000*st,bstr%(lbi+0.10,ubi+0.08))
topo.sim.schedule_command( 4000*st,bstr%(lbi+0.10,ubi+0.11))
topo.sim.schedule_command( 5000*st,bstr%(lbi+0.11,ubi+0.14))
topo.sim.schedule_command( 6500*st,bstr%(lbi+0.12,ubi+0.17))
topo.sim.schedule_command( 8000*st,bstr%(lbi+0.13,ubi+0.20))
topo.sim.schedule_command(20000*st,bstr%(lbi+0.14,ubi+0.23))
# Just to get more progress reports
topo.sim.schedule_command(12000*st,'pass')
topo.sim.schedule_command(16000*st,'pass')
# Settling steps changes
topo.sim.schedule_command( 2000*st,sheet_str+'.tsettle=10')
topo.sim.schedule_command( 5000*st,sheet_str+'.tsettle=11')
topo.sim.schedule_command( 6500*st,sheet_str+'.tsettle=12')
topo.sim.schedule_command( 8000*st,sheet_str+'.tsettle=13')
| bsd-3-clause | 2,643,544,422,502,129,000 | 42.080645 | 156 | 0.643686 | false |
jffifa/kyotogang-toolset | kotori/gconf.py | 1 | 3723 | #!/usr/bin/env python2
# -*- coding: UTF-8 -*-
import sys
import os
import urlparse
import urllib
class GConf:
"""global configuration
"""
GROUP_ID = '10079277'
# encodings
SHELL_ENCODING = sys.stdout.encoding
INTERNAL_ENCODING = 'utf_8'
# debug mode
DEBUG = False
# global dir and file path settings
BASE_DIR = os.path.dirname(__file__)
USER_DATA_PATH = os.path.join(BASE_DIR, 'data', 'user')
KOTORI_ASCII_PATH = 'kotori_ascii'
# global conf for urls
PROTOCOL = 'http'
BASE_URL = 'bbs.saraba1st.com'
# http origin url
ORIGIN_URL = urlparse.urlunparse((PROTOCOL, BASE_URL, '', '', '', ''))
# forum homepage url
FORUM_PATH = '/2b/forum.php'
FORUM_URL = urlparse.urlunparse((PROTOCOL, BASE_URL, FORUM_PATH, '', '', ''))
# ajax login url
LOGIN_PATH = '/2b/member.php'
LOGIN_QUERY = urllib.urlencode({
'mod':'logging',
'action':'login',
'loginsubmit':'yes',
'infloat':'yes',
'lssubmit':'yes',
'inajax':'1',
})
LOGIN_URL = urlparse.urlunparse((PROTOCOL, BASE_URL, LOGIN_PATH, '', LOGIN_QUERY, ''))
# session keeping url
KEEP_CONN_PATH = '/2b/home.php'
KEEP_CONN_QUERY = urllib.urlencode({
'mod':'spacecp',
'ac':'credit',
'showcredit':'1'
})
KEEP_CONN_URL = urlparse.urlunparse((PROTOCOL, BASE_URL, KEEP_CONN_PATH, '', KEEP_CONN_QUERY, ''))
# get rate form url
RATE_LIM_TID = 643316
RATE_LIM_PID = 22412315
RATE_FORM_PATH = '/2b/forum.php'
RATE_FORM_QUERY_DICT = {
'mod':'misc',
'action':'rate',
#'t':'1385395649378',
#'tid':'643316',
#'pid':'22412315',
'infloat':'yes',
'handlekey':'rate',
'inajax':'1',
'ajaxtarget':'fwin_content_rate',
}
RATE_PATH = FORUM_PATH
RATE_QUERY = urllib.urlencode({
'mod':'misc',
'action':'rate',
'ratesubmit':'yes',
'infloat':'yes',
'inajax':'1'
})
RATE_URL = urlparse.urlunparse((
PROTOCOL,
BASE_URL,
FORUM_PATH,
'',
RATE_QUERY,
''))
# fake user agent
FAKE_UA = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML: like Gecko) Chrome/31.0.1650.57 Safari/537.36'
# http header
LOGIN_HEADER = {
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language':'zh-CN,zh;q=0.8,ja;q=0.6',
'Cache-Control':'max-age=0',
#'Connection':'keep-alive',
'Connection':'close',
'User-Agent':FAKE_UA,
'Origin':ORIGIN_URL,
'Referer':FORUM_URL ,
}
RATE_FORM_HEADER = {
'Accept':'*/*',
'Accept-Language':'zh-CN,zh;q=0.8:ja;q=0.6',
#'Connection':'keep-alive',
'Connection':'close',
'User-Agent':FAKE_UA,
#'Referer':'http://bbs.saraba1st.com/2b/forum.php?mod=viewthread&tid=643316',
'X-Requested-With':'XMLHttpRequest',
}
RATE_HEADER = {
'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp:*/*;q=0.8',
'Accept-Language':'zh-CN,zh;q=0.8:ja;q=0.6',
'Cache-Control':'max-age=0',
#'Connection':'keep-alive',
'Connection':'close',
'User-Agent':FAKE_UA,
'Origin':ORIGIN_URL,
#'Referer':'http://bbs.saraba1st.com/2b/forum.php?mod=viewthread&tid=974473&page=1',
}
# session status
SESSION_STATUS_INIT = 0
SESSION_STATUS_LOGIN = 1
SESSION_STATUS_LOGOUT = 2
SESSION_STATUS_CONN = 3
# max users
MAX_USER = 256
POST_PER_PAGE = 30
MAX_RATE_CONCURRENCY = 256
| mit | 5,599,428,392,297,824,000 | 25.978261 | 120 | 0.551974 | false |
internap/almanach | tests/api/test_api_authentication.py | 1 | 1252 | # Copyright 2016 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from hamcrest import assert_that, equal_to
from tests.api.base_api import BaseApi
class ApiAuthenticationTest(BaseApi):
def setUp(self):
self.prepare()
self.prepare_with_failed_authentication()
def test_with_wrong_authentication(self):
self.controller.should_receive('list_entities').never()
query_string = {'start': '2014-01-01 00:00:00.0000', 'end': '2014-02-01 00:00:00.0000'}
code, result = self.api_get(url='/project/TENANT_ID/entities',
query_string=query_string,
headers={'X-Auth-Token': 'wrong token'})
assert_that(code, equal_to(401))
| apache-2.0 | -9,071,383,592,736,127,000 | 36.939394 | 95 | 0.676518 | false |
sjl767/woo | scripts/test-OLD/clump-hopper-viscoelastic.py | 1 | 3251 | # -*- coding: utf-8
from woo import utils,pack,export,qt
import gts,os,random,itertools
from numpy import *
import woo.log
#woo.log.setLevel('NewtonIntegrator',woo.log.TRACE)
# Parameters
tc=0.001# collision time
en=.3 # normal restitution coefficient
es=.3 # tangential restitution coefficient
frictionAngle=radians(35)#
density=2700
# facets material
params=utils.getViscoelasticFromSpheresInteraction(tc,en,es)
facetMat=O.materials.append(ViscElMat(frictionAngle=frictionAngle,**params)) # **params sets kn, cn, ks, cs
# default spheres material
dfltSpheresMat=O.materials.append(ViscElMat(density=density,frictionAngle=frictionAngle, **params))
O.dt=.1*tc # time step
Rs=0.05 # particle radius
# Create geometry
x0=0.; y0=0.; z0=0.; ab=.7; at=2.; h=1.; hl=h; al=at*3
zb=z0; x0b=x0-ab/2.; y0b=y0-ab/2.; x1b=x0+ab/2.; y1b=y0+ab/2.
zt=z0+h; x0t=x0-at/2.; y0t=y0-at/2.; x1t=x0+at/2.; y1t=y0+at/2.
zl=z0-hl;x0l=x0-al/2.; y0l=y0-al/2.; x1l=x0+al/2.; y1l=y0+al/2.
left = pack.sweptPolylines2gtsSurface([[Vector3(x0b,y0b,zb),Vector3(x0t,y0t,zt),Vector3(x0t,y1t,zt),Vector3(x0b,y1b,zb)]],capStart=True,capEnd=True)
lftIds=O.bodies.append(pack.gtsSurface2Facets(left.faces(),material=facetMat,color=(0,1,0)))
right = pack.sweptPolylines2gtsSurface([[Vector3(x1b,y0b,zb),Vector3(x1t,y0t,zt),Vector3(x1t,y1t,zt),Vector3(x1b,y1b,zb)]],capStart=True,capEnd=True)
rgtIds=O.bodies.append(pack.gtsSurface2Facets(right.faces(),material=facetMat,color=(0,1,0)))
near = pack.sweptPolylines2gtsSurface([[Vector3(x0b,y0b,zb),Vector3(x0t,y0t,zt),Vector3(x1t,y0t,zt),Vector3(x1b,y0b,zb)]],capStart=True,capEnd=True)
nearIds=O.bodies.append(pack.gtsSurface2Facets(near.faces(),material=facetMat,color=(0,1,0)))
far = pack.sweptPolylines2gtsSurface([[Vector3(x0b,y1b,zb),Vector3(x0t,y1t,zt),Vector3(x1t,y1t,zt),Vector3(x1b,y1b,zb)]],capStart=True,capEnd=True)
farIds=O.bodies.append(pack.gtsSurface2Facets(far.faces(),material=facetMat,color=(0,1,0)))
table = pack.sweptPolylines2gtsSurface([[Vector3(x0l,y0l,zl),Vector3(x0l,y1l,zl),Vector3(x1l,y1l,zl),Vector3(x1l,y0l,zl)]],capStart=True,capEnd=True)
tblIds=O.bodies.append(pack.gtsSurface2Facets(table.faces(),material=facetMat,color=(0,1,0)))
# Create clumps...
clumpColor=(0.0, 0.5, 0.5)
for k,l in itertools.product(arange(0,10),arange(0,10)):
clpId,sphId=O.bodies.appendClumped([utils.sphere(Vector3(x0t+Rs*(k*4+2),y0t+Rs*(l*4+2),i*Rs*2+zt),Rs,color=clumpColor,material=dfltSpheresMat) for i in xrange(4)])
# ... and spheres
#spheresColor=(0.4, 0.4, 0.4)
#for k,l in itertools.product(arange(0,9),arange(0,9)):
#sphAloneId=O.bodies.append( [utils.sphere( Vector3(x0t+Rs*(k*4+4),y0t+Rs*(l*4+4),i*Rs*2.3+zt),Rs,color=spheresColor,material=dfltSpheresMat) for i in xrange(4) ] )
# Create engines
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb(),Bo1_Facet_Aabb()]),
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom(), Ig2_Facet_Sphere_ScGeom()],
[Ip2_ViscElMat_ViscElMat_ViscElPhys()],
[Law2_ScGeom_ViscElPhys_Basic()],
),
GravityEngine(gravity=[0,0,-9.81]),
NewtonIntegrator(damping=0),
#VTKRecorder(virtPeriod=0.01,fileName='/tmp/',recorders=['spheres','velocity','facets'])
]
from woo import qt
qt.View()
O.saveTmp()
| gpl-2.0 | 8,081,068,913,472,389,000 | 41.776316 | 168 | 0.725008 | false |
iesugrace/book-reader | lib/noter.py | 1 | 3267 | from record import Record
from recorder import Recorder
from timeutils import isotime
import time
import interact
import os
class Noter(Recorder):
def __init__(self, db_path, book_name):
Recorder.__init__(self, db_path)
self.book_name = book_name
def make_makers(self):
makers = []
makers.append(('book', (lambda x: self.book_name, None)))
makers.append(('chapter', (interact.readint, 'Chapter: ')))
makers.append(('subject', (interact.readstr, 'Subject: ')))
makers.append(('content', (self.edit_content, None)))
self.makers = makers
def edit(self):
""" change an existing note
"""
cont = self.opendb()
notes = sorted(cont.items(), key=lambda x: int(x[0]))
text_list = []
for time, note in notes:
text = isotime(int(time)) + '\n' + note.content[:80]
text_list.append(text)
idx, junk = interact.printAndPick(text_list)
key = notes[idx][0]
note = notes[idx][1]
prompt = 'Chapter [%s]: ' % note.chapter
note.chapter = interact.readint(prompt, default=note.chapter)
prompt = 'Subject [%s]: ' % note.subject
note.subject = interact.readstr(prompt, default='') or note.subject
note.content = self.edit_content(data=note.content)
self.save(key, note)
def list(self):
cont = self.opendb()
notes = sorted(cont.items(), key=lambda x: int(x[0]))
text_list = []
for time, note in notes:
text = isotime(int(time)) + '\n' + note.content[:80]
text_list.append(text)
res = interact.printAndPick(text_list)
if res:
idx = res[0]
else:
return
key = notes[idx][0]
note = notes[idx][1]
print('-' * 80)
print('Book: %s' % note.book)
print('Chapter: %s' % note.chapter)
print('Subject: %s' % note.subject)
print('Content:\n%s' % note.content)
def delete(self):
assert False, 'not yet implemented'
def add(self):
""" caller must supply the field names and
maker function and arguments for each field.
"""
self.make_makers()
ent = Record()
for (field_name, (func, args)) in self.makers:
setattr(ent, field_name, func(args))
self.save(str(int(time.time())), ent)
def edit_content(self, *junk, data=None):
""" edit (add, change, delete) some data, and return it
as string use temporary file to store the data while creating.
"""
import tempfile
tmpfile = tempfile.NamedTemporaryFile(delete=False)
if data:
tmpfile.write(data.encode())
tmpfile.flush()
self.edit_file(tmpfile.name)
content = open(tmpfile.name).read()
os.unlink(tmpfile.name)
return content
def edit_file(self, path):
""" edit a file of a given name, using the editor
specified in EDITOR environment variable, or vi
if none specified.
"""
default_editor = 'vi'
editor = os.environ.get('EDITOR')
if not editor: editor = default_editor
os.system('%s %s' % (editor, path))
| gpl-2.0 | -4,272,964,902,325,736,400 | 32.680412 | 75 | 0.565657 | false |
taion/flask-jsonapiview | setup.py | 1 | 1714 | from setuptools import setup
EXTRAS_REQUIRE = {
"docs": ("sphinx", "pallets-sphinx-themes"),
"jwt": ("PyJWT>=1.4.0", "cryptography>=2.0.0"),
"tests": ("coverage", "psycopg2-binary", "pytest"),
}
EXTRAS_REQUIRE["dev"] = (
EXTRAS_REQUIRE["docs"] + EXTRAS_REQUIRE["tests"] + ("tox",)
)
setup(
name="Flask-RESTy",
version="1.5.0",
description="Building blocks for REST APIs for Flask",
url="https://github.com/4Catalyzer/flask-resty",
author="4Catalyzer",
author_email="[email protected]",
license="MIT",
python_requires=">=3.6",
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: Flask",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords="rest flask",
packages=("flask_resty",),
install_requires=(
"Flask>=1.1.0",
"Flask-SQLAlchemy>=1.0",
"marshmallow>=3.0.0",
"SQLAlchemy>=1.0.0",
"Werkzeug>=0.11",
"konch>=4.0",
),
extras_require=EXTRAS_REQUIRE,
entry_points={
"pytest11": ("flask-resty = flask_resty.testing",),
"flask.commands": ("shell = flask_resty.shell:cli",),
},
)
| mit | 8,034,599,104,357,237,000 | 31.961538 | 71 | 0.572929 | false |
Ivehui/DQN | dqn.py | 1 | 2248 | '''
@author Ivehui
@time 2016/06/05
@function: reinforcement agent
'''
import random
import caffe
import parameters as pms
import numpy as np
class DqnAgent(object):
def __init__(self, action_space, model=pms.newModel):
self.action_space = action_space
actionSolver = None
actionSolver = caffe.get_solver(pms.actionSolverPath)
actionSolver.net.copy_from(model)
# test net share weights with train net
actionSolver.test_nets[0].share_with(actionSolver.net)
self.solver = actionSolver
self.targetNet = caffe.Net(pms.actionTestNetPath, model, caffe.TEST)
def act(self, frame, greedy):
if random.uniform(0, 1) < greedy:
return self.action_space.sample()
else:
self.solver.test_nets[0].blobs['frames'].data[...] = frame.copy()
netOut = self.solver.test_nets[0].forward()
return np.where(netOut['value_q'][0] == max(netOut['value_q'][0]))[0][0]
def train(self, tran, selected):
self.targetNet.blobs['frames'].data[...] \
= tran.frames[selected + 1].copy()
netOut = self.targetNet.forward()
target = np.tile(tran.reward[selected]
+ pms.discount
* tran.n_last[selected]
* np.resize(netOut['value_q'].max(1),
(pms.batchSize, 1)),
(pms.actionSize,)
) * tran.action[selected]
self.solver.net.blobs['target'].data[...] = target
self.solver.net.blobs['frames'].data[...] = tran.frames[selected].copy()
self.solver.net.blobs['filter'].data[...] = tran.action[selected].copy()
self.solver.step(1)
def updateTarget(self):
for layer in pms.layers:
self.targetNet.params[layer][0].data[...] \
= self.targetNet.params[layer][0].data * (1 - pms.updateParam) + \
self.solver.net.params[layer][0].data * pms.updateParam
self.targetNet.params[layer][1].data[...] \
= self.targetNet.params[layer][1].data * (1 - pms.updateParam) + \
self.solver.net.params[layer][1].data * pms.updateParam
| mit | -2,411,459,686,163,328,000 | 37.758621 | 84 | 0.566726 | false |
giordi91/python_misc | widgets/plotWidget/colorWidget.py | 1 | 11019 |
from PySide import QtGui , QtCore
import math
#this is just a color picker
class ColorWidget(QtGui.QWidget):
colorChangedSignal = QtCore.Signal(int , int ,int)
def __init__(self, parent = None , color = [255,0,0]):
QtGui.QWidget.__init__(self,parent)
self.width = 200
self.height = 100
self.setGeometry(0,0,self.width,self.height)
self.__color = color
@property
def color(self):
return self.__color
@color.setter
def color (self ,color = []):
self.__color = color
self.colorChangedSignal.emit(self.__color[0],self.__color[1],self.__color[2])
self.update()
def mousePressEvent(self, event):
col = QtGui.QColorDialog.getColor()
if col.isValid():
self.color = [col.red() , col.green(), col.blue()]
self.colorChangedSignal.emit(self.__color[0],self.__color[1],self.__color[2])
self.update()
def drawBG(self, qp):
pen= QtGui.QPen()
color = QtGui.QColor(0, 0, 0)
pen.setColor(color)
pen.setWidthF(2)
qp.setPen(pen)
brush = QtGui.QBrush(QtGui.QColor(self.color[0], self.color[1], self.color[2]))
qp.setBrush(brush)
rectangle=QtCore.QRectF (0.0, 0.0, self.width, self.height);
qp.drawRoundedRect(rectangle, 2.0, 2.0);
def paintEvent(self, e):
'''
This procedure draws the widget
'''
qp = QtGui.QPainter()
qp.begin(self)
qp.setRenderHint(QtGui.QPainter.Antialiasing)
self.drawBG(qp)
qp.end()
def resizeEvent(self , event):
posX = event.size().width()
posY = event.size().height()
self.width = posX
self.height = posY
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
#simple ui to set directly on the plotter the colors etc
#nothing fancy not willing to spend too much time in commenting this:P
class OptionColorWidget(QtGui.QDialog):
def __init__(self,parent = None , plotter = None):
QtGui.QDialog.__init__(self, parent )
self.plotter= plotter
self.setObjectName(_fromUtf8("Dialog"))
self.resize(411, 310)
self.verticalLayout = QtGui.QVBoxLayout(self)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.settingsGB = QtGui.QGroupBox(self)
self.settingsGB.setObjectName(_fromUtf8("settingsGB"))
self.gridLayout_7 = QtGui.QGridLayout(self.settingsGB)
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.bgGB = QtGui.QGroupBox(self.settingsGB)
self.bgGB.setObjectName(_fromUtf8("bgGB"))
self.startBgL = QtGui.QLabel(self.bgGB)
self.startBgL.setGeometry(QtCore.QRect(10, 23, 56, 16))
self.startBgL.setObjectName(_fromUtf8("startBgL"))
self.endBgL = QtGui.QLabel(self.bgGB)
self.endBgL.setGeometry(QtCore.QRect(10, 46, 51, 16))
self.endBgL.setObjectName(_fromUtf8("endBgL"))
self.startBgCL = ColorWidget(self.bgGB)
self.startBgCL.setGeometry(QtCore.QRect(72, 23, 51, 17))
self.startBgCL.setObjectName(_fromUtf8("startBgCL"))
self.endBgCL = ColorWidget(self.bgGB)
self.endBgCL.setGeometry(QtCore.QRect(72, 46, 51, 17))
self.endBgCL.setObjectName(_fromUtf8("endBgCL"))
self.gridLayout_7.addWidget(self.bgGB, 0, 0, 1, 1)
self.grapGB = QtGui.QGroupBox(self.settingsGB)
self.grapGB.setMinimumSize(QtCore.QSize(220, 0))
self.grapGB.setObjectName(_fromUtf8("grapGB"))
self.gridLayout_4 = QtGui.QGridLayout(self.grapGB)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.graphL = QtGui.QLabel(self.grapGB)
self.graphL.setObjectName(_fromUtf8("graphL"))
self.gridLayout_4.addWidget(self.graphL, 0, 0, 1, 1)
self.frame_4 = ColorWidget(self.grapGB)
self.frame_4.setObjectName(_fromUtf8("frame_4"))
self.gridLayout_4.addWidget(self.frame_4, 0, 1, 1, 1)
self.gTypeL = QtGui.QLabel(self.grapGB)
self.gTypeL.setObjectName(_fromUtf8("gTypeL"))
self.gridLayout_4.addWidget(self.gTypeL, 0, 2, 1, 1)
self.gTypeCB = QtGui.QComboBox(self.grapGB)
self.gTypeCB.setObjectName(_fromUtf8("gTypeCB"))
self.gTypeCB.addItem(_fromUtf8(""))
self.gTypeCB.addItem(_fromUtf8(""))
self.gridLayout_4.addWidget(self.gTypeCB, 0, 3, 1, 1)
self.thickL = QtGui.QLabel(self.grapGB)
self.thickL.setObjectName(_fromUtf8("thickL"))
self.gridLayout_4.addWidget(self.thickL, 1, 0, 1, 2)
self.thickSB = QtGui.QDoubleSpinBox(self.grapGB)
self.thickSB.setProperty("value", 2.0)
self.thickSB.setObjectName(_fromUtf8("thickSB"))
self.gridLayout_4.addWidget(self.thickSB, 1, 2, 1, 2)
self.gridLayout_7.addWidget(self.grapGB, 0, 1, 1, 1)
self.gridGB = QtGui.QGroupBox(self.settingsGB)
self.gridGB.setObjectName(_fromUtf8("gridGB"))
self.gridLayout_6 = QtGui.QGridLayout(self.gridGB)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.axisGB = QtGui.QGroupBox(self.gridGB)
self.axisGB.setObjectName(_fromUtf8("axisGB"))
self.gridLayout = QtGui.QGridLayout(self.axisGB)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.axisCK = QtGui.QCheckBox(self.axisGB)
self.axisCK.setObjectName(_fromUtf8("axisCK"))
self.gridLayout.addWidget(self.axisCK, 0, 0, 1, 2)
self.axisL = QtGui.QLabel(self.axisGB)
self.axisL.setObjectName(_fromUtf8("axisL"))
self.gridLayout.addWidget(self.axisL, 1, 0, 1, 1)
self.axisCL = ColorWidget(self.axisGB)
self.axisCL.setObjectName(_fromUtf8("axisCL"))
self.gridLayout.addWidget(self.axisCL, 1, 1, 1, 1)
self.gridLayout_6.addWidget(self.axisGB, 0, 0, 1, 1)
self.gridGB_2 = QtGui.QGroupBox(self.gridGB)
self.gridGB_2.setObjectName(_fromUtf8("gridGB_2"))
self.gridLayout_2 = QtGui.QGridLayout(self.gridGB_2)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.showCK = QtGui.QCheckBox(self.gridGB_2)
self.showCK.setObjectName(_fromUtf8("showCK"))
self.gridLayout_2.addWidget(self.showCK, 0, 0, 1, 2)
self.gridL = QtGui.QLabel(self.gridGB_2)
self.gridL.setObjectName(_fromUtf8("gridL"))
self.gridLayout_2.addWidget(self.gridL, 1, 0, 1, 1)
self.gridCL = ColorWidget(self.gridGB_2)
self.gridCL.setObjectName(_fromUtf8("gridCL"))
self.gridLayout_2.addWidget(self.gridCL, 1, 1, 1, 1)
self.gridLayout_6.addWidget(self.gridGB_2, 0, 1, 1, 1)
self.numbersGB = QtGui.QGroupBox(self.gridGB)
self.numbersGB.setObjectName(_fromUtf8("numbersGB"))
self.gridLayout_3 = QtGui.QGridLayout(self.numbersGB)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.numbersCK = QtGui.QCheckBox(self.numbersGB)
self.numbersCK.setObjectName(_fromUtf8("numbersCK"))
self.gridLayout_3.addWidget(self.numbersCK, 0, 0, 1, 2)
self.numbersL = QtGui.QLabel(self.numbersGB)
self.numbersL.setObjectName(_fromUtf8("numbersL"))
self.gridLayout_3.addWidget(self.numbersL, 1, 0, 1, 1)
self.numbersCL = ColorWidget(self.numbersGB)
self.numbersCL.setObjectName(_fromUtf8("numbersCL"))
self.gridLayout_3.addWidget(self.numbersCL, 1, 1, 1, 1)
self.gridLayout_6.addWidget(self.numbersGB, 0, 2, 1, 1)
self.stepsL = QtGui.QLabel(self.gridGB)
self.stepsL.setObjectName(_fromUtf8("stepsL"))
self.gridLayout_6.addWidget(self.stepsL, 1, 0, 1, 1)
self.setpSB = QtGui.QSpinBox(self.gridGB)
self.setpSB.setProperty("value", 20)
self.setpSB.setObjectName(_fromUtf8("setpSB"))
self.gridLayout_6.addWidget(self.setpSB, 1, 1, 1, 1)
self.gridLayout_7.addWidget(self.gridGB, 1, 0, 1, 2)
self.donePB = QtGui.QPushButton(self.settingsGB)
self.donePB.setObjectName(_fromUtf8("donePB"))
self.gridLayout_7.addWidget(self.donePB, 2, 0, 1, 2)
self.verticalLayout.addWidget(self.settingsGB)
self.setWindowTitle(_translate("Dialog", "Dialog", None))
self.settingsGB.setTitle(_translate("Dialog", "Settings", None))
self.bgGB.setTitle(_translate("Dialog", "Background", None))
self.startBgL.setText(_translate("Dialog", "start color :", None))
self.endBgL.setText(_translate("Dialog", "end color :", None))
self.grapGB.setTitle(_translate("Dialog", "Graph", None))
self.graphL.setText(_translate("Dialog", "color :", None))
self.gTypeL.setText(_translate("Dialog", "type :", None))
self.gTypeCB.setItemText(0, _translate("Dialog", "dots", None))
self.gTypeCB.setItemText(1, _translate("Dialog", "line", None))
self.thickL.setText(_translate("Dialog", "Thickness :", None))
self.gridGB.setTitle(_translate("Dialog", "Grid", None))
self.axisGB.setTitle(_translate("Dialog", "Axis", None))
self.axisCK.setText(_translate("Dialog", " show", None))
self.axisL.setText(_translate("Dialog", "color:", None))
self.gridGB_2.setTitle(_translate("Dialog", "Grid", None))
self.showCK.setText(_translate("Dialog", " show", None))
self.gridL.setText(_translate("Dialog", "color:", None))
self.numbersGB.setTitle(_translate("Dialog", "Numbers", None))
self.numbersCK.setText(_translate("Dialog", " show", None))
self.numbersL.setText(_translate("Dialog", "color:", None))
self.stepsL.setText(_translate("Dialog", "Grid Step :", None))
self.donePB.setText(_translate("Dialog", "DONE", None))
self.showCK.setChecked(1)
self.axisCK.setChecked(1)
self.numbersCK.setChecked(1)
self.startBgCL.color = self.plotter.startBackgroundColor
self.endBgCL.color = self.plotter.endBackgroundColor
self.startBgCL.colorChangedSignal.connect(self.updateStartBG)
self.endBgCL.colorChangedSignal.connect(self.updateEndBG)
self.donePB.clicked.connect(self.close)
def updateStartBG(self , r,g,b):
self.plotter.startBackgroundColor = [r,g,b]
def updateEndBG(self , r,g,b):
self.plotter.endBackgroundColor = [r,g,b]
| mit | -1,275,120,345,756,301,000 | 41.544402 | 89 | 0.631909 | false |
MTgeophysics/mtpy | mtpy/uofa/bayesian1d.py | 1 | 2557 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on 31.07.2013
@author: LK@UofA
mtpy/uofa/bayesian1d.py
Module for handling the UofA Bayesian 1D inversion/modelling code.
"""
import os
import sys
import os.path as op
import mtpy.utils.filehandling as MTfh
import mtpy.core.edi as EDI
import mtpy.utils.exceptions as MTex
import numpy as np
def generate_input_file(edifilename, outputdir=None):
eo = EDI.Edi()
eo.readfile(edifilename)
filebase = op.splitext(op.split(edifilename)[-1])[0]
outfilename1 = '{0}_bayesian1d_z.in'.format(filebase)
outfilename2 = '{0}_bayesian1d_zvar.in'.format(filebase)
outdir = op.split(edifilename)[0]
if outputdir is not None:
try:
if not op.isdir(outputdir):
os.makedirs(outputdir)
outdir = outputdir
except:
pass
outfn1 = op.join(outdir, outfilename1)
outfn2 = op.join(outdir, outfilename2)
outfn1 = MTfh.make_unique_filename(outfn1)
outfn2 = MTfh.make_unique_filename(outfn2)
freqs = eo.freq
z_array = eo.Z.z
z_err_array = eo.Z.z_err
if len(freqs) != len(z_array):
raise MTex.MTpyError_edi_file('ERROR in Edi file {0} - number of '
'freqs different from length of Z array'.format(eo.filename))
sorting = np.argsort(freqs)
outstring1 = ''
outstring2 = ''
for idx in sorting:
z = z_array[idx]
z_err = z_err_array[idx]
f = freqs[idx]
outstring1 += '{0}\t'.format(f)
outstring2 += '{0}\t'.format(f)
for i in np.arange(2):
for j in np.arange(2):
if np.imag(z[i % 2, (j + 1) / 2]) < 0:
z_string = '{0}-{1}i'.format(np.real(z[i % 2, (j + 1) / 2]),
np.abs(np.imag(z[i % 2, (j + 1) / 2])))
else:
z_string = '{0}+{1}i'.format(np.real(z[i % 2, (j + 1) / 2]),
np.imag(z[i % 2, (j + 1) / 2]))
z_err_string = '{0}'.format(z_err[i % 2, (j + 1) / 2])
outstring1 += '{0}\t'.format(z_string)
outstring2 += '{0}\t'.format(z_err_string)
outstring1 = outstring1.rstrip() + '\n'
outstring2 = outstring2.rstrip() + '\n'
Fout1 = open(outfn1, 'w')
Fout2 = open(outfn2, 'w')
Fout1.write(outstring1.expandtabs(4))
Fout2.write(outstring2.expandtabs(4))
Fout1.close()
Fout2.close()
return outfn1, outfn2
| gpl-3.0 | 8,232,908,212,395,492,000 | 27.098901 | 99 | 0.539304 | false |
steveblamey/nautilus-archive | setup.py | 1 | 1285 | #!/usr/bin/env python
# coding: utf-8
from os import path
from distutils.core import setup
PROJECT_DIR = path.dirname(__file__)
extension = [
('/usr/share/nautilus-python/extensions',
[path.join(PROJECT_DIR, 'extension', 'nautilus-archive.py')]),
('/usr/share/icons/hicolor/48x48/emblems',
[path.join(PROJECT_DIR, 'extension', 'emblems', 'emblem-red-tag.png')]),
('/usr/share/icons/hicolor/scalable/emblems',
[path.join(PROJECT_DIR, 'extension', 'emblems', 'emblem-red-tag.svg')]),
('/usr/share/icons/hicolor/48x48/emblems',
[path.join(PROJECT_DIR, 'extension', 'emblems', 'emblem-green-tag.png')]),
('/usr/share/icons/hicolor/scalable/emblems',
[path.join(PROJECT_DIR, 'extension', 'emblems', 'emblem-green-tag.svg')]),
('/usr/sbin',
[path.join(PROJECT_DIR, 'scripts', 'tracker-archive-tagged')]),
]
setup(name='nautilus-archive',
version='0.2',
description='A file archiving extension for the Nautilus file manager',
long_description=open('README.rst').read(),
author='Steve Blamey',
author_email='[email protected]',
url='http://www.python.org/',
license='GPL-3',
platforms=['Linux'],
data_files=extension,
py_modules=['trackertag']
)
| gpl-3.0 | -5,443,317,509,589,844,000 | 31.948718 | 79 | 0.632685 | false |
zachjanicki/osf.io | api/base/urls.py | 1 | 1810 | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic.base import RedirectView
from settings import API_BASE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
url(base_pattern,
include(
[
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^comments/', include('api.comments.urls', namespace='comments')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^registrations/', include('api.registrations.urls', namespace='registrations')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^tokens/', include('api.tokens.urls', namespace='tokens')),
url(r'^logs/', include('api.logs.urls', namespace='logs')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^institutions/', include('api.institutions.urls', namespace='institutions')),
url(r'^collections/', include('api.collections.urls', namespace='collections')),
url(r'^guids/', include('api.guids.urls', namespace='guids')),
url(r'^licenses/', include('api.licenses.urls', namespace='licenses')),
url(r'^wikis/', include('api.wikis.urls', namespace='wikis')),
],
)
),
url(r'^$', RedirectView.as_view(pattern_name=views.root), name='redirect-to-root')
]
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| apache-2.0 | -8,062,880,710,388,205,000 | 45.410256 | 102 | 0.59779 | false |
srmagura/potential | ps/grid.py | 1 | 8094 | import numpy as np
from ps.extend import EType
class PsGrid:
# FIXME old way is deprecated
'''def ps_construct_grids(self, scheme_order):
self.construct_grids(scheme_order)
R = self.R # remove eventually?
a = self.a
self.all_Mplus = {0: set(), 1: set(), 2: set()}
self.all_Mminus = {0: set(), 1: set(), 2: set()}
self.all_gamma = {}
for i, j in self.M0:
r, th = self.get_polar(i, j)
x, y = self.get_coord(i, j)
boundary_r = self.boundary.eval_r(th)
# Segment 0
if th >= a and th <= 2*np.pi:
if r <= boundary_r:
self.all_Mplus[0].add((i, j))
else:
self.all_Mminus[0].add((i, j))
# Segment 1
if 0 <= x and x <= R:
if y <= 0:
if r <= boundary_r:
self.all_Mplus[1].add((i, j))
else:
self.all_Mminus[1].add((i, j))
# Segment 2
x1, y1 = self.get_radius_point(2, x, y)
dist = self.signed_dist_to_radius(2, x, y)
if 0 <= x1 and x1 <= R*np.cos(a):
if dist <= 0:
if r <= boundary_r and y >= 0:
self.all_Mplus[2].add((i, j))
else:
self.all_Mminus[2].add((i, j))
union_gamma_set = set()
for sid in range(3):
Nplus = set()
Nminus = set()
for i, j in self.M0:
Nm = set([(i, j), (i-1, j), (i+1, j), (i, j-1), (i, j+1)])
if scheme_order > 2:
Nm |= set([(i-1, j-1), (i+1, j-1), (i-1, j+1),
(i+1, j+1)])
if (i, j) in self.all_Mplus[sid]:
Nplus |= Nm
elif (i, j) in self.all_Mminus[sid]:
Nminus |= Nm
gamma_set = Nplus & Nminus
self.all_gamma[sid] = list(gamma_set)
union_gamma_set |= gamma_set
self.union_gamma = list(union_gamma_set)
if self.fake_grid:
return self.ps_construct_fake_grid()
def ps_construct_fake_grid(self):
"""
For testing extension test only. Dangerous
"""
R = self.R
a = self.a
h = self.AD_len / self.N
inv = self.get_coord_inv
self.all_gamma = {
0: [
#inv(R+h/2, h/2),
#inv(-R+h/2, h/2),
#inv(-R-h/2, 0),
#inv(R*np.cos(a)+h/2, R*np.sin(a)-h/2),
#inv(R*np.cos(a)-h/2, R*np.sin(a)-h/2),
#inv(R*np.cos(a)-h/2, R*np.sin(a)+h/2),
],
1: [
#inv(R+h/2, h/2),
#inv(R+h/2, -h/2),
],
2: []#inv(R*np.cos(a)+h/2, R*np.sin(a)-h/2)],
}
self.union_gamma = set()
for sid in range(3):
self.union_gamma |= set(self.all_gamma[sid])
self.union_gamma = list(self.union_gamma)
def ps_grid_dist_test(self):
def get_dist(node, setype):
def dformula(x0, y0, _x, _y):
return np.sqrt((x0-_x)**2 + (y0-_y)**2)
x, y = self.get_coord(*node)
dist = -1
R = self.R
a = self.a
if setype == (0, EType.standard):
n, th = self.boundary.get_boundary_coord(
*self.get_polar(*node)
)
dist = abs(n)
elif setype == (0, EType.left):
x0, y0 = (R*np.cos(a), R*np.sin(a))
dist = dformula(x0, y0, x, y)
elif setype == (0, EType.right):
x0, y0 = (R, 0)
dist = dformula(x0, y0, x, y)
elif setype == (1, EType.standard):
dist = abs(y)
elif setype == (1, EType.left):
x0, y0 = (0, 0)
dist = dformula(x0, y0, x, y)
elif setype == (1, EType.right):
x0, y0 = (R, 0)
dist = dformula(x0, y0, x, y)
elif setype == (2, EType.standard):
dist = self.dist_to_radius(2, x, y)
elif setype == (2, EType.left):
x0, y0 = (0, 0)
dist = dformula(x0, y0, x, y)
elif setype == (2, EType.right):
x0, y0 = (R*np.cos(a), R*np.sin(a))
dist = dformula(x0, y0, x, y)
return dist
all_gamma2 = {0: set(), 1: set(), 2: set()}
for node in self.union_gamma:
for sid in (0, 1, 2):
etype = self.get_etype(sid, *node)
setype = (sid, etype)
dist = get_dist(node, setype)
h = self.AD_len / self.N
if dist <= h*np.sqrt(2):
all_gamma2[sid].add(node)
for sid in (0, 1, 2):
print('=== {} ==='.format(sid))
diff = all_gamma2[sid] - set(self.all_gamma[sid])
print('all_gamma2 - all_gamma:', all_gamma2[sid] - set(self.all_gamma[sid]))
for node in diff:
print('{}: x={} y={}'.format(node, *self.get_coord(*node)))
print('all_gamma - all_gamma2:', set(self.all_gamma[sid]) - all_gamma2[sid])
print()
#assert self.all_gamma == all_gamma2
'''
def ps_construct_grids(self, scheme_order):
self.construct_grids(scheme_order)
self.Nplus = set()
self.Nminus = set()
for i, j in self.M0:
Nm = set([(i, j), (i-1, j), (i+1, j), (i, j-1), (i, j+1)])
if scheme_order > 2:
Nm |= set([(i-1, j-1), (i+1, j-1), (i-1, j+1),
(i+1, j+1)])
if (i, j) in self.global_Mplus:
self.Nplus |= Nm
elif (i, j) in self.global_Mminus:
self.Nminus |= Nm
self.union_gamma = list(self.Nplus & self.Nminus)
def get_dist(node, setype):
def dformula(x0, y0, _x, _y):
return np.sqrt((x0-_x)**2 + (y0-_y)**2)
x, y = self.get_coord(*node)
dist = -1
R = self.R
a = self.a
if setype == (0, EType.standard):
n, th = self.boundary.get_boundary_coord(
*self.get_polar(*node)
)
dist = abs(n)
elif setype == (0, EType.left):
x0, y0 = (R*np.cos(a), R*np.sin(a))
dist = dformula(x0, y0, x, y)
elif setype == (0, EType.right):
x0, y0 = (R, 0)
dist = dformula(x0, y0, x, y)
elif setype == (1, EType.standard):
dist = abs(y)
elif setype == (1, EType.left):
x0, y0 = (0, 0)
dist = dformula(x0, y0, x, y)
elif setype == (1, EType.right):
x0, y0 = (R, 0)
dist = dformula(x0, y0, x, y)
elif setype == (2, EType.standard):
dist = self.dist_to_radius(2, x, y)
elif setype == (2, EType.left):
x0, y0 = (0, 0)
dist = dformula(x0, y0, x, y)
elif setype == (2, EType.right):
x0, y0 = (R*np.cos(a), R*np.sin(a))
dist = dformula(x0, y0, x, y)
return dist
self.all_gamma = {0: [], 1: [], 2: []}
for node in self.union_gamma:
r, th = self.get_polar(*node)
placed = False
for sid in (0, 1, 2):
etype = self.get_etype(sid, *node)
setype = (sid, etype)
dist = get_dist(node, setype)
h = self.AD_len / self.N
if dist <= h*np.sqrt(2):
self.all_gamma[sid].append(node)
placed = True
# Every node in union_gamma should go in at least one of the
# all_gamma sets
assert placed
| gpl-3.0 | -3,562,702,645,237,333,000 | 29.659091 | 88 | 0.407833 | false |
a358003542/expython | expython/pattern/__init__.py | 1 | 1439 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import UserList
import logging
logger = logging.getLogger(__name__)
class CycleList(UserList):
"""
一个无限循环输出元素的可迭代对象,如果是on-fly模式
(主要指for循环中的删除动作)推荐使用 `remove_item` 方法
"""
def __init__(self, data):
super().__init__(data)
def __iter__(self):
self.index = 0
while True:
if self.index == len(self.data):
self.index = 0
yield self.data[self.index]
self.index += 1
def remove_item(self, item):
"""
主要是用于 on-fly 模式的列表更改移除操作的修正,
不是on-fly 动态模式,就直接用列表原来的remove方法即可
为了保持和原来的remove方法一致,并没有捕捉异常。
"""
self.data.remove(item)
self.index -= 1
def last_out_game(data, number):
test = CycleList(data)
count = 1
for i in test:
logger.debug('testing', i)
if len(test.data) <= 1:
break
if count == number:
try:
test.remove_item(i)
logger.debug('removing', i)
except ValueError:
pass
count = 0
count += 1
return test.data[0] | mit | 2,960,627,695,356,223,500 | 20.089286 | 44 | 0.497166 | false |
p4lang/p4app | docker/scripts/mininet/p4_mininet.py | 1 | 5757 | # Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from mininet.net import Mininet
from mininet.node import Switch, Host
from mininet.log import setLogLevel, info, error, debug
from mininet.moduledeps import pathCheck
from sys import exit
import os
import tempfile
import socket
class P4Host(Host):
def config(self, **params):
r = super(P4Host, self).config(**params)
for off in ["rx", "tx", "sg"]:
cmd = "/sbin/ethtool --offload %s %s off" % (self.defaultIntf().name, off)
self.cmd(cmd)
# disable IPv6
self.cmd("sysctl -w net.ipv6.conf.all.disable_ipv6=1")
self.cmd("sysctl -w net.ipv6.conf.default.disable_ipv6=1")
self.cmd("sysctl -w net.ipv6.conf.lo.disable_ipv6=1")
return r
def describe(self, sw_addr=None, sw_mac=None):
print "**********"
print "Network configuration for: %s" % self.name
print "Default interface: %s\t%s\t%s" %(
self.defaultIntf().name,
self.defaultIntf().IP(),
self.defaultIntf().MAC()
)
if sw_addr is not None or sw_mac is not None:
print "Default route to switch: %s (%s)" % (sw_addr, sw_mac)
print "**********"
class P4Switch(Switch):
"""P4 virtual switch"""
device_id = 0
def __init__(self, name, sw_path = None, json_path = None,
log_file = None,
thrift_port = None,
pcap_dump = False,
log_console = False,
verbose = False,
device_id = None,
enable_debugger = False,
**kwargs):
Switch.__init__(self, name, **kwargs)
assert(sw_path)
assert(json_path)
# make sure that the provided sw_path is valid
pathCheck(sw_path)
# make sure that the provided JSON file exists
if not os.path.isfile(json_path):
error("Invalid JSON file.\n")
exit(1)
self.sw_path = sw_path
self.json_path = json_path
self.verbose = verbose
self.log_file = log_file
if self.log_file is None:
self.log_file = "/tmp/p4s.{}.log".format(self.name)
self.output = open(self.log_file, 'w')
self.thrift_port = thrift_port
self.pcap_dump = pcap_dump
self.enable_debugger = enable_debugger
self.log_console = log_console
if device_id is not None:
self.device_id = device_id
P4Switch.device_id = max(P4Switch.device_id, device_id)
else:
self.device_id = P4Switch.device_id
P4Switch.device_id += 1
self.nanomsg = "ipc:///tmp/bm-{}-log.ipc".format(self.device_id)
@classmethod
def setup(cls):
pass
def check_switch_started(self, pid):
"""While the process is running (pid exists), we check if the Thrift
server has been started. If the Thrift server is ready, we assume that
the switch was started successfully. This is only reliable if the Thrift
server is started at the end of the init process"""
while True:
if not os.path.exists(os.path.join("/proc", str(pid))):
return False
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
result = sock.connect_ex(("localhost", self.thrift_port))
if result == 0:
return True
def start(self, controllers):
"Start up a new P4 switch"
info("Starting P4 switch {}.\n".format(self.name))
args = [self.sw_path]
for port, intf in self.intfs.items():
if not intf.IP():
args.extend(['-i', str(port) + "@" + intf.name])
if self.pcap_dump:
args.append("--pcap")
# args.append("--useFiles")
if self.thrift_port:
args.extend(['--thrift-port', str(self.thrift_port)])
if self.nanomsg:
args.extend(['--nanolog', self.nanomsg])
args.extend(['--device-id', str(self.device_id)])
P4Switch.device_id += 1
args.append(self.json_path)
if self.enable_debugger:
args.append("--debugger")
if self.log_console:
args.append("--log-console")
info(' '.join(args) + "\n")
pid = None
with tempfile.NamedTemporaryFile() as f:
# self.cmd(' '.join(args) + ' > /dev/null 2>&1 &')
self.cmd(' '.join(args) + ' >' + self.log_file + ' 2>&1 & echo $! >> ' + f.name)
pid = int(f.read())
debug("P4 switch {} PID is {}.\n".format(self.name, pid))
if not self.check_switch_started(pid):
error("P4 switch {} did not start correctly.\n".format(self.name))
exit(1)
info("P4 switch {} has been started.\n".format(self.name))
def stop(self):
"Terminate P4 switch."
self.output.flush()
self.cmd('kill %' + self.sw_path)
self.cmd('wait')
self.deleteIntfs()
def attach(self, intf):
"Connect a data port"
assert(0)
def detach(self, intf):
"Disconnect a data port"
assert(0)
| apache-2.0 | 1,125,914,588,463,093,400 | 35.436709 | 92 | 0.57061 | false |
iaz3/ModReader | modreader/game/__init__.py | 1 | 1209 | # !/usr/bin/env python3
"""
Game Support Modules should be located in this package
Names should be all lowercase, unique, and code should follow the template,
The template represents the bare miniumum API you are required to conform to.
You are allowed to add new files and extend it.
"""
# ====================== GPL License and Copyright Notice ======================
# This file is part of ModReader
# Copyright (C) 2016 Diana Land
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ModReader. If not, see <http://www.gnu.org/licenses/>.
#
# https://github.com/iaz3/ModReader
#
# =============================================================================
VERSION = "0.1.0"
| gpl-3.0 | 4,860,691,471,080,183,000 | 38 | 80 | 0.679074 | false |
zhanglab/psamm | psamm/gapfill.py | 1 | 11063 | # This file is part of PSAMM.
#
# PSAMM is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PSAMM is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PSAMM. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2014-2017 Jon Lund Steffensen <[email protected]>
# Copyright 2015-2020 Keith Dufault-Thompson <[email protected]>
"""Identify blocked metabolites and possible reconstructions.
This implements a variant of the algorithms described in [Kumar07]_.
"""
import logging
from six import iteritems, raise_from
from .lpsolver import lp
logger = logging.getLogger(__name__)
class GapFillError(Exception):
"""Indicates an error while running GapFind/GapFill"""
def _find_integer_tolerance(epsilon, v_max, min_tol):
"""Find appropriate integer tolerance for gap-filling problems."""
int_tol = min(epsilon / (10 * v_max), 0.1)
min_tol = max(1e-10, min_tol)
if int_tol < min_tol:
eps_lower = min_tol * 10 * v_max
logger.warning(
'When the maximum flux is {}, it is recommended that'
' epsilon > {} to avoid numerical issues with this'
' solver. Results may be incorrect with'
' the current settings!'.format(v_max, eps_lower))
return min_tol
return int_tol
def gapfind(model, solver, epsilon=0.001, v_max=1000, implicit_sinks=True):
"""Identify compounds in the model that cannot be produced.
Yields all compounds that cannot be produced. This method
assumes implicit sinks for all compounds in the model so
the only factor that influences whether a compound can be
produced is the presence of the compounds needed to produce it.
Epsilon indicates the threshold amount of reaction flux for the products
to be considered non-blocked. V_max indicates the maximum flux.
This method is implemented as a MILP-program. Therefore it may
not be efficient for larger models.
Args:
model: :class:`MetabolicModel` containing core reactions and reactions
that can be added for gap-filling.
solver: MILP solver instance.
epsilon: Threshold amount of a compound produced for it to not be
considered blocked.
v_max: Maximum flux.
implicit_sinks: Whether implicit sinks for all compounds are included
when gap-filling (traditional GapFill uses implicit sinks).
"""
prob = solver.create_problem()
# Set integrality tolerance such that w constraints are correct
min_tol = prob.integrality_tolerance.min
int_tol = _find_integer_tolerance(epsilon, v_max, min_tol)
if int_tol < prob.integrality_tolerance.value:
prob.integrality_tolerance.value = int_tol
# Define flux variables
v = prob.namespace()
for reaction_id in model.reactions:
lower, upper = model.limits[reaction_id]
v.define([reaction_id], lower=lower, upper=upper)
# Define constraints on production of metabolites in reaction
w = prob.namespace(types=lp.VariableType.Binary)
binary_cons_lhs = {compound: 0 for compound in model.compounds}
for spec, value in iteritems(model.matrix):
compound, reaction_id = spec
if value != 0:
w.define([spec])
w_var = w(spec)
lower, upper = (float(x) for x in model.limits[reaction_id])
if value > 0:
dv = v(reaction_id)
else:
dv = -v(reaction_id)
lower, upper = -upper, -lower
prob.add_linear_constraints(
dv <= upper * w_var,
dv >= epsilon + (lower - epsilon) * (1 - w_var))
binary_cons_lhs[compound] += w_var
xp = prob.namespace(model.compounds, types=lp.VariableType.Binary)
objective = xp.sum(model.compounds)
prob.set_objective(objective)
for compound, lhs in iteritems(binary_cons_lhs):
prob.add_linear_constraints(lhs >= xp(compound))
# Define mass balance constraints
massbalance_lhs = {compound: 0 for compound in model.compounds}
for spec, value in iteritems(model.matrix):
compound, reaction_id = spec
massbalance_lhs[compound] += v(reaction_id) * value
for compound, lhs in iteritems(massbalance_lhs):
if implicit_sinks:
# The constraint is merely >0 meaning that we have implicit sinks
# for all compounds.
prob.add_linear_constraints(lhs >= 0)
else:
prob.add_linear_constraints(lhs == 0)
# Solve
try:
result = prob.solve(lp.ObjectiveSense.Maximize)
except lp.SolverError as e:
raise_from(GapFillError('Failed to solve gapfill: {}'.format(e), e))
for compound in model.compounds:
if result.get_value(xp(compound)) < 0.5:
yield compound
def gapfill(
model, core, blocked, exclude, solver, epsilon=0.001, v_max=1000,
weights={}, implicit_sinks=True, allow_bounds_expansion=False):
"""Find a set of reactions to add such that no compounds are blocked.
Returns two iterators: first an iterator of reactions not in
core, that were added to resolve the model. Second, an
iterator of reactions in core that had flux bounds expanded (i.e.
irreversible reactions become reversible). Similarly to
GapFind, this method assumes, by default, implicit sinks for all compounds
in the model so the only factor that influences whether a compound
can be produced is the presence of the compounds needed to produce
it. This means that the resulting model will not necessarily be
flux consistent.
This method is implemented as a MILP-program. Therefore it may
not be efficient for larger models.
Args:
model: :class:`MetabolicModel` containing core reactions and reactions
that can be added for gap-filling.
core: The set of core (already present) reactions in the model.
blocked: The compounds to unblock.
exclude: Set of reactions in core to be excluded from gap-filling (e.g.
biomass reaction).
solver: MILP solver instance.
epsilon: Threshold amount of a compound produced for it to not be
considered blocked.
v_max: Maximum flux.
weights: Dictionary of weights for reactions. Weight is the penalty
score for adding the reaction (non-core reactions) or expanding the
flux bounds (all reactions).
implicit_sinks: Whether implicit sinks for all compounds are included
when gap-filling (traditional GapFill uses implicit sinks).
allow_bounds_expansion: Allow flux bounds to be expanded at the cost
of a penalty which can be specified using weights (traditional
GapFill does not allow this). This includes turning irreversible
reactions reversible.
"""
prob = solver.create_problem()
# Set integrality tolerance such that w constraints are correct
min_tol = prob.integrality_tolerance.min
int_tol = _find_integer_tolerance(epsilon, v_max, min_tol)
if int_tol < prob.integrality_tolerance.value:
prob.integrality_tolerance.value = int_tol
# Define flux variables
v = prob.namespace(model.reactions, lower=-v_max, upper=v_max)
# Add binary indicator variables
database_reactions = set(model.reactions).difference(core, exclude)
ym = prob.namespace(model.reactions, types=lp.VariableType.Binary)
yd = prob.namespace(database_reactions, types=lp.VariableType.Binary)
objective = ym.expr(
(rxnid, weights.get(rxnid, 1)) for rxnid in model.reactions)
objective += yd.expr(
(rxnid, weights.get(rxnid, 1)) for rxnid in database_reactions)
prob.set_objective(objective)
# Add constraints on all reactions
for reaction_id in model.reactions:
lower, upper = (float(x) for x in model.limits[reaction_id])
if reaction_id in exclude or not allow_bounds_expansion:
prob.add_linear_constraints(
upper >= v(reaction_id), v(reaction_id) >= lower)
else:
# Allow flux bounds to expand up to v_max with penalty
delta_lower = min(0, -v_max - lower)
delta_upper = max(0, v_max - upper)
prob.add_linear_constraints(
v(reaction_id) >= lower + ym(reaction_id) * delta_lower,
v(reaction_id) <= upper + ym(reaction_id) * delta_upper)
# Add constraints on database reactions
for reaction_id in database_reactions:
lower, upper = model.limits[reaction_id]
prob.add_linear_constraints(
v(reaction_id) >= yd(reaction_id) * -v_max,
v(reaction_id) <= yd(reaction_id) * v_max)
# Define constraints on production of blocked metabolites in reaction
w = prob.namespace(types=lp.VariableType.Binary)
binary_cons_lhs = {compound: 0 for compound in blocked}
for (compound, reaction_id), value in iteritems(model.matrix):
if reaction_id not in exclude and compound in blocked and value != 0:
w.define([(compound, reaction_id)])
w_var = w((compound, reaction_id))
dv = v(reaction_id) if value > 0 else -v(reaction_id)
prob.add_linear_constraints(
dv <= v_max * w_var,
dv >= epsilon + (-v_max - epsilon) * (1 - w_var))
binary_cons_lhs[compound] += w_var
for compound, lhs in iteritems(binary_cons_lhs):
prob.add_linear_constraints(lhs >= 1)
# Define mass balance constraints
massbalance_lhs = {compound: 0 for compound in model.compounds}
for (compound, reaction_id), value in iteritems(model.matrix):
if reaction_id not in exclude:
massbalance_lhs[compound] += v(reaction_id) * value
for compound, lhs in iteritems(massbalance_lhs):
if implicit_sinks:
# The constraint is merely >0 meaning that we have implicit sinks
# for all compounds.
prob.add_linear_constraints(lhs >= 0)
else:
prob.add_linear_constraints(lhs == 0)
# Solve
try:
prob.solve(lp.ObjectiveSense.Minimize)
except lp.SolverError as e:
raise_from(GapFillError('Failed to solve gapfill: {}'.format(e)), e)
def added_iter():
for reaction_id in database_reactions:
if yd.value(reaction_id) > 0.5:
yield reaction_id
def no_bounds_iter():
for reaction_id in model.reactions:
if ym.value(reaction_id) > 0.5:
yield reaction_id
return added_iter(), no_bounds_iter()
| gpl-3.0 | 1,452,733,391,583,776,800 | 39.375912 | 79 | 0.657417 | false |
notapresent/rtrss | tests/test_localdirectory_storage.py | 1 | 2009 | import unittest
from testfixtures import TempDirectory
from tests import random_key
from rtrss.storage.localdirectory import LocalDirectoryStorage, mkdir_p
class LocalDirectoryStorageTestCase(unittest.TestCase):
test_key = 'some random key'
test_value = 'some random value'
def setUp(self):
self.dir = TempDirectory()
self.store = LocalDirectoryStorage(self.dir.path)
def tearDown(self):
self.dir.cleanup()
def test_put_stores(self):
self.store.put(self.test_key, self.test_value)
self.assertEqual(self.test_value, self.dir.read(self.test_key))
def test_get_retrieves(self):
self.dir.write(self.test_key, self.test_value)
self.assertEqual(self.test_value, self.store.get(self.test_key))
def test_get_returns_none_for_nonexistent(self):
self.assertIsNone(self.store.get('some random nonexistent key'))
def test_delete_deletes(self):
self.dir.write(self.test_key, self.test_value)
self.store.delete(self.test_key)
self.assertIsNone(self.store.get(self.test_key))
def test_delete_nonexistent_not_raises(self):
try:
self.store.delete('some random nonexistent key')
except OSError as e:
self.fail('delete("nonexistent key") raised an exception: ' % e)
def test_key_with_slashes_creates_directories(self):
key = 'key/with/slash'
self.store.put(key, self.test_value)
subdir, filename = key.rsplit('/', 1)
self.dir.check_dir(subdir, filename)
def test_bulk_delete_deletes_multiple(self):
keys = []
for _ in xrange(10):
key = random_key()
keys.append(key)
self.dir.write(key, self.test_value)
self.store.bulk_delete(keys)
self.dir.check()
def test_mkdir_p_creates_dir(self):
dirname = 'test directory 1/test directory 2/test directory 3'
mkdir_p(self.dir.getpath(dirname))
self.dir.check_dir(dirname)
| apache-2.0 | 1,940,752,862,690,465,300 | 30.888889 | 76 | 0.652066 | false |
novafloss/ci-formula | _states/jenkins_job.py | 1 | 1441 | # -*- coding: utf-8 -*-
import os
def _recreate_job_check(old, new):
old_cls = old.splitlines()[-1]
new_cls = new.splitlines()[-1]
return old_cls != new_cls
def present(name, source, template=None, context=None):
update_or_create_xml = __salt__['jenkins.update_or_create_xml'] # noqa
get_file_str = __salt__['cp.get_file_str'] # noqa
get_template = __salt__['cp.get_template'] # noqa
if template:
get_template(source, '/tmp/job.xml', template=template,
context=context)
new = open('/tmp/job.xml').read().strip()
os.unlink('/tmp/job.xml')
else:
new = get_file_str(source)
return update_or_create_xml(
name, new, object_='job', recreate_callback=_recreate_job_check)
def absent(name):
_runcli = __salt__['jenkins.runcli'] # noqa
test = __opts__['test'] # noqa
ret = {
'name': name,
'changes': {},
'result': None if test else True,
'comment': ''
}
try:
_runcli('get-job', name)
except Exception:
ret['comment'] = 'Already removed'
return ret
if not test:
try:
ret['comment'] = _runcli('delete-job', name)
except Exception, e:
ret['comment'] = e.message
ret['result'] = False
return ret
ret['changes'] = {
'old': 'present',
'new': 'absent',
}
return ret
| mit | 5,004,029,649,107,946,000 | 23.844828 | 75 | 0.530881 | false |
VU-Cog-Sci/PRF_experiment | ColorMatcherSession.py | 1 | 3743 | from __future__ import division
from psychopy import visual, core, misc, event
import numpy as np
from IPython import embed as shell
from math import *
import os, sys, time, pickle
import pygame
from pygame.locals import *
# from pygame import mixer, time
import Quest
sys.path.append( 'exp_tools' )
# sys.path.append( os.environ['EXPERIMENT_HOME'] )
from Session import *
from ColorMatcherTrial import *
from standard_parameters import *
from Staircase import YesNoStaircase
import appnope
appnope.nope()
class ColorMatcherSession(EyelinkSession):
def __init__(self, subject_initials, index_number, scanner, tracker_on):
super(ColorMatcherSession, self).__init__( subject_initials, index_number)
self.create_screen( size = screen_res, full_screen = 0, physical_screen_distance = 159.0, background_color = background_color, physical_screen_size = (70, 40) )
self.standard_parameters = standard_parameters
self.response_button_signs = response_button_signs
self.create_output_file_name()
if tracker_on:
self.create_tracker(auto_trigger_calibration = 1, calibration_type = 'HV9')
if self.tracker_on:
self.tracker_setup()
else:
self.create_tracker(tracker_on = False)
self.scanner = scanner
# trials can be set up independently of the staircases that support their parameters
self.prepare_trials()
self.all_color_values = []
self.exp_start_time = 0.0
self.color_step = 0.02
def prepare_trials(self):
"""docstring for prepare_trials(self):"""
self.RG_offsets = (np.random.rand(self.standard_parameters['num_trials']))
self.phase_durations = np.array([-0.0001,-0.0001, 1.00, -0.0001, 0.001])
# stimuli
self.fixation_rim = visual.PatchStim(self.screen, mask='raisedCos',tex=None, size=12.5, pos = np.array((0.0,0.0)), color = (0,0,0), maskParams = {'fringeWidth':0.4})
self.fixation_outer_rim = visual.PatchStim(self.screen, mask='raisedCos',tex=None, size=17.5, pos = np.array((0.0,0.0)), color = (-1.0,-1.0,-1.0), maskParams = {'fringeWidth':0.4})
self.fixation = visual.PatchStim(self.screen, mask='raisedCos',tex=None, size=9.0, pos = np.array((0.0,0.0)), color = (0,0,0), opacity = 1.0, maskParams = {'fringeWidth':0.4})
screen_width, screen_height = self.screen_pix_size
ecc_mask = filters.makeMask(matrixSize = 2048, shape='raisedCosine', radius=self.standard_parameters['stim_size'] * self.screen_pix_size[1] / self.screen_pix_size[0], center=(0.0, 0.0), range=[1, -1], fringeWidth=0.1 )
self.mask_stim = visual.PatchStim(self.screen, mask=ecc_mask,tex=None, size=(self.screen_pix_size[0], self.screen_pix_size[0]), pos = np.array((0.0,0.0)), color = self.screen.background_color) #
def close(self):
super(ColorMatcherSession, self).close()
text_file = open("data/%s_color_ratios.txt"%self.subject_initials, "w")
text_file.write('Mean RG/BY ratio: %.2f\nStdev RG/BY ratio: %.2f'%(np.mean(np.array(self.all_color_values)/self.standard_parameters['BY_comparison_color']),np.std(np.array(self.all_color_values)/self.standard_parameters['BY_comparison_color'])))
text_file.close()
def run(self):
"""docstring for fname"""
# cycle through trials
for i in range(self.standard_parameters['num_trials']):
# prepare the parameters of the following trial based on the shuffled trial array
this_trial_parameters = self.standard_parameters.copy()
this_trial_parameters['RG_offset'] = self.RG_offsets[i]
these_phase_durations = self.phase_durations.copy()
this_trial = ColorMatcherTrial(this_trial_parameters, phase_durations = these_phase_durations, session = self, screen = self.screen, tracker = self.tracker)
# run the prepared trial
this_trial.run(ID = i)
if self.stopped == True:
break
self.close()
| mit | 2,694,495,617,494,793,700 | 40.131868 | 247 | 0.711996 | false |
jbalogh/zamboni | apps/devhub/tests/test_helpers.py | 1 | 7159 | # -*- coding: utf8 -*-
import unittest
import urllib
from django.utils import translation
from mock import Mock
from nose.tools import eq_
from pyquery import PyQuery as pq
import amo
import amo.tests
from amo.urlresolvers import reverse
from amo.tests.test_helpers import render
from addons.models import Addon
from devhub import helpers
from files.models import File, Platform
from versions.models import Version
def test_dev_page_title():
translation.activate('en-US')
request = Mock()
request.APP = None
addon = Mock()
addon.name = 'name'
ctx = {'request': request, 'addon': addon}
title = 'Oh hai!'
s1 = render('{{ dev_page_title("%s") }}' % title, ctx)
s2 = render('{{ page_title("%s :: Developer Hub") }}' % title, ctx)
eq_(s1, s2)
s1 = render('{{ dev_page_title() }}', ctx)
s2 = render('{{ page_title("Developer Hub") }}', ctx)
eq_(s1, s2)
s1 = render('{{ dev_page_title("%s", addon) }}' % title, ctx)
s2 = render('{{ page_title("%s :: %s") }}' % (title, addon.name), ctx)
eq_(s1, s2)
class TestDevBreadcrumbs(unittest.TestCase):
def setUp(self):
self.request = Mock()
self.request.APP = None
def test_no_args(self):
s = render('{{ dev_breadcrumbs() }}', {'request': self.request})
doc = pq(s)
crumbs = doc('li')
eq_(len(crumbs), 2)
eq_(crumbs.text(), 'Developer Hub My Add-ons')
eq_(crumbs.eq(1).children('a'), [])
def test_no_args_with_default(self):
s = render('{{ dev_breadcrumbs(add_default=True) }}',
{'request': self.request})
doc = pq(s)
crumbs = doc('li')
eq_(crumbs.text(), 'Add-ons Developer Hub My Add-ons')
eq_(crumbs.eq(1).children('a').attr('href'), reverse('devhub.index'))
eq_(crumbs.eq(2).children('a'), [])
def test_with_items(self):
s = render("""{{ dev_breadcrumbs(items=[('/foo', 'foo'),
('/bar', 'bar')]) }}'""",
{'request': self.request})
doc = pq(s)
crumbs = doc('li>a')
eq_(len(crumbs), 4)
eq_(crumbs.eq(2).text(), 'foo')
eq_(crumbs.eq(2).attr('href'), '/foo')
eq_(crumbs.eq(3).text(), 'bar')
eq_(crumbs.eq(3).attr('href'), '/bar')
def test_with_addon(self):
addon = Mock()
addon.name = 'Firebug'
addon.id = 1843
s = render("""{{ dev_breadcrumbs(addon) }}""",
{'request': self.request, 'addon': addon})
doc = pq(s)
crumbs = doc('li')
eq_(crumbs.text(), 'Developer Hub My Add-ons Firebug')
eq_(crumbs.eq(1).text(), 'My Add-ons')
eq_(crumbs.eq(1).children('a').attr('href'), reverse('devhub.addons'))
eq_(crumbs.eq(2).text(), 'Firebug')
eq_(crumbs.eq(2).children('a'), [])
def test_with_addon_and_items(self):
addon = Mock()
addon.name = 'Firebug'
addon.id = 1843
addon.slug = 'fbug'
s = render("""{{ dev_breadcrumbs(addon,
items=[('/foo', 'foo'),
('/bar', 'bar')]) }}""",
{'request': self.request, 'addon': addon})
doc = pq(s)
crumbs = doc('li')
eq_(len(crumbs), 5)
eq_(crumbs.eq(2).text(), 'Firebug')
eq_(crumbs.eq(2).children('a').attr('href'),
reverse('devhub.addons.edit', args=[addon.slug]))
eq_(crumbs.eq(3).text(), 'foo')
eq_(crumbs.eq(3).children('a').attr('href'), '/foo')
eq_(crumbs.eq(4).text(), 'bar')
eq_(crumbs.eq(4).children('a').attr('href'), '/bar')
def test_summarize_validation():
v = Mock()
v.errors = 1
v.warnings = 1
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'1 error, 1 warning')
v.errors = 2
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'2 errors, 1 warning')
v.warnings = 2
eq_(render('{{ summarize_validation(validation) }}',
{'validation': v}),
u'2 errors, 2 warnings')
def test_log_action_class():
v = Mock()
for k, v in amo.LOG_BY_ID.iteritems():
if v.action_class is not None:
cls = 'action-' + v.action_class
else:
cls = ''
eq_(render('{{ log_action_class(id) }}', {'id': v.id}), cls)
class TestDisplayUrl(unittest.TestCase):
def setUp(self):
self.raw_url = u'http://host/%s' % 'フォクすけといっしょ'.decode('utf8')
def test_utf8(self):
url = urllib.quote(self.raw_url.encode('utf8'))
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
def test_unicode(self):
url = urllib.quote(self.raw_url.encode('utf8'))
url = unicode(url, 'utf8')
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
def test_euc_jp(self):
url = urllib.quote(self.raw_url.encode('euc_jp'))
eq_(render('{{ url|display_url }}', {'url': url}),
self.raw_url)
class TestDevFilesStatus(amo.tests.TestCase):
def setUp(self):
platform = Platform.objects.create(id=amo.PLATFORM_ALL.id)
self.addon = Addon.objects.create(type=1, status=amo.STATUS_UNREVIEWED)
self.version = Version.objects.create(addon=self.addon)
self.file = File.objects.create(version=self.version,
platform=platform,
status=amo.STATUS_UNREVIEWED)
def expect(self, expected):
cnt, msg = helpers.dev_files_status([self.file], self.addon)[0]
eq_(cnt, 1)
eq_(msg, expected)
def test_unreviewed_lite(self):
self.addon.status = amo.STATUS_LITE
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.STATUS_CHOICES[amo.STATUS_UNREVIEWED])
def test_unreviewed_public(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_unreviewed_nominated(self):
self.addon.status = amo.STATUS_NOMINATED
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_unreviewed_lite_and_nominated(self):
self.addon.status = amo.STATUS_LITE_AND_NOMINATED
self.file.status = amo.STATUS_UNREVIEWED
self.expect(amo.STATUS_CHOICES[amo.STATUS_NOMINATED])
def test_reviewed_lite(self):
self.addon.status = amo.STATUS_LITE
self.file.status = amo.STATUS_LITE
self.expect(amo.STATUS_CHOICES[amo.STATUS_LITE])
def test_reviewed_public(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_PUBLIC
self.expect(amo.STATUS_CHOICES[amo.STATUS_PUBLIC])
def test_disabled(self):
self.addon.status = amo.STATUS_PUBLIC
self.file.status = amo.STATUS_DISABLED
self.expect(amo.STATUS_CHOICES[amo.STATUS_DISABLED])
| bsd-3-clause | -6,397,185,273,978,489,000 | 32.995238 | 79 | 0.557081 | false |
systers/postorius | copybump.py | 1 | 2797 | #! /usr/bin/env python3
import os
import re
import sys
import stat
import datetime
FSF = 'by the Free Software Foundation, Inc.'
this_year = datetime.date.today().year
pyre_c = re.compile(r'# Copyright \(C\) ((?P<start>\d{4})-)?(?P<end>\d{4})')
pyre_n = re.compile(r'# Copyright ((?P<start>\d{4})-)?(?P<end>\d{4})')
new_c = '# Copyright (C) {}-{} {}'
new_n = '# Copyright {}-{} {}'
MODE = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
if '--noc' in sys.argv:
pyre = pyre_n
new = new_n
sys.argv.remove('--noc')
else:
pyre = pyre_c
new = new_c
def do_file(path, owner):
permissions = os.stat(path).st_mode & MODE
with open(path) as in_file, open(path + '.out', 'w') as out_file:
try:
for line in in_file:
mo_c = pyre_c.match(line)
mo_n = pyre_n.match(line)
if mo_c is None and mo_n is None:
out_file.write(line)
continue
mo = (mo_n if mo_c is None else mo_c)
start = (mo.group('end')
if mo.group('start') is None
else mo.group('start'))
if int(start) == this_year:
out_file.write(line)
continue
print(new.format(start, this_year, owner), file=out_file) # noqa
print('=>', path)
for line in in_file:
out_file.write(line)
except UnicodeDecodeError:
print('Cannot convert path:', path)
os.remove(path + '.out')
return
os.rename(path + '.out', path)
os.chmod(path, permissions)
def remove(dirs, path):
try:
dirs.remove(path)
except ValueError:
pass
def do_walk():
try:
owner = sys.argv[1]
except IndexError:
owner = FSF
for root, dirs, files in os.walk('.'):
if root == '.':
remove(dirs, '.git')
remove(dirs, '.tox')
remove(dirs, 'bin')
remove(dirs, 'contrib')
remove(dirs, 'develop-eggs')
remove(dirs, 'eggs')
remove(dirs, 'parts')
remove(dirs, 'gnu-COPYING-GPL')
remove(dirs, '.installed.cfg')
remove(dirs, '.bzrignore')
remove(dirs, 'distribute_setup.py')
if root == './src':
remove(dirs, 'postorius.egg-info')
if root == './src/postorius':
remove(dirs, 'messages')
for file_name in files:
if os.path.splitext(file_name)[1] in ('.pyc', '.gz', '.egg'):
continue
path = os.path.join(root, file_name)
if os.path.isfile(path):
do_file(path, owner)
if __name__ == '__main__':
do_walk()
| gpl-3.0 | 5,930,184,830,484,723,000 | 28.135417 | 80 | 0.490526 | false |
pewen/ten | ten/post.py | 1 | 2525 | """
Post processing tools
"""
import os
import numpy as np
def extrac4dir(dir_path, search):
"""
Extrar the epsilon, mean free path, total time and efficienci of all file in some directory.
Parameters
----------
dir_path : str
Path to the directory with the outputs files.
search : list
List with the number of acceptor to search only this effienci.
Return
------
out : matrix
"""
dirs = os.listdir(path=dir_path)
out = np.zeros((len(dirs), len(search) + 3))
for num_file, file_path in enumerate(dirs):
with open(os.path.join(dir_path, file_path), 'r') as file:
cnt = 0
while True:
line = file.readline()
if 'path' in line:
line_split = line.split()
out[num_file][0] = float(line_split[3])
elif 'Epsilon' in line:
line_split = line.split()
out[num_file][1] = float(line_split[1])
elif 'Total time =' in line:
line_split = line.split()
out[num_file][2] = float(line_split[3])
elif 'Nº acceptors' in line:
line = file.readline()
while True:
line = file.readline()
# Remove all spaces
line_without_space = ''.join(line.split())
line_split = line_without_space.split('|')
# End of file
if '+--------------' in line:
break
if line_split[1] == str(search[cnt]):
out[num_file][cnt + 3] = float(line_split[4])
cnt += 1
break
if '' == line:
break
return out
def diference2(efficience, eff_matrix):
"""
Squared difference between the simulated efficiencies (matrix) and a given.
Parameters
----------
efficience : array_like
Efficience to compare
eff_matrix : matrix
Matrix give for extrac4dir.
Return
------
"""
diff_matrix = np.zeros((eff_matrix.shape[0], eff_matrix.shape[1] + 1))
diff_matrix[:, :3] = eff_matrix[:, :3]
diff_matrix[:, 4:] = eff_matrix[:, 3:]
# Diff
for i in range(diff_matrix.shape[0]):
diff_matrix[i][3] = sum((diff_matrix[i][4:] - efficience)**2)
return diff_matrix
| mit | -8,547,275,649,797,289,000 | 27.681818 | 96 | 0.479398 | false |
AstroFloyd/SolTrack | Python/soltrack/riseset.py | 2 | 8500 |
"""SolTrack: a simple, free, fast and accurate C routine to compute the position of the Sun.
Copyright (c) 2014-2020 Marc van der Sluys, Paul van Kan and Jurgen Reintjes,
Sustainable Energy research group, HAN University of applied sciences, Arnhem, The Netherlands
This file is part of the SolTrack package, see: http://soltrack.sourceforge.net
SolTrack is derived from libTheSky (http://libthesky.sourceforge.net) under the terms of the GPL v.3
This is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General
Public License as published by the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License along with this code. If not, see
<http://www.gnu.org/licenses/>.
"""
import math as m
import numpy as np
import soltrack as st
from soltrack.data import PI,TWO_PI, R2D,R2H
from soltrack.dataclasses import Location, Time, RiseSet, copyDataclass
def computeSunRiseSet(location, time, rsAlt=0.0, useDegrees=False, useNorthEqualsZero=False):
"""Compute rise, transit and set times for the Sun, as well as their azimuths/altitude.
Parameters:
location (Location): Dataclass containing the geographic location to compute the Sun's rise and set times for.
time (Time): Dataclass containing date and time to compute the position for, in UT.
rsAlt (float): Altitude to return rise/set data for (radians; optional, default=0.0 meaning actual rise/set). Set rsAlt>pi/2 to compute transit only.
useDegrees (bool): Use degrees for input and output angular variables, rather than radians (optional, default=False).
useNorthEqualsZero (bool): Use the definition where azimuth=0 denotes north, rather than south (optional, default=False).
Returns:
(RiseSet): Dataclass containing the Sun's rise, transit and set data.
Note:
- if rsAlt == 0.0, actual rise and set times are computed
- if rsAlt != 0.0, the routine calculates when alt = rsAlt is reached
- returns times, rise/set azimuth and transit altitude in the dataclass riseSet
See:
- subroutine riset() in riset.f90 from libTheSky (libthesky.sf.net) for more info
"""
tmRad = np.zeros(3)
azalt = np.zeros(3)
alt=0.0; ha=0.0; h0=0.0
computeRefrEquatorial = True # Compure refraction-corrected equatorial coordinates (Hour angle, declination).
computeDistance = False # Compute the distance to the Sun in AU.
rsa = -0.8333/R2D # Standard altitude for the Sun in radians
if(abs(rsAlt) > 1.e-9): rsa = rsAlt # Use a user-specified altitude
# If the used uses degrees, convert the geographic location to radians:
# This was a local variable llocation in C
loc = copyDataclass(Location, location) # Local instance of the Location dataclass, so that it can be changed
if(useDegrees):
loc.longitude /= R2D
loc.latitude /= R2D
# Set date and time to midnight UT for the desired day:
rsTime = Time() # Local instance of the dataclass Time
rsTime.year = time.year
rsTime.month = time.month
rsTime.day = time.day
rsTime.hour = 0
rsTime.minute = 0
rsTime.second = 0.0
# Compute the Sun's position. Returns a Position object:
pos = st.computeSunPosition(loc, rsTime, False, useNorthEqualsZero, computeRefrEquatorial, computeDistance) # useDegrees = False: NEVER use degrees internally!
agst0 = pos.agst # AGST for midnight
evMax = 3 # Compute transit, rise and set times by default (1-3)
cosH0 = (m.sin(rsa)-m.sin(loc.latitude)*m.sin(pos.declination)) / \
(m.cos(loc.latitude)*m.cos(pos.declination))
if(abs(cosH0) > 1.0): # Body never rises/sets
evMax = 1 # Compute transit time and altitude only
else:
h0 = m.acos(cosH0) % PI # Should probably work without %
tmRad[0] = (pos.rightAscension - loc.longitude - pos.agst) % TWO_PI # Transit time in radians; lon0 > 0 for E
if(evMax > 1):
tmRad[1] = (tmRad[0] - h0) % TWO_PI # Rise time in radians
tmRad[2] = (tmRad[0] + h0) % TWO_PI # Set time in radians
accur = 1.0e-5 # Accuracy; 1e-5 rad ~ 0.14s. Don't make this smaller than 1e-16
for evi in range(evMax): # Loop over transit, rise, set
iter = 0
dTmRad = m.inf
while(abs(dTmRad) > accur):
th0 = agst0 + 1.002737909350795*tmRad[evi] # Solar day in sidereal days in 2000
rsTime.second = tmRad[evi]*R2H*3600.0 # Radians -> seconds - w.r.t. midnight (h=0,m=0)
pos = st.computeSunPosition(loc, rsTime, False, useNorthEqualsZero, computeRefrEquatorial, computeDistance) # useDegrees = False: NEVER use degrees internally!
ha = revPI(th0 + loc.longitude - pos.rightAscension) # Hour angle: -PI - +PI
alt = m.asin(m.sin(loc.latitude)*m.sin(pos.declination) +
m.cos(loc.latitude)*m.cos(pos.declination)*m.cos(ha)) # Altitude
# Correction to transit/rise/set times:
if(evi==0): # Transit
dTmRad = -revPI(ha) # -PI - +PI
else: # Rise/set
dTmRad = (alt-rsa)/(m.cos(pos.declination)*m.cos(loc.latitude)*m.sin(ha))
tmRad[evi] = tmRad[evi] + dTmRad
# Print debug output to stdOut:
# print(" %4i %2i %2i %2i %2i %9.3lf " % (rsTime.year,rsTime.month,rsTime.day, rsTime.hour,rsTime.minute,rsTime.second))
# print(" %3i %4i %9.3lf %9.3lf %9.3lf \n" % (evi,iter, tmRad[evi]*24,abs(dTmRad)*24,accur*24))
iter += 1
if(iter > 30): break # while loop doesn't seem to converge
# while(abs(dTmRad) > accur)
if(iter > 30): # Convergence failed
print("\n *** WARNING: riset(): Riset failed to converge: %i %9.3lf ***\n" % (evi,rsAlt))
tmRad[evi] = -m.inf
azalt[evi] = -m.inf
else: # Result converged, store it
if(evi == 0):
azalt[evi] = alt # Transit altitude
else:
azalt[evi] = m.atan2( m.sin(ha), ( m.cos(ha) * m.sin(loc.latitude) -
m.tan(pos.declination) * m.cos(loc.latitude) ) ) # Rise,set hour angle -> azimuth
if(tmRad[evi] < 0.0 and abs(rsAlt) < 1.e-9):
tmRad[evi] = -m.inf
azalt[evi] = -m.inf
# for-loop evi
# Set north to zero radians for azimuth if desired:
if(useNorthEqualsZero):
azalt[1] = (azalt[1] + PI) % TWO_PI # Add PI and fold between 0 and 2pi
azalt[2] = (azalt[2] + PI) % TWO_PI # Add PI and fold between 0 and 2pi
# Convert resulting angles to degrees if desired:
if(useDegrees):
azalt[0] *= R2D # Transit altitude
azalt[1] *= R2D # Rise azimuth
azalt[2] *= R2D # Set azimuth
# Store results:
riseSet = RiseSet() # Instance of the dataclass RiseSet, to store and return the results
riseSet.transitTime = tmRad[0]*R2H # Transit time - radians -> hours
riseSet.riseTime = tmRad[1]*R2H # Rise time - radians -> hours
riseSet.setTime = tmRad[2]*R2H # Set time - radians -> hours
riseSet.transitAltitude = azalt[0] # Transit altitude
riseSet.riseAzimuth = azalt[1] # Rise azimuth
riseSet.setAzimuth = azalt[2] # Set azimuth
return riseSet
def revPI(angle):
"""Fold an angle in radians to take a value between -PI and +PI.
Parameters:
angle (float): Angle to fold (rad).
"""
return ((angle + PI) % TWO_PI) - PI
| lgpl-3.0 | 6,757,125,456,409,840,000 | 42.814433 | 173 | 0.597882 | false |
asd43/Structural-Variation | popgen/mergeBD2bed.py | 1 | 1961 | #!/usr/bin/env python3
# Copyright (c) 2017 Genome Research Ltd.
# Author: Alistair Dunham
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License , or (at your option) any later
# version.
# This program is distributed in the hope that it will be useful , but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
# You should have received a copy of the GNU General Public License along with
# this program. If not , see <http :// www.gnu.org/licenses/>.
## Script to merge filtered BD call files to bed format for processing
import argparse
import fileinput
parser = argparse.ArgumentParser(description="Merge filtered BreakDancer variant calls into a single bed file for genotyping. "
"If the ID flag is given an ID is expected in the column beyond the normal BD output, otherwise an id is generated.")
parser.add_argument('bd',metavar='B',type=str,help="File containing a list of BreakDancer variant call files (.BD_out format).")
parser.add_argument('--id','-i',action='store_true',help="Use IDs added to the BD_out file on filtering. Otherwise generate IDs enumerating only filtered calls.")
args = parser.parse_args()
bdFiles = []
with fileinput.input(args.bd) as fi:
for i in fi:
bdFiles.append(i.strip())
## Stream through each file and output bed formated versions
for fi in bdFiles:
with fileinput.input(fi) as bd:
if not args.id:
f = fi.split('/')
idBase = f[-1].strip('.BD_out')
n = 1
for li in bd:
if not li[0] == '#':
t = li.strip().split()
if args.id:
ID = t[12]
else:
ID = '.'.join(['BD',idBase,n])
n += 1
print(t[0],t[1],t[4],ID,sep='\t')
| gpl-3.0 | 1,119,849,092,429,157,500 | 39.020408 | 162 | 0.684345 | false |
maltsev/LatexWebOffice | app/tests/server/views/test_document.py | 1 | 4826 | # -*- coding: utf-8 -*-
"""
* Purpose : Test der Dokument- und Projektverwaltung (app/view/documents.py)
* Creation Date : 20-11-2014
* Last Modified : Mi 26 Nov 2014 14:58:13 CET
* Author : mattis
* Coauthors : christian
* Sprintnumber : 2
* Backlog entry : -
"""
from app.common.constants import ERROR_MESSAGES
from app.common import util
from app.tests.server.views.viewtestcase import ViewTestCase
class DocumentsTestClass(ViewTestCase):
def setUp(self):
"""Setup Methode für die einzelnen Tests
Diese Funktion wird vor jeder Testfunktion ausgeführt.
Damit werden die notwendigen Variablen und Modelle für jeden Test neu initialisiert.
Die Methoden hierzu befinden sich im ViewTestCase (viewtestcase.py).
:return: None
"""
self.setUpSingleUser()
def tearDown(self):
"""Freigabe von nicht mehr notwendigen Ressourcen.
Diese Funktion wird nach jeder Testfunktion ausgeführt.
:return: None
"""
pass
def test_Execute(self):
"""Test der execute() Methode des document view
Teste die Verteilfunktion, die die verschiedenen Document-commands den richtigen Methode zuweist.
Testfälle:
- user1 ruft createdir mit fehlendem Parameter id auf -> Fehler
- user1 ruft unbekannten Befehl auf -> Fehler
- user1 ruft createdir mit eine String als id auf -> Fehler
- user1 ruft updatefile auf ohne den content mitzusenden -> Fehler
:return: None
"""
missingpara_id = {'name': 'id', 'type': int}
missingpara_content = {'name': 'content', 'type': str}
missingpara_name = {'name': 'name', 'type': str}
# Teste Aufruf mit fehlendem Parameter
# createdir command benötigt Parameter 'id':parentdirid und 'name': directoryname
response = util.documentPoster(self, command='createdir', idpara=None, name='newfolder')
# erwartete Antwort des Servers
serveranswer = ERROR_MESSAGES['MISSINGPARAMETER'] % missingpara_id
# überprüfe die Antwort des Servers
# status sollte failure sein
# die Antwort des Servers sollte mit serveranswer übereinstimmen
util.validateJsonFailureResponse(self, response.content, serveranswer)
# --------------------------------------------------------------------------------------------------------------
# Teste unbekannten Befehl ('command')
response = util.documentPoster(self, command='DOESNOTEXIST')
# erwartete Antwort des Servers
serveranswer = ERROR_MESSAGES['COMMANDNOTFOUND']
# überprüfe die Antwort des Servers
# status sollte failure sein
# die Antwort des Servers sollte mit serveranswer übereinstimmen
util.validateJsonFailureResponse(self, response.content, serveranswer)
# --------------------------------------------------------------------------------------------------------------
# Sende Anfrage zum erstellen eines Ordners mit einem String als ID
response = util.documentPoster(self, command='createdir', idpara='noIntID', name='newfolder')
# erwartete Antwort des Servers
serveranswer = ERROR_MESSAGES['MISSINGPARAMETER'] % missingpara_id
# überprüfe die Antwort des Servers
# status sollte failure sein
# die Antwort des Servers sollte mit serveranswer übereinstimmen
util.validateJsonFailureResponse(self, response.content, serveranswer)
# --------------------------------------------------------------------------------------------------------------
# Sende Anfrage zum ändern des Inhalt einer .tex Datei ohne den Inhalt mitzusenden
response = util.documentPoster(self, command='updatefile', idpara=1)
# erwartete Antwort des Servers
serveranswer = ERROR_MESSAGES['MISSINGPARAMETER'] % missingpara_content
# überprüfe die Antwort des Servers
# status sollte failure sein
# die Antwort des Servers sollte mit serveranswer übereinstimmen
#util.validateJsonFailureResponse(self, response.content, serveranswer)
# --------------------------------------------------------------------------------------------------------------
# Sende Anfrage zum Umbenennen einer Datei ohne den neuen Namen mitzusenden
response = util.documentPoster(self, command='renamefile', idpara=1)
# erwartete Antwort des Servers
serveranswer = ERROR_MESSAGES['MISSINGPARAMETER'] % missingpara_name
# überprüfe die Antwort des Servers
# status sollte failure sein
# die Antwort des Servers sollte mit serveranswer übereinstimmen
#util.validateJsonFailureResponse(self, response.content, serveranswer) | gpl-3.0 | -6,987,938,852,695,280,000 | 37.134921 | 120 | 0.619484 | false |
swayf/doit | tests/test_runner.py | 1 | 25023 | import os
from multiprocessing import Queue
import pytest
from mock import Mock
from doit.dependency import Dependency
from doit.task import Task
from doit.control import TaskDispatcher, ExecNode
from doit import runner
# sample actions
def my_print(*args):
pass
def _fail():
return False
def _error():
raise Exception("I am the exception.\n")
def _exit():
raise SystemExit()
class FakeReporter(object):
"""Just log everything in internal attribute - used on tests"""
def __init__(self, outstream=None, options=None):
self.log = []
def get_status(self, task):
self.log.append(('start', task))
def execute_task(self, task):
self.log.append(('execute', task))
def add_failure(self, task, exception):
self.log.append(('fail', task))
def add_success(self, task):
self.log.append(('success', task))
def skip_uptodate(self, task):
self.log.append(('up-to-date', task))
def skip_ignore(self, task):
self.log.append(('ignore', task))
def cleanup_error(self, exception):
self.log.append(('cleanup_error',))
def runtime_error(self, msg):
self.log.append(('runtime_error',))
def teardown_task(self, task):
self.log.append(('teardown', task))
def complete_run(self):
pass
@pytest.fixture
def reporter(request):
return FakeReporter()
class TestRunner(object):
def testInit(self, reporter, depfile):
my_runner = runner.Runner(depfile.name, reporter)
assert False == my_runner._stop_running
assert runner.SUCCESS == my_runner.final_result
class TestRunner_SelectTask(object):
def test_ready(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )])
my_runner = runner.Runner(depfile.name, reporter)
assert True == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert not reporter.log
def test_DependencyError(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )],
file_dep=["i_dont_exist"])
my_runner = runner.Runner(depfile.name, reporter)
assert False == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
assert not reporter.log
def test_upToDate(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )], file_dep=[__file__])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.dep_manager.save_success(t1)
assert False == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert ('up-to-date', t1) == reporter.log.pop(0)
assert not reporter.log
def test_ignore(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.dep_manager.ignore(t1)
assert False == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert ('ignore', t1) == reporter.log.pop(0)
assert not reporter.log
def test_alwaysExecute(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )])
my_runner = runner.Runner(depfile.name, reporter, always_execute=True)
my_runner.dep_manager.save_success(t1)
assert True == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert not reporter.log
def test_noSetup_ok(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )])
my_runner = runner.Runner(depfile.name, reporter)
assert True == my_runner.select_task(ExecNode(t1, None), {})
assert ('start', t1) == reporter.log.pop(0)
assert not reporter.log
def test_withSetup(self, reporter, depfile):
t1 = Task("taskX", [(my_print, ["out a"] )], setup=["taskY"])
my_runner = runner.Runner(depfile.name, reporter)
# defer execution
n1 = ExecNode(t1, None)
assert False == my_runner.select_task(n1, {})
assert ('start', t1) == reporter.log.pop(0)
assert not reporter.log
# trying to select again
assert True == my_runner.select_task(n1, {})
assert not reporter.log
def test_getargs_ok(self, reporter, depfile):
def ok(): return {'x':1}
def check_x(my_x): return my_x == 1
t1 = Task('t1', [(ok,)])
n1 = ExecNode(t1, None)
t2 = Task('t2', [(check_x,)], getargs={'my_x':('t1','x')})
n2 = ExecNode(t2, None)
my_runner = runner.Runner(depfile.name, reporter)
# t2 gives chance for setup tasks to be executed
assert False == my_runner.select_task(n2, {})
assert ('start', t2) == reporter.log.pop(0)
# execute task t1 to calculate value
assert True == my_runner.select_task(n1, {})
assert ('start', t1) == reporter.log.pop(0)
t1_result = my_runner.execute_task(t1)
assert ('execute', t1) == reporter.log.pop(0)
my_runner.process_task_result(n1, t1_result)
assert ('success', t1) == reporter.log.pop(0)
# t2.options are set on select_task
assert {} == t2.options
assert True == my_runner.select_task(n2, {})
assert not reporter.log
assert {'my_x': 1} == t2.options
def test_getargs_fail(self, reporter, depfile):
# invalid getargs. Exception wil be raised and task will fail
def check_x(my_x): return True
t1 = Task('t1', [lambda :True])
n1 = ExecNode(t1, None)
t2 = Task('t2', [(check_x,)], getargs={'my_x':('t1','x')})
n2 = ExecNode(t2, None)
my_runner = runner.Runner(depfile.name, reporter)
# t2 gives chance for setup tasks to be executed
assert False == my_runner.select_task(n2, {})
assert ('start', t2) == reporter.log.pop(0)
# execute task t1 to calculate value
assert True == my_runner.select_task(n1, {})
assert ('start', t1) == reporter.log.pop(0)
t1_result = my_runner.execute_task(t1)
assert ('execute', t1) == reporter.log.pop(0)
my_runner.process_task_result(n1, t1_result)
assert ('success', t1) == reporter.log.pop(0)
# select_task t2 fails
assert False == my_runner.select_task(n2, {})
assert ('fail', t2) == reporter.log.pop(0)
assert not reporter.log
def test_getargs_dict(self, reporter, depfile):
def ok(): return {'x':1}
t1 = Task('t1', [(ok,)])
n1 = ExecNode(t1, None)
t2 = Task('t2', None, getargs={'my_x':('t1', None)})
tasks_dict = {'t1': t1, 't2':t2}
my_runner = runner.Runner(depfile.name, reporter)
t1_result = my_runner.execute_task(t1)
my_runner.process_task_result(n1, t1_result)
# t2.options are set on _get_task_args
assert {} == t2.options
my_runner._get_task_args(t2, tasks_dict)
assert {'my_x': {'x':1}} == t2.options
def test_getargs_group(self, reporter, depfile):
def ok(): return {'x':1}
t1 = Task('t1', None, task_dep=['t1:a'], has_subtask=True)
t1a = Task('t1:a', [(ok,)], is_subtask=True)
t2 = Task('t2', None, getargs={'my_x':('t1', None)})
tasks_dict = {'t1': t1, 't1a':t1a, 't2':t2}
my_runner = runner.Runner(depfile.name, reporter)
t1a_result = my_runner.execute_task(t1a)
my_runner.process_task_result(ExecNode(t1a, None), t1a_result)
# t2.options are set on _get_task_args
assert {} == t2.options
my_runner._get_task_args(t2, tasks_dict)
assert {'my_x': [{'x':1}]} == t2.options
class TestTask_Teardown(object):
def test_ok(self, reporter, depfile):
touched = []
def touch():
touched.append(1)
t1 = Task('t1', [], teardown=[(touch,)])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.teardown_list = [t1]
my_runner.teardown()
assert 1 == len(touched)
assert ('teardown', t1) == reporter.log.pop(0)
assert not reporter.log
def test_reverse_order(self, reporter, depfile):
def do_nothing():pass
t1 = Task('t1', [], teardown=[do_nothing])
t2 = Task('t2', [], teardown=[do_nothing])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.teardown_list = [t1, t2]
my_runner.teardown()
assert ('teardown', t2) == reporter.log.pop(0)
assert ('teardown', t1) == reporter.log.pop(0)
assert not reporter.log
def test_errors(self, reporter, depfile):
def raise_something(x):
raise Exception(x)
t1 = Task('t1', [], teardown=[(raise_something,['t1 blow'])])
t2 = Task('t2', [], teardown=[(raise_something,['t2 blow'])])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.teardown_list = [t1, t2]
my_runner.teardown()
assert ('teardown', t2) == reporter.log.pop(0)
assert ('cleanup_error',) == reporter.log.pop(0)
assert ('teardown', t1) == reporter.log.pop(0)
assert ('cleanup_error',) == reporter.log.pop(0)
assert not reporter.log
class TestTask_RunAll(object):
def test_reporter_runtime_error(self, reporter, depfile):
t1 = Task('t1', [], calc_dep=['t2'])
t2 = Task('t2', [lambda: {'file_dep':[1]}])
my_runner = runner.Runner(depfile.name, reporter)
my_runner.run_all(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert ('start', t2) == reporter.log.pop(0)
assert ('execute', t2) == reporter.log.pop(0)
assert ('success', t2) == reporter.log.pop(0)
assert ('runtime_error',) == reporter.log.pop(0)
assert not reporter.log
# run tests in both single process runner and multi-process runner
RUNNERS = [runner.Runner]
# TODO: test should be added and skipped!
if runner.MRunner.available():
RUNNERS.append(runner.MRunner)
@pytest.fixture(params=RUNNERS)
def RunnerClass(request):
return request.param
# decorator to force coverage on function.
# used to get coverage using multiprocessing.
def cov_dec(func): # pragma: no cover
try:
import coverage
except:
# coverage should not be required
return func
def wrap(*args, **kwargs):
cov = coverage.coverage(data_suffix=True)
cov.start()
try:
return func(*args, **kwargs)
finally:
cov.stop()
cov.save()
return wrap
# monkey patch function executed in a subprocess to get coverage
# TODO - disabled because it was not working anymore...
#runner.MRunner.execute_task = cov_dec(runner.MRunner.execute_task)
def ok(): return "ok"
def ok2(): return "different"
class TestRunner_run_tasks(object):
def test_teardown(self, reporter, RunnerClass, depfile):
t1 = Task('t1', [], teardown=[ok])
t2 = Task('t2', [])
my_runner = RunnerClass(depfile.name, reporter)
assert [] == my_runner.teardown_list
my_runner.run_tasks(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
my_runner.finish()
assert ('teardown', t1) == reporter.log[-1]
# testing whole process/API
def test_success(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [(my_print, ["out a"] )] )
t2 = Task("t2", [(my_print, ["out a"] )] )
my_runner = RunnerClass(depfile.name, reporter)
my_runner.run_tasks(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert runner.SUCCESS == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0), reporter.log
assert ('execute', t1) == reporter.log.pop(0)
assert ('success', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('execute', t2) == reporter.log.pop(0)
assert ('success', t2) == reporter.log.pop(0)
# test result, value, out, err are saved into task
def test_result(self, reporter, RunnerClass, depfile):
def my_action():
import sys
sys.stdout.write('out here')
sys.stderr.write('err here')
return {'bb': 5}
task = Task("taskY", [my_action] )
my_runner = RunnerClass(depfile.name, reporter)
assert None == task.result
assert {} == task.values
assert [None] == [a.out for a in task.actions]
assert [None] == [a.err for a in task.actions]
my_runner.run_tasks(TaskDispatcher({'taskY':task}, [], ['taskY']))
assert runner.SUCCESS == my_runner.finish()
assert {'bb': 5} == task.result
assert {'bb': 5} == task.values
assert ['out here'] == [a.out for a in task.actions]
assert ['err here'] == [a.err for a in task.actions]
# whenever a task fails remaining task are not executed
def test_failureOutput(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [_fail])
t2 = Task("t2", [_fail])
my_runner = RunnerClass(depfile.name, reporter)
my_runner.run_tasks(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert runner.FAILURE == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('execute', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
# second task is not executed
assert 0 == len(reporter.log)
def test_error(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [_error])
t2 = Task("t2", [_error])
my_runner = RunnerClass(depfile.name, reporter)
my_runner.run_tasks(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert runner.ERROR == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('execute', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
# second task is not executed
assert 0 == len(reporter.log)
# when successful dependencies are updated
def test_updateDependencies(self, reporter, RunnerClass, depfile):
depPath = os.path.join(os.path.dirname(__file__),"data/dependency1")
ff = open(depPath,"a")
ff.write("xxx")
ff.close()
dependencies = [depPath]
filePath = os.path.join(os.path.dirname(__file__),"data/target")
ff = open(filePath,"a")
ff.write("xxx")
ff.close()
targets = [filePath]
t1 = Task("t1", [my_print], dependencies, targets)
my_runner = RunnerClass(depfile.name, reporter)
my_runner.run_tasks(TaskDispatcher({'t1':t1}, [], ['t1']))
assert runner.SUCCESS == my_runner.finish()
d = Dependency(depfile.name)
assert d._get("t1", os.path.abspath(depPath))
def test_continue(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [(_fail,)] )
t2 = Task("t2", [(_error,)] )
t3 = Task("t3", [(ok,)])
my_runner = RunnerClass(depfile.name, reporter, continue_=True)
disp = TaskDispatcher({'t1':t1, 't2':t2, 't3':t3}, [], ['t1', 't2', 't3'])
my_runner.run_tasks(disp)
assert runner.ERROR == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('execute', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('execute', t2) == reporter.log.pop(0)
assert ('fail', t2) == reporter.log.pop(0)
assert ('start', t3) == reporter.log.pop(0)
assert ('execute', t3) == reporter.log.pop(0)
assert ('success', t3) == reporter.log.pop(0)
assert 0 == len(reporter.log)
def test_continue_dont_execute_parent_of_failed_task(self, reporter,
RunnerClass, depfile):
t1 = Task("t1", [(_error,)] )
t2 = Task("t2", [(ok,)], task_dep=['t1'])
t3 = Task("t3", [(ok,)])
my_runner = RunnerClass(depfile.name, reporter, continue_=True)
disp = TaskDispatcher({'t1':t1, 't2':t2, 't3':t3}, [], ['t1', 't2', 't3'])
my_runner.run_tasks(disp)
assert runner.ERROR == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('execute', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('fail', t2) == reporter.log.pop(0)
assert ('start', t3) == reporter.log.pop(0)
assert ('execute', t3) == reporter.log.pop(0)
assert ('success', t3) == reporter.log.pop(0)
assert 0 == len(reporter.log)
def test_continue_dep_error(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [(ok,)], file_dep=['i_dont_exist'] )
t2 = Task("t2", [(ok,)], task_dep=['t1'])
my_runner = RunnerClass(depfile.name, reporter, continue_=True)
disp = TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2'])
my_runner.run_tasks(disp)
assert runner.ERROR == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('fail', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('fail', t2) == reporter.log.pop(0)
assert 0 == len(reporter.log)
def test_continue_ignored_dep(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [(ok,)], )
t2 = Task("t2", [(ok,)], task_dep=['t1'])
my_runner = RunnerClass(depfile.name, reporter, continue_=True)
my_runner.dep_manager.ignore(t1)
disp = TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2'])
my_runner.run_tasks(disp)
assert runner.SUCCESS == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('ignore', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('ignore', t2) == reporter.log.pop(0)
assert 0 == len(reporter.log)
def test_getargs(self, reporter, RunnerClass, depfile):
def use_args(arg1):
print arg1
def make_args(): return {'myarg':1}
t1 = Task("t1", [(use_args,)], getargs=dict(arg1=('t2','myarg')) )
t2 = Task("t2", [(make_args,)])
my_runner = RunnerClass(depfile.name, reporter)
my_runner.run_tasks(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert runner.SUCCESS == my_runner.finish()
assert ('start', t1) == reporter.log.pop(0)
assert ('start', t2) == reporter.log.pop(0)
assert ('execute', t2) == reporter.log.pop(0)
assert ('success', t2) == reporter.log.pop(0)
assert ('execute', t1) == reporter.log.pop(0)
assert ('success', t1) == reporter.log.pop(0)
assert 0 == len(reporter.log)
# SystemExit runner should not interfere with SystemExit
def testSystemExitRaises(self, reporter, RunnerClass, depfile):
t1 = Task("t1", [_exit])
my_runner = RunnerClass(depfile.name, reporter)
disp = TaskDispatcher({'t1':t1}, [], ['t1'])
pytest.raises(SystemExit, my_runner.run_tasks, disp)
my_runner.finish()
class TestMReporter(object):
class MyRunner(object):
def __init__(self):
self.result_q = Queue()
def testReporterMethod(self, reporter):
fake_runner = self.MyRunner()
mp_reporter = runner.MReporter(fake_runner, reporter)
my_task = Task("task x", [])
mp_reporter.add_success(my_task)
got = fake_runner.result_q.get(True, 1)
assert {'name': "task x", "reporter": 'add_success'} == got
def testNonReporterMethod(self, reporter):
fake_runner = self.MyRunner()
mp_reporter = runner.MReporter(fake_runner, reporter)
assert hasattr(mp_reporter, 'add_success')
assert not hasattr(mp_reporter, 'no_existent_method')
# python2.5 dont have class decorators
pytest.mark.skipif('not runner.MRunner.available()')(TestMReporter)
class TestMRunner_get_next_task(object):
# simple normal case
def test_run_task(self, reporter, depfile):
t1 = Task('t1', [])
t2 = Task('t2', [])
run = runner.MRunner(depfile.name, reporter)
run._run_tasks_init(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert t1 == run.get_next_task(None).task
assert t2 == run.get_next_task(None).task
assert None == run.get_next_task(None)
def test_stop_running(self, reporter, depfile):
t1 = Task('t1', [])
t2 = Task('t2', [])
run = runner.MRunner(depfile.name, reporter)
run._run_tasks_init(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
assert t1 == run.get_next_task(None).task
run._stop_running = True
assert None == run.get_next_task(None)
def test_waiting(self, reporter, depfile):
t1 = Task('t1', [])
t2 = Task('t2', [], setup=('t1',))
run = runner.MRunner(depfile.name, reporter)
run._run_tasks_init(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t2']))
# first start task 1
n1 = run.get_next_task(None)
assert t1 == n1.task
# hold until t1 is done
assert isinstance(run.get_next_task(None), runner.Hold)
assert isinstance(run.get_next_task(None), runner.Hold)
n1.run_status = 'done'
n2 = run.get_next_task(n1)
assert t2 == n2.task
assert None == run.get_next_task(n2)
def test_waiting_controller(self, reporter, depfile):
t1 = Task('t1', [])
t2 = Task('t2', [], calc_dep=('t1',))
run = runner.MRunner(depfile.name, reporter)
run._run_tasks_init(TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2']))
# first task ok
assert t1 == run.get_next_task(None).task
# hold until t1 finishes
assert 0 == run.free_proc
assert isinstance(run.get_next_task(None), runner.Hold)
assert 1 == run.free_proc
# python2.5 dont have class decorators
pytest.mark.skipif('not runner.MRunner.available()')(TestMRunner_get_next_task)
class TestMRunner_start_process(object):
# 2 process, 3 tasks
def test_all_processes(self, reporter, monkeypatch, depfile):
mock_process = Mock()
monkeypatch.setattr(runner, 'Process', mock_process)
t1 = Task('t1', [])
t2 = Task('t2', [])
td = TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2'])
run = runner.MRunner(depfile.name, reporter, num_process=2)
run._run_tasks_init(td)
result_q = Queue()
task_q = Queue()
proc_list = run._run_start_processes(task_q, result_q)
run.finish()
assert 2 == len(proc_list)
assert t1.name == task_q.get().name
assert t2.name == task_q.get().name
# 2 process, 1 task
def test_less_processes(self, reporter, monkeypatch, depfile):
mock_process = Mock()
monkeypatch.setattr(runner, 'Process', mock_process)
t1 = Task('t1', [])
td = TaskDispatcher({'t1':t1}, [], ['t1'])
run = runner.MRunner(depfile.name, reporter, num_process=2)
run._run_tasks_init(td)
result_q = Queue()
task_q = Queue()
proc_list = run._run_start_processes(task_q, result_q)
run.finish()
assert 1 == len(proc_list)
assert t1.name == task_q.get().name
# 2 process, 2 tasks (but only one task can be started)
def test_waiting_process(self, reporter, monkeypatch, depfile):
mock_process = Mock()
monkeypatch.setattr(runner, 'Process', mock_process)
t1 = Task('t1', [])
t2 = Task('t2', [], task_dep=['t1'])
td = TaskDispatcher({'t1':t1, 't2':t2}, [], ['t1', 't2'])
run = runner.MRunner(depfile.name, reporter, num_process=2)
run._run_tasks_init(td)
result_q = Queue()
task_q = Queue()
proc_list = run._run_start_processes(task_q, result_q)
run.finish()
assert 2 == len(proc_list)
assert t1.name == task_q.get().name
assert isinstance(task_q.get(), runner.Hold)
# python2.5 dont have class decorators
pytest.mark.skipif('not runner.MRunner.available()')(TestMRunner_start_process)
class TestMRunner_execute_task(object):
def test_hold(self, reporter, depfile):
run = runner.MRunner(depfile.name, reporter)
task_q = Queue()
task_q.put(runner.Hold()) # to test
task_q.put(None) # to terminate function
result_q = Queue()
run.execute_task_subprocess(task_q, result_q)
run.finish()
# nothing was done
assert result_q.empty() # pragma: no cover (coverage bug?)
# python2.5 dont have class decorators
pytest.mark.skipif('not runner.MRunner.available()')(TestMRunner_execute_task)
| mit | -4,764,168,194,960,022,000 | 37.378834 | 82 | 0.581785 | false |
francois-berder/PyLetMeCreate | letmecreate/click/joystick.py | 1 | 1291 | #!/usr/bin/env python3
"""Python binding of Joystick wrapper of LetMeCreate library."""
import ctypes
_LIB = ctypes.CDLL('libletmecreate_click.so')
def get_x():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the X position from the
chip.
"""
pos_x = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_x(ctypes.byref(pos_x))
if ret < 0:
raise Exception("joystick click get x failed")
return pos_x.value
def get_y():
"""Returns the Y position of the joystick.
Note: An exception is thrown if it fails to read the Y position from the
chip.
"""
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_y(ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get y failed")
return pos_y.value
def get_position():
"""Returns the X position of the joystick.
Note: An exception is thrown if it fails to read the position from the
chip.
"""
pos_x = ctypes.c_int8(0)
pos_y = ctypes.c_int8(0)
ret = _LIB.joystick_click_get_position(ctypes.byref(pos_x),
ctypes.byref(pos_y))
if ret < 0:
raise Exception("joystick click get position failed")
return (pos_x.value, pos_y.value)
| bsd-3-clause | -7,252,341,453,528,857,000 | 26.468085 | 76 | 0.625871 | false |
GoogleCloudPlatform/appengine-config-transformer | yaml_conversion/lib/google/appengine/api/yaml_listener.py | 1 | 7849 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
"""PyYAML event listener
Contains class which interprets YAML events and forwards them to
a handler object.
"""
from yaml_conversion.lib.google.appengine.api import yaml_errors
import yaml
# Default mapping of event type to handler method name
_EVENT_METHOD_MAP = {
yaml.events.StreamStartEvent: 'StreamStart',
yaml.events.StreamEndEvent: 'StreamEnd',
yaml.events.DocumentStartEvent: 'DocumentStart',
yaml.events.DocumentEndEvent: 'DocumentEnd',
yaml.events.AliasEvent: 'Alias',
yaml.events.ScalarEvent: 'Scalar',
yaml.events.SequenceStartEvent: 'SequenceStart',
yaml.events.SequenceEndEvent: 'SequenceEnd',
yaml.events.MappingStartEvent: 'MappingStart',
yaml.events.MappingEndEvent: 'MappingEnd',
}
class EventHandler(object):
"""Handler interface for parsing YAML files.
Implement this interface to define specific YAML event handling class.
Implementing classes instances are passed to the constructor of
EventListener to act as a receiver of YAML parse events.
"""
def StreamStart(self, event, loader):
"""Handle start of stream event"""
def StreamEnd(self, event, loader):
"""Handle end of stream event"""
def DocumentStart(self, event, loader):
"""Handle start of document event"""
def DocumentEnd(self, event, loader):
"""Handle end of document event"""
def Alias(self, event, loader):
"""Handle alias event"""
def Scalar(self, event, loader):
"""Handle scalar event"""
def SequenceStart(self, event, loader):
"""Handle start of sequence event"""
def SequenceEnd(self, event, loader):
"""Handle end of sequence event"""
def MappingStart(self, event, loader):
"""Handle start of mappping event"""
def MappingEnd(self, event, loader):
"""Handle end of mapping event"""
class EventListener(object):
"""Helper class to re-map PyYAML events to method calls.
By default, PyYAML generates its events via a Python generator. This class
is a helper that iterates over the events from the PyYAML parser and forwards
them to a handle class in the form of method calls. For simplicity, the
underlying event is forwarded to the handler as a parameter to the call.
This object does not itself produce iterable objects, but is really a mapping
to a given handler instance.
Example use:
class PrintDocumentHandler(object):
def DocumentStart(event):
print "A new document has been started"
EventListener(PrintDocumentHandler()).Parse('''
key1: value1
---
key2: value2
'''
>>> A new document has been started
A new document has been started
In the example above, the implemented handler class (PrintDocumentHandler)
has a single method which reports each time a new document is started within
a YAML file. It is not necessary to subclass the EventListener, merely it
receives a PrintDocumentHandler instance. Every time a new document begins,
PrintDocumentHandler.DocumentStart is called with the PyYAML event passed
in as its parameter..
"""
def __init__(self, event_handler):
"""Initialize PyYAML event listener.
Constructs internal mapping directly from event type to method on actual
handler. This prevents reflection being used during actual parse time.
Args:
event_handler: Event handler that will receive mapped events. Must
implement at least one appropriate handler method named from
the values of the _EVENT_METHOD_MAP.
Raises:
ListenerConfigurationError if event_handler is not an EventHandler.
"""
if not isinstance(event_handler, EventHandler):
raise yaml_errors.ListenerConfigurationError(
'Must provide event handler of type yaml_listener.EventHandler')
self._event_method_map = {}
# For each event type in default method map...
for event, method in _EVENT_METHOD_MAP.iteritems():
# Map event class to actual method
self._event_method_map[event] = getattr(event_handler, method)
def HandleEvent(self, event, loader=None):
"""Handle individual PyYAML event.
Args:
event: Event to forward to method call in method call.
Raises:
IllegalEvent when receives an unrecognized or unsupported event type.
"""
# Must be valid event object
if event.__class__ not in _EVENT_METHOD_MAP:
raise yaml_errors.IllegalEvent(
"%s is not a valid PyYAML class" % event.__class__.__name__)
# Conditionally handle event
if event.__class__ in self._event_method_map:
self._event_method_map[event.__class__](event, loader)
def _HandleEvents(self, events):
"""Iterate over all events and send them to handler.
This method is not meant to be called from the interface.
Only use in tests.
Args:
events: Iterator or generator containing events to process.
raises:
EventListenerParserError when a yaml.parser.ParserError is raised.
EventError when an exception occurs during the handling of an event.
"""
for event in events:
try:
self.HandleEvent(*event)
except Exception, e:
event_object, loader = event
raise yaml_errors.EventError(e, event_object)
def _GenerateEventParameters(self,
stream,
loader_class=yaml.loader.SafeLoader):
"""Creates a generator that yields event, loader parameter pairs.
For use as parameters to HandleEvent method for use by Parse method.
During testing, _GenerateEventParameters is simulated by allowing
the harness to pass in a list of pairs as the parameter.
A list of (event, loader) pairs must be passed to _HandleEvents otherwise
it is not possible to pass the loader instance to the handler.
Also responsible for instantiating the loader from the Loader
parameter.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work.
Loader: Loader class to use as per the yaml.parse method. Used to
instantiate new yaml.loader instance.
Yields:
Tuple(event, loader) where:
event: Event emitted by PyYAML loader.
loader_class: Used for dependency injection.
"""
assert loader_class is not None
try:
loader = loader_class(stream)
while loader.check_event():
yield (loader.get_event(), loader)
except yaml.error.YAMLError, e:
raise yaml_errors.EventListenerYAMLError(e)
def Parse(self, stream, loader_class=yaml.loader.SafeLoader):
"""Call YAML parser to generate and handle all events.
Calls PyYAML parser and sends resulting generator to handle_event method
for processing.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
"""
self._HandleEvents(self._GenerateEventParameters(stream, loader_class))
| apache-2.0 | 6,397,356,658,310,916,000 | 33.884444 | 79 | 0.706842 | false |
shichao-an/leetcode-python | product_of_array_except_self/solution4.py | 1 | 1363 |
"""
Given an array of n integers where n > 1, nums, return an array output such
that output[i] is equal to the product of all the elements of nums except
nums[i].
Solve it without division and in O(n).
For example, given [1,2,3,4], return [24,12,8,6].
Follow up:
Could you solve it with constant space complexity? (Note: The output array
does not count as extra space for the purpose of space complexity analysis.)
"""
class Solution(object):
def productExceptSelf(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
n = len(nums)
res = [1 for i in range(n)]
# Scan from left to right
for i in range(1, n):
# i is from 1 to n - 1
# res[i] is the product accumulated to the left
res[i] = res[i - 1] * nums[i - 1]
# right_product is the product accumulated to the right
right_product = 1
for i in range(1, n):
# j ranges from i - 2 to 0
j = n - 1 - i
right_product *= nums[j + 1]
res[j] *= right_product
return res
a0 = [0, 0]
a1 = [1, 2, 3]
a2 = [2, 3, 4]
a3 = [1, 2, 3, 4]
a4 = [2, 3, 4, 5]
s = Solution()
print(s.productExceptSelf(a0))
print(s.productExceptSelf(a1))
print(s.productExceptSelf(a2))
print(s.productExceptSelf(a3))
print(s.productExceptSelf(a4))
| bsd-2-clause | -8,392,910,952,196,059,000 | 25.72549 | 76 | 0.58474 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.