repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
kriberg/eve-armada | armada/logistics/models.py | 1 | 2472 | from django.db import models
from django.contrib import admin
from armada.capsuler.models import UserPilot, \
UserCorporation, \
UserAPIKey, \
Capsuler
class LogisticsTeam(models.Model):
name = models.CharField(max_length=200)
corporation = models.ForeignKey(UserCorporation)
creator = models.ForeignKey(UserPilot)
team_type = models.CharField(max_length=30, choices=(
('STOCKER', 'Stocking'),
('HAULER', 'Hauling'),
('MANUFACTUER', 'Manufacturing'),
('FUELER', 'Fueling')))
def get_members(self):
return LogisticsTeamMember.objects.filter(team=self).order_by('pilot__public_info__name')
def get_capsuler_members(self, user):
pilots = user.get_active_pilots()
return LogisticsTeamMember.objects.filter(team=self, pilot__in=pilots)
def get_managers(self):
return LogisticsTeamMember.objects.filter(team=self,
manager=True,
accepted=True)
def is_member(self, capsuler):
if self.get_capsuler_members(capsuler).count() > 0 or capsuler == self.manager:
return True
else:
return False
def is_manager(self, capsuler):
memberships = LogisticsTeamMember.objects.filter(team=self,
pilot__in=capsuler.get_pilots_in_corporation(self.corporation),
accepted=True,
manager=True)
return memberships.count() > 0
def is_creator(self, capsuler):
for membership in self.get_capsuler_members(capsuler):
if membership.pilot == self.creator:
return True
return False
def __unicode__(self):
return self.name
def get_page_link(self):
return '/%s/%s/%s/' % (self.team_type.lower(), self.corporation, self.name)
class Meta:
unique_together = ('corporation', 'name')
class LogisticsTeamMember(models.Model):
team = models.ForeignKey(LogisticsTeam)
pilot = models.ForeignKey(UserPilot, related_name='pilot_userpilot')
accepted = models.BooleanField(default=False, editable=False)
manager = models.BooleanField(default=False)
god = models.ForeignKey(Capsuler, related_name='god_capsuler', editable=False)
class Meta:
unique_together = ('team', 'pilot')
def __unicode__(self):
return '%s: %s' % (self.team.name, self.pilot)
admin.site.register(LogisticsTeam)
admin.site.register(LogisticsTeamMember)
| agpl-3.0 | -895,496,008,196,292,100 | 32.863014 | 97 | 0.64644 | false |
sapcc/monasca-agent | monasca_agent/collector/checks_d/prometheus.py | 1 | 8246 | """
Plugin to scrape prometheus endpoint
"""
# This file uses 'print' as a function rather than a statement, a la Python3
from __future__ import print_function
import math
import requests
# import prometheus client dependency dynamically
from monasca_agent.common import util
from requests import RequestException
try:
import prometheus_client.parser as prometheus_client_parser
except ImportError:
prometheus_client_parser = None
# stdlib
import logging
from datetime import datetime
import calendar
# project
import monasca_agent.collector.checks.utils as utils
import monasca_agent.collector.checks.services_checks as services_checks
import monasca_agent.common.exceptions as exceptions
log = logging.getLogger(__name__)
class Prometheus(services_checks.ServicesCheck):
"""
Collect metrics and events
"""
def __init__(self, name, init_config, agent_config, instances=None):
super(Prometheus, self).__init__(name, init_config, agent_config, instances)
# last time of polling
self._last_ts = {}
self._publisher = utils.DynamicCheckHelper(self)
self._config = {}
for inst in instances:
url = inst['url']
# for Prometheus federation URLs, set the name match filter according to the mapping
if url.endswith('/federate'):
mapped_metrics = self._publisher.get_mapped_metrics(inst)
url += '?match[]={__name__=~"' + ("|".join(mapped_metrics) + '"')
for key, value in inst.get('match_labels', {}).items():
if isinstance(value, list):
url += ',{}=~"{}"'.format(key, "|".join(value))
else:
url += ',{}=~"{}"'.format(key, value)
url += '}'
log.info("Fetching from Prometheus federation URL: %s", url)
self._config[inst['name']] = {'url': url, 'timeout': int(inst.get('timeout', 5)),
'collect_response_time': bool(inst.get('collect_response_time', False))}
def _check(self, instance):
if prometheus_client_parser is None:
self.log.error("Skipping prometheus plugin check due to missing 'prometheus_client' module.")
return
self._update_metrics(instance)
# overriding method to catch Infinity exception
def get_metrics(self, prettyprint=False):
"""Get all metrics, including the ones that are tagged.
@return the list of samples
@rtype list of Measurement objects from monasca_agent.common.metrics
"""
try:
return super(Prometheus, self).get_metrics(prettyprint)
except exceptions.Infinity as ex:
# self._disabledMetrics.append(metric_name)
self.log.exception("Caught infinity exception in prometheus plugin.")
if not prettyprint:
self.log.error("More dimensions needs to be mapped in order to resolve clashing measurements")
return self.get_metrics(True)
else:
self.rate('monasca.agent.mapping_errors', 1, dimensions={'agent_check': 'prometheus',
'metric': ex.metric})
return []
@staticmethod
def _convert_timestamp(timestamp):
# convert from string '2016-03-16T16:48:59.900524303Z' to a float monasca can handle 164859.900524
# conversion using strptime() works only for 6 digits in microseconds so the timestamp is limited to
# 26 characters
ts = datetime.strptime(timestamp[:25] + timestamp[-1], "%Y-%m-%dT%H:%M:%S.%fZ")
return calendar.timegm(datetime.timetuple(ts))
def _update_container_metrics(self, instance, metric_name, container, timestamp=None,
fixed_dimensions=None):
# TBD determine metric from Prometheus input
labels = container[1]
value = float(container[2])
if math.isnan(value):
self.log.debug('filtering out NaN value provided for metric %s{%s}', metric_name, labels)
return
self._publisher.push_metric(instance,
metric=metric_name,
value=value,
labels=labels,
timestamp=timestamp,
fixed_dimensions=fixed_dimensions)
def _retrieve_and_parse_metrics(self, url, timeout, collect_response_time, instance_name):
"""
Metrics from prometheus come in plain text from the endpoint and therefore need to be parsed.
To do that the prometheus client's text_string_to_metric_families -method is used. That method returns a
generator object.
The method consumes the metrics from the endpoint:
# HELP container_cpu_system_seconds_total Cumulative system cpu time consumed in seconds.
# TYPE container_cpu_system_seconds_total counter
container_cpu_system_seconds_total{id="/",name="/"} 1.59578817e+06
....
and produces a metric family element with (returned from generator) with the following attributes:
name -> e.g. ' container_cpu_system_seconds_total '
documentation -> e.g. ' container_cpu_system_seconds_total Cumulative system cpu time consumed in seconds. '
type -> e.g. ' counter '
samples -> e.g. ' [.. ,("container_cpu_system_seconds_total", {id="/",name="/"}, 1.59578817e+06),
('container_cpu_system_seconds_total', {u'id': u'/docker', u'name': u'/docker'},
922.66),
..] '
:param url: the url of the prometheus metrics
:return: metric_families iterable
"""
timer = util.Timer()
try:
response = requests.get(url, timeout=timeout)
# report response time first, even when there is HTTP errors
if collect_response_time:
# Stop the timer as early as possible
running_time = timer.total()
self.gauge('monasca.agent.check_collect_time', running_time, dimensions={'agent_check': 'prometheus',
'instance': instance_name})
response.raise_for_status()
body = response.text
except RequestException:
self.log.exception("Retrieving metrics from endpoint %s failed", url)
self.rate('monasca.agent.check_collect_errors', 1, dimensions={'agent_check': 'prometheus',
'instance': instance_name})
return []
metric_families = prometheus_client_parser.text_string_to_metric_families(body)
return metric_families
def _update_metrics(self, instance):
cfg = self._config[instance['name']]
metric_families_generator = self._retrieve_and_parse_metrics(cfg['url'], cfg['timeout'],
cfg['collect_response_time'], instance['name'])
for metric_family in metric_families_generator:
container = None
try:
for container in metric_family.samples:
# currently there is no support for detecting metric types from P8S
self._update_container_metrics(instance, metric_family.name, container)
except Exception as e:
self.log.warning("Unable to collect metric: {0} for container: {1} . - {2} ".format(
metric_family.name, container[1].get('name'), repr(e)))
self.rate('monasca.agent.check_collect_errors', 1, dimensions={'agent_check': 'prometheus',
'instance': instance['name']})
def _update_last_ts(self, instance_name):
utc_now = datetime.utcnow()
self._last_ts[instance_name] = utc_now.isoformat('T')
| bsd-3-clause | 5,412,609,451,448,231,000 | 45.067039 | 120 | 0.573975 | false |
vivyly/fancast | fancast/casting/views.py | 1 | 6503 | import simplejson
from django.views import generic
from django.http import HttpResponse
from django.views.decorators.csrf import (csrf_exempt,
requires_csrf_token)
from rest_framework import (viewsets,
generics,
)
from rest_framework.renderers import JSONRenderer
from .models import (Project,
Character,
Actor,
Prospect)
from .serializers import (ProjectSerializer,
CharacterSerializer,
ProspectSerializer,
ActorSerializer)
from .forms import (AddActor,
AddVote,
AddCharacter)
class JSONResponse(HttpResponse):
"""
An HttpResponse that renders its content into JSON.
"""
def __init__(self, data, **kwargs):
content = JSONRenderer().render(data)
kwargs['content_type'] = 'application/json'
super(JSONResponse, self).__init__(content, **kwargs)
class ProjectListView(generic.ListView):
#template_name = "project_list.html"
template_name = "projects.html"
def get_queryset(self):
return Project.objects.all().order_by('published')
class ProjectDetailView(generic.DetailView):
model = Project
#template_name = "cast_list.html"
template_name = "casting.html"
context_object_name = "slug"
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['sessionID'] = self.request.COOKIES.get('sessionid')
return context
class ProjectViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows projects to be viewed or edited
"""
queryset = Project.objects.all().order_by('published')
serializer_class = ProjectSerializer
class CharacterViewSet(generics.ListCreateAPIView):
model = Character
serializer_class = CharacterSerializer
def get_queryset(self):
slug = self.kwargs.get('slug')
if slug:
return Character.objects.filter(project__slug=slug)
return Character.objects.none()
class CharacterDetail(generics.RetrieveAPIView):
model = Character
serializer_class = CharacterSerializer
def get_queryset(self):
slug = self.kwargs.get('slug')
if slug:
return Character.objects.filter(slug=slug)
return Character.objects.none()
class ActorViewSet(generics.ListAPIView):
model = Actor
serializer_class = ActorSerializer
def get_queryset(self):
slug = self.kwargs.get('slug')
if slug:
character = Character.objects.get(slug=slug)
eligible_actors = []
for actor in character.actors:
try:
prospect = Prospect.objects.get(actor = actor,
character = character)
actor.upvotes = prospect.upvotes
actor.downvotes = prospect.downvotes
actor.total = actor.total
except Prospect.DoesNotExist:
actor.upvotes = 0
actor.downvotes = 0
actor.total = 0
eligible_actors.append(actor)
return eligible_actors
else:
return Character.objects.none()
class ActorDetail(generics.RetrieveAPIView):
serializer_class = ActorSerializer
def get_queryset(self):
slug = self.kwargs.get('slug')
if slug:
try:
return Actor.objects.filter(slug=slug)
except Actor.DoesNotExist:
pass
return Actor.objects.none()
@csrf_exempt
@requires_csrf_token
def vote(request, slug):
if request.method == "POST" or request.method == "PUT":
#this is probably not the right way to do it, need
#to figure out why post params are coming in as a string
#instead of a QueryDict
params = simplejson.loads(request.body)
params['sessionid'] = request.session.session_key
params['prospect_id'] = slug
form = AddVote(params)
if form.is_valid():
_vote = form.save()
try:
prospect = Prospect.objects.get(slug=slug)
prospects = Prospect.objects.filter(character=prospect.character)
serializer = ProspectSerializer(prospects, many=True,
context = {'request':request})
serializer.is_valid()
return JSONResponse(serializer.data)
except Prospect.DoesNotExist:
return JSONResponse({})
@csrf_exempt
@requires_csrf_token
def add_actor(request):
if request.method == "POST":
#this is probably not the right way to do it, need
#to figure out why post params are coming in as a string
#instead of a QueryrDict
params = simplejson.loads(request.body)
form = AddActor(params)
if form.is_valid():
_actor = form.save()
character = Character.objects.get(slug=params.get('character_id'))
prospects = Prospect.objects.filter(character=character)
serializer = ProspectSerializer(prospects, many=True,
context = {'request':request})
serializer.is_valid()
return JSONResponse(serializer.data)
else:
errors = [(k, v[0]) for k, v in
form.errors.items()]
return JSONResponse({'errors':errors})
return JSONResponse({})
@csrf_exempt
@requires_csrf_token
def add_character(request):
if request.method == "POST":
#this is probably not the right way to do it, need
#to figure out why post params are coming in as a string
#instead of a QueryrDict
print request
params = simplejson.loads(request.body)
print params
form = AddCharacter(params)
if form.is_valid():
character = form.save()
serializer = CharacterSerializer([character], many=True,
context = {'request':request})
serializer.is_valid()
return JSONResponse(serializer.data)
else:
errors = [(k, v[0]) for k, v in
form.errors.items()]
return JSONResponse(errors)
return JSONResponse({})
| bsd-3-clause | -2,488,046,979,807,079,400 | 33.775401 | 78 | 0.584807 | false |
blab/nextstrain-augur | builds/dengue/dengue.process.py | 1 | 9677 | from __future__ import print_function
import os, sys
# we assume (and assert) that this script is running from the virus directory, i.e. inside H7N9 or zika
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import base.process
from base.process import process
import argparse
import numpy as np
from dengue_titers import titer_model, titer_export ## Set up and parameterize the titer model separately for tidiness
##### Define references and metadata #####
sanofi_vaccine_strains = {
'denv1': 'DENV1/THAILAND/PUO359/1980',
'denv2': 'DENV2/THAILAND/PUO218/1980',
'denv3': 'DENV3/THAILAND/PAH88188/1988',
'denv4': 'DENV4/INDONESIA/S1228/1978',
'all': None}
regions = ['africa', 'europe', 'north_america', 'china', 'south_asia',
'japan_korea', 'south_pacific', 'oceania', 'south_america',
'southeast_asia', 'west_asia']
##### Parse args, set up config #####
def collect_args():
"""Returns a dengue-specific argument parser."""
parser = base.process.collect_args()
# parser.add_argument('-j', '--json', default=None, nargs='+', type=str, help="Accepts path to prepared JSON(s); overrides -s argument")
parser.add_argument('-s', '--serotypes', default=["multiple"], nargs='+', type=str, choices=['denv1', 'denv2', 'denv3', 'denv4', 'all', 'multiple'],
help="Look for prepared JSON(s) like ./prepared/dengue_SEROTYPE.json; 'multiple' will run all five builds. Default='multiple'")
parser.add_argument('--no_mut_freqs', default=True, action='store_true', help="skip mutation frequencies")
parser.add_argument('--no_tree_freqs', default=False, action='store_true', help="skip tree (clade) frequencies")
parser.add_argument('--no_titers', default=False, action='store_true', help="skip titer models")
parser.set_defaults(json = None)
return parser
def make_config (prepared_json, args):
"""
Configure your analysis here.
Parsed as a function to enable running multiple builds with one cmd.
"""
return {
"dir": "dengue",
"in": prepared_json,
"geo_inference": ['region'], # what traits to perform this on; don't run country (too many demes, too few sequences per deme to be reliable)
"auspice": { ## settings for auspice JSON export
"extra_attr": ['serum', 'clade', 'dTiter_sanofi'], # keys from tree.tree.clade['attr'] to include in export
"color_options": { # which traits to color the tree by in auspice; titer colorbys are added in dengue_titers
"country":{"key":"country", "legendTitle":"Country", "menuItem":"country", "type":"discrete"},
"region":{"key":"region", "legendTitle":"Region", "menuItem":"region", "type":"discrete"},
"gt":{"key":"genotype", "legendTitle":"Genotype", "menuItem":"genotype", "type":"discrete"},
},
"defaults": {'geoResolution': 'region', 'colorBy': 'region', 'distanceMeasure': 'div', 'mapTriplicate': True}
},
"timetree_options": {"Tc": False},
"fit_titer_model": not args.no_titers,
"titers": { # regularization parameter values and cross-validation fraction
"lam_avi":0.0,
"lam_pot":0.5,
"lam_drop":1.0,
"training_fraction":0.9,
},
"estimate_mutation_frequencies": not args.no_mut_freqs,
"estimate_tree_frequencies": not args.no_tree_freqs,
"clean": args.clean,
"pivot_spacing": 1.0/4, # pivots = time points; 1/N timepoints per year
"newick_tree_options":{
"raxml": not args.no_raxml # for dev work only
}
}
##### Parse input files/params and run #####
if __name__=="__main__":
parser = collect_args()
args = parser.parse_args()
### Find the right input files ###
if args.json: # If provided, a specified JSON path overrides serotype argument
args.json = [args.json]
else: # Look for serotype-specific JSONs in the ./prepared/ directory
if 'multiple' in args.serotypes: # "multiple" = run all 5 builds
args.serotypes = ['denv1', 'denv2', 'denv3', 'denv4', 'all']
else:
args.serotypes = args.serotypes
args.json = ['./prepared/dengue_%s.json'%s for s in args.serotypes] # Look for ./prepared/dengue_SEROTYPE.json if no file paths given
for j in args.json: # validate input JSONs exist
assert os.path.isfile(j)
### Run analyses ###
for prepared_json in args.json:
try:
print("Processing %s"%prepared_json)
runner = process(make_config(prepared_json, args)) # parse
runner.align() # run alignment with mafft
runner.build_tree() # build tree with fasttree -> raxml
runner.timetree_setup_filter_run() # infer ML ancestral states (geo traits, node dates, mutations)
runner.run_geo_inference() # run mugration model to infer transmissions
# estimate mutation frequencies here.
if runner.config["estimate_mutation_frequencies"]:
pivots = runner.get_pivots_via_spacing()
runner.estimate_mutation_frequencies(pivots=pivots, min_freq=0.02, inertia=np.exp(-1.0/12), stiffness=2)
# estimate tree frequencies here.
if runner.config["estimate_tree_frequencies"]: # methods @ [ref]
pivots = runner.get_pivots_via_spacing()
runner.estimate_tree_frequencies(pivots=pivots, stiffness=2) # stiffness ~= amount of smoothing
for region in ['southeast_asia', 'south_america']: #regions:
try:
runner.estimate_tree_frequencies(region=region, stiffness=2)
except:
continue
# titers
if runner.config["fit_titer_model"] and runner.config["titers"]: # methods @ Neher et al., PNAS 2016
titer_model(runner,
lam_pot = runner.config['titers']['lam_pot'],
lam_avi = runner.config['titers']['lam_avi'],
lam_drop = runner.config['titers']['lam_drop'],
training_fraction = runner.config['titers']['training_fraction'],
sanofi_strain = sanofi_vaccine_strains[runner.info['lineage']], # vaccine strain for each serotype-specific build
plot=False,
criterium = lambda node: True) # calculate dTiter for all branches
cross_validate=3) # calculate dTiter for all branches
titer_export(runner)
### Export for visualization in auspice
runner.auspice_export()
except:
continue
##### Extra code bank #####
'''
genotypes = {
'denv1': {'I': [('E', 461, 'V'), ('E', 484, 'L'), ('M', 107, 'T')],
'II': [('E', 345, 'A'), ('E', 432, 'M'), ('E', 439, 'V')],
'IV': [('E', 339, 'S'), ('M', 72, 'E'), ('E', 88, 'T')],
'V': [('E', 297, 'T'), ('NS5', 135, 'M')]},
'denv2': {'AMERICAN': [('E', 71, 'D'), ('E', 81, 'T'), ('E', 129, 'I')],
'ASIANAMERICAN': [('E', 491, 'A'), ('M', 15, 'G'), ('M', 39, 'I')],
'ASIANI': [('E', 484, 'I'), ('NS5', 688, 'I'), ('NS1', 222, 'N')],
'COSMOPOLITAN': [('E', 71, 'A'), ('E', 149, 'N'), ('E', 462, 'V')],
'SYLVATIC': [('E', 59, 'F'), ('E', 236, 'M'), ('E', 432, 'V')]},
'denv3': {'I': [('E', 233, 'K'), ('M', 128, 'F'), ('E', 68, 'V')],
'II': [('M', 57, 'A'), ('NS5', 750, 'K')],
'III': [('E', 303, 'T'), ('E', 454, 'V'), ('E', 132, 'Y')],
'IV': [('E', 22, 'E'), ('E', 50, 'V'), ('E', 62, 'G')]},
'denv4': {'I': [('E', 494, 'H'), ('NS1', 5, 'A')],
'II': [('E', 265, 'A'), ('E', 46, 'T'), ('NS1', 246, 'S')],
'SYLVATIC': [('E', 132, 'V'), ('E', 154, 'S'), ('E', 162, 'T')]},
'all': {}
}
for i in ['denv1', 'denv2', 'denv3', 'denv4']:
for k,v in genotypes[i].items():
genotypes['all'][i.upper()+'_'+k] = v
# Label named clades based on mutations/genotypes at defining sites
runner.matchClades(genotypes[runner.info['lineage']])
# this is tricky with dengue because the canonical genotypes
# don't really represent the present-day viral diversity.
# I'll get around to redefining these soon-ish hopefully.
### Comparison: force dTiter values to be non-zero only on interserotype brances
def is_interserotype(node):
descendents = node.get_terminals()
serotypes = [k.name.split('/')[0] for k in descendents if 'DENV' in k.name]
serotypes = [s for s in serotypes if s != 'DENV']
return len(set(serotypes)) > 1
interserotype_branches = []
for node in runner.tree.tree.find_clades():
if is_interserotype(node):
interserotype_branches.append(node)
for child in node.clades:
interserotype_branches.append(child)
for node in runner.tree.tree.find_clades():
if node in interserotype_branches:
node.interserotype = True
else:
node.interserotype = False
titer_model(runner,
lam_pot = runner.config['titers']['lam_pot'],
lam_avi = runner.config['titers']['lam_avi'],
lam_drop = runner.config['titers']['lam_drop'],
training_fraction = runner.config['titers']['training_fraction'],
plot=False,
criterium = lambda node: node.interserotype == True,
csv_fname='~/Users/Sidney/Dropbox/dengue/data/titer-model/interserotype-branch-effects/model_predictions.csv')
'''
| agpl-3.0 | -5,209,273,781,426,590,000 | 48.372449 | 152 | 0.571045 | false |
LowerSilesians/geo-squizzy | build_big_data/main.py | 1 | 2041 | import random
import json
from models import CITY
class Duplicates:
def __init__(self):
self.storage = dict()
pass
class Feature:
def __init__(self, *args, **kwargs):
self.data = dict({"type": "Feature", "properties": dict(), "geometry": {"type": kwargs['type'], "coordinates": []}})
self.data['properties'] = kwargs['model']().get_random_data()
def add_coordinates(self, coordinates=None):
self.data['geometry']['coordinates'] = coordinates
class DataStructure:
def __init__(self, *args, **kwargs):
self.data = dict({'type': kwargs['type'], 'features': []})
self.duplicates = Duplicates()
self._range = kwargs['coordinates_range']
self.feature_model = kwargs['feature_model']
self.feature_type = kwargs['feature_type']
self.__run__(number=kwargs['features_number'])
pass
def __run__(self, number=None):
self.data['features'] = [self.feature() for x in range(0, number, 1)]
pass
def coordinates(self):
x = random.uniform(self._range[0], self._range[1])
case = self.duplicates.storage.get(x, None)
while case is not None:
x = random.uniform(self._range[0], self._range[1])
case = self.duplicates.storage.get(x, None)
self.duplicates.storage[x] = x
return x
def feature(self):
feature = Feature(type=self.feature_type, model=self.feature_model)
feature.add_coordinates(coordinates=[self.coordinates(), self.coordinates()])
return feature.data
if __name__ == "__main__":
geo = DataStructure(type="FeatureCollection",
feature_type="Point",
coordinates_range=[float(-200), float(200)],
features_number=100000,
feature_model=CITY)
geo_json = json.dumps(geo.data)
f = open("/home/ing/PycharmProjects/geo-squizzy/geosquizzy/build_big_data/data/dump100000.json", "w")
f.write(geo_json)
f.close() | mit | -4,119,395,978,009,548,000 | 33.033333 | 124 | 0.590397 | false |
mrh1997/cymu | libclang/build-and-test-clang.py | 1 | 1065 | import subprocess
import sys
from os.path import dirname, join
prj_path = dirname(sys.argv[0])
if len(sys.argv) == 1 or sys.argv[1] != 'no-rebuild':
subprocess.check_call(['vagrant', 'powershell', '-c', 'cmd.exe', '-c',
r'C:\vagrant\build.cmd'])
sys.path.append(join(prj_path, r'src\tools\clang\bindings\python'))
import clang.cindex
clang.cindex.Config.set_library_path(join(prj_path, r'build\Release\bin'))
c_src = """
int main(void)
{
int a;
int * b;
a = (3 + 4) * -(3 + 1);
b = &a;
return a;
}
"""
def print_node(node, indentation=0):
print indentation*' ', node.kind.name, node.spelling, node.operator_kind.name if node.operator_kind != clang.cindex.OperatorKind.NULL else ""
for subnode in node.get_children():
print_node(subnode, indentation+1)
transunit = clang.cindex.TranslationUnit.from_source(
'test.c', unsaved_files=[('test.c', c_src)])
if len(list(transunit.diagnostics)) > 0:
for diag in transunit.diagnostics:
print diag
else:
print_node(transunit.cursor)
| gpl-3.0 | 7,588,159,631,043,952,000 | 31.272727 | 148 | 0.647887 | false |
jhauberg/cards.py | cards/templatefield.py | 1 | 2868 | # coding=utf-8
"""
This module provides functions for working with template fields.
"""
import re
from typing import Iterator
class TemplateField: # pylint: disable=too-few-public-methods
""" Represents a field in a template. """
def __init__(self,
name: str=None,
context: str=None,
inner_content: str=None,
indices: range=None):
self.name = name # the name of the field
self.context = context # the context passed to the field name
self.inner_content = inner_content # the inner content between the field braces
self.indices = indices # the indices ranging from the first wrapping '{' to the last '}'
if self.inner_content is None:
if self.name is not None:
if self.context is not None:
self.inner_content = self.name + ' ' + self.context
else:
self.inner_content = self.name
def __str__(self):
return '{{ ' + (self.inner_content or '') + ' }}'
def has_row_reference(self) -> bool:
""" Determine whether a field holds a row reference. """
return (self.context.startswith('#')
if self.context is not None
else False)
def fields(content: str,
with_name_like: str=None,
with_context_like: str=None,
strictly_matching: bool=True) -> Iterator[TemplateField]:
""" Return an iterator for all fields (e.g. '{{ a_field }}') that occur in a template. """
pattern = r'{{\s?(([^}}\s]*)\s?(.*?))\s?}}'
for match in re.finditer(pattern, content):
inner_content = match.group(1).strip()
name = match.group(2).strip()
context = match.group(3).strip()
inner_content = inner_content if len(inner_content) > 0 else None
name = name if len(name) > 0 else None
context = context if len(context) > 0 else None
field = TemplateField(
name, context, inner_content, indices=range(
match.start(), match.end()))
satisfies_name_filter = (with_name_like is None or
(with_name_like is not None and field.name is not None
and re.search(with_name_like, field.name) is not None))
satisfies_context_filter = (with_context_like is None or
(with_context_like is not None and field.context is not None
and re.search(with_context_like, field.context) is not None))
satisfies_filter = (satisfies_name_filter and satisfies_context_filter
if strictly_matching
else satisfies_name_filter or satisfies_context_filter)
if satisfies_filter:
yield field
| mit | -7,489,422,013,993,804,000 | 36.246753 | 98 | 0.561018 | false |
mrunge/horizon_lib | horizon_lib/tables/actions.py | 1 | 38071 | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import logging
import types
import warnings
from django.conf import settings
from django.core import urlresolvers
from django import shortcuts
from django.template.loader import render_to_string # noqa
from django.utils.datastructures import SortedDict
from django.utils.functional import Promise # noqa
from django.utils.http import urlencode # noqa
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
import six
from horizon_lib import exceptions
from horizon_lib import messages
from horizon_lib.utils import functions
from horizon_lib.utils import html
LOG = logging.getLogger(__name__)
# For Bootstrap integration; can be overridden in settings.
ACTION_CSS_CLASSES = ("btn", "btn-default", "btn-sm")
STRING_SEPARATOR = "__"
class BaseActionMetaClass(type):
"""Metaclass for adding all actions options from inheritance tree
to action.
This way actions can inherit from each other but still use
the class attributes DSL. Meaning, all attributes of Actions are
defined as class attributes, but in the background, it will be used as
parameters for the initializer of the object. The object is then
initialized clean way. Similar principle is used in DataTableMetaclass.
"""
def __new__(mcs, name, bases, attrs):
# Options of action are set ass class attributes, loading them.
options = {}
if attrs:
options = attrs
# Iterate in reverse to preserve final order
for base in bases[::-1]:
# It actually throws all super classes away except immediate
# superclass. But it's fine, immediate super-class base_options
# includes everything because superclasses was created also by
# this metaclass. Same principle is used in DataTableMetaclass.
if hasattr(base, 'base_options') and base.base_options:
base_options = {}
# Updating options by superclasses.
base_options.update(base.base_options)
# Updating superclass options by actual class options.
base_options.update(options)
options = base_options
# Saving all options to class attribute, this will be used for
# instantiating of the specific Action.
attrs['base_options'] = options
return type.__new__(mcs, name, bases, attrs)
def __call__(cls, *args, **kwargs):
cls.base_options.update(kwargs)
# Adding cls.base_options to each init call.
klass = super(BaseActionMetaClass, cls).__call__(
*args, **cls.base_options)
return klass
@six.add_metaclass(BaseActionMetaClass)
class BaseAction(html.HTMLElement):
"""Common base class for all ``Action`` classes."""
def __init__(self, **kwargs):
super(BaseAction, self).__init__()
self.datum = kwargs.get('datum', None)
self.table = kwargs.get('table', None)
self.handles_multiple = kwargs.get('handles_multiple', False)
self.requires_input = kwargs.get('requires_input', False)
self.preempt = kwargs.get('preempt', False)
self.policy_rules = kwargs.get('policy_rules', None)
def data_type_matched(self, datum):
"""Method to see if the action is allowed for a certain type of data.
Only affects mixed data type tables.
"""
if datum:
action_data_types = getattr(self, "allowed_data_types", [])
# If the data types of this action is empty, we assume it accepts
# all kinds of data and this method will return True.
if action_data_types:
datum_type = getattr(datum, self.table._meta.data_type_name,
None)
if datum_type and (datum_type not in action_data_types):
return False
return True
def get_policy_target(self, request, datum):
"""Provide the target for a policy request.
This method is meant to be overridden to return target details when
one of the policy checks requires them. E.g., {"user_id": datum.id}
"""
return {}
def allowed(self, request, datum):
"""Determine whether this action is allowed for the current request.
This method is meant to be overridden with more specific checks.
"""
return True
def _allowed(self, request, datum):
policy_check = getattr(settings, "POLICY_CHECK_FUNCTION", None)
if policy_check and self.policy_rules:
target = self.get_policy_target(request, datum)
return (policy_check(self.policy_rules, request, target) and
self.allowed(request, datum))
return self.allowed(request, datum)
def update(self, request, datum):
"""Allows per-action customization based on current conditions.
This is particularly useful when you wish to create a "toggle"
action that will be rendered differently based on the value of an
attribute on the current row's data.
By default this method is a no-op.
"""
pass
def get_default_classes(self):
"""Returns a list of the default classes for the action. Defaults to
``["btn", "btn-default", "btn-sm"]``.
"""
return getattr(settings, "ACTION_CSS_CLASSES", ACTION_CSS_CLASSES)
def get_default_attrs(self):
"""Returns a list of the default HTML attributes for the action.
Defaults to returning an ``id`` attribute with the value
``{{ table.name }}__action_{{ action.name }}__{{ creation counter }}``.
"""
if self.datum is not None:
bits = (self.table.name,
"row_%s" % self.table.get_object_id(self.datum),
"action_%s" % self.name)
else:
bits = (self.table.name, "action_%s" % self.name)
return {"id": STRING_SEPARATOR.join(bits)}
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def associate_with_table(self, table):
self.table = table
class Action(BaseAction):
"""Represents an action which can be taken on this table's data.
.. attribute:: name
Required. The short name or "slug" representing this
action. This name should not be changed at runtime.
.. attribute:: verbose_name
A descriptive name used for display purposes. Defaults to the
value of ``name`` with the first letter of each word capitalized.
.. attribute:: verbose_name_plural
Used like ``verbose_name`` in cases where ``handles_multiple`` is
``True``. Defaults to ``verbose_name`` with the letter "s" appended.
.. attribute:: method
The HTTP method for this action. Defaults to ``POST``. Other methods
may or may not succeed currently.
.. attribute:: requires_input
Boolean value indicating whether or not this action can be taken
without any additional input (e.g. an object id). Defaults to ``True``.
.. attribute:: preempt
Boolean value indicating whether this action should be evaluated in
the period after the table is instantiated but before the data has
been loaded.
This can allow actions which don't need access to the full table data
to bypass any API calls and processing which would otherwise be
required to load the table.
.. attribute:: allowed_data_types
A list that contains the allowed data types of the action. If the
datum's type is in this list, the action will be shown on the row
for the datum.
Default to be an empty list (``[]``). When set to empty, the action
will accept any kind of data.
.. attribute:: policy_rules
list of scope and rule tuples to do policy checks on, the
composition of which is (scope, rule)
scope: service type managing the policy for action
rule: string representing the action to be checked
for a policy that requires a single rule check:
policy_rules should look like
"(("compute", "compute:create_instance"),)"
for a policy that requires multiple rule checks:
rules should look like
"(("identity", "identity:list_users"),
("identity", "identity:list_roles"))"
At least one of the following methods must be defined:
.. method:: single(self, data_table, request, object_id)
Handler for a single-object action.
.. method:: multiple(self, data_table, request, object_ids)
Handler for multi-object actions.
.. method:: handle(self, data_table, request, object_ids)
If a single function can work for both single-object and
multi-object cases then simply providing a ``handle`` function
will internally route both ``single`` and ``multiple`` requests
to ``handle`` with the calls from ``single`` being transformed
into a list containing only the single object id.
"""
def __init__(self, single_func=None, multiple_func=None, handle_func=None,
attrs=None, **kwargs):
super(Action, self).__init__(**kwargs)
self.method = kwargs.get('method', "POST")
self.requires_input = kwargs.get('requires_input', True)
self.verbose_name = kwargs.get('verbose_name', self.name.title())
self.verbose_name_plural = kwargs.get('verbose_name_plural',
"%ss" % self.verbose_name)
self.allowed_data_types = kwargs.get('allowed_data_types', [])
self.icon = kwargs.get('icon', None)
if attrs:
self.attrs.update(attrs)
# Don't set these if they're None
if single_func:
self.single = single_func
if multiple_func:
self.multiple = multiple_func
if handle_func:
self.handle = handle_func
# Ensure we have the appropriate methods
has_handler = hasattr(self, 'handle') and callable(self.handle)
has_single = hasattr(self, 'single') and callable(self.single)
has_multiple = hasattr(self, 'multiple') and callable(self.multiple)
if has_handler or has_multiple:
self.handles_multiple = True
if not has_handler and (not has_single or has_multiple):
cls_name = self.__class__.__name__
raise NotImplementedError('You must define either a "handle" '
'method or a "single" or "multiple" '
'method on %s.' % cls_name)
if not has_single:
def single(self, data_table, request, object_id):
return self.handle(data_table, request, [object_id])
self.single = types.MethodType(single, self)
if not has_multiple and self.handles_multiple:
def multiple(self, data_table, request, object_ids):
return self.handle(data_table, request, object_ids)
self.multiple = types.MethodType(multiple, self)
def get_param_name(self):
"""Returns the full POST parameter name for this action.
Defaults to
``{{ table.name }}__{{ action.name }}``.
"""
return "__".join([self.table.name, self.name])
class LinkAction(BaseAction):
"""A table action which is simply a link rather than a form POST.
.. attribute:: name
Required. The short name or "slug" representing this
action. This name should not be changed at runtime.
.. attribute:: verbose_name
A string which will be rendered as the link text. (Required)
.. attribute:: url
A string or a callable which resolves to a url to be used as the link
target. You must either define the ``url`` attribute or override
the ``get_link_url`` method on the class.
.. attribute:: allowed_data_types
A list that contains the allowed data types of the action. If the
datum's type is in this list, the action will be shown on the row
for the datum.
Defaults to be an empty list (``[]``). When set to empty, the action
will accept any kind of data.
"""
# class attribute name is used for ordering of Actions in table
name = "link"
ajax = False
def __init__(self, attrs=None, **kwargs):
super(LinkAction, self).__init__(**kwargs)
self.method = kwargs.get('method', "GET")
self.bound_url = kwargs.get('bound_url', None)
self.name = kwargs.get('name', self.name)
self.verbose_name = kwargs.get('verbose_name', self.name.title())
self.url = kwargs.get('url', None)
self.allowed_data_types = kwargs.get('allowed_data_types', [])
self.icon = kwargs.get('icon', None)
self.kwargs = kwargs
if not kwargs.get('verbose_name', None):
raise NotImplementedError('A LinkAction object must have a '
'verbose_name attribute.')
if attrs:
self.attrs.update(attrs)
if self.ajax:
self.classes = list(self.classes) + ['ajax-update']
def get_ajax_update_url(self):
table_url = self.table.get_absolute_url()
params = urlencode(
SortedDict([("action", self.name), ("table", self.table.name)])
)
return "%s?%s" % (table_url, params)
def render(self):
return render_to_string(("horizon_lib/common/"
"_data_table_table_action.html"),
{"action": self})
def associate_with_table(self, table):
super(LinkAction, self).associate_with_table(table)
if self.ajax:
self.attrs['data-update-url'] = self.get_ajax_update_url()
def get_link_url(self, datum=None):
"""Returns the final URL based on the value of ``url``.
If ``url`` is callable it will call the function.
If not, it will then try to call ``reverse`` on ``url``.
Failing that, it will simply return the value of ``url`` as-is.
When called for a row action, the current row data object will be
passed as the first parameter.
"""
if not self.url:
raise NotImplementedError('A LinkAction class must have a '
'url attribute or define its own '
'get_link_url method.')
if callable(self.url):
return self.url(datum, **self.kwargs)
try:
if datum:
obj_id = self.table.get_object_id(datum)
return urlresolvers.reverse(self.url, args=(obj_id,))
else:
return urlresolvers.reverse(self.url)
except urlresolvers.NoReverseMatch as ex:
LOG.info('No reverse found for "%s": %s' % (self.url, ex))
return self.url
class FilterAction(BaseAction):
"""A base class representing a filter action for a table.
.. attribute:: name
The short name or "slug" representing this action. Defaults to
``"filter"``.
.. attribute:: verbose_name
A descriptive name used for display purposes. Defaults to the
value of ``name`` with the first letter of each word capitalized.
.. attribute:: param_name
A string representing the name of the request parameter used for the
search term. Default: ``"q"``.
.. attribute: filter_type
A string representing the type of this filter. If this is set to
``"server"`` then ``filter_choices`` must also be provided.
Default: ``"query"``.
.. attribute: filter_choices
Required for server type filters. A tuple of tuples representing the
filter options. Tuple composition should evaluate to (string, string,
boolean), representing the filter parameter, display value, and whether
or not it should be applied to the API request as an API query
attribute. API type filters do not need to be accounted for in the
filter method since the API will do the filtering. However, server
type filters in general will need to be performed in the filter method.
By default this attribute is not provided.
.. attribute: needs_preloading
If True, the filter function will be called for the initial
GET request with an empty ``filter_string``, regardless of the
value of ``method``.
"""
# TODO(gabriel): The method for a filter action should be a GET,
# but given the form structure of the table that's currently impossible.
# At some future date this needs to be reworked to get the filter action
# separated from the table's POST form.
# class attribute name is used for ordering of Actions in table
name = "filter"
def __init__(self, **kwargs):
super(FilterAction, self).__init__(**kwargs)
self.method = kwargs.get('method', "POST")
self.name = kwargs.get('name', self.name)
self.verbose_name = kwargs.get('verbose_name', _("Filter"))
self.filter_type = kwargs.get('filter_type', "query")
self.filter_choices = kwargs.get('filter_choices')
self.needs_preloading = kwargs.get('needs_preloading', False)
self.param_name = kwargs.get('param_name', 'q')
self.icon = "search"
if self.filter_type == 'server' and self.filter_choices is None:
raise NotImplementedError(
'A FilterAction object with the '
'filter_type attribute set to "server" must also have a '
'filter_choices attribute.')
def get_param_name(self):
"""Returns the full query parameter name for this action.
Defaults to
``{{ table.name }}__{{ action.name }}__{{ action.param_name }}``.
"""
return "__".join([self.table.name, self.name, self.param_name])
def assign_type_string(self, table, data, type_string):
for datum in data:
setattr(datum, table._meta.data_type_name, type_string)
def data_type_filter(self, table, data, filter_string):
filtered_data = []
for data_type in table._meta.data_types:
func_name = "filter_%s_data" % data_type
filter_func = getattr(self, func_name, None)
if not filter_func and not callable(filter_func):
# The check of filter function implementation should happen
# in the __init__. However, the current workflow of DataTable
# and actions won't allow it. Need to be fixed in the future.
cls_name = self.__class__.__name__
raise NotImplementedError("You must define a %s method "
"for %s data type in %s." %
(func_name, data_type, cls_name))
_data = filter_func(table, data, filter_string)
self.assign_type_string(table, _data, data_type)
filtered_data.extend(_data)
return filtered_data
def filter(self, table, data, filter_string):
"""Provides the actual filtering logic.
This method must be overridden by subclasses and return
the filtered data.
"""
return data
def is_api_filter(self, filter_field):
"""Determine if the given filter field should be used as an
API filter.
"""
if self.filter_type == 'server':
for choice in self.filter_choices:
if (choice[0] == filter_field and len(choice) > 2 and
choice[2] is True):
return True
return False
class FixedFilterAction(FilterAction):
"""A filter action with fixed buttons."""
def __init__(self, **kwargs):
super(FixedFilterAction, self).__init__(**kwargs)
self.filter_type = kwargs.get('filter_type', "fixed")
self.needs_preloading = kwargs.get('needs_preloading', True)
self.fixed_buttons = self.get_fixed_buttons()
self.filter_string = ''
def filter(self, table, images, filter_string):
self.filter_string = filter_string
categories = self.categorize(table, images)
self.categories = defaultdict(list, categories)
for button in self.fixed_buttons:
button['count'] = len(self.categories[button['value']])
if not filter_string:
return images
return self.categories[filter_string]
def get_fixed_buttons(self):
"""Returns a list of dictionaries describing the fixed buttons
to use for filtering.
Each list item should be a dict with the following keys:
* ``text``: Text to display on the button
* ``icon``: Icon class for icon element (inserted before text).
* ``value``: Value returned when the button is clicked. This value is
passed to ``filter()`` as ``filter_string``.
"""
return []
def categorize(self, table, images):
"""Override to separate images into categories.
Return a dict with a key for the value of each fixed button,
and a value that is a list of images in that category.
"""
return {}
class BatchAction(Action):
"""A table action which takes batch action on one or more
objects. This action should not require user input on a
per-object basis.
.. attribute:: name
An internal name for this action.
.. method:: action_present
Method accepting an integer/long parameter and returning the display
forms of the name properly pluralised (depending on the integer) and
translated in a string or tuple/list.
.. attribute:: action_present (PendingDeprecation)
String or tuple/list. The display forms of the name.
Should be a transitive verb, capitalized and translated. ("Delete",
"Rotate", etc.) If tuple or list - then setting
self.current_present_action = n will set the current active item
from the list(action_present[n])
You can pass a complete action name including 'data_type' by specifying
'%(data_type)s' substitution in action_present ("Delete %(data_type)s").
Otherwise a complete action name is a format of "<action> <data_type>".
<data_type> is determined based on the number of items.
By passing a complete action name you allow translators to control
the order of words as they want.
NOTE: action_present attribute is bad for translations and should be
avoided. Please use the action_present method instead.
This form is kept for legacy.
.. method:: action_past
Method accepting an integer/long parameter and returning the display
forms of the name properly pluralised (depending on the integer) and
translated in a string or tuple/list.
.. attribute:: action_past (PendingDeprecation)
String or tuple/list. The past tense of action_present. ("Deleted",
"Rotated", etc.) If tuple or list - then
setting self.current_past_action = n will set the current active item
from the list(action_past[n])
NOTE: action_past attribute is bad for translations and should be
avoided. Please use the action_past method instead.
This form is kept for legacy.
.. attribute:: data_type_singular
Optional display name (if the data_type method is not defined) for the
type of data that receives the action. ("Key Pair", "Floating IP", etc.)
.. attribute:: data_type_plural
Optional plural word (if the data_type method is not defined) for the
type of data being acted on. Defaults to appending 's'. Relying on the
default is bad for translations and should not be done, so it's absence
will raise a DeprecationWarning. It is currently kept as optional for
legacy code.
NOTE: data_type_singular and data_type_plural attributes are bad for
translations and should be avoided. Please use the action_present and
action_past methods. This form is kept for legacy.
.. attribute:: success_url
Optional location to redirect after completion of the delete
action. Defaults to the current page.
"""
def __init__(self, **kwargs):
super(BatchAction, self).__init__(**kwargs)
action_present_method = False
if hasattr(self, 'action_present'):
if callable(self.action_present):
action_present_method = True
else:
warnings.warn(PendingDeprecationWarning(
'The %s BatchAction class must have an action_present '
'method instead of attribute.' % self.__class__.__name__
))
action_past_method = False
if hasattr(self, 'action_past'):
if callable(self.action_past):
action_past_method = True
else:
warnings.warn(PendingDeprecationWarning(
'The %s BatchAction class must have an action_past '
'method instead of attribute.' % self.__class__.__name__
))
action_methods = action_present_method and action_past_method
has_action_method = action_present_method or action_past_method
if has_action_method and not action_methods:
raise NotImplementedError(
'The %s BatchAction class must have both action_past and'
'action_present methods.' % self.__class__.__name__
)
if not action_methods:
if not kwargs.get('data_type_singular'):
raise NotImplementedError(
'The %s BatchAction class must have a data_type_singular '
'attribute when action_past and action_present attributes '
'are used.' % self.__class__.__name__
)
self.data_type_singular = kwargs.get('data_type_singular')
self.data_type_plural = kwargs.get('data_type_plural',
self.data_type_singular + 's')
# TODO(ygbo): get rid of self.use_action_method once action_present and
# action_past are changed to methods handling plurals.
self.use_action_method = action_methods
self.success_url = kwargs.get('success_url', None)
# If setting a default name, don't initialize it too early
self.verbose_name = kwargs.get('verbose_name', self._get_action_name)
self.verbose_name_plural = kwargs.get(
'verbose_name_plural',
lambda: self._get_action_name('plural'))
self.current_present_action = 0
self.current_past_action = 0
# Keep record of successfully handled objects
self.success_ids = []
def _allowed(self, request, datum=None):
# Override the default internal action method to prevent batch
# actions from appearing on tables with no data.
if not self.table.data and not datum:
return False
return super(BatchAction, self)._allowed(request, datum)
def _get_action_name(self, items=None, past=False):
"""Builds combinations like 'Delete Object' and 'Deleted
Objects' based on the number of items and `past` flag.
:param items:
A list or tuple of items (or container with a __len__ method) to
count the number of concerned items for which this method is
called.
When this method is called for a single item (by the BatchAction
itself), this parameter can be omitted and the number of items
will be considered as "one".
If we want to evaluate to "zero" this parameter must not be omitted
(and should be an empty container).
:param past:
Boolean flag indicating if the action took place in the past.
By default a present action is considered.
"""
action_type = "past" if past else "present"
if items is None:
# Called without items parameter (by a single instance.)
count = 1
else:
count = len(items)
# TODO(ygbo): get rid of self.use_action_method once action_present and
# action_past are changed to methods handling plurals.
action_attr = getattr(self, "action_%s" % action_type)
if self.use_action_method:
action_attr = action_attr(count)
if isinstance(action_attr, (basestring, Promise)):
action = action_attr
else:
toggle_selection = getattr(self, "current_%s_action" % action_type)
action = action_attr[toggle_selection]
if self.use_action_method:
return action
# TODO(ygbo): get rid of all this bellow once action_present and
# action_past are changed to methods handling plurals.
data_type = ungettext_lazy(
self.data_type_singular,
self.data_type_plural,
count
)
if '%(data_type)s' in action:
# If full action string is specified, use action as format string.
msgstr = action
else:
if action_type == "past":
msgstr = pgettext_lazy("past", "%(action)s %(data_type)s")
else:
msgstr = pgettext_lazy("present", "%(action)s %(data_type)s")
return msgstr % {'action': action, 'data_type': data_type}
def action(self, request, datum_id):
"""Required. Accepts a single object id and performs the specific
action.
Return values are discarded, errors raised are caught and logged.
"""
def update(self, request, datum):
"""Switches the action verbose name, if needed."""
if getattr(self, 'action_present', False):
self.verbose_name = self._get_action_name()
self.verbose_name_plural = self._get_action_name('plural')
def get_success_url(self, request=None):
"""Returns the URL to redirect to after a successful action."""
if self.success_url:
return self.success_url
return request.get_full_path()
def handle(self, table, request, obj_ids):
action_success = []
action_failure = []
action_not_allowed = []
for datum_id in obj_ids:
datum = table.get_object_by_id(datum_id)
datum_display = table.get_object_display(datum) or _("N/A")
if not table._filter_action(self, request, datum):
action_not_allowed.append(datum_display)
LOG.info('Permission denied to %s: "%s"' %
(self._get_action_name(past=True).lower(),
datum_display))
continue
try:
self.action(request, datum_id)
# Call update to invoke changes if needed
self.update(request, datum)
action_success.append(datum_display)
self.success_ids.append(datum_id)
LOG.info('%s: "%s"' %
(self._get_action_name(past=True), datum_display))
except Exception as ex:
# Handle the exception but silence it since we'll display
# an aggregate error message later. Otherwise we'd get
# multiple error messages displayed to the user.
if getattr(ex, "_safe_message", None):
ignore = False
else:
ignore = True
action_failure.append(datum_display)
exceptions.handle(request, ignore=ignore)
# Begin with success message class, downgrade to info if problems.
success_message_level = messages.success
if action_not_allowed:
msg = _('You are not allowed to %(action)s: %(objs)s')
params = {"action":
self._get_action_name(action_not_allowed).lower(),
"objs": functions.lazy_join(", ", action_not_allowed)}
messages.error(request, msg % params)
success_message_level = messages.info
if action_failure:
msg = _('Unable to %(action)s: %(objs)s')
params = {"action": self._get_action_name(action_failure).lower(),
"objs": functions.lazy_join(", ", action_failure)}
messages.error(request, msg % params)
success_message_level = messages.info
if action_success:
msg = _('%(action)s: %(objs)s')
params = {"action":
self._get_action_name(action_success, past=True),
"objs": functions.lazy_join(", ", action_success)}
success_message_level(request, msg % params)
return shortcuts.redirect(self.get_success_url(request))
class DeleteAction(BatchAction):
"""A table action used to perform delete operations on table data.
.. attribute:: name
A short name or "slug" representing this action.
Defaults to 'delete'
.. method:: action_present
Method accepting an integer/long parameter and returning the display
forms of the name properly pluralised (depending on the integer) and
translated in a string or tuple/list.
.. attribute:: action_present (PendingDeprecation)
A string containing the transitive verb describing the delete action.
Defaults to 'Delete'
NOTE: action_present attribute is bad for translations and should be
avoided. Please use the action_present method instead.
This form is kept for legacy.
.. method:: action_past
Method accepting an integer/long parameter and returning the display
forms of the name properly pluralised (depending on the integer) and
translated in a string or tuple/list.
.. attribute:: action_past (PendingDeprecation)
A string set to the past tense of action_present.
Defaults to 'Deleted'
NOTE: action_past attribute is bad for translations and should be
avoided. Please use the action_past method instead.
This form is kept for legacy.
.. attribute:: data_type_singular (PendingDeprecation)
A string used to name the data to be deleted.
.. attribute:: data_type_plural (PendingDeprecation)
Optional. Plural of ``data_type_singular``.
Defaults to ``data_type_singular`` appended with an 's'. Relying on
the default is bad for translations and should not be done, so it's
absence will raise a DeprecationWarning. It is currently kept as
optional for legacy code.
NOTE: data_type_singular and data_type_plural attributes are bad for
translations and should be avoided. Please use the action_present and
action_past methods. This form is kept for legacy.
"""
name = "delete"
def __init__(self, **kwargs):
super(DeleteAction, self).__init__(**kwargs)
self.name = kwargs.get('name', self.name)
if not hasattr(self, "action_present"):
self.action_present = kwargs.get('action_present', _("Delete"))
if not hasattr(self, "action_past"):
self.action_past = kwargs.get('action_past', _("Deleted"))
self.icon = "remove"
def action(self, request, obj_id):
"""Action entry point. Overrides base class' action method.
Accepts a single object id passing it over to the delete method
responsible for the object's destruction.
"""
return self.delete(request, obj_id)
def delete(self, request, obj_id):
"""Required. Deletes an object referenced by obj_id.
Override to provide delete functionality specific to your data.
"""
def get_default_classes(self):
"""Appends ``btn-danger`` to the action's default css classes.
This method ensures the corresponding button is highlighted
as a trigger for a potentially dangerous action.
"""
classes = super(DeleteAction, self).get_default_classes()
classes += ("btn-danger",)
return classes
class UpdateAction(object):
"""A table action for cell updates by inline editing."""
name = "update"
action_present = _("Update")
action_past = _("Updated")
data_type_singular = "update"
def action(self, request, datum, obj_id, cell_name, new_cell_value):
self.update_cell(request, datum, obj_id, cell_name, new_cell_value)
def update_cell(self, request, datum, obj_id, cell_name, new_cell_value):
"""Method for saving data of the cell.
This method must implements saving logic of the inline edited table
cell.
"""
def allowed(self, request, datum, cell):
"""Determine whether updating is allowed for the current request.
This method is meant to be overridden with more specific checks.
Data of the row and of the cell are passed to the method.
"""
return True
| apache-2.0 | 7,165,437,380,666,320,000 | 38.906709 | 79 | 0.614168 | false |
marwoodandrew/superdesk-aap | server/aap/publish/formatters/anpa_formatter_test.py | 1 | 15888 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from datetime import datetime
import io
from apps.publish import init_app
from superdesk.publish.subscribers import SUBSCRIBER_TYPES
from superdesk.tests import TestCase
from .aap_formatter_common import map_priority
from .anpa_formatter import AAPAnpaFormatter
class ANPAFormatterTest(TestCase):
subscribers = [{"_id": "1", "name": "notes", "subscriber_type": SUBSCRIBER_TYPES.WIRE, "media_type": "media",
"is_active": True, "sequence_num_settings": {"max": 10, "min": 1},
"destinations": [{"name": "ANPA", "delivery_type": "email", "format": "ANPA",
"config": {"recipients": "[email protected]"}
}]
}]
article = {
'source': 'AAP',
'_updated': datetime.strptime('2015-05-29 05:46', '%Y-%m-%d %H:%M'),
'anpa_category': [{'qcode': 'a'}],
'headline': 'This is a test headline',
'slugline': 'slugline',
'subject': [{'qcode': '02011001'}],
'anpa_take_key': 'take_key',
'urgency': 5,
'unique_id': '1',
'body_html': '<p>The story body</p>',
'type': 'text',
'word_count': '1',
'priority': 1,
'task': {'desk': 1},
'body_footer': '<p>call helpline 999 if you are planning<br>to quit smoking</p>'
}
desks = [{'_id': 1, 'name': 'National'},
{'_id': 2, 'name': 'Sports'},
{'_id': 3, 'name': 'Finance'}]
vocab = [{'_id': 'categories', 'items': [
{'is_active': True, 'name': 'Overseas Sport', 'qcode': 'S', 'subject': '15000000'},
{'is_active': True, 'name': 'Finance', 'qcode': 'F', 'subject': '04000000'},
{'is_active': True, 'name': 'General News', 'qcode': 'A'},
{'is_active': True, 'name': 'bogus', 'qcode': 'b'}]}]
def setUp(self):
self.app.data.insert('subscribers', self.subscribers)
self.app.data.insert('desks', self.desks)
self.app.data.insert('vocabularies', self.vocab)
init_app(self.app)
def testANPAFormatter(self):
subscriber = self.app.data.find('subscribers', None, None)[0]
f = AAPAnpaFormatter()
resp = f.format(self.article.copy(), subscriber, ['axx'])[0]
seq = resp['published_seq_num']
item = resp['encoded_item']
self.assertGreater(int(seq), 0)
lines = io.StringIO(item.decode())
line = lines.readline()
self.assertTrue('axx' in line[1:])
line = lines.readline()
self.assertEqual(line[:3], '') # Skip the sequence
line = lines.readline()
self.assertEqual(line[0:20], 'f a bc-slugline ') # skip the date
line = lines.readline()
self.assertEqual(line.strip(), 'This is a test headline')
line = lines.readline()
self.assertEqual(line.strip(), 'slugline take_key')
line = lines.readline()
self.assertEqual(line.strip(), 'The story body')
line = lines.readline()
self.assertEqual(line.strip(), 'call helpline 999 if you are planning')
line = lines.readline()
self.assertEqual(line.strip(), 'to quit smoking')
lines.readline()
line = lines.readline()
self.assertEqual(line.strip(), 'AAP')
def testANPAWithNoSelectorsFormatter(self):
subscriber = self.app.data.find('subscribers', None, None)[0]
subscriber['name'] = 'not notes'
f = AAPAnpaFormatter()
resp = f.format(self.article.copy(), subscriber)[0]
seq = resp['published_seq_num']
item = resp['encoded_item']
self.assertGreater(int(seq), 0)
lines = io.StringIO(item.decode())
line = lines.readline()
self.assertEqual(line[:3], '') # Skip the sequence
line = lines.readline()
self.assertEqual(line[0:20], 'f a bc-slugline ') # skip the date
line = lines.readline()
self.assertEqual(line.strip(), 'This is a test headline')
line = lines.readline()
self.assertEqual(line.strip(), 'slugline take_key')
line = lines.readline()
self.assertEqual(line.strip(), 'The story body')
line = lines.readline()
self.assertEqual(line.strip(), 'call helpline 999 if you are planning')
line = lines.readline()
self.assertEqual(line.strip(), 'to quit smoking')
lines.readline()
line = lines.readline()
self.assertEqual(line.strip(), 'AAP')
def testANPAWithBylineFormatter(self):
subscriber = self.app.data.find('subscribers', None, None)[0]
subscriber['name'] = 'not notes'
byline_article = dict(self.article.copy())
byline_article['byline'] = '<p>Joe Blogs</p>'
f = AAPAnpaFormatter()
resp = f.format(byline_article, subscriber)[0]
seq = resp['published_seq_num']
item = resp['encoded_item']
self.assertGreater(int(seq), 0)
lines = io.StringIO(item.decode())
line = lines.readline()
self.assertEqual(line[:3], '') # Skip the sequence
line = lines.readline()
self.assertEqual(line[0:20], 'f a bc-slugline ') # skip the date
line = lines.readline()
self.assertEqual(line.strip(), 'This is a test headline')
line = lines.readline()
self.assertEqual(line.strip(), 'slugline take_key')
line = lines.readline()
self.assertEqual(line.strip(), 'Joe Blogs')
line = lines.readline()
self.assertEqual(line.strip(), 'The story body')
line = lines.readline()
self.assertEqual(line.strip(), 'call helpline 999 if you are planning')
line = lines.readline()
self.assertEqual(line.strip(), 'to quit smoking')
lines.readline()
line = lines.readline()
self.assertEqual(line.strip(), 'AAP')
def testServiceLevelFormatter(self):
subscriber = self.app.data.find('subscribers', None, None)[0]
subscriber['name'] = 'not notes'
service_level_article = dict(self.article.copy())
service_level_article['genre'] = [{'qcode': 'Results (sport)'}]
service_level_article['anpa_category'] = [{'qcode': 'S'}]
f = AAPAnpaFormatter()
resp = f.format(service_level_article, subscriber)[0]
seq = resp['published_seq_num']
item = resp['encoded_item']
self.assertGreater(int(seq), 0)
lines = io.StringIO(item.decode())
line = lines.readline()
self.assertEqual(line[:3], '') # Skip the sequence
line = lines.readline()
self.assertEqual(line[0:20], 'f s bc-slugline ') # skip the date
def testMultipleCategoryFormatter(self):
subscriber = self.app.data.find('subscribers', None, None)[0]
multi_article = dict(self.article.copy())
multi_article.pop('anpa_category')
multi_article['anpa_category'] = [{'qcode': 'a'}, {'qcode': 'b'}]
f = AAPAnpaFormatter()
docs = f.format(multi_article, subscriber, ['Axy', 'Bkl'])
docs = f.format(multi_article, subscriber, ['Axy', 'Bkl'])
self.assertEqual(len(docs), 2)
cat = 'a'
for doc in docs:
item = doc['encoded_item']
lines = io.StringIO(item.decode())
line = lines.readline()
line = lines.readline()
line = lines.readline()
self.assertEqual(line[2:3], cat) # skip the date
cat = 'b'
def test_process_headline_empty_sequence_short_headline(self):
f = AAPAnpaFormatter()
article = {'headline': '1234567890' * 5}
anpa = []
f._process_headline(anpa, article, b'a')
self.assertEqual(anpa[0], b'12345678901234567890123456789012345678901234567890')
def test_headline_with_markup(self):
f = AAPAnpaFormatter()
article = {'headline': '<p>headline</p>'}
anpa = []
f._process_headline(anpa, article, b'a')
self.assertEqual(anpa[0], b'headline')
def test_process_headline_empty_sequence_long_headline(self):
f = AAPAnpaFormatter()
article = {'headline': '1234567890' * 7}
anpa = []
f._process_headline(anpa, article, b'a')
self.assertEqual(anpa[0], b'1234567890123456789012345678901234567890123456789012345678901234')
def test_process_headline_with_sequence_short_headline(self):
f = AAPAnpaFormatter()
article = {'headline': '1234567890=7', 'sequence': 7}
anpa = []
f._process_headline(anpa, article, b'a')
self.assertEqual(anpa[0], b'1234567890=7')
def test_process_headline_with_sequence_long_headline(self):
f = AAPAnpaFormatter()
article1 = {'headline': '1234567890' * 7 + '=7', 'sequence': 7}
anpa = []
f._process_headline(anpa, article1, b'a')
self.assertEqual(anpa[0], b'12345678901234567890123456789012345678901234567890123456789012=7')
article2 = {'headline': '1234567890' * 7 + '=7', 'sequence': 17}
anpa = []
f._process_headline(anpa, article2, b'a')
self.assertEqual(anpa[0], b'1234567890123456789012345678901234567890123456789012345678901=17')
def test_process_headline_locator_inject(self):
f = AAPAnpaFormatter()
article3 = {'headline': '1234567890' * 3, 'place': [{'qcode': 'VIC', 'name': 'VIC'}]}
anpa = []
f._process_headline(anpa, article3, b'a')
self.assertEqual(anpa[0], b'VIC:123456789012345678901234567890')
def test_map_priority(self):
self.assertEqual('f', map_priority(1))
self.assertEqual('u', map_priority(2))
self.assertEqual('b', map_priority(3))
self.assertEqual('r', map_priority(4))
self.assertEqual('r', map_priority(5))
self.assertEqual('r', map_priority(6))
self.assertEqual('r', map_priority(None))
self.assertEqual('r', map_priority(7))
self.assertEqual('r', map_priority(''))
def test_dateline_with_empty_text(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({'dateline': {'text': None}})
resp = f.format(item, subscriber)[0]
self.assertTrue('The story body' in resp['encoded_item'].decode('ascii'))
def test_dateline_injection(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({'dateline': {'text': 'SYDNEY, June 27 AAP -'}})
resp = f.format(item, subscriber)[0]
out = resp['encoded_item']
lines = io.StringIO(out.decode())
self.assertTrue(lines.getvalue().find('SYDNEY, June 27 AAP - The story body') > 0)
def test_ednote_injection(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({'ednote': 'Note this'})
resp = f.format(item, subscriber)[0]
out = resp['encoded_item']
lines = io.StringIO(out.decode())
self.assertTrue(lines.getvalue().find('Note this') > 0)
def test_div_body(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({
'body_html': '<div>Kathmandu Holdings has lodged a claim in the New Zealand High'
' Court for the recovery of costs associated with last year\'s takeover bid from Briscoe'
' Group.</div><div>Kathmandu Holdings has lodged a claim in the New Zealand High Court for '
'the recovery of costs associated with last year\'s takeover bid from Briscoe Group.'
'</div><div><br></div><div>Kathmandu incurred costs in relation to the takeover bid. '
'After an initial request for payment on November 20, 2015 and subsequent correspondence, '
'Briscoe made a payment of $637,711.65 on May 25, 2016 without prejudice to its position on '
'what sum Kathmandu is entitled to recover.</div><div><br></div><div>Kathmandu considers the '
'full amount claimed is recoverable and has issued legal proceedings for the balance of monies'
' owed.</div>'})
resp = f.format(item, subscriber)[0]
out = resp['encoded_item']
lines = io.StringIO(out.decode())
self.assertTrue(lines.getvalue().split('\n')[7].find(' Kathmandu incurred costs in relation') == 0)
def test_span_body(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({
'body_html': '<p>Dental materials maker and marketer SDI has boosted its shares after reporting a lift in'
' sales, with improvements across Europe, Brazil and North America.</p>'
'<p>SDI <span style=\"background-color: transparent;\">reported a 7.8 per cent lift in unaudited'
' sales to $74 million for the year to June 30, 2016 on Monday, up from $68.7 million a year '
'earlier.</span></p><p>The company said it expected to report a post-tax profit of between $7.2 million '
'and $7.8 million when it releases its full-year results on August 29.</p><p>Shares in SDI gained '
'6.5 cents - a 12.2 per cent increase - to close at 59.5 cents on Monday.</p>'})
resp = f.format(item, subscriber)[0]
out = resp['encoded_item']
lines = io.StringIO(out.decode())
self.assertTrue(lines.getvalue().split('\n')[5].find(' SDI reported a 7.8 per cent lift in unaudited') == 0)
def test_br_body(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({
'body_html': '<p>Dental materials maker and marketer SDI<br> has boosted its shares after '
'reporting a lift in'
' sales, with improvements across Europe, Brazil and North America.</p>'
'<p>SDI <span style=\"background-color: transparent;\">reported a 7.8 per cent lift in unaudited'
' sales to $74 million for the year to June 30, 2016 on Monday, up from $68.7 million a year '
'earlier.</span></p><p>The company said it expected to report a post-tax profit of between $7.2 million '
'and $7.8 million when it releases its full-year results on August 29.</p><p>Shares in SDI gained '
'6.5 cents - a 12.2 per cent increase - to close at 59.5 cents on Monday.</p>'})
resp = f.format(item, subscriber)[0]
out = resp['encoded_item']
lines = io.StringIO(out.decode())
self.assertTrue(lines.getvalue().split('\n')[4].find(' Dental materials maker and marketer SDI') == 0)
self.assertTrue(lines.getvalue().split('\n')[5].find(' has boosted its shares after reporting') == 0)
def test_none_body(self):
f = AAPAnpaFormatter()
subscriber = self.app.data.find('subscribers', None, None)[0]
item = self.article.copy()
item.update({
'anpa_take_key': None, 'byline': None, 'abstract': None})
resp = f.format(item, subscriber)[0]
self.assertTrue('encoded_item' in resp)
| agpl-3.0 | -7,921,156,450,565,238,000 | 41.368 | 120 | 0.592837 | false |
repotvsupertuga/repo | plugin.video.zen/resources/lib/indexers/navigator.py | 1 | 14704 | # -*- coding: utf-8 -*-
'''
zen Add-on
Copyright (C) 2016 zen
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,sys,urlparse
from resources.lib.modules import control
from resources.lib.modules import trakt
sysaddon = sys.argv[0]
syshandle = int(sys.argv[1])
artPath = control.artPath()
addonFanart = control.addonFanart()
imdbCredentials = False if control.setting('imdb.user') == '' else True
traktCredentials = trakt.getTraktCredentialsInfo()
traktIndicators = trakt.getTraktIndicatorsInfo()
queueMenu = control.lang(32065).encode('utf-8')
movielist1 = control.setting('tmdb.movielist_name1')
movielist2 = control.setting('tmdb.movielist_name2')
movielist3 = control.setting('tmdb.movielist_name3')
movielist4 = control.setting('tmdb.movielist_name4')
movielist5 = control.setting('tmdb.movielist_name5')
movielist6 = control.setting('tmdb.movielist_name6')
movielist7 = control.setting('tmdb.movielist_name7')
movielist8 = control.setting('tmdb.movielist_name8')
movielist9 = control.setting('tmdb.movielist_name9')
movielist10 = control.setting('tmdb.movielist_name10')
tvlist1 = control.setting('tmdb.tvlist_name1')
tvlist2 = control.setting('tmdb.tvlist_name2')
tvlist3 = control.setting('tmdb.tvlist_name3')
tvlist4 = control.setting('tmdb.tvlist_name4')
tvlist5 = control.setting('tmdb.tvlist_name5')
tvlist6 = control.setting('tmdb.tvlist_name6')
tvlist7 = control.setting('tmdb.tvlist_name7')
tvlist8 = control.setting('tmdb.tvlist_name8')
tvlist9 = control.setting('tmdb.tvlist_name9')
tvlist10 = control.setting('tmdb.tvlist_name10')
class navigator:
def root(self):
self.addDirectoryItem(32001, 'movieNavigator', 'movies.png', 'DefaultMovies.png')
self.addDirectoryItem(32002, 'tvNavigator', 'channels.png', 'DefaultTVShows.png')
if not control.setting('movie.widget') == '0': self.addDirectoryItem('Spotlight', 'movieWidget', 'latest-movies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Movies Watchlist', 'movieFavourites', 'mymovies.png', 'DefaultMovies.png')
self.addDirectoryItem('TV Shows Watchlist', 'tvFavourites', 'mymovies.png', 'DefaultMovies.png')
self.addDirectoryItem('New Movies', 'movies&url=premiere', 'trending.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(32026, 'tvshows&url=premiere', 'years.png', 'DefaultTVShows.png')
self.addDirectoryItem('My Lists', 'lists_navigator', 'trending.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(32027, 'calendars', 'networks.png', 'DefaultRecentlyAddedEpisodes.png')
self.addDirectoryItem(32007, 'channels', 'channels.png', 'DefaultMovies.png')
self.addDirectoryItem(32008, 'toolNavigator', 'tools.png', 'DefaultAddonProgram.png')
downloads = True if control.setting('downloads') == 'true' and (len(control.listDir(control.setting('movie.download.path'))[0]) > 0) else False
if downloads == True: self.addDirectoryItem(32009, 'downloadNavigator', 'downloads.png', 'DefaultFolder.png')
self.addDirectoryItem(32010, 'searchNavigator', 'search.png', 'DefaultFolder.png')
self.addDirectoryItem('Changelog', 'ShowChangelog', 'icon.png', 'DefaultFolder.png')
self.endDirectory()
def movies(self, lite=False):
self.addDirectoryItem('Featured', 'movies&url=featured', 'featured.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Trending', 'movies&url=trending', 'trending.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Populars', 'movies&url=popular', 'populars.png', 'DefaultMovies.png')
self.addDirectoryItem('New Movies', 'movies&url=premiere', 'trending.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Top Rated', 'movies&url=views', 'most-viewed.png', 'DefaultMovies.png')
self.addDirectoryItem('In Theaters', 'movies&url=theaters', 'in-theaters.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Marvel Universe', 'movies&url=tmdbmarvel', 'marvel.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Oscar Winners', 'movies&url=tmdboscars', 'oscars.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Disney Collection', 'movies&url=tmdbdisney', 'disney.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem('Genres', 'movieGenres', 'genres.png', 'DefaultMovies.png')
self.addDirectoryItem('Years', 'movieYears', 'years.png', 'DefaultMovies.png')
self.addDirectoryItem('Persons', 'moviePersons', 'people.png', 'DefaultMovies.png')
self.addDirectoryItem('Certificates', 'movieCertificates', 'certificates.png', 'DefaultMovies.png')
self.addDirectoryItem(32028, 'moviePerson', 'people-search.png', 'DefaultMovies.png')
self.addDirectoryItem(32010, 'movieSearch', 'search.png', 'DefaultMovies.png')
self.endDirectory()
def lists_navigator(self):
self.addDirectoryItem('Movies', 'movielist', 'movies.png', 'DefaultMovies.png')
self.addDirectoryItem('Tv Shows', 'tvlist', 'channels.png', 'DefaultTVShows.png')
self.endDirectory()
def mymovies(self):
self.addDirectoryItem(movielist1, 'movies&url=mycustomlist1', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist2, 'movies&url=mycustomlist2', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist3, 'movies&url=mycustomlist3', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist4, 'movies&url=mycustomlist4', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist5, 'movies&url=mycustomlist5', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist6, 'movies&url=mycustomlist6', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist7, 'movies&url=mycustomlist7', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist8, 'movies&url=mycustomlist8', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist9, 'movies&url=mycustomlist9', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.addDirectoryItem(movielist10, 'movies&url=mycustomlist10', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.endDirectory()
def mytv(self):
self.addDirectoryItem(tvlist1, 'tvshows&url=mycustomlist1', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist2, 'tvshows&url=mycustomlist2', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist3, 'tvshows&url=mycustomlist3', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist4, 'tvshows&url=mycustomlist4', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist5, 'tvshows&url=mycustomlist5', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist6, 'tvshows&url=mycustomlist6', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist7, 'tvshows&url=mycustomlist7', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist8, 'tvshows&url=mycustomlist8', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist9, 'tvshows&url=mycustomlist9', 'channels.png', 'DefaultTVShows.png')
self.addDirectoryItem(tvlist10, 'tvshows&url=mycustomlist10', 'mymovies.png', 'DefaultRecentlyAddedMovies.png')
self.endDirectory()
def tvshows(self, lite=False):
self.addDirectoryItem('Featured', 'tvshows&url=featured', 'populars.png', 'DefaultRecentlyAddedEpisodes.png')
self.addDirectoryItem('Populars', 'tvshows&url=popular', 'most-viewed.png', 'DefaultTVShows.png')
self.addDirectoryItem(32019, 'tvshows&url=views', 'most-viewed.png', 'DefaultTVShows.png')
self.addDirectoryItem(32026, 'tvshows&url=premiere', 'years.png', 'DefaultTVShows.png')
self.addDirectoryItem(32025, 'tvshows&url=active', 'years.png', 'DefaultTVShows.png')
self.addDirectoryItem(32023, 'tvshows&url=rating', 'featured.png', 'DefaultTVShows.png')
self.addDirectoryItem(32011, 'tvGenres', 'genres.png', 'DefaultTVShows.png')
self.addDirectoryItem(32016, 'tvNetworks', 'networks.png', 'DefaultTVShows.png')
self.addDirectoryItem(32024, 'tvshows&url=airing', 'airing-today.png', 'DefaultTVShows.png')
self.addDirectoryItem(32027, 'calendars', 'networks.png', 'DefaultRecentlyAddedEpisodes.png')
self.addDirectoryItem(32010, 'tvSearch', 'search.png', 'DefaultTVShows.png')
self.endDirectory()
def tools(self):
self.addDirectoryItem('[B]URL RESOLVER[/B]: Settings', 'urlresolversettings', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem(32043, 'openSettings&query=0.0', 'tools.png', 'DefaultAddonProgram.png')
# self.addDirectoryItem(32044, 'openSettings&query=3.1', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem(32045, 'openSettings&query=1.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]SETTINGS[/B]: Accounts', 'openSettings&query=2.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem(32047, 'openSettings&query=3.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem(32046, 'openSettings&query=5.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]SETTINGS[/B]: Downloads', 'openSettings&query=4.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]SETTINGS[/B]: Watchlist', 'openSettings&query=6.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]SETTINGS[/B]: Lists', 'openSettings&query=7.0', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]ZEN[/B]: Views', 'viewsNavigator', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]ZEN[/B]: Clear Providers', 'clearSources', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]ZEN[/B]: Clear Cache', 'clearCache', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]BACKUP[/B]: Watchlist', 'backupwatchlist', 'tools.png', 'DefaultAddonProgram.png')
self.addDirectoryItem('[B]RESTORE[/B]: Watchlist', 'restorewatchlist', 'tools.png', 'DefaultAddonProgram.png')
self.endDirectory()
def downloads(self):
movie_downloads = control.setting('movie.download.path')
# tv_downloads = control.setting('tv.download.path')
if len(control.listDir(movie_downloads)[0]) > 0:
self.addDirectoryItem(32001, movie_downloads, 'movies.png', 'DefaultMovies.png', isAction=False)
self.endDirectory()
def search(self):
self.addDirectoryItem(32001, 'movieSearch', 'search.png', 'DefaultMovies.png')
self.addDirectoryItem(32002, 'tvSearch', 'search.png', 'DefaultTVShows.png')
# self.addDirectoryItem(32029, 'moviePerson', 'people-search.png', 'DefaultMovies.png')
# self.addDirectoryItem(32030, 'tvPerson', 'people-search.png', 'DefaultTVShows.png')
self.endDirectory()
def views(self):
try:
control.idle()
items = [ (control.lang(32001).encode('utf-8'), 'movies'), (control.lang(32002).encode('utf-8'), 'tvshows'), (control.lang(32054).encode('utf-8'), 'seasons'), (control.lang(32038).encode('utf-8'), 'episodes') ]
select = control.selectDialog([i[0] for i in items], control.lang(32049).encode('utf-8'))
if select == -1: return
content = items[select][1]
title = control.lang(32059).encode('utf-8')
url = '%s?action=addView&content=%s' % (sys.argv[0], content)
poster, banner, fanart = control.addonPoster(), control.addonBanner(), control.addonFanart()
item = control.item(label=title)
item.setInfo(type='Video', infoLabels = {'title': title})
item.setArt({'icon': poster, 'thumb': poster, 'poster': poster, 'tvshow.poster': poster, 'season.poster': poster, 'banner': banner, 'tvshow.banner': banner, 'season.banner': banner})
item.setProperty('Fanart_Image', fanart)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=False)
control.content(int(sys.argv[1]), content)
control.directory(int(sys.argv[1]), cacheToDisc=True)
from resources.lib.modules import cache
views.setView(content, {})
except:
return
def accountCheck(self):
if traktCredentials == False and imdbCredentials == False:
control.idle()
control.infoDialog(control.lang(32042).encode('utf-8'), sound=True, icon='WARNING')
sys.exit()
def clearCache(self):
control.idle()
yes = control.yesnoDialog(control.lang(32056).encode('utf-8'), '', '')
if not yes: return
from resources.lib.modules import cache
cache.clear()
control.infoDialog(control.lang(32057).encode('utf-8'), sound=True, icon='INFO')
def addDirectoryItem(self, name, query, thumb, icon, queue=False, isAction=True, isFolder=True):
try: name = control.lang(name).encode('utf-8')
except: pass
url = '%s?action=%s' % (sysaddon, query) if isAction == True else query
thumb = os.path.join(artPath, thumb) if not artPath == None else icon
cm = []
if queue == True: cm.append((queueMenu, 'RunPlugin(%s?action=queueItem)' % sysaddon))
item = control.item(label=name)
item.addContextMenuItems(cm)
item.setArt({'icon': thumb, 'thumb': thumb})
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
control.addItem(handle=syshandle, url=url, listitem=item, isFolder=isFolder)
def endDirectory(self):
# control.do_block_check(False)
control.directory(syshandle, cacheToDisc=True)
| gpl-2.0 | -5,556,857,216,013,222,000 | 55.122137 | 222 | 0.696477 | false |
crccheck/atx-bandc | bandc/apps/agenda/tests/test_utils.py | 1 | 3059 | import datetime
import os.path
from unittest import mock
from django.test import TestCase
from ..factories import BandCFactory
from ..utils import (
MeetingCancelled,
parse_date,
clean_text,
process_page,
get_number_of_pages,
_save_page,
)
from .. import scrape_logger
BASE_DIR = os.path.dirname(__file__)
class UtilsTests(TestCase):
def test_parse_date_works(self):
date = parse_date("January 13, 2014")
self.assertEqual(date, datetime.date(2014, 1, 13))
with self.assertRaises(MeetingCancelled):
date = parse_date("January 28, 2014 (Cancelled)")
def test_clean_test(self):
fixture = (
("", ""),
("test", "test"),
("- May 27, 2014 PARB Agenda", "May 27, 2014 PARB Agenda"),
)
for input, expected in fixture:
self.assertEqual(clean_text(input), expected)
def test_process_page_works(self):
html = open(os.path.join(BASE_DIR, "samples/music.html")).read()
meeting_data, doc_data = process_page(html)
self.assertEqual(len(doc_data), 9)
self.assertEqual(doc_data[0]["date"], datetime.date(2014, 6, 2))
def test_get_number_of_pages_works(self):
html = open(os.path.join(BASE_DIR, "samples/music.html")).read()
self.assertEqual(get_number_of_pages(html), 1)
html = open(os.path.join(BASE_DIR, "samples/parks.html")).read()
self.assertEqual(get_number_of_pages(html), 2)
@mock.patch("bandc.apps.agenda.models.Document.refresh")
def test_save_page_works(self, mock_task):
html = open(os.path.join(BASE_DIR, "samples/music.html")).read()
meeting_data, doc_data = process_page(html)
bandc = BandCFactory()
# Sanity check
self.assertEqual(bandc.latest_meeting, None)
process_next = _save_page(meeting_data, doc_data, bandc)
self.assertFalse(process_next)
self.assertEqual(bandc.latest_meeting.date.isoformat(), "2014-02-03")
self.assertEqual(bandc.latest_meeting.documents.all()[0].edims_id, 204789)
self.assertTrue(mock_task.called)
def test_save_page_handles_no_data(self):
meeting_data, doc_data = [], []
bandc = BandCFactory()
# Sanity check
self.assertEqual(bandc.latest_meeting, None)
process_next = _save_page(meeting_data, doc_data, bandc)
self.assertFalse(process_next)
self.assertEqual(bandc.latest_meeting, None)
@mock.patch("bandc.apps.agenda.models.Document.refresh")
def test_save_page_logs_to_scrape_logger(self, mock_task):
html = open(os.path.join(BASE_DIR, "samples/music.html")).read()
meeting_data, doc_data = process_page(html)
bandc = BandCFactory()
# Sanity check
self.assertEqual(bandc.latest_meeting, None)
with scrape_logger.init() as context:
_save_page(meeting_data, doc_data, bandc)
self.assertEqual(len(context.meetings), 4)
self.assertEqual(len(context.documents), 9)
| bsd-3-clause | -7,105,842,265,565,726,000 | 33.761364 | 82 | 0.632887 | false |
wyrdmeister/OnlineAnalysis | OAGui/src/GuiBase.py | 1 | 5421 | # -*- coding: utf-8 -*-
"""
Online Analysis Configuration Editor - Base class with logging functions
Version 2.0
Michele Devetta (c) 2013
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import logging
from PyQt4 import QtCore
from PyQt4 import QtGui
def declare_trUtf8(name):
""" Declare a UTF-8 translation function with the given module name. """
def function(string):
try:
return unicode(QtGui.QApplication.translate(name, string, None, QtGui.QApplication.UnicodeUTF8))
except:
return unicode(string)
return function
class GuiBase(QtCore.QObject):
""" Base OA GUI class with logging facilities. """
def __init__(self, name='OAGui'):
""" Constructor. """
# Parent constructor
QtCore.QObject.__init__(self)
# Parse command line args
ap = argparse.ArgumentParser(prog=name, add_help=False)
ap.add_argument('-d, --debug', dest="debug", action="store_const", const=Logger.DEBUG, default=Logger.INFO)
out = ap.parse_args()
# Init logger
self.logger = Logger(name, out.debug)
class WidgetHandler(logging.Handler):
""" Logging handler that send formatted output to QListWidget. """
def __init__(self, signal, level=logging.NOTSET):
""" Constructor. """
logging.Handler.__init__(self, level)
self._signal = signal
def emit(self, record):
""" Stores a record. """
self._signal.emit(unicode(self.format(record)))
class Logger(object):
""" Logger class. """
def __init__(self, name="Default", level=logging.INFO):
""" Constructor. """
self._name = name
self._level = level
self._init_logger()
def _init_logger(self):
""" Initialize the logger object. """
# Setup logger
self._logger = logging.getLogger(self._name)
self._logger.setLevel(self._level)
# Add standard handler if not present
for h in self._logger.handlers:
try:
if h.name == self._name + "_handler":
return
except:
pass
_handler = logging.StreamHandler()
_handler.setFormatter(logging.Formatter('[%(asctime)s] %(name)s:%(levelname)s:%(message)s', '%b %d, %H:%M:%S'))
_handler.setLevel(self._level)
_handler.name = self._name + "_handler"
self._logger.addHandler(_handler)
def setupWidgetLogger(self, signal):
""" Add a widget handler to the current logger. """
for h in self._logger.handlers:
try:
if h.name == self._name + "_WidgetLogger":
return
except:
pass
handler = WidgetHandler(signal, self._level)
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s:%(message)s', '%b %d, %H:%M:%S'))
handler.name = "OAEditor_WidgetLogger"
self._logger.addHandler(handler)
def __getstate__(self):
""" Enable the logger object to be pickled. """
odict = self.__dict__.copy() # copy the dict since we change it
del odict['_logger'] # remove logger entry
return odict
def __setstate__(self, idict):
""" Enable the logger object to be unpickled. """
self.__dict__.update(idict) # restore dict
self._init_logger()
def level(self):
""" Return logger level. """
return self._level
def critical(self, msg, *args, **kwargs):
""" Equivalent to logging.critical """
if 'exc_info' in kwargs and self._logger.level != logging.DEBUG:
kwargs['exc_info'] = False
self._logger.log(logging.CRITICAL, msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
""" Equivalent to logging.error """
if 'exc_info' in kwargs and self._logger.level != logging.DEBUG:
kwargs['exc_info'] = False
self._logger.log(logging.ERROR, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
""" Equivalent to logging.warning """
if 'exc_info' in kwargs and self._logger.level != logging.DEBUG:
kwargs['exc_info'] = False
self._logger.log(logging.WARN, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
""" Equivalent to logging.info """
if 'exc_info' in kwargs and self._logger.level != logging.DEBUG:
kwargs['exc_info'] = False
self._logger.log(logging.INFO, msg, *args, **kwargs)
def debug(self, msg, *args, **kwargs):
""" Equivalent to logging.debug """
self._logger.log(logging.DEBUG, msg, *args, **kwargs)
# Log levels
DEBUG = logging.DEBUG
INFO = logging.INFO
WARN = logging.WARN
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL | gpl-3.0 | 5,279,471,035,327,872,000 | 32.060976 | 119 | 0.608006 | false |
gpodder/gpodder | share/gpodder/extensions/notification-win32.py | 1 | 6754 | # -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2018 The gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Notification implementation for Windows
# Sean Munkel; 2012-12-29
"""
Current state (2018/07/29 ELL):
- I can't get pywin32 to work in msys2 (the platform used for this python3/gtk3 installer)
so existing code using COM doesn't work.
- Gio.Notification is not implemented on windows yet.
see https://bugzilla.gnome.org/show_bug.cgi?id=776583
- Gtk.StatusIcon with a context works but is deprecated. Showing a balloon using set_tooltip_markup
doesn't work.
See https://github.com/afiskon/py-gtk-example
- hexchat have implemented a solid c++ solution.
See https://github.com/hexchat/hexchat/tree/master/src/fe-gtk/notifications
I've chosen to implement notifications by calling a PowerShell script invoking
Windows Toast Notification API or Balloon Notification as fallback.
It's tested on Win7 32bit and Win10 64bit VMs from modern.ie
So we have a working solution until Gio.Notification is implemented on Windows.
"""
import logging
import os
import os.path
import subprocess
import sys
import tempfile
import gpodder
import gi # isort:skip
gi.require_version('Gtk', '3.0') # isort:skip
from gi.repository import Gtk # isort:skip
logger = logging.getLogger(__name__)
_ = gpodder.gettext
__title__ = _('Notification Bubbles for Windows')
__description__ = _('Display notification bubbles for different events.')
__authors__ = 'Sean Munkel <[email protected]>'
__category__ = 'desktop-integration'
__mandatory_in__ = 'win32'
__only_for__ = 'win32'
class gPodderExtension(object):
def __init__(self, *args):
gpodder_script = sys.argv[0]
gpodder_script = os.path.realpath(gpodder_script)
self._icon = os.path.join(os.path.dirname(gpodder_script), "gpodder.ico")
def on_notification_show(self, title, message):
script = """
try {{
if ([Environment]::OSVersion.Version -ge (new-object 'Version' 10,0,10240)) {{
# use Windows 10 Toast notification
[Windows.UI.Notifications.ToastNotificationManager, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.UI.Notifications.ToastNotification, Windows.UI.Notifications, ContentType = WindowsRuntime] | Out-Null
[Windows.Data.Xml.Dom.XmlDocument, Windows.Data.Xml.Dom.XmlDocument, ContentType = WindowsRuntime] | Out-Null
# Need a real AppID (see https://stackoverflow.com/q/46814858)
# use gPodder app id if it's the installed, otherwise use PowerShell's AppID
try {{
$gpo_appid = Get-StartApps -Name "gpodder"
}} catch {{
write-host "Get-StartApps not available"
$gpo_appid = $null
}}
if ($gpo_appid -ne $null) {{
$APP_ID = $gpo_appid[0].AppID
}} else {{
$APP_ID = '{{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}}\\WindowsPowerShell\\v1.0\\powershell.exe'
}}
$template = @"
<toast activationType="protocol" launch="" duration="long">
<visual>
<binding template="ToastGeneric">
<image placement="appLogoOverride" src="{icon}" />
<text><![CDATA[{title}]]></text>
<text><![CDATA[{message}]]></text>
</binding>
</visual>
<audio silent="true" />
</toast>
"@
$xml = New-Object Windows.Data.Xml.Dom.XmlDocument
$xml.LoadXml($template)
$toast = New-Object Windows.UI.Notifications.ToastNotification $xml
[Windows.UI.Notifications.ToastNotificationManager]::CreateToastNotifier($APP_ID).Show($toast)
Remove-Item -LiteralPath $MyInvocation.MyCommand.Path -Force # Delete this script temp file.
}} else {{
# use older Baloon notification when not on Windows 10
[System.Reflection.Assembly]::LoadWithPartialName("System.Windows.Forms")
$o = New-Object System.Windows.Forms.NotifyIcon
$o.Icon = "{icon}"
$o.BalloonTipIcon = "None"
$o.BalloonTipText = @"
{message}
"@
$o.BalloonTipTitle = @"
{title}
"@
$o.Visible = $True
$Delay = 10 # Delay value in seconds.
$o.ShowBalloonTip($Delay*1000)
Start-Sleep -s $Delay
$o.Dispose()
Remove-Item -LiteralPath $MyInvocation.MyCommand.Path -Force # Delete this script temp file.
}}
}} catch {{
write-host "Caught an exception:"
write-host "Exception Type: $($_.Exception.GetType().FullName)"
write-host "Exception Message: $($_.Exception.Message)"
exit 1
}}
""".format(icon=self._icon, message=message, title=title)
fh, path = tempfile.mkstemp(suffix=".ps1")
with open(fh, "w", encoding="utf_8_sig") as f:
f.write(script)
try:
# hide powershell command window using startupinfo
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags = subprocess.CREATE_NEW_CONSOLE | subprocess.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = subprocess.SW_HIDE
# to run 64bit powershell on Win10 64bit when running from 32bit gPodder
# (we need 64bit powershell on Win10 otherwise Get-StartApps is not available)
powershell = r"{}\sysnative\WindowsPowerShell\v1.0\powershell.exe".format(os.environ["SystemRoot"])
if not os.path.exists(powershell):
powershell = "powershell.exe"
subprocess.Popen([powershell,
"-ExecutionPolicy", "Bypass", "-File", path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
startupinfo=startupinfo)
except subprocess.CalledProcessError as e:
logger.error("Error in on_notification_show(title=%r, message=%r):\n"
"\t%r exit code %i\n\tstdout=%s\n\tstderr=%s",
title, message, e.cmd, e.returncode, e.stdout, e.stderr)
except FileNotFoundError:
logger.error("Error in on_notification_show(title=%r, message=%r): %s not found",
title, message, powershell)
def on_unload(self):
pass
| gpl-3.0 | -7,643,252,481,788,382,000 | 40.950311 | 126 | 0.659165 | false |
bslatkin/8-bits | appengine-ndb/ndb/key_test.py | 1 | 16345 | """Tests for key.py."""
import base64
import pickle
import unittest
from .google_imports import datastore_errors
from .google_imports import datastore_types
from .google_imports import entity_pb
from . import eventloop
from . import key
from . import model
from . import tasklets
from . import test_utils
class KeyTests(test_utils.NDBTest):
the_module = key
def testShort(self):
k0 = key.Key('Kind', None)
self.assertEqual(k0.flat(), ('Kind', None))
k1 = key.Key('Kind', 1)
self.assertEqual(k1.flat(), ('Kind', 1))
k2 = key.Key('Parent', 42, 'Kind', 1)
self.assertEqual(k2.flat(), ('Parent', 42, 'Kind', 1))
def testFlat(self):
flat = ('Kind', 1)
pairs = tuple((flat[i], flat[i + 1]) for i in xrange(0, len(flat), 2))
k = key.Key(flat=flat)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
self.assertEqual(k.kind(), 'Kind')
def testFlatLong(self):
flat = ('Kind', 1, 'Subkind', 'foobar')
pairs = tuple((flat[i], flat[i + 1]) for i in xrange(0, len(flat), 2))
k = key.Key(flat=flat)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
self.assertEqual(k.kind(), 'Subkind')
def testSerialized(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
r = entity_pb.Reference()
r.set_app('_')
e = r.mutable_path().add_element()
e.set_type(flat[0])
e.set_id(flat[1])
e = r.mutable_path().add_element()
e.set_type(flat[2])
e.set_name(flat[3])
serialized = r.Encode()
urlsafe = base64.urlsafe_b64encode(r.Encode()).rstrip('=')
k = key.Key(flat=flat)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(urlsafe=urlsafe)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(serialized=serialized)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(reference=r)
self.assertTrue(k.reference() is not r)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(reference=r, app=r.app(), namespace='')
self.assertTrue(k.reference() is not r)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k1 = key.Key('A', 1)
self.assertEqual(k1.urlsafe(), 'agFfcgcLEgFBGAEM')
k2 = key.Key(urlsafe=k1.urlsafe())
self.assertEqual(k1, k2)
def testId(self):
k1 = key.Key('Kind', 'foo', app='app1', namespace='ns1')
self.assertEqual(k1.id(), 'foo')
k2 = key.Key('Subkind', 42, parent=k1)
self.assertEqual(k2.id(), 42)
k3 = key.Key('Subkind', 'bar', parent=k2)
self.assertEqual(k3.id(), 'bar')
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.id(), None)
def testStringId(self):
k1 = key.Key('Kind', 'foo', app='app1', namespace='ns1')
self.assertEqual(k1.string_id(), 'foo')
k2 = key.Key('Subkind', 'bar', parent=k1)
self.assertEqual(k2.string_id(), 'bar')
k3 = key.Key('Subkind', 42, parent=k2)
self.assertEqual(k3.string_id(), None)
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.string_id(), None)
def testIntegerId(self):
k1 = key.Key('Kind', 42, app='app1', namespace='ns1')
self.assertEqual(k1.integer_id(), 42)
k2 = key.Key('Subkind', 43, parent=k1)
self.assertEqual(k2.integer_id(), 43)
k3 = key.Key('Subkind', 'foobar', parent=k2)
self.assertEqual(k3.integer_id(), None)
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.integer_id(), None)
def testParent(self):
p = key.Key('Kind', 1, app='app1', namespace='ns1')
self.assertEqual(p.parent(), None)
k = key.Key('Subkind', 'foobar', parent=p)
self.assertEqual(k.flat(), ('Kind', 1, 'Subkind', 'foobar'))
self.assertEqual(k.parent(), p)
k = key.Key('Subkind', 'foobar', parent=p,
app=p.app(), namespace=p.namespace())
self.assertEqual(k.flat(), ('Kind', 1, 'Subkind', 'foobar'))
self.assertEqual(k.parent(), p)
def testRoot(self):
p = key.Key('Kind', 1, app='app1', namespace='ns1')
self.assertEqual(p.root(), p)
k = key.Key('Subkind', 'foobar', parent=p)
self.assertEqual(k.flat(), ('Kind', 1, 'Subkind', 'foobar'))
self.assertEqual(k.root(), p)
k2 = key.Key('Subsubkind', 42, parent=k,
app=p.app(), namespace=p.namespace())
self.assertEqual(k2.flat(), ('Kind', 1,
'Subkind', 'foobar',
'Subsubkind', 42))
self.assertEqual(k2.root(), p)
def testRepr_Inferior(self):
k = key.Key('Kind', 1L, 'Subkind', 'foobar')
self.assertEqual(repr(k),
"Key('Kind', 1, 'Subkind', 'foobar')")
self.assertEqual(repr(k), str(k))
def testRepr_Toplevel(self):
k = key.Key('Kind', 1)
self.assertEqual(repr(k), "Key('Kind', 1)")
def testRepr_Incomplete(self):
k = key.Key('Kind', None)
self.assertEqual(repr(k), "Key('Kind', None)")
def testRepr_UnicodeKind(self):
k = key.Key(u'\u1234', 1)
self.assertEqual(repr(k), "Key('\\xe1\\x88\\xb4', 1)")
def testRepr_UnicodeId(self):
k = key.Key('Kind', u'\u1234')
self.assertEqual(repr(k), "Key('Kind', '\\xe1\\x88\\xb4')")
def testRepr_App(self):
k = key.Key('Kind', 1, app='foo')
self.assertEqual(repr(k), "Key('Kind', 1, app='foo')")
def testRepr_Namespace(self):
k = key.Key('Kind', 1, namespace='foo')
self.assertEqual(repr(k), "Key('Kind', 1, namespace='foo')")
def testUnicode(self):
flat_input = (u'Kind\u1234', 1, 'Subkind', u'foobar\u4321')
flat = (flat_input[0].encode('utf8'), flat_input[1],
flat_input[2], flat_input[3].encode('utf8'))
pairs = tuple((flat[i], flat[i + 1]) for i in xrange(0, len(flat), 2))
k = key.Key(flat=flat_input)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
# TODO: test these more thoroughly
r = k.reference()
serialized = k.serialized()
urlsafe = k.urlsafe()
key.Key(urlsafe=urlsafe.decode('utf8'))
key.Key(serialized=serialized.decode('utf8'))
key.Key(reference=r)
# TODO: this may not make sense -- the protobuf utf8-encodes values
r = entity_pb.Reference()
r.set_app('_')
e = r.mutable_path().add_element()
e.set_type(flat_input[0])
e.set_name(flat_input[3])
k = key.Key(reference=r)
self.assertEqual(k.reference(), r)
def testHash(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
pairs = [(flat[i], flat[i + 1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat)
self.assertEqual(hash(k), hash(tuple(pairs)))
def testOrdering(self):
a = key.Key(app='app2', namespace='ns2', flat=('kind1', 1))
b = key.Key(app='app2', namespace='ns1', flat=('kind1', 1))
c = key.Key(app='app1', namespace='ns1', flat=('kind1', 1))
d = key.Key(app='app1', namespace='ns1', flat=('kind1', 2))
e = key.Key(app='app1', namespace='ns1', flat=('kind1', 'e'))
f = key.Key(app='app1', namespace='ns1', flat=('kind1', 'f'))
g = key.Key(app='app1', namespace='ns1', flat=('kind2', 'f', 'x', 1))
h = key.Key(app='app1', namespace='ns1', flat=('kind2', 'f', 'x', 2))
input = [a, b, c, d, e, f, g, h]
actual = sorted(input)
expected = sorted(
input,
key=lambda k: datastore_types.ReferenceToKeyValue(k.reference()))
self.assertEqual(actual, expected)
for i in range(len(actual)):
for j in range(len(actual)):
self.assertEqual(actual[i] < actual[j], i < j)
self.assertEqual(actual[i] <= actual[j], i <= j)
self.assertEqual(actual[i] > actual[j], i > j)
self.assertEqual(actual[i] >= actual[j], i >= j)
self.assertEqual(actual[i] == actual[j], i == j)
self.assertEqual(actual[i] != actual[j], i != j)
def testUniqueIncomplete(self):
p0 = None
p1 = key.Key('bar', 1)
for p in p0, p1:
a = key.Key('foo', 0, parent=p)
b = key.Key('foo', '', parent=p)
c = key.Key('foo', None, parent=p)
self.assertEqual(a, b)
self.assertEqual(b, c)
self.assertEqual(c, a)
for x in a, b, c:
self.assertEqual(x.id(), None)
self.assertEqual(x.string_id(), None)
self.assertEqual(x.integer_id(), None)
self.assertEqual(x.pairs()[-1], ('foo', None))
self.assertEqual(x.flat()[-1], None)
self.assertEqual(x.urlsafe(), c.urlsafe())
def testPickling(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
k = key.Key(flat=flat)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(k, protocol=proto)
kk = pickle.loads(s)
self.assertEqual(k, kk)
def testIncomplete(self):
key.Key(flat=['Kind', None])
self.assertRaises(datastore_errors.BadArgumentError,
key.Key, flat=['Kind', None, 'Subkind', 1])
self.assertRaises(TypeError, key.Key, flat=['Kind', ()])
def testKindFromModel(self):
class M(model.Model):
pass
class N(model.Model):
@classmethod
def _get_kind(cls):
return 'NN'
k = key.Key(M, 1)
self.assertEqual(k, key.Key('M', 1))
k = key.Key('X', 1, N, 2, 'Y', 3)
self.assertEqual(k, key.Key('X', 1, 'NN', 2, 'Y', 3))
def testKindFromBadValue(self):
# TODO: BadArgumentError
self.assertRaises(Exception, key.Key, 42, 42)
def testDeleteHooksCalled(self):
test = self # Closure for inside hook
self.pre_counter = 0
self.post_counter = 0
class HatStand(model.Model):
@classmethod
def _pre_delete_hook(cls, key):
test.pre_counter += 1
if test.pre_counter == 1: # Cannot test for key in delete_multi
self.assertEqual(self.key, key)
@classmethod
def _post_delete_hook(cls, key, future):
test.post_counter += 1
self.assertEqual(self.key, key)
self.assertTrue(future.get_result() is None)
furniture = HatStand()
key = furniture.put()
self.key = key
self.assertEqual(self.pre_counter, 0, 'Pre delete hook called early')
future = key.delete_async()
self.assertEqual(self.pre_counter, 1, 'Pre delete hook not called')
self.assertEqual(self.post_counter, 0, 'Post delete hook called early')
future.get_result()
self.assertEqual(self.post_counter, 1, 'Post delete hook not called')
# All counters now read 1, calling delete_multi for 10 keys makes this 11
new_furniture = [HatStand() for _ in range(10)]
keys = [furniture.put() for furniture in new_furniture] # Sequential keys
multi_future = model.delete_multi_async(keys)
self.assertEqual(self.pre_counter, 11,
'Pre delete hooks not called on delete_multi')
self.assertEqual(self.post_counter, 1,
'Post delete hooks called early on delete_multi')
for fut, key in zip(multi_future, keys):
self.key = key
fut.get_result()
self.assertEqual(self.post_counter, 11,
'Post delete hooks not called on delete_multi')
def testNoDefaultDeleteCallback(self):
# See issue 58. http://goo.gl/hPN6j
ctx = tasklets.get_context()
ctx.set_cache_policy(False)
class EmptyModel(model.Model):
pass
entity = EmptyModel()
entity.put()
fut = entity.key.delete_async()
self.assertFalse(fut._immediate_callbacks,
'Delete hook queued default no-op.')
def testGetHooksCalled(self):
test = self # Closure for inside hook
self.pre_counter = 0
self.post_counter = 0
class HatStand(model.Model):
@classmethod
def _pre_get_hook(cls, key):
test.pre_counter += 1
if test.pre_counter == 1: # Cannot test for key in get_multi
self.assertEqual(key, self.key)
@classmethod
def _post_get_hook(cls, key, future):
test.post_counter += 1
self.assertEqual(key, self.key)
self.assertEqual(future.get_result(), self.entity)
furniture = HatStand()
self.entity = furniture
key = furniture.put()
self.key = key
self.assertEqual(self.pre_counter, 0, 'Pre get hook called early')
future = key.get_async()
self.assertEqual(self.pre_counter, 1, 'Pre get hook not called')
self.assertEqual(self.post_counter, 0, 'Post get hook called early')
future.get_result()
self.assertEqual(self.post_counter, 1, 'Post get hook not called')
# All counters now read 1, calling get for 10 keys should make this 11
new_furniture = [HatStand() for _ in range(10)]
keys = [furniture.put() for furniture in new_furniture] # Sequential keys
multi_future = model.get_multi_async(keys)
self.assertEqual(self.pre_counter, 11,
'Pre get hooks not called on get_multi')
self.assertEqual(self.post_counter, 1,
'Post get hooks called early on get_multi')
for fut, key, entity in zip(multi_future, keys, new_furniture):
self.key = key
self.entity = entity
fut.get_result()
self.assertEqual(self.post_counter, 11,
'Post get hooks not called on get_multi')
def testMonkeyPatchHooks(self):
hook_attr_names = ('_pre_get_hook', '_post_get_hook',
'_pre_delete_hook', '_post_delete_hook')
original_hooks = {}
# Backup the original hooks
for name in hook_attr_names:
original_hooks[name] = getattr(model.Model, name)
self.pre_get_flag = False
self.post_get_flag = False
self.pre_delete_flag = False
self.post_delete_flag = False
# TODO: Should the unused arguments to Monkey Patched tests be tested?
class HatStand(model.Model):
@classmethod
def _pre_get_hook(cls, unused_key):
self.pre_get_flag = True
@classmethod
def _post_get_hook(cls, unused_key, unused_future):
self.post_get_flag = True
@classmethod
def _pre_delete_hook(cls, unused_key):
self.pre_delete_flag = True
@classmethod
def _post_delete_hook(cls, unused_key, unused_future):
self.post_delete_flag = True
# Monkey patch the hooks
for name in hook_attr_names:
hook = getattr(HatStand, name)
setattr(model.Model, name, hook)
try:
key = HatStand().put()
key.get()
self.assertTrue(self.pre_get_flag,
'Pre get hook not called when model is monkey patched')
self.assertTrue(self.post_get_flag,
'Post get hook not called when model is monkey patched')
key.delete()
self.assertTrue(self.pre_delete_flag,
'Pre delete hook not called when model is monkey patched')
self.assertTrue(self.post_delete_flag,
'Post delete hook not called when model is monkey patched')
finally:
# Restore the original hooks
for name in hook_attr_names:
setattr(model.Model, name, original_hooks[name])
def testPreHooksCannotCancelRPC(self):
class Foo(model.Model):
@classmethod
def _pre_get_hook(cls, unused_key):
raise tasklets.Return()
@classmethod
def _pre_delete_hook(cls, unused_key):
raise tasklets.Return()
entity = Foo()
entity.put()
self.assertRaises(tasklets.Return, entity.key.get)
self.assertRaises(tasklets.Return, entity.key.delete)
def testNoDefaultGetCallback(self):
# See issue 58. http://goo.gl/hPN6j
ctx = tasklets.get_context()
ctx.set_cache_policy(False)
class EmptyModel(model.Model):
pass
entity = EmptyModel()
entity.put()
fut = entity.key.get_async()
self.assertFalse(fut._immediate_callbacks, 'Get hook queued default no-op.')
def testFromOldKey(self):
old_key = datastore_types.Key.from_path('TestKey', 1234)
new_key = key.Key.from_old_key(old_key)
self.assertEquals(str(old_key), new_key.urlsafe())
old_key2 = new_key.to_old_key()
self.assertEquals(old_key, old_key2)
def main():
unittest.main()
if __name__ == '__main__':
main()
| apache-2.0 | -8,650,296,654,672,698,000 | 33.194561 | 80 | 0.614561 | false |
jsafrane/openlmi-storage | test/unit/config/test_unit_settings.py | 1 | 7966 | # Copyright (C) 2012 Red Hat, Inc. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Jan Safranek <[email protected]>
# -*- coding: utf-8 -*-
from StorageConfiguration import StorageConfiguration
from SettingManager import SettingManager, Setting
import unittest
import os
import shutil
class TestSetting(unittest.TestCase):
def setUp(self):
self.directory = os.path.dirname(__file__)
if not self.directory:
self.directory = "."
StorageConfiguration.CONFIG_FILE = "/not/existing"
self.config = StorageConfiguration()
def test_missing(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are missing.
"""
self.config.CONFIG_PATH = self.directory + "/configs/missing/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/missing/var/"
mgr = SettingManager(self.config)
mgr.load()
self.assertDictEqual(mgr.classes, {})
def test_empty(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are empty.
"""
self.config.CONFIG_PATH = self.directory + "/configs/empty/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/empty/var/"
mgr = SettingManager(self.config)
mgr.load()
self.assertDictEqual(mgr.classes, {})
def test_full(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are empty.
"""
self.config.CONFIG_PATH = self.directory + "/configs/full/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/full/var/"
mgr = SettingManager(self.config)
mgr.load()
# check LMI_StorageSetting class loaded OK
self.assertTrue(mgr.classes.has_key("LMI_StorageSetting"))
# check it has all instances
settings = mgr.get_settings("LMI_StorageSetting")
self.assertIn("LMI:StorageSetting:preconfigured1", settings.keys())
self.assertIn("LMI:StorageSetting:preconfigured2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent1", settings.keys())
self.assertIn("LMI:StorageSetting:persistent2", settings.keys())
self.assertEqual(len(settings.keys()), 4)
# check one preconfigured setting
s1 = settings['LMI:StorageSetting:preconfigured1']
self.assertEqual(s1.id, "LMI:StorageSetting:preconfigured1")
self.assertEqual(s1.type, Setting.TYPE_PRECONFIGURED)
self.assertEqual(s1['first'], "1")
self.assertEqual(s1['second'], "two")
self.assertEqual(s1['third'], "3.0")
# check one persistent setting
s2 = settings['LMI:StorageSetting:persistent2']
self.assertEqual(s2.id, "LMI:StorageSetting:persistent2")
self.assertEqual(s2.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s2['first'], "1000")
self.assertEqual(s2['second'], "two thousand")
self.assertEqual(s2['third'], "3000.0")
def test_save_load(self):
""" Test saving a persistent settings and loading them back."""
# load the 'full' settings
self.config.CONFIG_PATH = self.directory + "/configs/full/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/full/var/"
mgr = SettingManager(self.config)
mgr.load()
# dirty hack to save it to different directory...
self.config.PERSISTENT_PATH = self.directory + "/configs/save_load/var/"
# add one transient setting
s = Setting(Setting.TYPE_TRANSIENT, "LMI:StorageSetting:transient1")
s['first'] = "111"
s['second'] = "two two two"
s['third'] = "333.0"
mgr.set_setting("LMI_StorageSetting", s)
# add one preconfigured setting (this should not happen in reality,
# but let's test it).
s = Setting(Setting.TYPE_PRECONFIGURED, "LMI:StorageSetting:preconfigured3")
s['first'] = "1111"
s['second'] = "two two two two"
s['third'] = "3333.0"
mgr.set_setting("LMI_StorageSetting", s)
# add one persistent setting
s = Setting(Setting.TYPE_PERSISTENT, "LMI:StorageSetting:persistent3")
s['first'] = "11"
s['second'] = "two two"
s['third'] = "33.0"
mgr.set_setting("LMI_StorageSetting", s)
# the persistent setting should be saved
# try to reload the cofig - it should remove the preconfigured one
mgr.load()
# check it has all instances and that the preconfigured is gone
settings = mgr.get_settings("LMI_StorageSetting")
self.assertIn("LMI:StorageSetting:preconfigured1", settings.keys())
self.assertIn("LMI:StorageSetting:preconfigured2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent1", settings.keys())
self.assertIn("LMI:StorageSetting:persistent2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent3", settings.keys())
self.assertIn("LMI:StorageSetting:transient1", settings.keys())
self.assertEqual(len(settings.keys()), 6)
# check the transient is ok
s1 = settings['LMI:StorageSetting:transient1']
self.assertEqual(s1.id, "LMI:StorageSetting:transient1")
self.assertEqual(s1.type, Setting.TYPE_TRANSIENT)
self.assertEqual(s1['first'], "111")
self.assertEqual(s1['second'], "two two two")
self.assertEqual(s1['third'], "333.0")
# check the persistent is there
s2 = settings['LMI:StorageSetting:persistent3']
self.assertEqual(s2.id, "LMI:StorageSetting:persistent3")
self.assertEqual(s2.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s2['first'], "11")
self.assertEqual(s2['second'], "two two")
self.assertEqual(s2['third'], "33.0")
# remove one persistent, it should be saved imediatelly
mgr.delete_setting('LMI_StorageSetting', s2)
# check it is really removed
mgr = SettingManager(self.config)
mgr.load()
settings = mgr.get_settings("LMI_StorageSetting")
self.assertNotIn("LMI:StorageSetting:persistent3", settings.keys())
# change one persistent, it should be saved imediatelly
s3 = settings['LMI:StorageSetting:persistent2']
s3['first'] = "-1"
s3['second'] = "minus one"
s3['third'] = "-3.0"
mgr.set_setting('LMI_StorageSetting', s3)
# check it is really removed
mgr = SettingManager(self.config)
mgr.load()
settings = mgr.get_settings("LMI_StorageSetting")
s3 = settings['LMI:StorageSetting:persistent2']
self.assertEqual(s3.id, "LMI:StorageSetting:persistent2")
self.assertEqual(s3.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s3['first'], "-1")
self.assertEqual(s3['second'], "minus one")
self.assertEqual(s3['third'], "-3.0")
def tearDown(self):
# remove any files in configs/save_load/var/
path = self.directory + "/configs/save_load/var/"
shutil.rmtree(path, ignore_errors=True)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 4,726,737,873,234,810,000 | 39.436548 | 84 | 0.647878 | false |
DBrianKimmel/PyHouse | Project/src/Modules/Computer/Web/test/test_web_utils.py | 1 | 3750 | """
@name: PyHouse/src/Modules/Computer/Web/_test/test_web_utils.py
@author: D. Brian Kimmel
@contact: [email protected]>
@copyright: (c) 2014-2017 by D. Brian Kimmel
@license: MIT License
@note: Created on Jun 29, 2013
@Summary: Test web utilities module
Passed all 7 tests - DBK - 2017-01-12
"""
__updated__ = '2017-01-19'
# Import system type stuff
import xml.etree.ElementTree as ET
from twisted.trial import unittest
import jsonpickle
# Import PyMh files and modules.
from test.xml_data import XML_LONG
from test.testing_mixin import SetupPyHouseObj
from Modules.Computer.Web import web_utils
from Modules.Housing.rooms import Xml as roomsXML
from Modules.Core.Utilities import json_tools
from Modules.Housing.test.xml_housing import \
TESTING_HOUSE_NAME, \
TESTING_HOUSE_KEY, \
TESTING_HOUSE_ACTIVE
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
PY_DATA = [ { 'a123': u'A', 'b': (2, 4), 'c': 3.0 }, 'def D E F' ]
JS_DATA = '{' + '}'
class SetupMixin(object):
def setUp(self, p_root):
self.m_pyhouse_obj = SetupPyHouseObj().BuildPyHouseObj(p_root)
self.m_xml = SetupPyHouseObj().BuildXml(p_root)
def jsonPair(self, p_json, p_key):
""" Extract key, value from json
"""
l_json = json_tools.decode_json_unicode(p_json)
try:
l_val = l_json[p_key]
except (KeyError, ValueError) as e_err:
l_val = 'ERRor on JsonPair for key "{}" {} {}'.format(p_key, e_err, l_json)
print(l_val)
return l_val
class Attribs(object):
def_attr = 'Hello World!'
class A0(unittest.TestCase):
def setUp(self):
pass
def test_00_Print(self):
print('Id: test_web_utils')
class C1_Rooms(SetupMixin, unittest.TestCase):
def setUp(self):
SetupMixin.setUp(self, ET.fromstring(XML_LONG))
self.m_api = roomsXML()
def test_01_Room(self):
l_rooms = self.m_api.read_rooms_xml(self.m_pyhouse_obj)
l_json = unicode(json_tools.encode_json(l_rooms))
# print(PrettyFormatAny.form(l_json, 'C1-01-A - Decode'))
# self.assertEqual(self.jsonPair(l_json, 0), l_rooms)
def test_02_Rooms(self):
l_rooms = self.m_api.read_rooms_xml(self.m_pyhouse_obj)
l_json = unicode(json_tools.encode_json(l_rooms))
# print(PrettyFormatAny.form(l_json, 'C1-02-A - Decode'))
class C2_House(SetupMixin, unittest.TestCase):
def setUp(self):
SetupMixin.setUp(self, ET.fromstring(XML_LONG))
def test_01_Before(self):
l_house = self.m_pyhouse_obj.House
# print(PrettyFormatAny.form(l_house, 'C2-01-A - House'))
l_house2 = {}
class D1_Json(SetupMixin, unittest.TestCase):
def setUp(self):
SetupMixin.setUp(self, ET.fromstring(XML_LONG))
def test_01_HouseInfo(self):
l_json = web_utils.GetJSONHouseInfo(self.m_pyhouse_obj)
l_obj = jsonpickle.decode(l_json)
# print(PrettyFormatAny.form(l_obj, 'D1-01-A - House'))
self.assertEquals(l_obj['Name'], TESTING_HOUSE_NAME)
self.assertEquals(l_obj['Key'], TESTING_HOUSE_KEY)
self.assertEquals(l_obj['Active'], TESTING_HOUSE_ACTIVE)
self.assertEquals(l_obj['Controllers'], {})
def test_02_ComputerInfo(self):
l_json = web_utils.GetJSONComputerInfo(self.m_pyhouse_obj)
_l_obj = jsonpickle.decode(l_json)
class E1_Json(SetupMixin, unittest.TestCase):
def setUp(self):
SetupMixin.setUp(self, ET.fromstring(XML_LONG))
def test_01_RoomInfo(self):
l_json = web_utils.GetJSONHouseInfo(self.m_pyhouse_obj)
l_obj = jsonpickle.decode(l_json)
# print(PrettyFormatAny.form(l_obj, 'E1-01-A - Decode'))
# ## END DBK
| mit | 5,935,139,643,977,492,000 | 29.241935 | 88 | 0.6464 | false |
chencoyote/owasp-pysec | pysec/load.py | 1 | 11062 | # Python Security Project (PySec) and its related class files.
#
# PySec is a set of tools for secure application development under Linux
#
# Copyright 2014 PySec development team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: ascii -*-
"""Module to manage more efficently import of modules"""
# ASCII_LETTERS = <str>
# DIGITS = <str>
# HEXDIGITS = <str>
# ModuleType = <type>
# _CACHE = {(<str>,<tuple>): <module>}
# _FIRST_LETTERS = <str>
# _HASHES = {<str>: <built-in function>}
# _OTHER_LETTERS = <str>
# _TAB = {<str>: <dict>}
# base64 = <module base64>
# fd = <module pysec.io.fd>
# hashlib = <module hashlib>
# imp = <module imp>
# os = <module os>
import imp
import os
import hashlib
import base64
from types import ModuleType
from pysec.core import Object
from pysec.io import fd
from pysec import log
from pysec import lang
__name__ = 'pysec.load'
__all__ = 'load_tab', 'import_lib', 'make_line'
# set actions
log.register_actions('LOAD_TAB', 'IMPORT_LIB')
ASCII_LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
DIGITS = '0123456789'
HEXDIGITS = '0123456789abcdefABCDEF'
_HASHES = {
# raise NameError: undefined: getattr
'md5': getattr(hashlib, 'md5'),
# raise NameError: undefined: getattr
'sha1': getattr(hashlib, 'sha1'),
# raise NameError: undefined: getattr
'sha256': getattr(hashlib, 'sha256'),
# raise NameError: undefined: getattr
'sha512': getattr(hashlib, 'sha512'),
}
_FIRST_LETTERS = '_%s' % ASCII_LETTERS
_OTHER_LETTERS = '_%s%s' % (ASCII_LETTERS, DIGITS)
def is_hex(string):
"""Returns True if string is a valid hexadecimal number, otherwise False"""
# string = <str>
# ch = <str>
# return <bool>
return all(ch in HEXDIGITS for ch in string)
def check_libname(name):
"""Returns True if name is a valid string for a library, other False"""
# name = <str>
# ch = <str>
# return <int>|<bool>
name = str(name)
if not name:
return 0
return (name[0] in _FIRST_LETTERS and
all(ch in _OTHER_LETTERS for ch in name[1:]))
def parse_version(version):
"""Parse version string in a tuple, if version is an invalid string
returns None"""
# version = <str>
# vs = <str>
# return <NoneType>|(*<int>)
version = str(version).split('.')
if len(version) != 3:
return None
if all(vs.isdigit() for vs in version):
return tuple(int(vs) for vs in version)
return None
def parse_hashes(hashes):
"""Parse hashes' string in hashes' dict, if it's invalid returns None"""
# hashes = <str>
# _hashes = {<NoneType>: ?}
# hname = <str>
# hs_field = <str>
# hval = <str>
# return {<str>: <str>}|<NoneType>
_hashes = {}
if hashes:
for hs_field in hashes.split(' '):
hname, _, hval = hs_field.strip().partition(':')
hs_field = _HASHES.get(hname, None)
if hs_field is None:
return None
if not is_hex(hval):
return None
if hs_field in _hashes:
return None
_hashes[hs_field] = hval
return _hashes
def _hash(path, hs_obj):
"""Calculate the hash of path using hs_obj (a Hash Object)"""
# path = <str>
# hs_obj = <HASH object>
# chunk = <str>
# fmod = <file>
# return <NoneType>
with fd.File.open(path, fd.FO_READEX) as fmod:
chunk = fmod.read(4096)
while chunk:
hs_obj.update(chunk)
chunk = fmod.read(4096)
def get_hash(path, hs_maker):
"""Calculates the hash of module in path"""
# path = <str>
# hs_maker = <function>
# dirpath = <str>
# filenames = [<str>]
# files = [<str>]
# fname = <str>
# fpath = <str>
# hs_mod = <HASH object>
# return <str>
hs_mod = hs_maker()
if os.path.isfile(path):
files = [path]
elif os.path.isdir(path):
files = sorted([os.path.join(dirpath, fname)
for dirpath, _, filenames in os.walk(path)
for fname in filenames
if os.path.isfile(os.path.join(dirpath, fname))])
else:
# raise <instance ImportError>
raise ImportError("invalid file type %r" % path)
for fpath in files:
_hash(fpath, hs_mod)
return hs_mod.hexdigest()
_CACHE = {}
_TAB = {}
class _LazyModule(Object, ModuleType):
"""_LazyModule is a module that instances their attribute in lazy mode"""
# instance.module = <NoneType>|<module>
# instance.name = <str>
def __init__(self, name, version):
# self = <instance load._LazyModule>
# name = <str>
# version = (*<int>)
# return <NoneType>
self.name = str(name)
self.version = version
self.module = None
def __getattr__(self, name):
# self = <instance load._LazyModule>
# name = <str>
# return <module>
# raise NameError: undefined: getattr
return getattr(self.module or importlib(self.name, self.version), name)
def __setattr__(self, name, value):
# self = <instance load._LazyModule>
# name = <str>
# value = ?
# return <NoneType>
setattr(self.module or importlib(self.name, self.version), name, value)
def __delattr__(self, name):
# self = <instance load._LazyModule>
# name = <str>
# return <NoneType>
delattr(self.module or importlib(self.name, self.version), name)
@log.wrap(log.actions.LOAD_TAB, fields=('path',), lib=__name__)
def load_tab(path):
"""Updates internal tab of modules"""
# path = <str>
# _tab = {<str>: <dict>}
# fields = <str>
# ftab = <instance pysec.io.fd.File>
# hashes = {<str>: <str>}|<NoneType>
# line = <str>
# lineno = <int>
# mod_vers = {<str>: <dict>}
# name = <str>
# version = <NoneType>|(*<int>)
# return <NoneType>
path = os.path.abspath(str(path))
_tab = {}
with fd.File.open(path, fd.FO_READEX) as ftab:
for lineno, line in enumerate(ftab.lines()):
fields = line.strip().split(';')
# name, version, path, hashes
if len(fields) != 4:
# raise <instance ImportError>
raise ImportError(lang.LOAD_WRONG_FIELDS % lineno)
name, version, path, hashes = fields
# name
if not check_libname(name):
# raise <instance ImportError>
raise ImportError(lang.LOAD_WRONG_LIB_NAME % lineno)
# version
version = parse_version(version)
if version is None:
# raise <instance ImportError>
raise ImportError(lang.LOAD_WRONG_VERSION_FORMAT % lineno)
# path
path = os.path.abspath(base64.b64decode(path))
# hashes
hashes = parse_hashes(hashes)
if hashes is None:
# raise <instance ImportError>
raise ImportError(lang.LOAD_WRONG_HASH_FORMAT % lineno)
# update tab
mod_vers = _tab.setdefault(name, {})
if version in mod_vers:
# raise <instance ImportError>
raise ImportError(lang.LOAD_DUP_LIB
% (name, version[0], version[1], version[2]))
mod_vers[version] = {'path': path, 'hash': hashes}
_TAB.update(_tab)
@log.wrap(log.actions.IMPORT_LIB,
fields=('name', 'version', 'lazy', '_reload'),
result='module', lib=__name__)
def importlib(name, version=None, lazy=0, _reload=0):
"""Load a library and return it.
name library's name
version if it's None it load lastest library, otherwise load the
version specified
lazy if false it returns normal module, otherwise it returns a
module placeholder and it will be loaded the first time that
it will be used
_reload if false search library in cache and returns it if exists
otherwise it load it. If _reload is true load library anse save
it in cache
"""
# name = <str>
# version = <NoneType>
# lazy = <int>
# _reload = <int>
# desc = <tuple>
# fdir = <str>
# fname = <str>
# fobj = <file>
# hs_maker = <function>
# hval = <str>
# mod = <NoneType>
# mod_info = {<function>: <str>}
# path = <str>
# vers = <NoneType>
# return <instance load._LazyModule>
name = str(name)
vers = _TAB.get(name, None)
if vers is None:
# raise <instance ImportError>
raise ImportError(lang.LOAD_LIB_NOT_FOUND % name)
if version is None:
version = max(vers.iterkeys())
elif version not in vers:
# raise <instance ImportError>
raise ImportError(lang.LOAD_LIB_VER_NOT_FOUND % (name, version))
if not _reload and (name, version) in _CACHE:
return _CACHE[(name, version)]
mod_info = vers.get(version)
try:
imp.acquire_lock()
path = mod_info['path']
if lazy:
return _LazyModule(name, path)
else:
fdir, fname = os.path.split(path)
for hs_maker, hval in mod_info['hash'].iteritems():
if get_hash(path, hs_maker) != hval:
# raise <instance ImportError>
raise ImportError(lang.LOAD_INVALID_HASH
% (name, version, path, hval))
# raise <instance ImportError>
fobj, path, desc = imp.find_module(os.path.splitext(fname)[0],
[fdir])
# raise <instance ImportError>
mod = imp.load_module(name, fobj, path, desc)
_CACHE[(name, version)] = mod
return mod
finally:
imp.release_lock()
def make_line(path, name, version):
"""Makes a complete string for loader's file"""
# path = <str>
# name = <str>
# version = (*<int>)
# hashes = [<str>]
# hs_func = <function>
# hs_name = <str>
# path64 = <str>
# vs = <int>
# return <str>
path = os.path.abspath(path)
path64 = base64.b64encode(path)
name = str(name)
version = tuple(version)
hashes = []
for hs_name, hs_func in _HASHES.iteritems():
hashes.append('%s:%s' % (hs_name, get_hash(path, hs_func)))
return '%s;%s;%s;%s' % (str(name), '.'.join(str(vs) for vs in version),
path64, ' '.join(hashes))
| apache-2.0 | 2,443,859,515,778,118,700 | 30.696275 | 79 | 0.568975 | false |
skim1420/spinnaker | testing/citest/tests/aws_smoke_test.py | 1 | 19365 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Smoke test to see if Spinnaker can interoperate with Amazon Web Services.
See testable_service/integration_test.py and spinnaker_testing/spinnaker.py
for more details.
The smoke test will use ssh to peek at the spinnaker configuration
to determine the managed project it should verify, and to determine
the spinnaker account name to use when sending it commands.
Sample Usage:
Assuming you have created $PASSPHRASE_FILE (which you should chmod 400)
and $CITEST_ROOT points to the root directory of this repository
(which is . if you execute this from the root)
PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker \
python $CITEST_ROOT/spinnaker/spinnaker_system/smoke_test.py \
--gce_ssh_passphrase_file=$PASSPHRASE_FILE \
--gce_project=$PROJECT \
--gce_zone=$ZONE \
--gce_instance=$INSTANCE
--test_aws_zone=$AWS_ZONE \
--aws_profile=$AWS_PROFILE
or
PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker \
python $CITEST_ROOT/spinnaker/spinnaker_system/smoke_test.py \
--native_hostname=host-running-smoke-test
--test_aws_zone=$AWS_ZONE \
--aws_profile=$AWS_PROFILE
Note that the $AWS_ZONE is not directly used, rather it is a standard
parameter being used to infer the region. The test is going to pick
some different availability zones within the region in order to test kato.
These are currently hardcoded in.
"""
# Standard python modules.
import sys
# citest modules.
import citest.aws_testing as aws
import citest.json_contract as jc
import citest.json_predicate as jp
import citest.service_testing as st
import citest.base
from citest.json_contract import ObservationPredicateFactory
ov_factory = ObservationPredicateFactory()
# Spinnaker modules.
import spinnaker_testing as sk
import spinnaker_testing.gate as gate
from botocore.exceptions import (BotoCoreError, ClientError)
class AwsSmokeTestScenario(sk.SpinnakerTestScenario):
"""Defines the scenario for the smoke test.
The scenario remembers:
* The agent used to talk to gate.
* The name of the unique Spinnaker application we create for this test.
* The name of the load balancer we create used.
"""
@classmethod
def new_agent(cls, bindings):
"""Implements citest.service_testing.AgentTestScenario.new_agent."""
return gate.new_agent(bindings)
def __init__(self, bindings, agent=None):
"""Constructor.
Args:
bindings: [dict] The data bindings to use to configure the scenario.
agent: [GateAgent] The agent for invoking the test operations on Gate.
"""
super(AwsSmokeTestScenario, self).__init__(bindings, agent)
bindings = self.bindings
aws_observer = self.aws_observer
self.autoscaling_client = aws_observer.make_boto_client('autoscaling')
self.ec2_client = aws_observer.make_boto_client('ec2')
self.elb_client = aws_observer.make_boto_client('elb')
self.lb_detail = 'lb'
self.lb_name = '{app}-{stack}-{detail}'.format(
app=bindings['TEST_APP'], stack=bindings['TEST_STACK'],
detail=self.lb_detail)
# We'll call out the app name because it is widely used
# because it scopes the context of our activities.
# pylint: disable=invalid-name
self.TEST_APP = bindings['TEST_APP']
def create_app(self):
"""Creates OperationContract that creates a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_create_app_operation(
bindings=self.bindings, application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_AWS_ACCOUNT']),
contract=contract)
def delete_app(self):
"""Creates OperationContract that deletes a new Spinnaker Application."""
contract = jc.Contract()
return st.OperationContract(
self.agent.make_delete_app_operation(
application=self.TEST_APP,
account_name=self.bindings['SPINNAKER_AWS_ACCOUNT']),
contract=contract)
def upsert_load_balancer(self, use_vpc):
"""Creates OperationContract for upsertLoadBalancer.
Calls Spinnaker's upsertLoadBalancer with a configuration, then verifies
that the expected resources and configurations are visible on AWS. See
the contract builder for more info on what the expectations are.
Args:
use_vpc: [bool] if True configure a VPC otherwise dont.
"""
bindings = self.bindings
context = citest.base.ExecutionContext()
load_balancer_name = self.lb_name
if use_vpc:
# TODO(ewiseblatt): 20160301
# We're hardcoding the VPC here, but not sure which we really want.
# I think this comes from the spinnaker.io installation instructions.
# What's interesting about this is that it is a 10.* CidrBlock,
# as opposed to the others, which are public IPs. All this is sensitive
# as to where the TEST_AWS_VPC_ID came from so this is going to be
# brittle. Ideally we only need to know the vpc_id and can figure the
# rest out based on what we have available.
subnet_type = 'internal (defaultvpc)'
vpc_id = bindings['TEST_AWS_VPC_ID']
# Not really sure how to determine this value in general.
security_groups = [bindings['TEST_AWS_SECURITY_GROUP_ID']]
security_groups = ['default']
# The resulting load balancer will only be available in the zone of
# the subnet we are using. We'll figure that out by looking up the
# subnet we want.
subnets_response = self.aws_observer.call_method(
context,
self.ec2_client.describe_subnets,
Filters=[{'Name': 'vpc-id', 'Values': [vpc_id]}])
subnet_details = subnets_response['Subnets']
try:
avail_zones = [detail['AvailabilityZone'] for detail in subnet_details]
region = avail_zones[0][:-1]
if len(avail_zones) > 2:
avail_zones = [avail_zones[0], avail_zones[-1]] # just keep two
except KeyError:
raise ValueError('vpc_id={0} appears to be unknown'.format(vpc_id))
else:
# We're assuming that the given region has 'A' and 'B' availability
# zones. This seems conservative but might be brittle since we permit
# any region.
region = bindings['TEST_AWS_REGION']
avail_zones = [region + 'a', region + 'b']
subnet_type = ""
vpc_id = None
security_groups = None
# This will be a second load balancer not used in other tests.
# Decorate the name so as not to confuse it.
load_balancer_name += '-pub'
listener = {
'Listener': {
'InstancePort':80,
'LoadBalancerPort':80
}
}
health_check = {
'HealthyThreshold': 8,
'UnhealthyThreshold': 3,
'Interval': 12,
'Timeout': 6,
'Target':'HTTP:%d/' % listener['Listener']['InstancePort']
}
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'type': 'upsertLoadBalancer',
'cloudProvider': 'aws',
# 'loadBalancerName': load_balancer_name,
'credentials': bindings['SPINNAKER_AWS_ACCOUNT'],
'name': load_balancer_name,
'stack': bindings['TEST_STACK'],
'detail': self.lb_detail,
'region': bindings['TEST_AWS_REGION'],
'availabilityZones': {region: avail_zones},
'regionZones': avail_zones,
'listeners': [{
'internalProtocol': 'HTTP',
'internalPort': listener['Listener']['InstancePort'],
'externalProtocol': 'HTTP',
'externalPort': listener['Listener']['LoadBalancerPort']
}],
'healthCheck': health_check['Target'],
'healthCheckProtocol': 'HTTP',
'healthCheckPort': listener['Listener']['LoadBalancerPort'],
'healthCheckPath': '/',
'healthTimeout': health_check['Timeout'],
'healthInterval': health_check['Interval'],
'healthyThreshold': health_check['HealthyThreshold'],
'unhealthyThreshold': health_check['UnhealthyThreshold'],
'user': '[anonymous]',
'usePreferredZones': True,
'vpcId': vpc_id,
'subnetType': subnet_type,
# If I set security group to this then I get an error it is missing.
# bindings['TEST_AWS_SECURITY_GROUP_ID']],
'securityGroups': security_groups
}],
description='Create Load Balancer: ' + load_balancer_name,
application=self.TEST_APP)
builder = aws.AwsPythonContractBuilder(self.aws_observer)
(builder.new_clause_builder('Load Balancer Added', retryable_for_secs=10)
.call_method(
self.elb_client.describe_load_balancers,
LoadBalancerNames=[load_balancer_name])
.EXPECT(ov_factory.value_list_path_contains(
'LoadBalancerDescriptions',
jp.LIST_MATCHES([jp.DICT_MATCHES({
'HealthCheck':
jp.DICT_MATCHES({
key: jp.EQUIVALENT(value)
for key, value in health_check.items()}),
'AvailabilityZones': jp.LIST_SIMILAR(avail_zones),
'ListenerDescriptions/Listener':
jp.DICT_MATCHES({
key: jp.NUM_EQ(value)
for key, value in listener['Listener'].items()})
})]))
))
title_decorator = '_with_vpc' if use_vpc else '_without_vpc'
return st.OperationContract(
self.new_post_operation(
title='upsert_load_balancer' + title_decorator,
data=payload,
path='tasks'),
contract=builder.build())
def delete_load_balancer(self, use_vpc):
"""Creates OperationContract for deleteLoadBalancer.
To verify the operation, we just check that the AWS resources
created by upsert_load_balancer are no longer visible on AWS.
Args:
use_vpc: [bool] if True delete the VPC load balancer, otherwise
the non-VPC load balancer.
"""
load_balancer_name = self.lb_name
if not use_vpc:
# This is the second load balancer, where we decorated the name in upsert.
load_balancer_name += '-pub'
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'type': 'deleteLoadBalancer',
'cloudProvider': 'aws',
'credentials': self.bindings['SPINNAKER_AWS_ACCOUNT'],
'regions': [self.bindings['TEST_AWS_REGION']],
'loadBalancerName': load_balancer_name
}],
description='Delete Load Balancer: {0} in {1}:{2}'.format(
load_balancer_name,
self.bindings['SPINNAKER_AWS_ACCOUNT'],
self.bindings['TEST_AWS_REGION']),
application=self.TEST_APP)
builder = aws.AwsPythonContractBuilder(self.aws_observer)
(builder.new_clause_builder('Load Balancer Removed')
.call_method(
self.elb_client.describe_load_balancers,
LoadBalancerNames=[load_balancer_name])
.EXPECT(
ov_factory.error_list_contains(
jp.ExceptionMatchesPredicate(
(BotoCoreError, ClientError), 'LoadBalancerNotFound'))))
title_decorator = '_with_vpc' if use_vpc else '_without_vpc'
return st.OperationContract(
self.new_post_operation(
title='delete_load_balancer' + title_decorator,
data=payload,
path='tasks'),
contract=builder.build())
def create_server_group(self):
"""Creates OperationContract for createServerGroup.
To verify the operation, we just check that the AWS Auto Scaling Group
for the server group was created.
"""
bindings = self.bindings
# Spinnaker determines the group name created,
# which will be the following:
group_name = '{app}-{stack}-v000'.format(
app=self.TEST_APP, stack=bindings['TEST_STACK'])
region = bindings['TEST_AWS_REGION']
avail_zones = [region + 'a', region + 'b']
test_security_group_id = bindings['TEST_AWS_SECURITY_GROUP_ID']
test_security_group_id = 'default'
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'type': 'createServerGroup',
'cloudProvider': 'aws',
'application': self.TEST_APP,
'credentials': bindings['SPINNAKER_AWS_ACCOUNT'],
'strategy':'',
'capacity': {'min':2, 'max':2, 'desired':2},
'targetHealthyDeployPercentage': 100,
'loadBalancers': [self.lb_name],
'cooldown': 8,
'healthCheckType': 'EC2',
'healthCheckGracePeriod': 40,
'instanceMonitoring': False,
'ebsOptimized': False,
'iamRole': bindings['AWS_IAM_ROLE'],
'terminationPolicies': ['Default'],
'availabilityZones': {region: avail_zones},
'keyPair': bindings['TEST_AWS_KEYPAIR'],
'suspendedProcesses': [],
# TODO(ewiseblatt): Inquiring about how this value is determined.
# It seems to be the "Name" tag value of one of the VPCs
# but is not the default VPC, which is what we using as the VPC_ID.
# So I suspect something is out of whack. This name comes from
# spinnaker.io tutorial. But using the default vpc would probably
# be more adaptive to the particular deployment.
'subnetType': 'internal (defaultvpc)',
'securityGroups': [test_security_group_id],
'virtualizationType': 'paravirtual',
'stack': bindings['TEST_STACK'],
'freeFormDetails': '',
'amiName': bindings['TEST_AWS_AMI'],
'instanceType': 'm1.small',
'useSourceCapacity': False,
'account': bindings['SPINNAKER_AWS_ACCOUNT'],
'user': '[anonymous]'
}],
description='Create Server Group in ' + group_name,
application=self.TEST_APP)
builder = aws.AwsPythonContractBuilder(self.aws_observer)
(builder.new_clause_builder('Auto Server Group Added',
retryable_for_secs=30)
.call_method(
self.autoscaling_client.describe_auto_scaling_groups,
AutoScalingGroupNames=[group_name])
.EXPECT(
ov_factory.value_list_path_contains(
'AutoScalingGroups',
jp.LIST_MATCHES([jp.DICT_MATCHES({'MaxSize': jp.NUM_EQ(2)})]))
))
return st.OperationContract(
self.new_post_operation(
title='create_server_group', data=payload, path='tasks'),
contract=builder.build())
def delete_server_group(self):
"""Creates OperationContract for deleteServerGroup.
To verify the operation, we just check that the AWS Auto Scaling Group
is no longer visible on AWS (or is in the process of terminating).
"""
bindings = self.bindings
group_name = '{app}-{stack}-v000'.format(
app=self.TEST_APP, stack=bindings['TEST_STACK'])
payload = self.agent.make_json_payload_from_kwargs(
job=[{
'cloudProvider': 'aws',
'type': 'destroyServerGroup',
'serverGroupName': group_name,
'asgName': group_name,
'region': bindings['TEST_AWS_REGION'],
'regions': [bindings['TEST_AWS_REGION']],
'credentials': bindings['SPINNAKER_AWS_ACCOUNT'],
'user': '[anonymous]'
}],
application=self.TEST_APP,
description='DestroyServerGroup: ' + group_name)
builder = aws.AwsPythonContractBuilder(self.aws_observer)
(builder.new_clause_builder('Auto Scaling Group Removed')
.call_method(
self.autoscaling_client.describe_auto_scaling_groups,
AutoScalingGroupNames=[group_name])
.EXPECT(
ov_factory.error_list_contains(
jp.ExceptionMatchesPredicate(
(BotoCoreError, ClientError), 'AutoScalingGroupNotFound')))
.OR(
ov_factory.value_list_path_contains(
'AutoScalingGroups',
jp.LIST_MATCHES([])))
.OR(
ov_factory.value_list_path_contains(
'AutoScalingGroups',
jp.LIST_MATCHES([
jp.DICT_MATCHES({'Status': jp.STR_SUBSTR('Delete'),
'MaxSize': jp.NUM_EQ(0)})])))
)
return st.OperationContract(
self.new_post_operation(
title='delete_server_group', data=payload, path='tasks'),
contract=builder.build())
class AwsSmokeTest(st.AgentTestCase):
"""The test fixture for the SmokeTest.
This is implemented using citest OperationContract instances that are
created by the AwsSmokeTestScenario.
"""
# pylint: disable=missing-docstring
@property
def scenario(self):
return citest.base.TestRunner.global_runner().get_shared_data(
AwsSmokeTestScenario)
@property
def testing_agent(self):
return self.scenario.agent
def test_a_create_app(self):
self.run_test_case(self.scenario.create_app())
def test_b_upsert_load_balancer_public(self):
self.run_test_case(self.scenario.upsert_load_balancer(use_vpc=False))
def test_b_upsert_load_balancer_vpc(self):
self.run_test_case(self.scenario.upsert_load_balancer(use_vpc=True))
def test_c_create_server_group(self):
# We'll permit this to timeout for now
# because it might be waiting on confirmation
# but we'll continue anyway because side effects
# should have still taken place.
self.run_test_case(self.scenario.create_server_group(),
poll_every_secs=5, timeout_ok=True)
def test_x_delete_server_group(self):
self.run_test_case(self.scenario.delete_server_group(),
max_retries=5, poll_every_secs=5)
def test_y_delete_load_balancer_vpc(self):
self.run_test_case(self.scenario.delete_load_balancer(use_vpc=True),
max_retries=5)
def test_y_delete_load_balancer_pub(self):
self.run_test_case(self.scenario.delete_load_balancer(use_vpc=False),
max_retries=5)
def test_z_delete_app(self):
# Give a total of a minute because it might also need
# an internal cache update
self.run_test_case(self.scenario.delete_app(),
retry_interval_secs=8, max_retries=8)
def main():
"""Implements the main method running this smoke test."""
defaults = {
'TEST_STACK': str(AwsSmokeTestScenario.DEFAULT_TEST_ID),
'TEST_APP': 'smoketest' + AwsSmokeTestScenario.DEFAULT_TEST_ID
}
return citest.base.TestRunner.main(
parser_inits=[AwsSmokeTestScenario.initArgumentParser],
default_binding_overrides=defaults,
test_case_list=[AwsSmokeTest])
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | 5,472,275,676,813,450,000 | 36.240385 | 80 | 0.636871 | false |
kedz/cuttsum | old/python/cuttsum/readers.py | 1 | 2494 | import codecs
import numpy as np
from sklearn.feature_extraction import DictVectorizer
def gold_reader(bow_file, l_file, sim_idx, vector=u'latent'):
op = codecs.open
sims = []
vectors = []
labels = []
unicodes = []
last_hour = None
with op(bow_file, u'r', u'utf-8') as bf, op(l_file, u'r', u'utf-8') as lf:
header = lf.readline().strip()
b_line = bf.readline()
l_line = lf.readline()
while b_line and l_line:
b_datum = b_line.strip().split(u'\t')
b_hour, b_stream_id, b_sent_id, b_unicode = b_datum[0:4]
bow = {x:1 for x in b_datum[4].split(u' ')}
l_datum = l_line.strip().split(u'\t')
l_hour, l_stream_id, l_sent_id = l_datum[0:3]
sim = float(l_datum[sim_idx])
lvec = [float(x) for x in l_datum[6:]]
b_label = (b_hour, b_stream_id, b_sent_id)
l_label = (l_hour, l_stream_id, l_sent_id)
assert b_label == l_label
if b_hour != last_hour:
if last_hour is not None:
n_points = len(sims)
sims = np.array(sims)
if vector == u'latent':
vectors = np.array(vectors)
elif vector == u'bow':
vctr = DictVectorizer()
vectors = vctr.fit_transform(vectors)
unicodes = np.array(unicodes, dtype=(unicode, 1000))
yield (last_hour, labels, unicodes, sims, vectors)
sims = []
vectors = []
labels = []
unicodes = []
last_hour = b_hour
sims.append(sim)
if vector == u'latent':
vectors.append(lvec)
elif vector == u'bow':
vectors.append(bow)
labels.append(b_label)
unicodes.append(b_unicode)
b_line = bf.readline()
l_line = lf.readline()
if len(vectors) > 0:
n_points = len(sims)
sims = np.array(sims)
if vector == u'latent':
vectors = np.array(vectors)
elif vector == u'bow':
vctr = DictVectorizer()
vectors = vctr.fit_transform(vectors)
unicodes = np.array(unicodes, dtype=(unicode, 1000))
yield (last_hour, labels, unicodes, sims, vectors)
| apache-2.0 | -8,457,494,943,271,030,000 | 32.702703 | 78 | 0.468324 | false |
Karajlug/karajlug | dbot/views.py | 1 | 2174 | # -----------------------------------------------------------------------------
# Karajlug.org
# Copyright (C) 2010 Karajlug community
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------------
import hashlib
import socket
from django.http import Http404, HttpResponse, HttpResponseForbidden
from django.contrib.auth.models import User
from django.views.decorators.csrf import csrf_exempt
from forms import WebServiceForm
@csrf_exempt
def webservice(request):
"""
Simple HTTP POST service.
"""
if not request.method == "POST":
raise Http404()
form = WebServiceForm(request.POST)
if form.is_valid():
try:
user = User.objects.get(username=form.cleaned_data["user"])
except User.DoesNotExist:
raise Http404()
m = hashlib.sha1()
m.update("%s%s" % (form.cleaned_data["msg"],
user.password))
hash_ = m.hexdigest()
if not hash_ == form.cleaned_data["hash"]:
return HttpResponseForbidden()
sock = socket.socket(socket.AF_UNIX,
socket.SOCK_STREAM)
try:
sock.connect("/tmp/socket")
sock.send(form.cleaned_data["msg"])
sock.recv(1024)
except socket.error:
pass
sock.close()
return HttpResponse("0")
else:
return HttpResponse(form.errors)
| gpl-2.0 | 284,063,036,326,837,600 | 33.507937 | 79 | 0.596136 | false |
tos-kamiya/agec2 | src/gen_ngram.py | 1 | 25223 | #!/usr/bin/env python
#coding: utf-8
__author__ = 'Toshihiro Kamiya <[email protected]>'
__status__ = 'experimental'
import collections
import os
import sys
import datetime
from _utilities import sort_uniq
import asm_manip as am
import ope_manip as om
import precomp_manip as pm
UNTRACKED_CLAZS = frozenset([
"java/lang/StringBuilder",
"java/util/Iterator"
])
UNDIGGED_METHODS = frozenset([
'getClass:()Ljava/lang/Class;',
'equals:(Ljava/lang/Object;)Z',
'hashCode:()I',
'compareTo:(Ljava/lang/Object;)I',
'toString:()Ljava/lang/String;',
'get:(Ljava/lang/Object;)Ljava/lang/Object;',
'put:(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;',
'getProperty:(Ljava/lang/Object;)Ljava/lang/Object;',
])
def to_ngram_tuples_iter(found_ngrams):
for head, tails in sorted(found_ngrams.iteritems(), key=lambda h_ts: h_ts[0]):
ngram_tuples = []
for tail in tails:
ngram = (head,) + tail
buf = []
for c_m, frame in ngram:
claz, method = frame.claz_method
c, m = c_m
s = m if c is None else "%s.%s" % c_m
buf.append((s, "%s.%s,%d" % (claz, method, frame.index), frame.depth))
ngram_tuple = tuple(buf)
ngram_tuples.append(ngram_tuple)
yield sort_uniq(ngram_tuples)
# Drop intermediate paths data from each n-gram and merge the n-grams.
# Because printed n-grams do not have such intermediate path data,
# so two n-grams, which starts the same posision and the same depth but the distinct paths,
# are not able to be distinguished in the output and look just duplication in the output.
def make_method2claz2code(sig2code):
method2claz2code = {}
for (claz, sig), code in sorted(sig2code.iteritems()):
method2claz2code.setdefault(sig, {})[claz] = code
return method2claz2code
def make_claz2methods(method2claz2code):
claz2methods = {}
for m, c2c in method2claz2code.iteritems():
for c in c2c.iterkeys():
claz2methods.setdefault(c, []).append(m)
return claz2methods
class StackFrame(object):
def __init__(self, claz_method, index, prev_frame):
self.claz_method = claz_method
self.index = index
self.prev_frame = prev_frame
self.depth = 0 if prev_frame is None else prev_frame.depth + 1
def __eq__(self, other):
return other is not None and \
self.claz_method == other.claz_method and self.index == other.index and \
self.depth == other.depth and self.prev_frame == other.prev_frame
def __lt__(self, other):
if other is None:
return False
if self.claz_method < other.claz_method:
return True
elif self.claz_method == other.claz_method:
if self.index < other.index:
return True
elif self.index == other.index:
if self.depth < other.depth:
return True
elif self.depth == other.depth:
return self.prev_frame < other.prev_frame
def __hash__(self):
return hash(self.claz_method) + hash(self.index) + hash(self.depth) # prev_frame is not used in hash computation
def copy(self, index=None):
return StackFrame(self.claz_method, index if index is not None else self.index, self.prev_frame)
def __repr__(self):
return "StackFrame(%s,%s,*,depth=%d)" % (repr(self.claz_method), repr(self.index), self.depth) # prev_frame is not printed
class CodeNgramGenerator:
def __init__(self, method2claz2precomp):
self.method2claz2precomp = method2claz2precomp
self.ngram_size = 6
self.max_call_depth = -1
self.allow_repetitive_ngram = False
self.no_branch_ngram = False
self.no_returning_execution_path = False
self.use_undigg_method_list = False
self.count_branch_in_surface_level = False
self.clear_temp()
def clear_temp(self):
self._claz_method0 = None
self._max_call_depth = None
self._stack_already_printed_on_raise = False
def _remove_repetition(self, cur_gram):
if self.allow_repetitive_ngram:
return 0
for replen in range(1, min(len(cur_gram), self.ngram_size) // 2 + 1):
for j in range(-1, -1 - replen, -1):
c, r = cur_gram[j], cur_gram[j - replen]
if c[0] != r[0]:
break # for j
else:
del cur_gram[-replen:]
return replen
return 0
def _setup_temp(self, claz, method):
self._claz_method0 = claz, method
self._max_call_depth = self.max_call_depth if self.max_call_depth >= 0 else \
self.ngram_size * (-self.max_call_depth)
self._stack_already_printed_on_raise = False
self._found_grams = {} # head item -> set if tuple of tail items
# here, head is the first item of ngram tail is the other items
def gen_ngrams(self, claz, method):
self._setup_temp(claz, method)
self._dig_method(self._max_call_depth , [], (claz, method), None, None)
self.clear_temp()
return self._found_grams
def _dig_method(self, dig_count, cur_gram, claz_method, prev_frame, prev_footmarks_frame,
start_cell=None, is_return_dig=False):
if not is_return_dig:
p = self.method2claz2precomp[claz_method[1]][claz_method[0]]
if start_cell is None:
start_cell = p.start_cell
cur_frame = StackFrame(claz_method, start_cell[0], prev_frame)
cur_footmarks_frame = [], prev_footmarks_frame
else:
assert claz_method is None
assert start_cell is None
cur_frame = prev_frame
claz_method = cur_frame.claz_method
p = self.method2claz2precomp[claz_method[1]][claz_method[0]]
start_cell = p.cells[cur_frame.index][1]
cur_footmarks_frame = prev_footmarks_frame[0][:], prev_footmarks_frame[1]
cur_block_entrance_cells = p.bent_cells
depth = cur_frame.depth
try:
branches = []
def dig_branch(dig_count, cur_gram, cur_cell, cur_footmarks_frame):
footmarks = cur_footmarks_frame[0]
while True:
index, next_cell, precomp_cmd, precomp_arg = cur_cell
if index in cur_block_entrance_cells:
if index in footmarks:
break # while True
footmarks.append(index)
# in deeper levels than the surface, branchs are counted
# in order to avoid interprting too complex control dependencies
if self.count_branch_in_surface_level or depth > 0:
if dig_count <= 0:
break # while True
dig_count -= 1
if precomp_cmd == pm.INVOKE:
stk = cur_frame.copy(index)
c_m = c, m = precomp_arg
if cur_gram and dig_count > 0 and self._is_method_digg_target(c, m, cur_gram):
c2p = self.method2claz2precomp.get(m)
if c2p:
cs = sorted(c2p.iterkeys()) if c is None else \
[c] if c in c2p else \
[]
for c2 in cs:
c2_m = (c2, m)
if not CodeNgramGenerator.is_recursion(c2_m, cur_frame):
self._dig_method(dig_count - 1, cur_gram[-self.ngram_size:], c2_m, stk, cur_footmarks_frame)
cur_gram.append((c_m, stk))
if self._remove_repetition(cur_gram) == 0 and len(cur_gram) >= self.ngram_size:
if self._is_escaping(cur_gram[-self.ngram_size][1]):
break # while True
cand_gram = tuple(cur_gram[-self.ngram_size:])
if not self._store_if_new_ngram(cand_gram):
break # while True
elif precomp_cmd == pm.RETURN:
if self.no_returning_execution_path:
break # while True
if cur_frame.prev_frame is not None:
self._dig_method(dig_count, cur_gram, None,
cur_frame.prev_frame, cur_footmarks_frame[1], is_return_dig=True)
break # while True
elif precomp_cmd == pm.GOTO:
if not self.no_branch_ngram:
next_cell = precomp_arg
elif precomp_cmd == pm.BRANCHS:
if not self.no_branch_ngram:
branches.extend((dig_count, cur_gram[-self.ngram_size:], dc, (footmarks[:], prev_footmarks_frame)) \
for dc in precomp_arg)
elif precomp_cmd == pm.THROW:
break # while True
else:
assert False
cur_cell = next_cell
dig_branch(dig_count, cur_gram, start_cell, cur_footmarks_frame)
while branches:
b = branches.pop()
dig_branch(*b)
except:
self._print_stack(cur_frame)
raise
@staticmethod
def is_recursion(claz_method, frame):
method = claz_method[1]
while frame:
if method == frame.claz_method[1]:
return True
frame = frame.prev_frame
return False
def _store_if_new_ngram(self, cand_gram):
assert len(cand_gram) >= 1
tails = self._found_grams.setdefault(cand_gram[0], set())
tail = tuple(cand_gram[1:])
if tail in tails:
return False
tails.add(tail)
return True
def _is_method_digg_target(self, c, method, cur_gram):
assert method
if self.use_undigg_method_list and method in UNDIGGED_METHODS:
return False
if c is None and method.endswith(":()V"):
return False
for i in xrange(0, min(len(cur_gram), self.ngram_size - 1)):
if cur_gram[-i-1][0][1] == method:
return False
return True
def _is_escaping(self, head_frame):
return head_frame.depth != 0 # escaped from the original method?
# a head item of a n-gram always comes from the original method.
# if not (that is, a head item is comes from some called method by the original method),
# such code fragment is not a part of the orignal method, but a part of the called method.
def _print_stack(self, frame):
if self._stack_already_printed_on_raise:
return
buf = []
while frame:
buf.append((frame.claz_method[0], frame.claz_method[1], frame.index))
frame = frame.prev_frame
sys.stderr.write("debug info> cur_call_stack = [%s]\n" % ", ".join("%s.%s:%d" % f for f in buf))
self._stack_already_printed_on_raise = True
class CodeNgramGeneratorWStartIndices(CodeNgramGenerator):
def clear_temp(self):
CodeNgramGenerator.clear_temp(self)
self._start_index = None
def gen_ngrams(self, claz, method, start_indices):
self._setup_temp(claz, method)
for start_index in start_indices:
self._start_index = start_index
claz2precomp = self.method2claz2precomp[method]
precomp_cells = claz2precomp[claz].cells
head_cell = precomp_cells[start_index]
self._dig_method(self._max_call_depth, [], (claz, method), None, None, head_cell)
self.clear_temp()
return self._found_grams
def _is_escaping(self, head_frame):
if self._start_index is not None:
if head_frame.claz_method == self._claz_method0 and head_frame.index != self._start_index:
return True
return CodeNgramGenerator._is_escaping(self, head_frame)
def gen_code_ngrams(claz, method, method2claz2precomp, ngram_size, start_indices=None,
max_call_depth=-1, allow_repetitive_ngram=False, no_branch_ngram=False,
no_returning_execution_path=False, use_undigg_method_list=False,
count_branch_in_surface_level=False):
if start_indices:
cng = CodeNgramGeneratorWStartIndices(method2claz2precomp)
else:
cng = CodeNgramGenerator(method2claz2precomp)
cng.ngram_size = ngram_size
cng.max_call_depth = max_call_depth
cng.allow_repetitive_ngram = allow_repetitive_ngram
cng.no_branch_ngram = no_branch_ngram
cng.no_returning_execution_path = no_returning_execution_path
cng.use_undigg_method_list = use_undigg_method_list
cng.count_branch_in_surface_level = count_branch_in_surface_level
if start_indices:
return cng.gen_ngrams(claz, method, start_indices)
else:
return cng.gen_ngrams(claz, method)
def identify_claz(method2claz2code, class_patterns):
exclude_clazs = frozenset([e for e in class_patterns if not e.endswith('/*')])
exclude_packages = frozenset([e[:-1] for e in class_patterns if e.endswith('/*')])
clazs = set()
for method, claz2code in method2claz2code.iteritems():
clazs.update(claz2code.iterkeys())
clazs_tobe_excluded = set()
for claz in sorted(clazs):
if claz in exclude_clazs:
clazs_tobe_excluded.add(claz)
else:
p = claz.rfind('/')
if p >= 0:
package = claz[:p + 1] # include trailing '/'
if package in exclude_packages:
clazs_tobe_excluded.add(claz)
return clazs_tobe_excluded
def identify_target_claz_method(method2claz2code,entry_class_patterns):
if entry_class_patterns:
claz_set = frozenset(identify_claz(method2claz2code, entry_class_patterns))
claz_method_list = sorted((claz, method) for method, claz2pre in method2claz2code.iteritems() \
for claz in claz2pre.iterkeys() if claz in claz_set)
else:
claz_method_list = sorted((claz, method) for method, claz2pre in method2claz2code.iteritems() \
for claz in claz2pre.iterkeys())
claz_method_count = collections.Counter()
for claz, method in claz_method_list:
claz_method_count[claz] += 1
return claz_method_list, claz_method_count
def exclude_clazs(method2claz2code, excludeded_class_patterns):
removed_clazs = identify_claz(method2claz2code, excludeded_class_patterns)
for method, claz2code in method2claz2code.items():
for c in removed_clazs.intersection(claz2code.iterkeys()):
del claz2code[c]
if len(claz2code) == 0:
del method2claz2code[method]
return removed_clazs
def exclude_ctors(method2claz2code):
ctors = [m for m in method2claz2code.iterkeys() \
if m.startswith('"<init>"') or m.startswith("access$")]
for m in ctors:
del method2claz2code[m]
return ctors
def remove_too_many_definition_methods(method2claz2code, max_method_definition):
assert max_method_definition > 0
too_many_definition_methods = [method \
for method, claz2code in method2claz2code.iteritems() \
if len(claz2code) > max_method_definition]
for m in too_many_definition_methods:
del method2claz2code[m]
return too_many_definition_methods
def do_filtering_clazs(write, method2claz2code, excluded_class_patterns):
if excluded_class_patterns:
removed_clazs = exclude_clazs(method2claz2code, excluded_class_patterns)
write("removed classes by --exclude option(s): %d\n" % \
len(removed_clazs))
def do_filtering_methods(write, method2claz2code, include_ctors, max_method_definition):
if not include_ctors:
ctors = exclude_ctors(method2claz2code)
write("removed ctors: %d\n" % len(ctors))
if max_method_definition > 0:
too_many_definition_methods = remove_too_many_definition_methods(
method2claz2code, max_method_definition)
write("removed methods by option --max-definition=%d: %d\n" % \
(max_method_definition, len(too_many_definition_methods)))
def gen_argpsr():
from argparse import ArgumentParser
from _version_data import VERSION
psr = ArgumentParser(description='Generate n-grams of method calls')
psr.add_argument('-a', '--asm-directory', action='store', required=True)
psr.add_argument('-n', '--ngram-size', action='store', type=int, default=6)
psr.add_argument('-v', '--verbose', action='store_true')
psr.add_argument('--max-call-depth', action='store', type=int, default=-2,
help='max depth in expanding method calls. negative number means scale factor to n-gram size. (default is -2, that is. 2 * n-gram size.)')
psr.add_argument('--max-method-definition', action='store', type=int, default=-1,
help='max method defintions for a signiture. =-1 means unlimited')
psr.add_argument('--allow-repetitive-ngram', action='store_true')
psr.add_argument('--no-branch-ngram', action='store_true')
psr.add_argument('-e', '--exclude', action='append',
help="specify class in fully-qualified name, e.g. org/myapp/MyClass$AInnerClass. a wildcard '*' can be used as class name, e.g. org/myapp/*")
psr.add_argument('--entry', action='append',
help="class to be a entry point of abstract interpretation. specify class in fully-qualified name. wildcard can be used.")
psr.add_argument('--include-ctors', action='store_true',
help='include "<init>" and access$... methods as targets')
grp = psr.add_mutually_exclusive_group(required=False)
grp.add_argument('--mode-diagnostic', action='store_true',
help='show bytecode info and the filtering results')
grp.add_argument('--mode-method-signature', action='store_true',
help='show method signatures')
grp.add_argument('--mode-method-body', action='store_true',
help='show method bodies (byte code)')
psr.add_argument('--debug-wo-leaf-class-dispatch-optimization', action='store_true')
psr.add_argument('--debug-no-returning-execution-path', action='store_true')
psr.add_argument('--debug-count-branch-in-surface-level', action='store_true')
psr.add_argument('--version', action='version', version='%(prog)s ' + VERSION)
return psr
def is_untracked_method_call(c, m):
return c in UNTRACKED_CLAZS or m.find("access$") >= 0
def main(argv):
psr = gen_argpsr()
args = psr.parse_args(argv[1:])
max_method_definition = max(-1, args.max_method_definition)
excluded_class_patterns = frozenset(args.exclude if args.exclude else [])
entry_class_patterns = frozenset(args.entry if args.entry else [])
verbose = args.verbose
debug_wo_leaf_class_dispatch_optimization = args.debug_wo_leaf_class_dispatch_optimization
debug_no_returning_execution_path = args.debug_no_returning_execution_path
if verbose:
def verbose_write(mes): sys.stderr.write("> %s" % mes)
else:
def verbose_write(mes): pass
if not os.path.isdir(args.asm_directory):
sys.exit("error: fail to access asm_directory: %s" % args.asm_directory)
sig2code = {}
#sig2exceptiontable = {}
#sig2linenumbertable = {}
claz2deriving = {} # claz -> list of the clazs that inherit it
for typ, values in am.get_asm_info_iter(args.asm_directory):
if typ == am.METHOD_CODE:
claz_sig, code, etbl, ltbl = values
sig2code[claz_sig] = tuple(code)
#sig2exceptiontable[sig] = etbl
#sig2linenumbertable[sig] = ltbl
elif typ == am.INHERITANCE:
claz, imps, exts = values
for e in exts:
claz2deriving.setdefault(e, []).append(claz)
if args.mode_method_signature:
for claz_sig in sorted(sig2code.iterkeys()):
sys.stdout.write('%s.%s\n' % claz_sig.encode('utf-8'))
elif args.mode_method_body:
for claz_sig, method_body in sorted(sig2code.iteritems()):
ol = om.body_text_to_ope_list(method_body, claz_sig)
try:
om.verify_branch_ope(ol)
except om.InvalidOpe as e:
raise om.InvalidOpe("%s.%s: %s" % (claz_sig[0], claz_sig[1], str(e)))
sys.stdout.write('%s.%s\n' % claz_sig)
for L in om.format_ope_list(ol): #, fields=om.FORMAT_FIELD.OPE):
sys.stdout.write('%s\n' % L)
else:
sig2oplist = {}
for claz_sig, method_body in sorted(sig2code.iteritems()):
ol = om.body_text_to_ope_list(method_body, claz_sig)
sig2oplist[claz_sig] = ol
del sig2code
method2claz2code = make_method2claz2code(sig2oplist)
del sig2oplist
claz2methods = make_claz2methods(method2claz2code)
do_filtering_clazs(verbose_write, method2claz2code, excluded_class_patterns)
do_filtering_methods(verbose_write, method2claz2code, args.include_ctors, max_method_definition)
claz_method_list, claz_method_count = identify_target_claz_method(method2claz2code,entry_class_patterns)
if args.mode_diagnostic:
sys.stdout.write("classes: %d\n" % len(claz_method_count))
sys.stdout.write("method bodies: %d\n" % sum(claz_method_count.itervalues()))
m2ccount = collections.Counter()
for m, c2c in method2claz2code.iteritems():
m2ccount[m] += len(c2c)
mccounts = sorted(((m, c) for m, c in m2ccount.iteritems()), key=lambda m_c: m_c[1], reverse=True)
sys.stdout.write("method having many definitions:\n")
for m, c in mccounts:
if c < 50: break # for m, c
sys.stdout.write(" %4d %s\n" % (c, m))
return
if debug_wo_leaf_class_dispatch_optimization:
claz2methods = claz2deriving = None
method2claz2precomp = {}
claz_method_tables = pm.ClazMethodTables(claz2methods, claz2deriving, is_untracked_method_call)
for method, c2c in method2claz2code.iteritems():
for claz, ope_list in c2c.iteritems():
# if claz == "org/gjt/sp/jedit/bufferio/BufferSaveRequest" and method == "run:()V":
# assert True
precomp = pm.precompile_code(claz, ope_list,
claz_method_tables=claz_method_tables,
remove_repetition=not args.allow_repetitive_ngram)
method2claz2precomp.setdefault(method, {})[claz] = precomp
del claz_method_tables
del method2claz2code
sys.stdout.write("# --ngram-size=%d\n" % args.ngram_size)
sys.stdout.write("# --max-call-depth=%d\n" % args.max_call_depth)
sys.stdout.write("# --max-method-definition=%d\n" % max_method_definition)
if args.allow_repetitive_ngram:
sys.stdout.write("# --allow-repetitive-ngram\n")
if args.no_branch_ngram:
sys.stdout.write("# --no-branch-ngram\n")
if args.include_ctors:
sys.stdout.write("# --include-ctors\n")
for e in excluded_class_patterns:
sys.stdout.write("# --exclude=%s\n" % e)
for e in entry_class_patterns:
sys.stdout.write("# --entry=%s\n" % e)
sys.stdout.write('\n')
prev_claz = None
for i, (claz, method) in enumerate(claz_method_list):
code = method2claz2precomp[method][claz]
if verbose and claz != prev_claz:
t = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
s = '%s (%d-%d of %d) %s\n' % (t, i+1, i+1 + claz_method_count[claz] - 1, len(claz_method_list), claz)
verbose_write(s.encode('utf-8'))
prev_claz = claz
found_ngrams = gen_code_ngrams(claz, method, method2claz2precomp, args.ngram_size,
max_call_depth=args.max_call_depth, allow_repetitive_ngram=args.allow_repetitive_ngram,
no_branch_ngram=args.no_branch_ngram, no_returning_execution_path=debug_no_returning_execution_path,
use_undigg_method_list=debug_wo_leaf_class_dispatch_optimization,
count_branch_in_surface_level=args.debug_count_branch_in_surface_level)
for ngrams in to_ngram_tuples_iter(found_ngrams):
for ngram in ngrams:
sys.stdout.write(''.join("%s\t%s\t%d\n" % op_loc_dep for op_loc_dep in ngram))
sys.stdout.write('\n')
if __name__ == '__main__':
main(sys.argv)
| mit | 5,821,878,241,560,408,000 | 43.960784 | 153 | 0.587638 | false |
NaviNet/ImageFeeder | imagefeeder/imagefeeder.py | 1 | 19421 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 NaviNet Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Watches directories and sends images to Eyes.
"""
import argparse
from distutils import dir_util
import logging
import os
import re
import Queue
import shutil
import threading
import time
from applitools import errors
from applitools import eyes
import glob
from watchdog import events
import eyeswrapper
import watchdir
_DONE_BASE_NAME = 'done'
_FAILURE_DIR_NAME = 'FAILED'
_SUCCESS_DIR_NAME = 'DONE'
_INDEX = None # A nonnegative integer, or None to disable indexing
_DEFAULT_SEP = '_'
_LOGGER = logging.getLogger(__name__)
_TIMEOUT = 300 # In seconds
# The Applitools Eyes Team License limits the number of concurrent
# tests to n + 1, where n is the number of team members. However,
# Applitools does not enforce this limit; until they do, we are free to
# test as much as we want.
_MAX_CONCURRENT_TESTS = 0
_CONCURRENT_TEST_QUEUE = None
def _make_empty_directory(path):
"""Clears a directory or deletes a regular file.
Deletes whatever the path refers to (if anything) and creates an
empty directory at that path.
Args:
path: The path to make point to an empty directory.
"""
_LOGGER.debug('Clearing directory: {}'.format(path))
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.exists(path):
os.remove(path)
dir_util.mkpath(path)
class DirectoryGlobEventHandler(events.FileSystemEventHandler):
"""Event handler for new directories matching a glob.
"""
def __init__(self, stop_event, **kwargs):
"""Initializes the event handler.
Args:
stop_event: An Event to set to stop watching.
batch_info: A BatchInfo or None.
base_path: The literal existing part of the watched
directory.
host_app: A browser name or None.
host_os: An OS name or None.
overwrite_baseline: Whether to overwrite the baseline.
patterns: An iterable of file name globs.
sep: The host information separator, set by --sep.
test_name: The name of the Eyes test.
"""
self._patterns = kwargs.pop('patterns', ['*'])
self._batch_info = kwargs.pop('batch_info', None)
self._host_app = kwargs.pop('host_app', None)
self._host_os = kwargs.pop('host_os', None)
self._sep = kwargs.pop('sep', _DEFAULT_SEP)
self._base_path = kwargs.pop('base_path')
self._stop_event = stop_event
processing_dir = os.path.join(os.path.dirname(self._base_path),
watchdir.PROCESSING_DIR_NAME)
if os.path.isfile(processing_dir):
os.remove(processing_dir)
_LOGGER.info('Processing directory: {}'.format(processing_dir))
super(self.__class__, self).__init__(**kwargs)
if (self._base_path == self._patterns[0] and
os.path.isdir(self._base_path)):
# Watch a non-glob immediately.
self._watch(self._base_path)
stop_event.set()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def on_created(self, event):
"""Handles the creation of a new file.
If the new file is a directory and it matches one of the event
handler's globs, it watches it for new images to send to Eyes.
Args:
event: The Event representing the creation of a new file.
"""
src_path = event.src_path
matched_pattern = _matches_any_pattern(src_path, self._patterns)
if matched_pattern:
_LOGGER.info('Created: {} (matching {})'.format(src_path,
matched_pattern))
if event.is_directory:
self._watch(src_path)
else:
_LOGGER.warn('Not a directory: {}'.format(src_path))
def _watch(self, src_path):
"""Watches a directory to send new images to Eyes.
Args:
src_path: The path to watch.
"""
host_os, host_app = _get_app_environment(src_path, self._sep)
watchdir.watch(src_path, WindowMatchingEventHandler,
base_path=self._base_path, batch_info=self._batch_info,
host_app=self._host_app or host_app,
host_os=self._host_os or host_os,
watched_path=src_path, test_name=src_path)
class _GrowingList(list):
"""List that grows when needed.
"""
def __setitem__(self, index, value):
"""Sets the value at an index.
If the index is out of bounds, grows the list to be long
enough, filling unspecified indexes with None.
Args:
index: An index.
value: A value.
"""
if index >= len(self):
self.extend([None] * (index + 1 - len(self)))
super(self.__class__, self).__setitem__(index, value)
class WindowMatchingEventHandler(eyeswrapper.EyesWrapper,
watchdir.CreationEventHandler):
"""Event handler for moving new files and uploading them to Eyes.
"""
def __init__(self, stop_event, **kwargs):
"""Initializes the event handler.
Args:
stop_event: An Event to set when it is time to stop
watching.
"""
# pylint: disable=super-init-not-called
self._next_index = _INDEX or 0
self._path_cache = _GrowingList()
self._stop_event = stop_event
self._timer = None
for base in self.__class__.__bases__:
base.__init__(self, **kwargs)
def _process(self):
"""Sends new files to Applitools.
Each image file may include an integer index somewhere in its
name. If enabled by --index, this method uploads them in order
of their indexes. If two files include the same integer, only
the first is used.
Stops watching when the "done" file (set by --done) appears in
the queue or when the time-out interval passes without a new
file appearing.
"""
while not self.driver:
# Wait for Eyes to have opened.
time.sleep(0.1)
_CONCURRENT_TEST_QUEUE.put(None)
while True:
path = self._backlog.get()
if self._timer:
self._timer.cancel()
basename = os.path.basename(path)
if basename == _DONE_BASE_NAME:
self._stop()
break
match = _INDEX is None or re.search(r'\d+', basename)
if match:
# The file has an index and should be uploaded, or
# indexing has been disabled.
if _INDEX is None:
matched_index = self._next_index
else:
matched_index = int(match.group())
if matched_index < self._next_index:
_LOGGER.warn(
'Ignoring file with repeated index: {}'.format(path))
else:
self._path_cache[matched_index] = path
# Upload as many files from the cache as possible
# without skipping any indexes.
try:
while self._path_cache[self._next_index]:
eyeswrapper.match(
self.eyes, self._path_cache[self._next_index])
self._next_index += 1
except IndexError:
# We have run off the end of the cache. This is
# expected when the cache has no holes in it.
pass
else:
_LOGGER.warn('No index in file name: {}'.format(path))
_LOGGER.debug('File cache, starting at index {}: {}'.format(
self._next_index + 1, self._path_cache[self._next_index + 1:]))
_LOGGER.debug('Setting timer for {} s'.format(_TIMEOUT))
self._timer = threading.Timer(_TIMEOUT, self._time_out)
self._timer.start()
def _time_out(self):
"""Stop watching because of a time-out.
"""
_LOGGER.debug('Timing out')
self._stop()
def _stop(self):
"""Stops watching.
"""
self._stop_event.set()
def __exit__(self, exc_type, exc_value, traceback):
"""Ends the Eyes test and moves files.
Moves files on completion of a test. The destination directory
depends on whether the Eyes test succeeded or failed.
Args:
exc_type: The type of the raised exception.
exc_value: The raised exception.
traceback: The traceback.
"""
try:
# Upload whatever files are left.
for path in self._path_cache[self._next_index:]:
if path:
eyeswrapper.match(self.eyes, path)
# Allow another path to be watched.
_CONCURRENT_TEST_QUEUE.get()
_CONCURRENT_TEST_QUEUE.task_done()
# Close Eyes.
super(self.__class__, self).__exit__(exc_type, exc_value,
traceback)
except errors.NewTestError as error:
_LOGGER.info(error)
final_dir_name = _SUCCESS_DIR_NAME
except errors.TestFailedError as error:
_LOGGER.info(error)
final_dir_name = _FAILURE_DIR_NAME
else:
final_dir_name = _SUCCESS_DIR_NAME
finally:
final_dir = os.path.join(os.path.dirname(self._base_path),
final_dir_name)
base_path_final_copy = os.path.join(
final_dir, os.path.basename(self._base_path))
watched_path_final_copy = os.path.join(
base_path_final_copy,
os.path.relpath(self._watched_path, self._base_path))
_make_empty_directory(watched_path_final_copy)
_LOGGER.debug('Moving {} to {}'.format(
self._watched_path_copy, watched_path_final_copy))
if os.path.isdir(watched_path_final_copy):
shutil.rmtree(watched_path_final_copy)
elif os.path.exists(watched_path_final_copy):
os.remove(watched_path_final_copy)
os.rename(self._watched_path_copy, watched_path_final_copy)
def _get_app_environment(path, sep):
"""Get the host OS and browser.
Finds the nearest parent directory of the watched path with two or
more instances of sep and splits on it. The host OS and browser are
the last two fields but one.
Args:
path: The path in which to find the host information.
sep: The separator. If false, simply returns None for both.
Returns:
An iterable of two elements: the host OS and browser, which are
both strings or both None.
"""
prev_path = None
while path != prev_path and sep:
head, tail = os.path.split(path)
fields = tail.split(sep)
if len(fields) > 3:
return fields[-3:-1]
prev_path = path
path = head
return None, None
def _parse_args():
"""Parse command line arguments.
Returns:
A Namespace containing the parsed arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'patterns', nargs='*', default=[os.curdir],
help='glob of paths to watch (default: current directory)',
metavar='GLOB')
baseline_group = parser.add_argument_group(
'Eyes session arguments',
'startInfo parameters for the new Eyes session')
baseline_group.add_argument(
'--batch', help='batch all directories together as BATCH')
baseline_group.add_argument(
'--app', default=eyeswrapper.APP_NAME,
help='run against the APP baseline (default: %(default)s)')
baseline_group.add_argument(
'--test', help='set the test name (default: the path to watch)')
baseline_group.add_argument(
'--sep', default=_DEFAULT_SEP,
help='find the nearest parent directory to the watched path with '
'three or more instances of PATTERN, split on it, and set the host '
'OS and browser to the last two fields but one (default: '
'%(default)s)', metavar='PATTERN')
baseline_group.add_argument('--browser',
help='set the host browser (overrides --sep)')
baseline_group.add_argument('--os',
help='set the host OS (overrides --sep)')
path_group = parser.add_argument_group(
'file and directory name arguments')
path_group.add_argument(
'--done', default=_DONE_BASE_NAME,
help='end a test when FILENAME is created (default: %(default)s)',
metavar='FILENAME')
path_group.add_argument('--failed', default=_FAILURE_DIR_NAME,
help='put files into DIRNAME when an Eyes test '
'fails (default: %(default)s)', metavar='DIRNAME')
path_group.add_argument(
'--in-progress', default=watchdir.PROCESSING_DIR_NAME,
help='put files into DIRNAME for processing (default: %(default)s)',
metavar='DIRNAME')
path_group.add_argument(
'--passed', default=_SUCCESS_DIR_NAME,
help='put files into DIRNAME when an Eyes test passes (default: '
'%(default)s)', metavar='DIRNAME')
parser.add_argument('-a', '--api-key', required=True,
help='set the Applitools Eyes API key')
parser.add_argument('-i', '--index', '--array-base', default=_INDEX,
type=int, help='start uploading images from index N '
'(by default, indexing is disabled)', metavar='N')
parser.add_argument('--log', default='WARNING', type=str.upper,
help='set the logging level (default: %(default)s)',
metavar='LEVEL')
parser.add_argument('-t', '--tests', default=_MAX_CONCURRENT_TESTS,
type=int, help='run N tests concurrently (N <= 0 '
'means unlimited; default: %(default)d)',
metavar='N')
parser.add_argument('--timeout', default=_TIMEOUT, type=int, help='stop '
'watching after N seconds without a new file (by '
'default, timing out is disabled)', metavar='N')
return parser.parse_args()
def _literal_existing_part(pattern):
"""Returns the literal existing part of a glob.
The literal existing part is as many consecutive directories of the
glob as possible which do not include any glob metacharacters ('*',
'?', and '['). For example, the literal existing part of '/x/y/*/z/?'
is '/x/y'.
Args:
pattern: A file glob.
Returns:
The literal existing part of the glob.
"""
pattern += os.sep
while True:
dirname = os.path.dirname(pattern)
if glob.has_magic(dirname) or not os.path.exists(dirname):
pattern = dirname
else:
return dirname
def _matches_any_pattern(path, patterns):
"""Compares a path against a list of globs.
Args:
path: A path.
patterns: An iterable of file name globs.
Returns:
The first pattern the path matches, or False if none matches.
"""
normalized_path = os.path.normcase(os.path.normpath(path))
for pattern in patterns:
for matching_path in glob.glob(pattern):
if (os.path.normcase(os.path.normpath(matching_path)) ==
normalized_path):
return pattern
return False
def _set_up_logging(level):
"""Sets up logging.
Args:
level: The logging level.
"""
_LOGGER.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
_LOGGER.addHandler(handler)
_LOGGER.propagate = False
if _LOGGER.getEffectiveLevel() <= logging.DEBUG:
eyeswrapper.LOGGER = _LOGGER
watchdir.LOGGER = _LOGGER
from applitools import logger
eyes_logger = logger.StdoutLogger()
logger.set_logger(eyes_logger)
requests_logger = logging.getLogger('requests.packages.urllib3')
requests_logger.addHandler(handler)
requests_logger.setLevel(logging.DEBUG)
requests_logger.propagate = False
def main():
"""Watches directories and sends images to Eyes.
Use --help for full command line option documentation.
"""
# pylint: disable=global-statement
global _CONCURRENT_TEST_QUEUE
global _DONE_BASE_NAME
global _FAILURE_DIR_NAME
global _INDEX
global _MAX_CONCURRENT_TESTS
global _SUCCESS_DIR_NAME
global _TIMEOUT
args = _parse_args()
# Logging
_set_up_logging(args.log)
_LOGGER.debug('Args: {}'.format(args))
# Command line arguments
batch_info = None
if args.batch:
batch_info = eyes.BatchInfo(args.batch)
eyeswrapper.APP_NAME = args.app
if args.test:
eyeswrapper.TEST_NAME = args.test
_DONE_BASE_NAME = args.done
_FAILURE_DIR_NAME = args.failed
watchdir.PROCESSING_DIR_NAME = args.in_progress
_SUCCESS_DIR_NAME = args.passed
eyes.Eyes.api_key = args.api_key
_INDEX = args.index
if _INDEX and _INDEX < 0:
_LOGGER.warn(
'Invalid index {}; indexing will be disabled'.format(_INDEX))
_INDEX = None
_MAX_CONCURRENT_TESTS = args.tests
_CONCURRENT_TEST_QUEUE = Queue.Queue(_MAX_CONCURRENT_TESTS)
_TIMEOUT = args.timeout
# Watching
watched_paths = []
for pattern in args.patterns:
pattern = os.path.realpath(pattern)
path = _literal_existing_part(pattern)
normalized_path = os.path.normcase(path)
if normalized_path in watched_paths:
_LOGGER.info('Skipping {}: same as {}'.format(pattern,
normalized_path))
continue
watched_paths.append(normalized_path)
watchdir.watch(normalized_path, DirectoryGlobEventHandler,
base_path=normalized_path,
patterns=[os.path.normcase(pattern)],
batch_info=batch_info, host_app=args.browser,
host_os=args.os, sep=args.sep)
_LOGGER.info('Ready to start watching')
try:
while watchdir.is_running():
time.sleep(1)
except KeyboardInterrupt:
watchdir.stop_watching()
if __name__ == '__main__':
main()
| apache-2.0 | -32,061,546,921,340,204 | 35.574388 | 79 | 0.582359 | false |
mattclark/osf.io | addons/github/models.py | 1 | 16200 | # -*- coding: utf-8 -*-
import os
import urlparse
import markupsafe
from addons.base.models import (BaseOAuthNodeSettings, BaseOAuthUserSettings,
BaseStorageAddon)
from django.db import models
from framework.auth import Auth
from github3 import GitHubError
from osf.models.external import ExternalProvider
from osf.models.files import File, Folder, BaseFileNode
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from website import settings
from addons.base import exceptions
from addons.github import settings as github_settings
from addons.github import utils
from addons.github.api import GitHubClient
from addons.github.exceptions import ApiError, NotFoundError
from addons.github.serializer import GitHubSerializer
from website.util import web_url_for
hook_domain = github_settings.HOOK_DOMAIN or settings.DOMAIN
class GithubFileNode(BaseFileNode):
_provider = 'github'
class GithubFolder(GithubFileNode, Folder):
pass
class GithubFile(GithubFileNode, File):
version_identifier = 'ref'
@property
def _hashes(self):
try:
return {'fileSha': self.history[-1]['extra']['hashes']['git']}
except (IndexError, KeyError):
return None
def touch(self, auth_header, revision=None, ref=None, branch=None, **kwargs):
revision = revision or ref or branch
return super(GithubFile, self).touch(auth_header, revision=revision, **kwargs)
class GitHubProvider(ExternalProvider):
name = 'GitHub'
short_name = 'github'
client_id = github_settings.CLIENT_ID
client_secret = github_settings.CLIENT_SECRET
auth_url_base = github_settings.OAUTH_AUTHORIZE_URL
callback_url = github_settings.OAUTH_ACCESS_TOKEN_URL
default_scopes = github_settings.SCOPE
def handle_callback(self, response):
"""View called when the OAuth flow is completed. Adds a new GitHubUserSettings
record to the user and saves the account info.
"""
client = GitHubClient(
access_token=response['access_token']
)
user_info = client.user()
return {
'provider_id': str(user_info.id),
'profile_url': user_info.html_url,
'display_name': user_info.login
}
class UserSettings(BaseOAuthUserSettings):
"""Stores user-specific github information
"""
oauth_provider = GitHubProvider
serializer = GitHubSerializer
def revoke_remote_oauth_access(self, external_account):
"""Overrides default behavior during external_account deactivation.
Tells GitHub to remove the grant for the OSF associated with this account.
"""
connection = GitHubClient(external_account=external_account)
try:
connection.revoke_token()
except GitHubError:
pass
# Required for importing username from social profile configuration page
# Assumes oldest connected account is primary.
@property
def public_id(self):
gh_accounts = self.owner.external_accounts.filter(provider=self.oauth_provider.short_name)
if gh_accounts:
return gh_accounts[0].display_name
return None
class NodeSettings(BaseOAuthNodeSettings, BaseStorageAddon):
oauth_provider = GitHubProvider
serializer = GitHubSerializer
user = models.TextField(blank=True, null=True)
repo = models.TextField(blank=True, null=True)
hook_id = models.TextField(blank=True, null=True)
hook_secret = models.TextField(blank=True, null=True)
registration_data = DateTimeAwareJSONField(default=dict, blank=True, null=True)
user_settings = models.ForeignKey(UserSettings, null=True, blank=True, on_delete=models.CASCADE)
@property
def folder_id(self):
return self.repo or None
@property
def folder_name(self):
if self.complete:
return '{}/{}'.format(self.user, self.repo)
return None
@property
def folder_path(self):
return self.repo or None
@property
def complete(self):
return self.has_auth and self.repo is not None and self.user is not None
def authorize(self, user_settings, save=False):
self.user_settings = user_settings
self.owner.add_log(
action='github_node_authorized',
params={
'project': self.owner.parent_id,
'node': self.owner._id,
},
auth=Auth(user_settings.owner),
)
if save:
self.save()
def clear_settings(self):
self.user = None
self.repo = None
self.hook_id = None
self.hook_secret = None
self.registration_data = None
def deauthorize(self, auth=None, log=True):
self.delete_hook(save=False)
self.clear_settings()
if log:
self.owner.add_log(
action='github_node_deauthorized',
params={
'project': self.owner.parent_id,
'node': self.owner._id,
},
auth=auth,
)
self.clear_auth()
def delete(self, save=False):
super(NodeSettings, self).delete(save=False)
self.deauthorize(log=False)
if save:
self.save()
@property
def repo_url(self):
if self.user and self.repo:
return 'https://github.com/{0}/{1}/'.format(
self.user, self.repo
)
@property
def short_url(self):
if self.user and self.repo:
return '/'.join([self.user, self.repo])
@property
def is_private(self):
connection = GitHubClient(external_account=self.external_account)
try:
return connection.repo(user=self.user, repo=self.repo).private
except GitHubError:
return
def get_folders(self, **kwargs):
if not self.has_auth:
raise exceptions.InvalidAuthError()
else:
connection = GitHubClient(external_account=self.external_account)
# Since /user/repos excludes organization repos to which the
# current user has push access, we have to make extra requests to
# find them
try:
repo_data = [
{
'addon': 'github',
'kind': 'repo',
'id': repo.id,
'name': repo.name,
'path': os.path.join(repo.owner.login, repo.name)
}
for repo in connection.repos()]
except GitHubError:
repo_data = []
return repo_data
# TODO: Delete me and replace with serialize_settings / Knockout
def to_json(self, user):
ret = super(NodeSettings, self).to_json(user)
user_settings = user.get_addon('github')
ret.update({
'user_has_auth': user_settings and user_settings.has_auth,
'is_registration': self.owner.is_registration,
})
if self.has_auth:
owner = self.user_settings.owner
if owner == user:
ret.update({'repo_names': self.get_folders()})
ret.update({
'node_has_auth': True,
'github_user': self.user or '',
'github_repo': self.repo or '',
'github_repo_full_name': '{0}/{1}'.format(self.user, self.repo) if (self.user and self.repo) else '',
'auth_osf_name': owner.fullname,
'auth_osf_url': owner.url,
'auth_osf_id': owner._id,
'github_user_name': self.external_account.display_name,
'github_user_url': self.external_account.profile_url,
'is_owner': owner == user,
'valid_credentials': GitHubClient(external_account=self.external_account).check_authorization(),
'addons_url': web_url_for('user_addons'),
'files_url': self.owner.web_url_for('collect_file_trees')
})
return ret
def serialize_waterbutler_credentials(self):
if not self.complete or not self.repo:
raise exceptions.AddonError('Addon is not authorized')
return {'token': self.external_account.oauth_key}
def serialize_waterbutler_settings(self):
if not self.complete:
raise exceptions.AddonError('Repo is not configured')
return {
'owner': self.user,
'repo': self.repo,
}
def create_waterbutler_log(self, auth, action, metadata):
path = metadata['path']
url = self.owner.web_url_for('addon_view_or_download_file', path=path, provider='github')
sha, urls = None, {}
try:
sha = metadata['extra']['commit']['sha']
urls = {
'view': '{0}?ref={1}'.format(url, sha),
'download': '{0}?action=download&ref={1}'.format(url, sha)
}
except KeyError:
pass
self.owner.add_log(
'github_{0}'.format(action),
auth=auth,
params={
'project': self.owner.parent_id,
'node': self.owner._id,
'path': path,
'urls': urls,
'github': {
'user': self.user,
'repo': self.repo,
'sha': sha,
},
},
)
#############
# Callbacks #
#############
def before_page_load(self, node, user):
"""
:param Node node:
:param User user:
:return str: Alert message
"""
messages = []
# Quit if not contributor
if not node.is_contributor_or_group_member(user):
return messages
# Quit if not configured
if self.user is None or self.repo is None:
return messages
# Quit if no user authorization
if self.user_settings is None:
return messages
connect = GitHubClient(external_account=self.external_account)
try:
repo = connect.repo(self.user, self.repo)
except (ApiError, GitHubError):
return
node_permissions = 'public' if node.is_public else 'private'
repo_permissions = 'private' if repo.private else 'public'
if repo_permissions != node_permissions:
message = (
'Warning: This OSF {category} is {node_perm}, but the GitHub '
'repo {user} / {repo} is {repo_perm}.'.format(
category=markupsafe.escape(node.project_or_component),
node_perm=markupsafe.escape(node_permissions),
repo_perm=markupsafe.escape(repo_permissions),
user=markupsafe.escape(self.user),
repo=markupsafe.escape(self.repo),
)
)
if repo_permissions == 'private':
message += (
' Users can view the contents of this private GitHub '
'repository through this public project.'
)
else:
message += (
' The files in this GitHub repo can be viewed on GitHub '
'<u><a href="https://github.com/{user}/{repo}/">here</a></u>.'
).format(
user=self.user,
repo=self.repo,
)
messages.append(message)
return messages
def before_remove_contributor_message(self, node, removed):
"""
:param Node node:
:param User removed:
:return str: Alert message
"""
try:
message = (super(NodeSettings, self).before_remove_contributor_message(node, removed) +
'You can download the contents of this repository before removing '
'this contributor <u><a href="{url}">here</a></u>.'.format(
url=node.api_url + 'github/tarball/'
))
except TypeError:
# super call returned None due to lack of user auth
return None
else:
return message
def after_remove_contributor(self, node, removed, auth=None):
"""
:param Node node:
:param User removed:
:return str: Alert message
"""
if self.user_settings and self.user_settings.owner == removed:
# Delete OAuth tokens
self.user_settings = None
self.save()
message = (
u'Because the GitHub add-on for {category} "{title}" was authenticated '
u'by {user}, authentication information has been deleted.'
).format(
category=markupsafe.escape(node.category_display),
title=markupsafe.escape(node.title),
user=markupsafe.escape(removed.fullname)
)
if not auth or auth.user != removed:
url = node.web_url_for('node_setting')
message += (
u' You can re-authenticate on the <u><a href="{url}">Settings</a></u> page.'
).format(url=url)
#
return message
def after_fork(self, node, fork, user, save=True):
"""
:param Node node: Original node
:param Node fork: Forked node
:param User user: User creating fork
:param bool save: Save settings after callback
:return the cloned settings
"""
clone = super(NodeSettings, self).after_fork(
node, fork, user, save=False
)
# Copy authentication if authenticated by forking user
if self.user_settings and self.user_settings.owner == user:
clone.user_settings = self.user_settings
if save:
clone.save()
return clone
def before_make_public(self, node):
try:
is_private = self.is_private
except NotFoundError:
return None
if is_private:
return (
'This {cat} is connected to a private GitHub repository. Users '
'(other than contributors) will not be able to see the '
'contents of this repo unless it is made public on GitHub.'
).format(
cat=node.project_or_component,
)
def after_delete(self, user):
self.deauthorize(Auth(user=user), log=True)
#########
# Hooks #
#########
# TODO: Should Events be added here?
# TODO: Move hook logic to service
def add_hook(self, save=True):
if self.user_settings:
connect = GitHubClient(external_account=self.external_account)
secret = utils.make_hook_secret()
hook = connect.add_hook(
self.user, self.repo,
'web',
{
'url': urlparse.urljoin(
hook_domain,
os.path.join(
self.owner.api_url, 'github', 'hook/'
)
),
'content_type': github_settings.HOOK_CONTENT_TYPE,
'secret': secret,
},
events=github_settings.HOOK_EVENTS,
)
if hook:
self.hook_id = hook.id
self.hook_secret = secret
if save:
self.save()
def delete_hook(self, save=True):
"""
:return bool: Hook was deleted
"""
if self.user_settings and self.hook_id:
connection = GitHubClient(external_account=self.external_account)
try:
response = connection.delete_hook(self.user, self.repo, self.hook_id)
except (GitHubError, NotFoundError):
return False
if response:
self.hook_id = None
if save:
self.save()
return True
return False
| apache-2.0 | 8,612,263,753,383,808,000 | 32.333333 | 117 | 0.553457 | false |
rhyolight/hypersearch | setup.py | 1 | 1371 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""TODO"""
from distutils.core import setup
setup(name="hypersearch",
version="0.0.1",
description=("A particle swarm optimization library created by Numenta "
"for hyperparameter optimization."),
author="Numenta",
author_email="[email protected]",
url="http://numenta.org",
packages=["hypersearch"],
)
| gpl-3.0 | 1,950,163,098,414,486,500 | 36.054054 | 78 | 0.644785 | false |
tboggs/fretboard | fretboard/displays/common.py | 1 | 1370 | from __future__ import division, print_function, unicode_literals
class Fret(object):
'''Represents an individual fret on a fretboard.'''
def __init__(self, string, fret, note):
self.string = string
self.number = fret
self.note = note
self.text = None
class FretboardDisplay(object):
'''Base class for fretboard displays.'''
def __init__(self, tuning='E A D G B E', nfrets=19):
'''
ARGUMENTS:
tuning (str):
The string tuning for the display. Should be a space-separated
string of note names.
nfrets (int):
Number of frets in the display.
'''
self.nfrets = nfrets
self.create_tuning(tuning)
self.create_strings()
def create_tuning(self, tuning_str):
from ..notes import Note
names = tuning_str.split()
tuning = [Note(n) for n in names]
# Adjust tone across octaves
for i in range(1, len(tuning)):
tuning[i] = tuning[i - 1] + tuning[i - 1].interval(tuning[i])
self.tuning = tuning
def create_strings(self):
tuning = list(reversed(self.tuning))
self.strings = [[Fret(i + 1, j, tuning[i] + j) for j in range(self.nfrets + 1)]
for i in range(len(tuning))]
| gpl-3.0 | -8,315,247,631,235,190,000 | 29.444444 | 87 | 0.545985 | false |
omji/django-tabbed-admin | tabbed_admin/templatetags/tabbed_admin_tags.py | 1 | 1878 | # -*- coding: utf-8 -*-
from django import template
from django.contrib.admin.helpers import Fieldset
from django.template.loader import render_to_string
from django.core.exceptions import ImproperlyConfigured
register = template.Library()
@register.simple_tag(takes_context=True)
def render_tab_fieldsets_inlines(context, entry):
"""
Render the fieldsets and inlines for a tab.
"""
template = "admin/includes/fieldset.html"
admin_form = context['adminform']
if 'request' not in context:
raise ImproperlyConfigured(
'"request" missing from context. Add django.core.context'
'_processors.request to your'
'TEMPLATE_CONTEXT_PROCESSORS')
request = context['request']
obj = context.get('original', None)
readonly_fields = admin_form.model_admin.get_readonly_fields(request, obj)
inline_matching = {}
if "inline_admin_formsets" in context:
inline_matching = dict((inline.opts.__class__.__name__, inline)
for inline in context["inline_admin_formsets"])
if entry['type'] == 'fieldset':
name = entry['name']
f = Fieldset(
admin_form.form,
name,
readonly_fields=readonly_fields,
model_admin=admin_form.model_admin,
**entry['config']
)
context["fieldset"] = f
return render_to_string(template, context.flatten(), request=request)
elif entry['type'] == 'inline':
try:
inline_admin_formset = inline_matching[entry["name"]]
context["inline_admin_formset"] = inline_admin_formset
return render_to_string(inline_admin_formset.opts.template,
context.flatten(), request=request)
except KeyError: # The user does not have the permission
pass
return ''
| bsd-3-clause | 6,588,136,163,030,727,000 | 37.326531 | 78 | 0.620873 | false |
fredgj/lazy | lazy/lazy.py | 1 | 1420 | class LazyRef(object):
def __init__(self, fget):
self.fget = fget
self.name = fget.__name__
def __get__(self, instance, cls):
value = self.fget(instance)
instance.__dict__[self.name] = value
return value
class LazyProperty(object):
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
if doc is None and fget is not None and hasattr(fget, '__doc__'):
doc = fget.__doc__
self.fget = fget
self.fset = fset
self.fdel = fdel
self.__doc__ = doc
self.value = None
def __get__(self, instance, cls):
if self.fget is None:
raise AttributeError('unreadable attribute')
if self.value is None:
self.value = self.fget(instance)
return self.value
def __set__(self, instance, value):
if self.fset is None:
raise AttributeError('can\'t set attribute')
self.value = None
return self.fset(instance, value)
def __delete__(self, instance):
if self.fdel is None:
raise AttributeError('can\'t delete attribute')
self.value = None
return self.fdel(instance)
def getter(self, func):
self.fget = func
return self
def setter(self, func):
self.fset = func
return self
def deleter(self, func):
self.fdel = func
return self
| mit | -501,561,031,706,150,700 | 26.307692 | 73 | 0.557042 | false |
ifp-uiuc/an-analysis-of-unsupervised-pre-training-iclr-2015 | cifar10/1_to_1/cnn_a/train.py | 1 | 1904 | import os
import sys
sys.path.append('../..')
import numpy
from anna import util
from anna.datasets import supervised_dataset
from models import CNNModel
print('Start')
pid = os.getpid()
print('PID: {}'.format(pid))
f = open('pid', 'wb')
f.write(str(pid)+'\n')
f.close()
model = CNNModel('experiment', './', learning_rate=1e-2)
monitor = util.Monitor(model)
# Loading CIFAR-10 dataset
print('Loading Data')
train_data = numpy.load('/data/cifar10/train_X.npy')
train_labels = numpy.load('/data/cifar10/train_y.npy')
test_data = numpy.load('/data/cifar10/test_X.npy')
test_labels = numpy.load('/data/cifar10/test_y.npy')
train_dataset = supervised_dataset.SupervisedDataset(train_data, train_labels)
test_dataset = supervised_dataset.SupervisedDataset(test_data, test_labels)
train_iterator = train_dataset.iterator(
mode='random_uniform', batch_size=128, num_batches=100000)
test_iterator = test_dataset.iterator(mode='random_uniform', batch_size=128,
num_batches=100000)
normer = util.Normer2(filter_size=5, num_channels=3)
augmenter = util.DataAugmenter(2, (32, 32), flip=False)
print('Training Model')
for x_batch, y_batch in train_iterator:
x_batch = x_batch.transpose(1, 2, 3, 0)
x_batch = augmenter.run(x_batch)
x_batch = normer.run(x_batch)
#y_batch = numpy.int64(numpy.argmax(y_batch, axis=1))
monitor.start()
log_prob, accuracy = model.train(x_batch, y_batch)
monitor.stop(1-accuracy) # monitor takes error instead of accuracy
if monitor.test:
monitor.start()
x_test_batch, y_test_batch = test_iterator.next()
x_test_batch = x_test_batch.transpose(1, 2, 3, 0)
x_test_batch = normer.run(x_test_batch)
#y_test_batch = numpy.int64(numpy.argmax(y_test_batch, axis=1))
test_accuracy = model.eval(x_test_batch, y_test_batch)
monitor.stop_test(1-test_accuracy)
| bsd-3-clause | 2,796,604,632,929,201,700 | 32.403509 | 78 | 0.682248 | false |
tjsavage/djangononrel-starter | settings.py | 1 | 1733 | # Initialize App Engine and import the default settings (DB backend, etc.).
# If you want to use a different backend you have to remove all occurences
# of "djangoappengine" from this file.
from djangoappengine.settings_base import *
import os
SECRET_KEY = '=r-$b*8hglm+858&9t043hlm6-&6-3d3vfc4((7yd0dbrakhvi'
INSTALLED_APPS = (
# 'django.contrib.admin',
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.sites',
'djangotoolbox',
# djangoappengine should come last, so it can override a few manage.py commands
'djangoappengine',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.request',
'django.core.context_processors.media',
)
# This test runner captures stdout and associates tracebacks with their
# corresponding output. Helps a lot with print-debugging.
TEST_RUNNER = 'djangotoolbox.test.CapturingTestSuiteRunner'
ADMIN_MEDIA_PREFIX = '/media/admin/'
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), 'templates'),)
ROOT_URLCONF = 'urls'
SITE_ID = 29
# Activate django-dbindexer if available
try:
import dbindexer
DATABASES['native'] = DATABASES['default']
DATABASES['default'] = {'ENGINE': 'dbindexer', 'TARGET': 'native'}
INSTALLED_APPS += ('dbindexer',)
DBINDEXER_SITECONF = 'dbindexes'
MIDDLEWARE_CLASSES = ('dbindexer.middleware.DBIndexerMiddleware',) + \
MIDDLEWARE_CLASSES
except ImportError:
pass
| bsd-3-clause | -3,507,845,645,340,688,000 | 30.509091 | 83 | 0.719561 | false |
frePPLe/frePPLe | contrib/odoo/addons_v8/frepple/res_company.py | 1 | 1239 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2014 by frePPLe bv
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp.osv import osv
from openerp.osv import fields
class res_company(osv.osv):
_name = 'res.company'
_inherit = 'res.company'
_columns = {
'manufacturing warehouse': fields.many2one('stock.warehouse', 'Manufacturing warehouse', ondelete='set null'),
'calendar': fields.many2one('resource.calendar', 'Calendar', ondelete='set null'),
'cmdline': fields.char('Command line', size=128)
}
_defaults = {
'cmdline': lambda *a: 'frepplectl --env=odoo_read,odoo_write'
}
res_company()
| agpl-3.0 | -7,058,372,471,046,832,000 | 34.4 | 114 | 0.719935 | false |
cpcloud/numpy | numpy/distutils/command/autodist.py | 3 | 1650 | """This module implements additional tests ala autoconf which can be useful.
"""
from __future__ import division, absolute_import, print_function
# We put them here since they could be easily reused outside numpy.distutils
def check_inline(cmd):
"""Return the inline identifier (may be empty)."""
cmd._check_compiler()
body = """
#ifndef __cplusplus
static %(inline)s int static_func (void)
{
return 0;
}
%(inline)s int nostatic_func (void)
{
return 0;
}
#endif"""
for kw in ['inline', '__inline__', '__inline']:
st = cmd.try_compile(body % {'inline': kw}, None, None)
if st:
return kw
return ''
def check_compiler_gcc4(cmd):
"""Return True if the C compiler is GCC 4.x."""
cmd._check_compiler()
body = """
int
main()
{
#if (! defined __GNUC__) || (__GNUC__ < 4)
#error gcc >= 4 required
#endif
}
"""
return cmd.try_compile(body, None, None)
def check_gcc_function_attribute(cmd, attribute, name):
"""Return True if the given function attribute is supported."""
cmd._check_compiler()
body = """
#pragma GCC diagnostic error "-Wattributes"
#pragma clang diagnostic error "-Wattributes"
int %s %s(void*);
int
main()
{
}
""" % (attribute, name)
return cmd.try_compile(body, None, None) != 0
def check_gcc_variable_attribute(cmd, attribute):
"""Return True if the given variable attribute is supported."""
cmd._check_compiler()
body = """
#pragma GCC diagnostic error "-Wattributes"
#pragma clang diagnostic error "-Wattributes"
int %s foo;
int
main()
{
return 0;
}
""" % (attribute, )
return cmd.try_compile(body, None, None) != 0
| bsd-3-clause | 3,202,266,910,410,981,400 | 20.428571 | 76 | 0.638788 | false |
GeoMop/GeoMop | src/LayerEditor/ui/dialogs/layers/split_layer.py | 1 | 4224 | """
Dialog for appending new layer to the end.
"""
import PyQt5.QtWidgets as QtWidgets
import PyQt5.QtGui as QtGui
from .layers_helpers import LayersHelpers
from LayerEditor.leconfig import cfg
import gm_base.icon as icon
class SplitLayerDlg(QtWidgets.QDialog):
def __init__(self, min, max, copy_block, parent=None):
super(SplitLayerDlg, self).__init__(parent)
self.setWindowTitle("Split Layer")
grid = QtWidgets.QGridLayout(self)
d_layer_name = QtWidgets.QLabel("Layer Name:", self)
self.layer_name = QtWidgets.QLineEdit()
self.layer_name.setToolTip("New Layer name (New layer is in the bottom)")
self.have_default_name = True
self.set_default_name()
self.layer_name.textChanged.connect(self.lay_name_changed)
self.image = QtWidgets.QLabel(self)
self.image.setMinimumWidth(self.layer_name.sizeHint().height())
self.image.setPixmap(icon.get_app_icon("sign-check").pixmap(self.layer_name.sizeHint().height()))
self.image.setToolTip('Layer name is unique, everything is fine.')
grid.addWidget(d_layer_name, 0, 0)
grid.addWidget(self.layer_name, 0, 1)
grid.addWidget(self.image, 0, 2)
d_split_type = QtWidgets.QLabel("Split Interface Type:", self)
self.split_type = LayersHelpers.split_type_combo(copy_block)
grid.addWidget(d_split_type, 1, 0)
grid.addWidget(self.split_type, 1, 1)
d_surface = QtWidgets.QLabel("Split in Surface:", self)
grid.addWidget(d_surface, 2, 0)
i = LayersHelpers.add_surface_to_grid(self, grid, 3)
self.validator = QtGui.QDoubleValidator()
self.validator.setBottom(min)
self.validator.setTop(max)
self.elevation.setValidator(self.validator)
self.elevation.setText(str((min+max)/2))
self._tranform_button = QtWidgets.QPushButton("Split", self)
self._tranform_button.clicked.connect(self.accept)
self._cancel_button = QtWidgets.QPushButton("Cancel", self)
self._cancel_button.clicked.connect(self.reject)
button_box = QtWidgets.QDialogButtonBox()
button_box.addButton(self._tranform_button, QtWidgets.QDialogButtonBox.AcceptRole)
button_box.addButton(self._cancel_button, QtWidgets.QDialogButtonBox.RejectRole)
grid.addWidget(button_box, i, 1, 1, 2)
self.setLayout(grid)
@classmethod
def is_unique_layer_name(self, lay_name):
""" Return False in the case of colision with an existing region name."""
for _, layer in cfg.diagram.regions.layers.items():
if lay_name == layer:
return False
return True
def lay_name_changed(self, name):
""" Called when Region Line Edit is changed."""
self.have_default_name = False
if self.is_unique_layer_name(name):
self.image.setPixmap(
icon.get_app_icon("sign-check").pixmap(self.layer_name.sizeHint().height())
)
self.image.setToolTip('Unique name is OK.')
self._tranform_button.setEnabled(True)
else:
self.image.setPixmap(
icon.get_app_icon("warning").pixmap(self.layer_name.sizeHint().height())
)
self.image.setToolTip('Name is not unique!')
self._tranform_button.setEnabled(False)
def set_default_name(self):
""" Set default name if it seems to be default name. """
if self.have_default_name:
lay_id = 0
name = cfg.diagram.regions.layers[0]
while not self.is_unique_layer_name(name):
lay_id += 1
name = "Layer_" + str(lay_id)
self.layer_name.setText(name)
self.have_default_name = True
def accept(self):
"""
Accepts the form if elevation data fields are valid.
:return: None
"""
if LayersHelpers.validate_depth(self.elevation, self.validator, self):
super(SplitLayerDlg, self).accept()
def fill_surface(self, interface):
"""Fill set surface"""
return LayersHelpers.fill_surface(self, interface)
| gpl-3.0 | 2,672,201,235,913,936,000 | 38.111111 | 105 | 0.624763 | false |
SCPR/calif-earthquakes | tests.py | 1 | 6616 | # -*- coding: utf-8 -*-
from earthquakes import app, db
# from earthquakes import settings_production
from earthquakes.models import Earthquake, NearestCity
import os
import logging
import time
import datetime
import calendar
import pytz
from pytz import timezone
from datetime import tzinfo, date
import unittest
import tempfile
import types
import requests
# from requests.packages.urllib3.util.retry import Retry
# from requests.adapters import HTTPAdapter
logger = logging.getLogger("root")
logging.basicConfig(
format="\033[1;36m%(levelname)s: %(filename)s (def %(funcName)s %(lineno)s): \033[1;37m %(message)s",
level=logging.DEBUG
)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
class TestCase(unittest.TestCase):
request_url = "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_hour.geojson"
details_url = "http://earthquake.usgs.gov/earthquakes/feed/v1.0/detail/nc72138821.geojson"
cities_url = "http://earthquake.usgs.gov/product/nearby-cities/nc72138821/us/1389226110774/nearby-cities.json"
def setUp(self):
"""
creates a new test client
"""
app.config["TESTING"] = True
self.app = app.test_client()
def test_a_download_chain(self):
"""
initiate a series of functions
"""
self.Test_get_usgs_api_response(self.request_url)
self.Test_usgs_details_url_present(self.request_url)
self.Test_usgs_cities_url_present(self.request_url)
def Test_get_usgs_api_response(self, request_url):
"""
test response from usgs api
"""
response = requests.get(request_url)
response.raise_for_status()
self.assertEqual(200, response.status_code)
response_data = response.json()
self.assertIsNotNone(response_data)
def Test_usgs_details_url_present(self, request_url):
"""
test response from usgs api for details json
"""
response = requests.get(request_url)
response.raise_for_status()
self.assertEqual(200, response.status_code)
response_data = response.json()
self.assertIsNotNone(response_data)
for item in response_data["features"]:
details = item["properties"]["detail"]
self.assertIsNotNone(details)
def Test_usgs_cities_url_present(self, request_url):
"""
test response from usgs api for details json
"""
response = requests.get(request_url)
response.raise_for_status()
self.assertEqual(200, response.status_code)
response_data = response.json()
self.assertIsNotNone(response_data)
for item in response_data["features"]:
details_url = item["properties"]["detail"]
self.assertIsNotNone(details_url)
details = requests.get(details_url)
details.raise_for_status()
self.assertEqual(200, details.status_code)
details_data = details.json()
self.assertIsNotNone(details_data)
nearest_cities_url = details_data["properties"]["products"][
"nearby-cities"][0]["contents"]["nearby-cities.json"]["url"]
self.assertIsNotNone(nearest_cities_url)
def test_append_city_to_list(self):
"""
test_append_city_to_list
"""
list_of_nearest_cities = [
{u"distance": 31, u"direction": u"NE", u"name": u"Soledad, California",
u"longitude": -121.32632, u"latitude": 36.42469, u"population": 25738},
{u"distance": 36, u"direction": u"NNE", u"name": u"Greenfield, California",
u"longitude": -121.24381, u"latitude": 36.3208, u"population": 16330},
{u"distance": 39, u"direction": u"SE", u"name": u"Hollister, California",
u"longitude": -121.4016, u"latitude": 36.85245, u"population": 34928},
{u"distance": 45, u"direction": u"N", u"name": u"King City, California",
u"longitude": -121.12603, u"latitude": 36.21274, u"population": 12874},
{u"distance": 221, u"direction": u"S", u"name": u"Sacramento, California",
u"longitude": -121.4944, u"latitude": 38.58157, u"population": 466488}
]
self.assertIsNotNone(list_of_nearest_cities)
container_list = []
for nearby_city in list_of_nearest_cities:
city = NearestCity(
id=None,
distance=nearby_city["distance"],
direction=nearby_city["direction"],
name=nearby_city["name"],
latitude=nearby_city["latitude"],
longitude=nearby_city["longitude"],
population=nearby_city["population"],
earthquake_id=None
)
self.assertIsNotNone(city)
container_list.append(city)
self.assertIsNotNone(container_list)
def test_parsing_for_desired_string(self):
"""
test_parsing_for_desired_string
"""
list_of_places = [
"35km N of Road Town, British Virgin Islands",
"11km NNW of Jones, Oklahoma",
"10km WNW of Cobb, California",
"110km NW of Ensenada, Baja California",
]
for place in list_of_places:
if "Baja California" in place:
test_data = False
self.assertFalse(test_data)
elif "California" in place:
test_data = True
self.assertTrue(test_data)
else:
test_data = False
self.assertFalse(test_data)
def test_for_date_formatting(self):
"""
test_for_date_formatting
"""
# terminal shows datetime object as local time
# via http://www.epochconverter.com/ - 2013-12-07 6:10:23.060000
# local: 2013-12-07 10:10:23.060000
# the date/time from the api is a unix timestamp
date_time = 1386439823060
# the timezone from the api offset from UTC in minutes at the event
# epicenter
tz = -360
# convert the unix timestamp to utc datetime object
test_data = isinstance(datetime.datetime.utcfromtimestamp(date_time / 1e3), datetime.datetime)
self.assertTrue(test_data)
# test views
def test_index_page(self):
"""
retrieve index page view
"""
test_response = self.app.get("/")
self.assertEqual(200, test_response.status_code)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 | -7,676,197,781,481,037,000 | 36.378531 | 114 | 0.606106 | false |
moreati/revelation | epiphany/test/test_asm.py | 1 | 11662 | from epiphany.sim import Epiphany
from epiphany.utils import float2bits
from epiphany.test.machine import StateChecker
import os.path
import pytest
elf_dir = os.path.join('epiphany', 'test', 'asm')
@pytest.mark.parametrize("elf,expected",
[('add.elf', StateChecker(rf1=110, rf2=7, rf3=105)),
('and.elf', StateChecker(rf0=0, rf1=1, rf2=1, rf3=0, rf4=0, rf5=0)),
('asr.elf', StateChecker(rf0=1, rf1=5, rf2=0, rf3=0)),
('bcond.elf', StateChecker(rf0=0, rf1=110)),
('bitr.elf', StateChecker(rf0=0x84c2a6e1)),
('bl.elf', StateChecker(rf0=15, rf1=0, rf2=15)),
pytest.mark.xfail(('dma_transfer.elf', StateChecker())),
('eor.elf', StateChecker(rf0=5, rf1=7, rf2=2)),
('fix.elf', StateChecker(rf0=5)),
('gid.elf', StateChecker(rfSTATUS=0b10)),
('gie.elf', StateChecker(rfSTATUS=0b00)),
pytest.mark.xfail(('hardware_loop.elf',
StateChecker(rf0=100, rf1=116, rf2=100, rf3=100,
rf4=100, rf5=100, rf6=100, rf7=100, rf8=100, rfSTATUS=0b00))),
('jalr.elf', StateChecker(rf3=100, rfLR=0x236)),
('jr.elf', StateChecker(rf0=3, rf1=1, rf2=2)),
('low_high.elf', StateChecker(rf3=0xFFFFFFFF)),
('lsl.elf', StateChecker(rf0=5, rf1=7, rf2=640, rf3=640)),
('lsr.elf', StateChecker(rf0=3, rf1=1, rf2=1, rf3=1)),
('mov_cond.elf', StateChecker(rf0=0, rf1=15, rf2=15, rf3=15)),
pytest.mark.xfail(('movfs.elf', StateChecker(rf0=7, rf63=7, rfIRET=7))),
('mov_imm.elf', StateChecker(rf0=25)),
('movt.elf', StateChecker(rf0=2415919104)),
('movts.elf', StateChecker(rf0=7, rfIRET=7)), # FIXME
('nop.elf', StateChecker(pc=564)),
('orr.elf', StateChecker(rf0=5, rf1=7, rf2=7)),
pytest.mark.xfail(('rti.elf', StateChecker())), # FIXME
('rts.elf', StateChecker(rf1=100, rf2=200, rf3=300, rfLR=562)),
('sub.elf', StateChecker(rf0=100, rf1=20, rf2=80, rf3=80)),
pytest.mark.xfail(('trap.elf', StateChecker())),
])
def test_elf(elf, expected):
"""Test ELF files that deal in unsigned integers (rather than floats).
"""
elf_filename = os.path.join(elf_dir, elf)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
expected.check(epiphany.state)
@pytest.mark.parametrize("elf,expected",
[('fabs.elf', StateChecker(rf0=float2bits(5.0),
rf1=float2bits(0.0),
rf2=float2bits(-5.0),
rf3=float2bits(5.0),
rf4=float2bits(0.0),
rf5=float2bits(5.0),
)),
('float.elf', StateChecker(rf1=float2bits(25.0))),
('fadd.elf', StateChecker(rf0=float2bits(15.0),
rf1=float2bits(5.0),
rf2=float2bits(5.0),
rf3=float2bits(10.0),
rf4=float2bits(0.0))),
('fsub.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(5.0),
rf4=float2bits(3.0),
rf5=float2bits(-3.0))),
('fmul.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(0.0),
rf4=float2bits(10.0))),
('fmadd.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(17.0),
rf4=float2bits(7.0))),
('fmsub.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(-3.0),
rf4=float2bits(7.0))),
('iadd.elf', StateChecker(rf0=float2bits(15.0),
rf1=float2bits(5.0),
rf2=float2bits(5.0),
rf3=float2bits(10.0),
rf4=float2bits(0.0))),
('isub.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(5.0),
rf4=float2bits(3.0),
rf5=float2bits(-3.0))),
('imul.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(0.0),
rf4=float2bits(10.0))),
('imadd.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(17.0),
rf4=float2bits(7.0))),
('imsub.elf', StateChecker(rf0=float2bits(0.0),
rf1=float2bits(2.0),
rf2=float2bits(5.0),
rf3=float2bits(-3.0),
rf4=float2bits(7.0))),
])
def test_fp_elf(elf, expected):
"""Check that floating point instructions operate correctly.
These ELF files are tested separately using the fp_check method.
This means that test failures will be reported so that the contents of
registers are printed as floats, rather than as unsigned integers.
"""
elf_filename = os.path.join(elf_dir, elf)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
expected.fp_check(epiphany.state)
@pytest.mark.parametrize("elf,expected",
[('ldr_disp.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100000)),
('ldr_disp_pm.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004)),
('ldr_index.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004, rf2=0)),
('ldr_pm.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004,
rf2=0x80002)),
])
def test_load(elf, expected):
"""Test ELF files that load values from memory into a register.
"""
elf_filename = os.path.join(elf_dir, elf)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.state.mem.write(0x00100004, 4, 0xFFFFFFFF)
epiphany.max_insts = 10000
epiphany.run()
expected.check(epiphany.state)
@pytest.mark.parametrize("elf,expected",
[('str_disp.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100000)),
('str_disp_pm.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004)),
('str_index.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004, rf2=0)),
('str_pm.elf', StateChecker(rf0=0xFFFFFFFF, rf1=0x00100004, rf2=4)),
])
def test_store(elf, expected):
"""Test ELF files that transfer data from registers to memory.
"""
elf_filename = os.path.join(elf_dir, elf)
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 10000
epiphany.run()
expected.check(epiphany.state, memory=[(0x00100004, 4, 0xFFFFFFFF)])
def test_testset32():
elf_filename = os.path.join(elf_dir, 'testset.elf')
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
if elf == 'movts.elf': print 'MOVTS'
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.state.mem.write(0x100004, 4, 0x0)
epiphany.max_insts = 100000
epiphany.run()
expected = StateChecker(AZ=1, rf0=0, rf1=0x100000, rf2=0x4)
expected.check(epiphany.state, memory=[(0x100004, 4, 0xFFFF)])
def test_testset32_fail():
"""Check that the testset32 instruction fails if the memory address it
is given is too low..
"""
elf_filename = os.path.join(elf_dir, 'testset_fail.elf')
expected_text = """testset32 has failed to write to address %s.
The absolute address used for the test and set instruction must be located
within the on-chip local memory and must be greater than 0x00100000 (2^20).
""" % str(hex(0x4))
with pytest.raises(ValueError) as expected_exn:
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.max_insts = 100000
epiphany.run()
assert expected_text == expected_exn.value.message
def test_execute_idle16(capsys):
"""Check that the idle16 prints out the correct warning on STDOUT.
"""
elf_filename = os.path.join(elf_dir, 'idle.elf')
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.state.rfSTATUS = 1
epiphany.max_insts = 10000
epiphany.run()
out, err = capsys.readouterr()
expected_state = StateChecker(rfSTATUS=0)
expected_text = ('IDLE16 does not wait in this simulator. ' +
'Moving to next instruction.')
expected_state.check(epiphany.state)
assert expected_text in out
assert err == ''
assert not epiphany.state.running # Set by bkpt16 instruction.
def test_unimpl():
"""Check that the unimpl instruction throws a NotImplementedError.
"""
elf_filename = os.path.join(elf_dir, 'unimpl.elf')
with pytest.raises(NotImplementedError):
epiphany = Epiphany()
with open(elf_filename, 'rb') as elf:
epiphany.init_state(elf, elf_filename, '', [], False, is_test=True)
epiphany.run()
| bsd-3-clause | 4,192,714,999,340,684,000 | 50.149123 | 84 | 0.483708 | false |
MartinThoma/hwrt | hwrt/features_plugin.py | 1 | 2007 | """Features in development."""
# Core Library modules
import os
import urllib.request
# Local modules
from . import handwritten_data, utils
class Bitmap:
"""n Γ n grayscale bitmap of the recording."""
normalize = True
def __init__(self, n=28):
self.n = n # Size of the bitmap (n x n)
def __repr__(self):
return ("Bitmap (n=%i)\n") % (self.n)
def __str__(self):
return repr(self)
def get_dimension(self):
"""Get the dimension of the returned feature. This equals the number
of elements in the returned list of numbers."""
return self.n ** 2
def __call__(self, hwr_obj):
assert isinstance(
hwr_obj, handwritten_data.HandwrittenData
), "handwritten data is not of type HandwrittenData, but of %r" % type(hwr_obj)
x = []
url = "http://localhost/write-math/website/raw-data/"
raw_data_id = hwr_obj.raw_data_id
project_root = utils.get_project_root()
foldername = os.path.jon(project_root, "bitmaps")
f = urllib.request.urlopen(f"{url}{raw_data_id}.svg")
with open("%s%i.svg" % (foldername, raw_data_id), "wb") as imgFile:
imgFile.write(f.read())
command = (
"convert -size 28x28 {folder}{id}.svg -resize {n}x{n} "
"-gravity center -extent {n}x{n} "
"-monochrome {folder}{id}.png"
).format(
id=raw_data_id,
n=self.n,
folder=foldername,
)
os.system(command)
# Third party modules
from PIL import Image
im = Image.open("%s%i.png" % (foldername, raw_data_id))
pix = im.load()
for i in range(28):
for j in range(28):
x.append(pix[i, j])
assert self.get_dimension() == len(
x
), "Dimension of %s should be %i, but was %i" % (
str(self),
self.get_dimension(),
len(x),
)
return x
| mit | -5,350,278,061,378,514,000 | 28.072464 | 87 | 0.536391 | false |
openstack/heat | heat/policies/build_info.py | 1 | 1502 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from heat.policies import base
DEPRECATED_REASON = """
The build API now supports system scope and default roles.
"""
POLICY_ROOT = 'build_info:%s'
deprecated_build_info = policy.DeprecatedRule(
name=POLICY_ROOT % 'build_info',
check_str=base.RULE_DENY_STACK_USER,
deprecated_reason=DEPRECATED_REASON,
deprecated_since=versionutils.deprecated.WALLABY
)
build_info_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'build_info',
check_str=base.SYSTEM_OR_PROJECT_READER,
scope_types=['system', 'project'],
description='Show build information.',
operations=[
{
'path': '/v1/{tenant_id}/build_info',
'method': 'GET'
}
],
deprecated_rule=deprecated_build_info
)
]
def list_rules():
return build_info_policies
| apache-2.0 | 7,344,856,664,631,625,000 | 29.04 | 78 | 0.673768 | false |
levibostian/VSAS | VSAS system/VSAS/vsasGUI/EmailScreen.py | 1 | 4232 | """
Test of an Email Screen
Author: Kristen Nielsen [email protected]
Modeled after tkSimpleDialog.py from pythonware.com
"""
from Tkinter import *
import tkMessageBox as MsgBox
from multilistbox import MultiListbox
from emailInputNew import EmailInput
class EmailSettings(Toplevel):
def __init__(self, parent):
Toplevel.__init__(self, parent, height=400, width=700)
#self.pack_propagate(0)
self.transient(parent)
self.title("VSAS - Email Settings")
self._parent = parent
self.adminEmail=""
emailFile = open("vsasGUI/emailTester.txt","r")
self.emailList = emailFile.readlines()
emailFile.close()
body = Frame(self, bg="black")
self._initialFocus = self.body(body)
body.pack_propagate(0)
body.pack(padx=5,pady=5)
self.buttonBox()
self.current=None
self.grab_set()
self.bind("<F1>",self.displayHelp)
if not self._initialFocus:
self._initialFocus = self
self.protocol("WM_DELETE_WINDOW", self.cancel)
self.geometry("+%d+%d" % (parent.winfo_rootx()+50,
parent.winfo_rooty()+50))
self._initialFocus.focus_set()
self._parent.wait_window(self)
def body(self, master):
# create canvas to hold scrollbar and listbox objects
emailListCanvas = Canvas(master, width=350, height=400)
emailListCanvas.config(scrollregion=emailListCanvas.bbox(ALL))
emailListCanvas.grid(column=0, sticky=W)
# create multiListbox to hold email list
self._emailListbox = MultiListbox(emailListCanvas,
(('Email', 160),("",1)),
command = self.deleteEmail)
for item in self.emailList:
self._emailListbox.insert(END, (item,""))
self._emailListbox.grid(column = 0,columnspan=3, sticky=W)
addButton = Button(emailListCanvas, text="Add",command=self.addEmail)
addButton.grid(row=1,column=0)
deleteButton = Button(emailListCanvas, text="Delete",command=self.deleteEmail)
deleteButton.grid(row=1,column=1)
helpButton = Button(emailListCanvas, text="Help", command = self.displayHelp)
helpButton.grid(row=1,column=2)
#Label(master, text="The administrator email will receive\nall information regarding all alerts",
#fg="green",bg="black").grid(column=1, row=0)
#self.adminEmailDisplay = Label(master, text=self.adminEmail)
#self.adminEmailDisplay.grid(column=1, row=1)
def buttonBox(self):
pass
def addEmail(self):
email = EmailInput(self, title="Add Email").get()
if len(email)>0:
emailFile = open("vsasGUI/emailTester.txt","a")
#emailComposite = email.split(",")
#emailTuple = (emailComposite[0], emailComposite[1])
print email
email = email+"\n"
self.emailList.append(email)
emailFile.write(email)
emailFile.close()
self._emailListbox.insert(END, (email,""))
self.update()
def deleteEmail(self, event=None):
if MsgBox.askyesno("Delete Email?","Are you sure you want to delete selected email?"):
index = self.emailList[eval(self._emailListbox.curselection()[0])]
self.emailList.remove(index)
self._emailListbox.delete(0,END)
emailFile = open("vsasGUI/emailTester.txt","w")
for item in self.emailList:
emailFile.write(item)
self._emailListbox.insert(END, (item,""))
emailFile.close()
def displayHelp(self, event=None):
helpText = open("vsasGUI/EmailScreenHelp.txt","r").read()
MsgBox.showinfo(title="VSAS Email Settings - Help", message=helpText)
def cancel(self, event=None):
if MsgBox.askyesno("Done?",
"All changes have been saved.\nReturn to VSAS Main?"):
self._parent.focus_set()
self.destroy()
| mit | 3,505,993,686,349,974,500 | 34.17094 | 105 | 0.587902 | false |
mropert/conan | conans/test/integration/go_diamond_test.py | 1 | 3953 | import unittest
from conans.test.utils.tools import TestServer, TestClient
from conans.model.ref import ConanFileReference
import platform
import os
from conans.test.utils.context_manager import CustomEnvPath
from conans.test.utils.test_files import hello_conan_files
from nose.plugins.attrib import attr
@attr('golang')
class GoDiamondTest(unittest.TestCase):
def setUp(self):
test_server = TestServer()
self.servers = {"default": test_server}
self.conan = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
def _export_upload(self, ref_str, number=0, deps=None):
conan_reference = ConanFileReference.loads(ref_str)
files = hello_conan_files(conan_reference=conan_reference, number=number, deps=deps,
lang='go')
self.conan.save(files, clean_first=True)
self.conan.run("export lasote/stable")
self.conan.run("upload %s" % str(conan_reference))
def reuse_test(self):
self._export_upload("hello0/0.1@lasote/stable")
self._export_upload("hello1/0.1@lasote/stable", 1, [0])
self._export_upload("hello2/0.1@lasote/stable", 2, [0])
self._export_upload("hello3/0.1@lasote/stable", 3, [1, 2])
client = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
conan_reference = ConanFileReference.loads("hello4/0.2@lasote/stable")
files3 = hello_conan_files(conan_reference=conan_reference, number=4, deps=[3], lang='go')
client.save(files3)
client.run("install --build missing")
client.run("build")
command = os.sep.join([".", "bin", "say_hello"])
with CustomEnvPath(paths_to_add=['$GOPATH/bin'],
var_to_add=[('GOPATH', client.current_folder), ]):
client.runner('go install hello4_main', cwd=os.path.join(client.current_folder, 'src'))
if platform.system() == "Windows":
command = "hello4_main"
else:
command = './hello4_main'
client.runner(command, cwd=os.path.join(client.current_folder, 'bin'))
self.assertEqual(['Hello 4', 'Hello 3', 'Hello 1', 'Hello 0', 'Hello 2', 'Hello 0'],
str(client.user_io.out).splitlines()[-6:])
# Try to upload and reuse the binaries
client.run("upload hello3/0.1@lasote/stable --all")
self.assertEqual(str(client.user_io.out).count("Uploading package"), 1)
client.run("upload hello1/0.1@lasote/stable --all")
self.assertEqual(str(client.user_io.out).count("Uploading package"), 1)
client.run("upload hello2/0.1@lasote/stable --all")
self.assertEqual(str(client.user_io.out).count("Uploading package"), 1)
client.run("upload hello0/0.1@lasote/stable --all")
self.assertEqual(str(client.user_io.out).count("Uploading package"), 1)
#
client2 = TestClient(servers=self.servers, users={"default": [("lasote", "mypass")]})
conan_reference = ConanFileReference.loads("hello4/0.2@lasote/stable")
files3 = hello_conan_files(conan_reference=conan_reference, number=4, deps=[3], lang='go')
client2.save(files3)
client2.run("install --build missing")
command = os.sep.join([".", "bin", "say_hello"])
with CustomEnvPath(paths_to_add=['$GOPATH/bin'],
var_to_add=[('GOPATH', client2.current_folder), ]):
client2.runner('go install hello4_main',
cwd=os.path.join(client2.current_folder, 'src'))
if platform.system() == "Windows":
command = "hello4_main"
else:
command = './hello4_main'
client2.runner(command, cwd=os.path.join(client2.current_folder, 'bin'))
self.assertEqual(['Hello 4', 'Hello 3', 'Hello 1', 'Hello 0', 'Hello 2', 'Hello 0'],
str(client2.user_io.out).splitlines()[-6:])
| mit | -5,165,749,421,708,407,000 | 47.207317 | 99 | 0.616747 | false |
Lana-B/Pheno4T | madanalysis/enumeration/observable_type.py | 1 | 6721 | ################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <[email protected]>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.enumeration.ma5_running_type import MA5RunningType
import math
class ObservableType(object):
# name : accept_particles
values = { 'UNKNOWN' : [False,'','','','',0,0,0,False,False],\
'SQRTS' : [False,'PHYSICS->SqrtS(event.mc())','PHYSICS->SqrtS(event.mc())','','GeV',100,0.,1000., True, False],\
'TET' : [False,'PHYSICS->Transverse->EventTET(event.mc())','PHYSICS->Transverse->EventTET(event.mc())','PHYSICS->Transverse->EventTET(event.rec())','GeV',100,0.,1000., True,False],\
'MET' : [False,'PHYSICS->Transverse->EventMET(event.mc())','PHYSICS->Transverse->EventMET(event.mc())','PHYSICS->Transverse->EventMET(event.rec())','GeV',100,0.,1000., True,False],\
'THT' : [False,'PHYSICS->Transverse->EventTHT(event.mc())','PHYSICS->Transverse->EventTHT(event.mc())','PHYSICS->Transverse->EventTHT(event.rec())','GeV',100,0.,1000., True,False],\
'MHT' : [False,'PHYSICS->Transverse->EventMHT(event.mc())','PHYSICS->Transverse->EventMHT(event.mc())','PHYSICS->Transverse->EventMHT(event.rec())','GeV',100,0.,1000.,True,False],\
'NPID': [False,'NPID','NPID','NPID','',100,0.,100.,False,False],\
'NAPID': [False,'NAPID','NAPID','NAPID','',100,0.,100.,False,False],\
'E' : [True,'e()','e()','e()','GeV',100,0.,1000.,True,True],\
'M' : [True,'m()','m()','m()','GeV/c^{2}',100,0.,1000.,True,True],\
'P' : [True,'p()','p()','p()','GeV/c',100,0.,1000.,True,True],\
'ET' : [True,'et()','et()','et()','GeV',100,0.,1000.,True,True],\
'MT' : [True,'mt()','mt()','mt()','GeV/c^{2}',100,0.,1000.,True,True],\
'PT' : [True,'pt()','pt()','pt()','GeV/c',100,0.,1000.,True,True],\
'PX' : [True,'px()','px()','px()','GeV/c',100,-1000.,1000.,True,True],\
'PY' : [True,'py()','py()','py()','GeV/c',100,-1000.,1000.,True,True],\
'PZ' : [True,'pz()','pz()','pz()','GeV/c',100,-1000.,1000.,True,True],\
'R' : [True,'r()','r()','r()','',100,0.,1000.,True,True],\
'THETA' : [True,'theta()','theta()','theta()','',100,0.,2*math.pi+0.01,True,True],\
'ETA' : [True,'eta()','eta()','eta()','',100,-8.0,+8.0,True,True],\
'PHI' : [True,'phi()','phi()','phi()','',100,0.,2*math.pi+0.01,True,True],\
'Y' : [True,'y()','y()','y()','',100,-8.0,+8.0,True,True],\
'BETA' : [True,'beta()','beta()','beta()','',100,0.,1.,True,True],\
'GAMMA': [True,'gamma()','gamma()','gamma()','',100,1.,1000.,True,True],\
'N' : [True,'N()','N()','N()','',20,0.,20.,True,True],\
'ISOL' : [True,'','','isolated()','',2,0,1,True,False],\
'HE_EE': [True,'','','HEoverEE()','',100,0,100,True,False],\
'NTRACKS': [True,'','','ntracks()','',100,0,100,True,False] }
class __metaclass__(type):
def __getattr__(self, name):
if name in self.values.keys():
return self.values.keys().index(name)
else:
return self.values.keys().index('UNKNOWN')
def accept_particles(self, index):
name = self.values.keys()[index]
return self.values[name][0]
def convert2string(self,index):
return self.values.keys()[index]
def convert2job_string(self,index,level):
name = self.values.keys()[index]
if level==MA5RunningType.PARTON:
return self.values[name][1]
elif level==MA5RunningType.HADRON:
return self.values[name][2]
elif level==MA5RunningType.RECO:
return self.values[name][3]
return ""
def convert2unit(self,index):
name = self.values.keys()[index]
return self.values[name][4]
def convert2nbins(self,index):
name = self.values.keys()[index]
return self.values[name][5]
def convert2xmin(self,index):
name = self.values.keys()[index]
return self.values[name][6]
def convert2xmax(self,index):
name = self.values.keys()[index]
return self.values[name][7]
def isCuttable(self,index):
name = self.values.keys()[index]
return self.values[name][8]
def prefix(self,index):
name = self.values.keys()[index]
return self.values[name][9]
def get_list(self,level=MA5RunningType.PARTON):
output = []
for item in self.values.keys():
x = ObservableType.convert2job_string(self.values.keys().index(item),level)
if x=="":
continue
output.append(item)
if self.values[item][0] and self.values[item][9]:
output.append('s'+item)
output.append('v'+item)
output.append('sd'+item)
output.append('ds'+item)
output.append('d'+item)
output.append('dv'+item)
output.append('vd'+item)
output.append('r'+item)
return output
def get_cutlist1(self,level=MA5RunningType.PARTON):
output = []
for item in self.values.keys():
if item=="N":
output.append(item)
continue
x = ObservableType.convert2job_string(self.values.keys().index(item),level)
if x=="":
continue
if not self.values[item][8]:
continue
if self.values[item][0]:
continue
output.append(item)
return output
def get_cutlist2(self,level=MA5RunningType.PARTON):
output = []
for item in self.values.keys():
x = ObservableType.convert2job_string(self.values.keys().index(item),level)
if item=="N":
continue
if x=="":
continue
if not self.values[item][8]:
continue
if not self.values[item][0]:
continue
output.append(item)
if not self.values[item][9]:
continue
output.append('s'+item)
output.append('v'+item)
output.append('sd'+item)
output.append('ds'+item)
output.append('d'+item)
output.append('dv'+item)
output.append('vd'+item)
output.append('r'+item)
return output
| gpl-3.0 | 7,029,142,806,390,245,000 | 37.626437 | 190 | 0.586371 | false |
mganeva/mantid | scripts/Muon/GUI/Common/context_example/context_example_widget.py | 1 | 1841 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
from Muon.GUI.Common.context_example.context_example_view import ContextExampleView
from Muon.GUI.Common.context_example.context_example_presenter import ContextExamplePresenter
from Muon.GUI.Common.context_example.context_example_model import ContextExampleModel
class ContextExampleWidget(object):
"""
An example of how to use the context with a widget class.
The widget class exposes the MVP to the rest of the GUI
"""
def __init__(self, context, parent=None):
model = ContextExampleModel(context)
sub_context = model.getSubContext()
view = ContextExampleView(sub_context, parent)
self._presenter = ContextExamplePresenter(view, model)
@property
def presenter(self):
return self._presenter
@property
def widget(self):
return self._presenter.widget
# interact with context
def setUpdateContext(self, slot):
"""
This function is to set the update
method from the main GUI to the signals
from this GUI
"""
view = self._presenter.widget
view.updateSignal.connect(slot)
def updateContext(self):
self._presenter.updateContext()
def loadFromContext(self):
# extract relevant info from context via model
model = self._presenter.model
sub_context = model.getSubContext()
# update the view with the subcontext
view = self._presenter.widget
view.loadFromContext(sub_context)
| gpl-3.0 | -1,136,197,275,128,112,800 | 31.875 | 93 | 0.689299 | false |
cumc-dbmi/pmi_sprint_reporter | file_transfer.py | 1 | 3848 | from base64 import b64encode
from io import BytesIO
from uuid import uuid4
import requests
import time
import wddx
import settings
TRANSFER_API_URL_FMT = 'https://transfer.nyp.org/seos/1000/%s.api'
TRANSFER_LOGIN_URL = TRANSFER_API_URL_FMT % 'login'
TRANSFER_FIND_URL = TRANSFER_API_URL_FMT % 'find'
TRANSFER_PUT_URL = TRANSFER_API_URL_FMT % 'put'
TRANSFER_SEND_URL = TRANSFER_API_URL_FMT % 'send'
SEND_DELAY_SECONDS = 1.5 # Accellion recommends 5 seconds, ain't nobody got time for that
UPLOAD_TIMEOUT_SECONDS = 60 * 2
def get_tokens():
"""
Retrieve Accellion API tokens
:return:
"""
data = {'auth_type': 'pwd',
'uid': settings.accellion['username'],
'pwd': settings.accellion['password'],
'api_token': 1,
'output': 'json'}
response = requests.post(TRANSFER_LOGIN_URL, data=data)
return response.json()
def parse_response(content):
items = wddx.loads(content)
return items[0]
def upload(filename, file_contents, recipients, mime_type='text/plain', subject=None, message=None, expire_days=21):
"""
Upload a file to the Accellion file system
:param filename: user-friendly filename
:param file_contents: binary data; this supports streamed data to prevent reading into memory
:param recipients: comma-separated list of e-mail addresses
:param subject: subject of e-mail
:param message: body of e-mail
:param mime_type: type of file
:param expire_days: number of days until link expires
:return: details from put and send api calls
"""
tokens = get_tokens()
uid = uuid4().__str__()
file_handle = '%s/files/%s/%s' % (tokens['client_id'], uid, filename)
data = {'token': tokens['put_token'], 'file_handle': file_handle}
put_response = requests.post(TRANSFER_PUT_URL, data=data, files={'file': (filename, file_contents, mime_type)})
put_details = parse_response(put_response.content)
# create e-mail package with links to file (short-url mode)
time.sleep(SEND_DELAY_SECONDS)
meta_file_handle = '%s/files/%s-list' % (tokens['client_id'], uid)
file_handle = put_details['file_handle']
file_size = put_details['file_size']
file_handle_hash = b64encode(file_handle)
file_list = '%s\n|%s\n|%s\n|\n' % (b64encode(filename), file_handle_hash, b64encode(file_size))
data = {'token': tokens['send_token'],
'short_token': 1,
'sender': b64encode(settings.accellion['username']),
'recipients': b64encode(recipients),
'meta_file_handle': meta_file_handle,
'file_list1': file_list,
'link_validity': expire_days,
'email_options1': 'vr'} # only allow the original recipient to download
if subject is not None:
data['subject'] = b64encode(subject)
if message is not None:
data['message'] = b64encode(message)
send_response = requests.post(TRANSFER_SEND_URL, data=data, timeout=UPLOAD_TIMEOUT_SECONDS)
send_details = parse_response(send_response.content)
response_details = dict(put=put_details, send=send_details)
return response_details
def download(url):
"""
Download file from secure file transfer and save it to specified location
:param url: url of file in secure file transfer
:return: file contents as str
"""
tokens = get_tokens()
cookie_value = 'user&%s&cs&%s' % (settings.accellion['username'], tokens['inbox_cs'])
r = requests.get(url, cookies=dict(a1000c1s1=cookie_value))
bs = BytesIO(r.content)
return bs.read()
def inbox():
"""
Retrieve list of e-mail packages
:return:
"""
tokens = get_tokens()
data = dict(token=tokens['inbox_token'], mailbody=1)
inbox_response = requests.post(TRANSFER_FIND_URL, data=data)
return parse_response(inbox_response.content)
| mit | -8,684,232,296,754,300,000 | 35.301887 | 116 | 0.661123 | false |
funkring/fdoo | addons-funkring/shop_delivery/__openerp__.py | 1 | 1296 | # -*- coding: utf-8 -*-
#############################################################################
#
# Copyright (c) 2007 Martin Reisenhofer <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Shop Delivery Default",
"description":"""
Shop Delivery
=============
Define shop delivery defaults
""",
"version" : "1.0",
"author" : "oerp.at",
"website" : "http://oerp.at",
"depends" : ["delivery",
"at_purchase_sale"],
"data" : ["view/shop_view.xml"],
"auto_install" : False,
"installable": True
} | agpl-3.0 | -7,352,487,846,455,987,000 | 34.054054 | 78 | 0.574074 | false |
giraldeau/autovm | autovm/helpers.py | 1 | 3970 | #!/usr/bin/env python
# coding=utf-8
import os
from os.path import join, dirname, exists
from os import makedirs, utime, unlink, walk
from heapq import heappush, heappop, heappushpop
import platform
import sys
def default_dist():
(a, b, c) = platform.dist()
return c
def default_arch():
mapping = { 'x86_64': 'amd64', 'i386': 'i386' }
arch = platform.machine()
return mapping.get(arch, 'amd64')
class NullProgressMonitor(object):
def __init__(self, msg="progress"):
pass
def update(self, percent):
pass
class CmdProgressMonitor(object):
def __init__(self, msg="progress"):
self.msg = msg
self.width = 10
self.percent = 0.0
def update(self, percent):
if (percent - self.percent < 0.001):
return
self.percent = percent
ticks = ((int(percent * 100) + 5) / self.width)
blank = self.width - ticks
sys.stdout.write("%s [%s%s] %.1f%%\r" % (self.msg, '#' * ticks, ' ' * blank, self.percent * 100))
sys.stdout.flush()
null_progress = NullProgressMonitor()
def copyfileobj_progress(fsrc, fdst, size, length=16*1024, progress=null_progress):
"""copy data from file-like object fsrc to file-like object fdst"""
sum = 0.0
if size == 0:
size = 1
while 1:
progress.update(sum / size)
buf = fsrc.read(length)
sum += len(buf)
if not buf:
break
fdst.write(buf)
progress.update(1.0)
# http://stackoverflow.com/questions/12654772/create-empty-file-using-python
def touch(path):
d = dirname(path)
if not exists(d):
makedirs(d)
with open(path, 'a'):
utime(path, None)
class AbstractWalkerVisitor(object):
def visit_file(self, root, name):
pass
def visit_dir(self, root, name):
pass
class PrintWalkerVisitor(AbstractWalkerVisitor):
def visit_file(self, root, name):
print "f %s" % repr(join(root, name))
def visit_dir(self, root, name):
print "d %s" % repr(join(root, name))
class EntriesWalkerVisitor(AbstractWalkerVisitor):
def __init__(self):
self.entries = []
def visit_file(self, root, name):
self.entries.append(join(root, name))
class CountWalkerVisitor(AbstractWalkerVisitor):
def __init__(self):
self.files = 0
self.directories = 0
def visit_file(self, root, name):
self.files += 1
def visit_dir(self, root, name):
self.directories += 1
class FileEntry(object):
def __init__(self, path):
self.path = path
self.st = os.stat(path)
def __cmp__(self, other):
if (self.st.st_mtime < other.st.st_mtime):
return 1
elif (self.st.st_mtime == other.st.st_mtime):
return 0
return -1
def __repr__(self):
return "%s %s" % (str(self.st.st_mtime), self.path)
class LRUWalkerVisitor(AbstractWalkerVisitor):
'make the list of least used files'
def __init__(self, max_item=100):
self.heap = []
self.max_item = max_item
def visit_file(self, root, name):
item = FileEntry(join(root, name))
if len(self.heap) < self.max_item:
heappush(self.heap, item)
else:
heappushpop(self.heap, item)
def get_entries(self):
return [heappop(self.heap) for i in range(len(self.heap))]
class DeleteWalkerVisitor(AbstractWalkerVisitor):
def visit_file(self, root, name):
unlink(join(root, name))
def visit_dir(self, root, name):
unlink(join(root, name))
class Walker(object):
'Scan directory and feed visitor'
def process(self, path, *visitor):
for root, dirs, files in walk(path, topdown=False):
for name in files:
for v in visitor:
v.visit_file(root, name)
for name in dirs:
for v in visitor:
v.visit_dir(root, name) | gpl-3.0 | -6,377,656,115,338,019,000 | 28.857143 | 106 | 0.589673 | false |
endlessm/chromium-browser | chrome/android/java/src/PRESUBMIT.py | 1 | 8949 | # Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Android Java code.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
This presubmit checks for the following:
- No new calls to Notification.Builder or NotificationCompat.Builder
constructors. Callers should use ChromeNotificationBuilder instead.
- No new calls to AlertDialog.Builder. Callers should use ModalDialogView
instead.
"""
import re
NEW_NOTIFICATION_BUILDER_RE = re.compile(
r'\bnew\sNotification(Compat)?\.Builder\b')
IMPORT_APP_COMPAT_ALERTDIALOG_RE = re.compile(
r'\bimport\sandroid\.support\.v7\.app\.AlertDialog;')
NEW_COMPATIBLE_ALERTDIALOG_BUILDER_RE = re.compile(
r'\bnew\s+(UiUtils\s*\.)?CompatibleAlertDialogBuilder\b')
NEW_ALERTDIALOG_BUILDER_RE = re.compile(
r'\bnew\sAlertDialog\.Builder\b')
COMMENT_RE = re.compile(r'^\s*(//|/\*|\*)')
BROWSER_ROOT = 'chrome/android/java/src/org/chromium/chrome/browser/'
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
result = []
result.extend(_CheckNotificationConstructors(input_api, output_api))
result.extend(_CheckAlertDialogBuilder(input_api, output_api))
result.extend(_CheckCompatibleAlertDialogBuilder(input_api, output_api))
# Add more checks here
return result
def _CheckNotificationConstructors(input_api, output_api):
# "Blacklist" because the following files are excluded from the check.
blacklist = (
'chrome/android/java/src/org/chromium/chrome/browser/notifications/'
'NotificationBuilder.java',
'chrome/android/java/src/org/chromium/chrome/browser/notifications/'
'NotificationCompatBuilder.java'
)
error_msg = '''
Android Notification Construction Check failed:
Your new code added one or more calls to the Notification.Builder and/or
NotificationCompat.Builder constructors, listed below.
This is banned, please construct notifications using
NotificationBuilderFactory.createChromeNotificationBuilder instead,
specifying a channel for use on Android O.
See https://crbug.com/678670 for more information.
'''
return _CheckReIgnoreComment(input_api, output_api, error_msg, blacklist,
NEW_NOTIFICATION_BUILDER_RE)
def _CheckAlertDialogBuilder(input_api, output_api):
# "Blacklist" because the following files are excluded from the check. In
# general, preference and FRE related UIs are not relevant to VR mode.
blacklist = (
BROWSER_ROOT + 'browserservices/ClearDataDialogActivity.java',
BROWSER_ROOT + 'browsing_data/ConfirmImportantSitesDialogFragment.java',
BROWSER_ROOT + 'browsing_data/OtherFormsOfHistoryDialogFragment.java',
BROWSER_ROOT + 'datareduction/settings/DataReductionStatsPreference.java',
BROWSER_ROOT + 'password_manager/AccountChooserDialog.java',
BROWSER_ROOT + 'password_manager/AutoSigninFirstRunDialog.java',
BROWSER_ROOT + r'settings[\\\/].*',
BROWSER_ROOT + 'signin/AccountPickerDialogFragment.java',
BROWSER_ROOT + 'signin/AccountSigninView.java',
BROWSER_ROOT + 'signin/ConfirmImportSyncDataDialog.java',
BROWSER_ROOT + 'signin/ConfirmManagedSyncDataDialog.java',
BROWSER_ROOT + 'signin/ConfirmSyncDataStateMachineDelegate.java',
BROWSER_ROOT + 'signin/SigninFragmentBase.java',
BROWSER_ROOT + 'signin/SignOutDialogFragment.java',
BROWSER_ROOT + 'site_settings/AddExceptionPreference.java',
BROWSER_ROOT + 'site_settings/ChosenObjectSettings.java',
BROWSER_ROOT + 'site_settings/ManageSpaceActivity.java',
BROWSER_ROOT + 'site_settings/ManageSpaceActivity.java',
BROWSER_ROOT + 'site_settings/SingleCategorySettings.java',
BROWSER_ROOT + 'site_settings/SingleWebsiteSettings.java',
BROWSER_ROOT + 'sync/settings/ManageSyncSettings.java',
BROWSER_ROOT + 'sync/settings/SyncAndServicesSettings.java',
BROWSER_ROOT + 'sync/ui/PassphraseCreationDialogFragment.java',
BROWSER_ROOT + 'sync/ui/PassphraseDialogFragment.java',
BROWSER_ROOT + 'sync/ui/PassphraseTypeDialogFragment.java',
)
error_msg = '''
AlertDialog.Builder Check failed:
Your new code added one or more calls to the AlertDialog.Builder, listed
below.
We recommend you use ModalDialogProperties to show a dialog whenever possible
to support VR mode. You could only keep the AlertDialog if you are certain
that your new AlertDialog is not used in VR mode (e.g. pereference, FRE)
If you are in doubt, contact
//src/chrome/android/java/src/org/chromium/chrome/browser/vr/VR_JAVA_OWNERS
'''
error_files = []
result = _CheckReIgnoreComment(input_api, output_api, error_msg, blacklist,
NEW_ALERTDIALOG_BUILDER_RE, error_files)
wrong_builder_errors = []
wrong_builder_error_msg = '''
Android Use of AppCompat AlertDialog.Builder Check failed:
Your new code added one or more calls to the AppCompat AlertDialog.Builder,
file listed below.
If you are keeping the new AppCompat AlertDialog.Builder, please use
CompatibleAlertDialogBuilder instead to work around support library issues.
See https://crbug.com/966101 for more information.
'''
for f in error_files:
contents = input_api.ReadFile(f)
if IMPORT_APP_COMPAT_ALERTDIALOG_RE.search(contents):
wrong_builder_errors.append(' %s' % (f.LocalPath()))
if wrong_builder_errors:
result.extend([output_api.PresubmitError(
wrong_builder_error_msg, wrong_builder_errors)])
return result
def _CheckCompatibleAlertDialogBuilder(input_api, output_api):
# "Blacklist" because the following files are excluded from the check.
blacklist = (
BROWSER_ROOT + 'LoginPrompt.java',
BROWSER_ROOT + 'SSLClientCertificateRequest.java',
BROWSER_ROOT + 'autofill/AutofillPopupBridge.java',
BROWSER_ROOT + 'autofill/keyboard_accessory/'
'AutofillKeyboardAccessoryBridge.java',
BROWSER_ROOT + 'dom_distiller/DistilledPagePrefsView.java',
BROWSER_ROOT + 'dom_distiller/DomDistillerUIUtils.java',
BROWSER_ROOT + 'download/DownloadController.java',
BROWSER_ROOT + 'download/OMADownloadHandler.java',
BROWSER_ROOT + 'externalnav/ExternalNavigationDelegateImpl.java',
BROWSER_ROOT + 'payments/AndroidPaymentApp.java',
BROWSER_ROOT + 'permissions/AndroidPermissionRequester.java',
BROWSER_ROOT + 'share/ShareDelegateImpl.java',
BROWSER_ROOT + 'util/AccessibilityUtil.java',
BROWSER_ROOT + 'webapps/AddToHomescreenDialog.java',
BROWSER_ROOT + 'webapps/WebappOfflineDialog.java',
)
error_msg = '''
Android Use of CompatibleAlertDialogBuilder Check failed:
Your new code added one or more calls to the CompatibleAlertDialogBuilder
constructors, listed below.
We recommend you use ModalDialogProperties to show a dialog whenever possible
to support VR mode. You could only keep the AlertDialog if you are certain
that your new AlertDialog is not used in VR mode (e.g. pereference, FRE)
If you are in doubt, contact
//src/chrome/android/java/src/org/chromium/chrome/browser/vr/VR_JAVA_OWNERS
'''
return _CheckReIgnoreComment(input_api, output_api, error_msg, blacklist,
NEW_COMPATIBLE_ALERTDIALOG_BUILDER_RE)
def _CheckReIgnoreComment(input_api, output_api, error_msg, blacklist,
regular_expression, error_files=None):
def CheckLine(current_file, line_number, line, problems, error_files):
"""Returns a boolean whether the line contains an error."""
if (regular_expression.search(line) and not COMMENT_RE.search(line)):
if error_files is not None:
error_files.append(current_file)
problems.append(
' %s:%d\n \t%s' %
(current_file.LocalPath(), line_number, line.strip()))
return True
return False
problems = []
sources = lambda x: input_api.FilterSourceFile(
x, white_list=(r'.*\.java$',), black_list=blacklist)
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=sources):
previous_line = ''
for line_number, line in f.ChangedContents():
if not CheckLine(f, line_number, line, problems, error_files):
if previous_line:
two_lines = '\n'.join([previous_line, line])
CheckLine(f, line_number, two_lines, problems, error_files)
previous_line = line
else:
previous_line = ''
if problems:
return [output_api.PresubmitError(error_msg, problems)]
return []
| bsd-3-clause | -43,752,571,225,410,950 | 41.614286 | 80 | 0.721086 | false |
Uli1/mapnik | scons/scons-local-2.4.0/SCons/Tool/ifort.py | 1 | 3327 | """SCons.Tool.ifort
Tool-specific initialization for newer versions of the Intel Fortran Compiler
for Linux/Windows (and possibly Mac OS X).
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/ifort.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import SCons.Defaults
from SCons.Scanner.Fortran import FortranScan
from FortranCommon import add_all_to_env
def generate(env):
"""Add Builders and construction variables for ifort to an Environment."""
# ifort supports Fortran 90 and Fortran 95
# Additionally, ifort recognizes more file extensions.
fscan = FortranScan("FORTRANPATH")
SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
if 'FORTRANFILESUFFIXES' not in env:
env['FORTRANFILESUFFIXES'] = ['.i']
else:
env['FORTRANFILESUFFIXES'].append('.i')
if 'F90FILESUFFIXES' not in env:
env['F90FILESUFFIXES'] = ['.i90']
else:
env['F90FILESUFFIXES'].append('.i90')
add_all_to_env(env)
fc = 'ifort'
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
env['%s' % dialect] = fc
env['SH%s' % dialect] = '$%s' % dialect
if env['PLATFORM'] == 'posix':
env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
if env['PLATFORM'] == 'win32':
# On Windows, the ifort compiler specifies the object on the
# command line with -object:, not -o. Massage the necessary
# command-line construction variables.
for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
for var in ['%sCOM' % dialect, '%sPPCOM' % dialect,
'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]:
env[var] = env[var].replace('-o $TARGET', '-object:$TARGET')
env['FORTRANMODDIRPREFIX'] = "/module:"
else:
env['FORTRANMODDIRPREFIX'] = "-module "
def exists(env):
return env.Detect('ifort')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 | -6,600,932,587,542,999,000 | 36.806818 | 104 | 0.685903 | false |
allenai/allennlp | allennlp/training/trainer.py | 1 | 56811 | import datetime
import logging
import math
import os
import re
import time
import traceback
from contextlib import contextmanager
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
from allennlp.common.util import int_to_device
import torch
import torch.distributed as dist
from torch.cuda import amp
import torch.optim.lr_scheduler
from torch.nn.parallel import DistributedDataParallel
from torch.nn.utils import clip_grad_norm_
from allennlp.common import Lazy, Registrable, Tqdm
from allennlp.common import util as common_util
from allennlp.common.checks import ConfigurationError, check_for_gpu
from allennlp.data import DataLoader
from allennlp.data.dataloader import TensorDict
from allennlp.models.model import Model
from allennlp.nn import util as nn_util
from allennlp.training import util as training_util
from allennlp.training.checkpointer import Checkpointer
from allennlp.training.learning_rate_schedulers import LearningRateScheduler
from allennlp.training.metric_tracker import MetricTracker
from allennlp.training.momentum_schedulers import MomentumScheduler
from allennlp.training.moving_average import MovingAverage
from allennlp.training.optimizers import Optimizer
from allennlp.training.tensorboard_writer import TensorboardWriter
logger = logging.getLogger(__name__)
class Trainer(Registrable):
"""
The base class for an AllenNLP trainer. It can do pretty much
anything you want. Your subclass should implement `train`
and also probably `from_params`.
"""
default_implementation = "gradient_descent"
def __init__(
self,
serialization_dir: str = None,
cuda_device: Optional[Union[int, torch.device]] = None,
distributed: bool = False,
local_rank: int = 0,
world_size: int = 1,
) -> None:
if cuda_device is None:
from torch import cuda
if cuda.device_count() > 0:
cuda_device = 0
else:
cuda_device = -1
check_for_gpu(cuda_device)
self._serialization_dir = serialization_dir
if isinstance(cuda_device, list):
raise ConfigurationError(
"In allennlp 1.0, the Trainer can only be assigned a single `cuda_device`. "
"Instead, we use torch's DistributedDataParallel at the command level, meaning "
"our Trainer always uses a single GPU per process."
)
if distributed and world_size <= 1:
raise ConfigurationError(
"Distributed training can be performed only with more than 1 device. Check "
"`cuda_device` key in the experiment configuration."
)
self.cuda_device = int_to_device(cuda_device)
self._distributed = distributed
self._rank = local_rank
self._master = self._rank == 0
self._world_size = world_size
def train(self) -> Dict[str, Any]:
"""
Train a model and return the results.
"""
raise NotImplementedError
@contextmanager
def get_checkpoint_state(self) -> Iterator[Tuple[Dict[str, Any], Dict[str, Any]]]:
"""
Returns a tuple of (model state, training state), where training state could have several
internal components (e.g., for an, optimizer, learning rate scheduler, etc.).
This is a context manager, and should be called as `with trainer.get_checkpoint_state() as
state:`, so that the trainer has the opportunity to change and restore its internal state
for checkpointing. This is used, e.g., for moving averages of model weights.
"""
raise NotImplementedError
class BatchCallback(Registrable):
"""
An optional callback that you can pass to the `GradientDescentTrainer` that will be called at
the end of every batch, during both training and validation. The default implementation
does nothing. You can implement your own callback and do whatever you want, such as saving
predictions to disk or extra logging.
"""
def __call__(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[List[TensorDict]],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_master: bool,
) -> None:
pass
@BatchCallback.register("tensorboard-memory-usage")
class TensoboardBatchMemoryUsage(BatchCallback):
"""
Logs the CPU and GPU memory usage to tensorboard on every batch.
This is mainly used for debugging as it can cause a significant slowdown in training.
"""
def __call__(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[List[TensorDict]],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_master: bool,
) -> None:
# In the distributed case we need to call this from every worker, since every
# worker reports its own memory usage.
cpu_memory_usage = common_util.peak_cpu_memory()
gpu_memory_usage = common_util.peak_gpu_memory()
# But we only want to log from the master process.
if is_master:
trainer._tensorboard.log_memory_usage(cpu_memory_usage, gpu_memory_usage)
BatchCallback.register("null")(BatchCallback)
class EpochCallback(Registrable):
"""
An optional callback that you can pass to the `GradientDescentTrainer` that will be called at
the end of every epoch (and before the start of training, with `epoch=-1`). The default
implementation does nothing. You can implement your own callback and do whatever you want, such
as additional modifications of the trainer's state in between epochs.
"""
def __call__(
self,
trainer: "GradientDescentTrainer",
metrics: Dict[str, Any],
epoch: int,
is_master: bool,
) -> None:
pass
EpochCallback.register("null")(EpochCallback)
@EpochCallback.register("track_epoch_callback")
class TrackEpochCallback:
"""
A callback that you can pass to the `GradientDescentTrainer` to access the current epoch number
in your model during training. This callback sets `model.epoch`, which can be read inside of
`model.forward()`. Since the EpochCallback passes `epoch=-1`
at the start of the training, we set `model.epoch = epoch + 1` which now denotes the number of
completed epochs at a given training state.
"""
def __init__(self):
super().__init__()
def __call__(
self,
trainer: "GradientDescentTrainer",
metrics: Dict[str, Any],
epoch: int,
is_master: bool,
) -> None:
trainer.model.epoch = epoch + 1
_BasicCallback = Union[BatchCallback, EpochCallback]
class _TrainerCallbackMeta(type):
def __new__(cls, name, bases, dct):
"""
Add subclasses that wrap the `TrainerCallback` into other interfaces.
"""
subtype = super().__new__(cls, name, bases, dct)
# These subtypes wrap the `TrainerCallback` into the `_BasicCallback` interfaces.
subtype.Batch = cls._make_callback_type(BatchCallback, subtype.on_batch)
subtype.Epoch = cls._make_callback_type(EpochCallback, subtype.on_epoch)
subtype.End = cls._make_callback_type(EpochCallback, subtype.on_end)
return subtype
@classmethod
def _make_callback_type(
cls,
call_type: Type[_BasicCallback],
call: Callable[[], None],
) -> Type[_BasicCallback]: # type: ignore
class _Wrapper(call_type): # type: ignore
def __init__(self, trainer_callback: "TrainerCallback"):
self.trainer_callback = trainer_callback
def __call__(self, trainer: "GradientDescentTrainer", *args, **kwargs):
call(self.trainer_callback, trainer, *args, **kwargs) # type: ignore
return _Wrapper
class TrainerCallback(Registrable, metaclass=_TrainerCallbackMeta):
"""
A general callback object that wraps all three types of callbacks into one.
Rather than a `__call__` method, this class has `on_batch`, `on_epoch`, and `on_end` methods, corresponding to
each callback type. Each one receives the state of the wrapper object as `self`. This enables easier state
sharing between related callbacks.
Under the hood, this is a metaclass that creates wrapping subclasses each time a subclass is created.
"""
def on_batch(
self,
trainer: "GradientDescentTrainer",
batch_inputs: List[List[TensorDict]],
batch_outputs: List[Dict[str, Any]],
batch_metrics: Dict[str, Any],
epoch: int,
batch_number: int,
is_training: bool,
is_master: bool,
) -> None:
"""
This callback hook is called after the end of each batch. This is equivalent to `BatchCallback`.
"""
pass
def on_epoch(
self,
trainer: "GradientDescentTrainer",
metrics: Dict[str, Any],
epoch: int,
is_master: bool,
) -> None:
"""
This callback hook is called after the end of each epoch. This is equivalent to `EpochCallback`.
"""
pass
def on_end(
self,
trainer: "GradientDescentTrainer",
metrics: Dict[str, Any],
epoch: int,
is_master: bool,
) -> None:
"""
This callback hook is called after the final training epoch. The `epoch` is passed as an argument.
"""
pass
def batch(self):
"""
Construct a `BatchCallback` wrapper for this `TrainCallback`.
The `cls.Batch` type is created by the metaclass.
"""
return self.Batch(self)
def epoch(self):
"""
Construct an `EpochCallback` wrapper for this instance.
The `cls.Epoch` type is created by the metaclass.
"""
return self.Epoch(self)
def end(self):
"""
Construct an `EpochCallback` wrapping the `on_end` end-of-training hook.
The `cls.End` type is created by the metaclass.
"""
return self.End(self)
TrainerCallback.register("null")(TrainerCallback)
@Trainer.register("gradient_descent", constructor="from_partial_objects")
class GradientDescentTrainer(Trainer):
"""
A trainer for doing supervised learning with gradient descent. It just takes a labeled dataset
and a `DataLoader`, and uses the supplied `Optimizer` to learn the weights for your model over
some fixed number of epochs. You can also pass in a validation dataloader and enable early
stopping. There are many other bells and whistles as well.
Registered as a `Trainer` with the name "gradient_descent" (and is also the default `Trainer`).
The constructor that is registered is `from_partial_objects` - see the arguments to that
function for the exact keys that should be used, if you are using a configuration file. They
largely match the arguments to `__init__`, and we don't repeat their docstrings in
`from_partial_objects`.
[0]: https://tinyurl.com/y5mv44fw
# Parameters
model : `Model`, required.
An AllenNLP model to be optimized. Pytorch Modules can also be optimized if
their `forward` method returns a dictionary with a "loss" key, containing a
scalar tensor representing the loss function to be optimized.
If you are training your model using GPUs, your model should already be
on the correct device. (If you are using our `train` command this will be
handled for you.)
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
optimizer : `torch.nn.Optimizer`, required.
An instance of a Pytorch Optimizer, instantiated with the parameters of the
model to be optimized.
data_loader : `DataLoader`, required.
A `DataLoader` containing your `Dataset`, yielding padded indexed batches.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
patience : `Optional[int] > 0`, optional (default=`None`)
Number of epochs to be patient before early stopping: the training is stopped
after `patience` epochs with no improvement. If given, it must be `> 0`.
If None, early stopping is disabled.
validation_metric : `str`, optional (default=`"-loss"`)
Validation metric to measure for whether to stop training using patience
and whether to serialize an `is_best` model each epoch. The metric name
must be prepended with either "+" or "-", which specifies whether the metric
is an increasing or decreasing function.
validation_data_loader : `DataLoader`, optional (default=`None`)
A `DataLoader` to use for the validation set. If `None`, then
use the training `DataLoader` with the validation data.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
num_epochs : `int`, optional (default = `20`)
Number of training epochs.
serialization_dir : `str`, optional (default=`None`)
Path to directory for saving and loading model files. Models will not be saved if
this parameter is not passed.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
checkpointer : `Checkpointer`, optional (default=`None`)
A `Checkpointer` is responsible for periodically saving model weights. If none is given
here, we will construct one with default parameters.
cuda_device : `int`, optional (default = `-1`)
An integer specifying the CUDA device(s) to use for this process. If -1, the CPU is used.
Data parallelism is controlled at the allennlp train level, so each trainer will have a single
GPU.
grad_norm : `float`, optional, (default = `None`).
If provided, gradient norms will be rescaled to have a maximum of this value.
grad_clipping : `float`, optional (default = `None`).
If provided, gradients will be clipped `during the backward pass` to have an (absolute)
maximum of this value. If you are getting `NaNs` in your gradients during training
that are not solved by using `grad_norm`, you may need this.
learning_rate_scheduler : `LearningRateScheduler`, optional (default = `None`)
If specified, the learning rate will be decayed with respect to
this schedule at the end of each epoch (or batch, if the scheduler implements
the `step_batch` method). If you use `torch.optim.lr_scheduler.ReduceLROnPlateau`,
this will use the `validation_metric` provided to determine if learning has plateaued.
To support updating the learning rate on every batch, this can optionally implement
`step_batch(batch_num_total)` which updates the learning rate given the batch number.
momentum_scheduler : `MomentumScheduler`, optional (default = `None`)
If specified, the momentum will be updated at the end of each batch or epoch
according to the schedule.
tensorboard_writer : `TensorboardWriter`, optional
If this is not provided, we will construct a `TensorboardWriter` with default
parameters and use that.
moving_average : `MovingAverage`, optional, (default = `None`)
If provided, we will maintain moving averages for all parameters. During training, we
employ a shadow variable for each parameter, which maintains the moving average. During
evaluation, we backup the original parameters and assign the moving averages to corresponding
parameters. Be careful that when saving the checkpoint, we will save the moving averages of
parameters. This is necessary because we want the saved model to perform as well as the validated
model if we load it later. But this may cause problems if you restart the training from checkpoint.
batch_callbacks : `List[BatchCallback]`, optional (default = `None`)
A list of callbacks that will be called at the end of every batch, during both train and
validation.
epoch_callbacks : `List[EpochCallback]`, optional (default = `None`)
A list of callbacks that will be called at the end of every epoch, and at the start of
training (with epoch = -1).
end_callbacks : `List[EpochCallback]`, optional (default = `None`)
A list of callbacks that will be called after the final epoch at the end of training. The type of the
callbacks is the same as `epoch_callbacks`.
trainer_callbacks : `List[TrainerCallback]`, optional (default = `None`)
A list of callbacks that will be called at each batch, epoch, and at the start and end of training.
distributed : `bool`, optional, (default = `False`)
If set, PyTorch's `DistributedDataParallel` is used to train the model in multiple GPUs. This also
requires `world_size` to be greater than 1.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately (you need a top-level "distributed" key, next to
the "trainer" entry, that specifies a list of "cuda_devices").
local_rank : `int`, optional, (default = `0`)
This is the unique identifier of the `Trainer` in a distributed process group. The GPU device id is
used as the rank.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
world_size : `int`, (default = `1`)
The number of `Trainer` workers participating in the distributed training.
In a typical AllenNLP configuration file, this parameter does not get an entry under the
"trainer", it gets constructed separately.
num_gradient_accumulation_steps : `int`, optional, (default = `1`)
Gradients are accumulated for the given number of steps before doing an optimizer step. This can
be useful to accommodate batches that are larger than the RAM size. Refer [Thomas Wolf's
post][0] for details on Gradient Accumulation.
use_amp : `bool`, optional, (default = `False`)
If `True`, we'll train using [Automatic Mixed Precision](https://pytorch.org/docs/stable/amp.html).
"""
def __init__(
self,
model: Model,
optimizer: torch.optim.Optimizer,
data_loader: DataLoader,
patience: Optional[int] = None,
validation_metric: str = "-loss",
validation_data_loader: DataLoader = None,
num_epochs: int = 20,
serialization_dir: Optional[str] = None,
checkpointer: Checkpointer = None,
cuda_device: Optional[Union[int, torch.device]] = None,
grad_norm: Optional[float] = None,
grad_clipping: Optional[float] = None,
learning_rate_scheduler: Optional[LearningRateScheduler] = None,
momentum_scheduler: Optional[MomentumScheduler] = None,
tensorboard_writer: TensorboardWriter = None,
moving_average: Optional[MovingAverage] = None,
batch_callbacks: List[BatchCallback] = None,
epoch_callbacks: List[EpochCallback] = None,
end_callbacks: List[EpochCallback] = None,
trainer_callbacks: List[TrainerCallback] = None,
distributed: bool = False,
local_rank: int = 0,
world_size: int = 1,
num_gradient_accumulation_steps: int = 1,
use_amp: bool = False,
) -> None:
super().__init__(serialization_dir, cuda_device, distributed, local_rank, world_size)
# I am not calling move_to_gpu here, because if the model is
# not already on the GPU then the optimizer is going to be wrong.
self.model = model
self.data_loader = data_loader
self._validation_data_loader = validation_data_loader
self.optimizer = optimizer
if patience is None: # no early stopping
if validation_data_loader is not None:
logger.warning(
"You provided a validation dataset but patience was set to None, "
"meaning that early stopping is disabled"
)
elif (not isinstance(patience, int)) or patience <= 0:
raise ConfigurationError(
'{} is an invalid value for "patience": it must be a positive integer '
"or None (if you want to disable early stopping)".format(patience)
)
# For tracking is_best_so_far and should_stop_early
self._metric_tracker = MetricTracker(patience, validation_metric)
# Get rid of + or -
self._validation_metric = validation_metric[1:]
self._num_epochs = num_epochs
self._checkpointer: Optional[Checkpointer] = checkpointer
if checkpointer is None and serialization_dir is not None:
self._checkpointer = Checkpointer(serialization_dir)
self._grad_norm = grad_norm
self._grad_clipping = grad_clipping
self._learning_rate_scheduler = learning_rate_scheduler
self._momentum_scheduler = momentum_scheduler
self._moving_average = moving_average
self._batch_callbacks = batch_callbacks or []
self._epoch_callbacks = epoch_callbacks or []
self._end_callbacks = end_callbacks or []
for callback in trainer_callbacks or []:
self._batch_callbacks.append(callback.batch())
self._epoch_callbacks.append(callback.epoch())
self._end_callbacks.append(callback.end())
# We keep the total batch number as an instance variable because it
# is used inside a closure for the hook which logs activations in
# `_enable_activation_logging`.
self._batch_num_total = 0
self._tensorboard = tensorboard_writer or TensorboardWriter(serialization_dir)
self._tensorboard.get_batch_num_total = lambda: self._batch_num_total
self._tensorboard.enable_activation_logging(self.model)
self._last_log = 0.0 # time of last logging
self._num_gradient_accumulation_steps = num_gradient_accumulation_steps
# Enable automatic mixed precision training.
self._scaler: Optional[amp.GradScaler] = None
self._use_amp = use_amp
if self._use_amp:
if self.cuda_device == torch.device("cpu"):
raise ValueError("Using AMP requires a cuda device")
self._scaler = amp.GradScaler()
# Using `DistributedDataParallel`(ddp) brings in a quirk wrt AllenNLP's `Model` interface and its
# usage. A `Model` object is wrapped by `ddp`, but assigning the wrapped model to `self.model`
# will break the usages such as `Model.get_regularization_penalty`, `Model.get_metrics`, etc.
#
# Hence a reference to Pytorch's object is maintained in the case of distributed training and in the
# normal case, reference to `Model` is retained. This reference is only used in
# these places: `model.__call__`, `model.train` and `model.eval`.
if self._distributed:
self._pytorch_model = DistributedDataParallel(
self.model,
device_ids=None if self.cuda_device == torch.device("cpu") else [self.cuda_device],
find_unused_parameters=True,
)
else:
self._pytorch_model = self.model
def rescale_gradients(self) -> float:
"""
Performs gradient rescaling. Is a no-op if gradient rescaling is not enabled.
Returns the norm of the gradients.
"""
parameters_to_clip = [p for p in self.model.parameters() if p.grad is not None]
if self._grad_norm:
if self._scaler is not None:
# Need to first unscale gradients in order to clip as usual.
self._scaler.unscale_(self.optimizer)
return clip_grad_norm_(parameters_to_clip, self._grad_norm)
else:
return torch.norm(
torch.stack([torch.norm(p.grad.detach()) for p in parameters_to_clip])
)
def batch_outputs(self, batch: TensorDict, for_training: bool) -> Dict[str, torch.Tensor]:
"""
Does a forward pass on the given batch and returns the output dictionary that the model
returns, after adding any specified regularization penalty to the loss (if training).
"""
batch = nn_util.move_to_device(batch, self.cuda_device)
output_dict = self._pytorch_model(**batch)
if for_training:
try:
assert "loss" in output_dict
regularization_penalty = self.model.get_regularization_penalty()
if regularization_penalty is not None:
output_dict["reg_loss"] = regularization_penalty
output_dict["loss"] += regularization_penalty
except AssertionError:
if for_training:
raise RuntimeError(
"The model you are trying to optimize does not contain a"
" 'loss' key in the output of model.forward(inputs)."
)
return output_dict
def _train_epoch(self, epoch: int) -> Dict[str, float]:
"""
Trains one epoch and returns metrics.
"""
logger.info("Epoch %d/%d", epoch, self._num_epochs - 1)
cpu_memory_usage = []
for worker, memory in common_util.peak_cpu_memory().items():
cpu_memory_usage.append((worker, memory))
logger.info(f"Worker {worker} memory usage: {common_util.format_size(memory)}")
gpu_memory_usage = []
for gpu, memory in common_util.peak_gpu_memory().items():
gpu_memory_usage.append((gpu, memory))
logger.info(f"GPU {gpu} memory usage: {common_util.format_size(memory)}")
regularization_penalty = self.model.get_regularization_penalty()
train_loss = 0.0
batch_loss = 0.0
train_reg_loss = None if regularization_penalty is None else 0.0
batch_reg_loss = None if regularization_penalty is None else 0.0
# Set the model to "train" mode.
self._pytorch_model.train()
# Get tqdm for the training batches
batch_generator = iter(self.data_loader)
batch_group_generator = common_util.lazy_groups_of(
batch_generator, self._num_gradient_accumulation_steps
)
logger.info("Training")
num_training_batches: Union[int, float]
try:
len_data_loader = len(self.data_loader)
num_training_batches = math.ceil(
len_data_loader / self._num_gradient_accumulation_steps
)
except TypeError:
num_training_batches = float("inf")
# Having multiple tqdm bars in case of distributed training will be a mess. Hence only the master's
# progress is shown
if self._master:
batch_group_generator_tqdm = Tqdm.tqdm(
batch_group_generator, total=num_training_batches
)
else:
batch_group_generator_tqdm = batch_group_generator
self._last_log = time.time()
batches_this_epoch = 0
if self._batch_num_total is None:
self._batch_num_total = 0
done_early = False
for batch_group in batch_group_generator_tqdm:
if self._distributed:
# Check whether the other workers have stopped already (due to differing amounts of
# data in each). If so, we can't proceed because we would hang when we hit the
# barrier implicit in Model.forward. We use a IntTensor instead a BoolTensor
# here because NCCL process groups apparently don't support BoolTensor.
done = torch.tensor(0, device=self.cuda_device)
torch.distributed.all_reduce(done, torch.distributed.ReduceOp.SUM)
if done.item() > 0:
done_early = True
logger.warning(
f"Worker {torch.distributed.get_rank()} finishing training early! "
"This implies that there is an imbalance in your training "
"data across the workers and that some amount of it will be "
"ignored. A small amount of this is fine, but a major imbalance "
"should be avoided. Note: This warning will appear unless your "
"data is perfectly balanced."
)
break
batches_this_epoch += 1
self._batch_num_total += 1
batch_num_total = self._batch_num_total
# Zero gradients.
# NOTE: this is actually more efficient than calling `self.optimizer.zero_grad()`
# because it avoids a read op when the gradients are first updated below.
for param_group in self.optimizer.param_groups:
for p in param_group["params"]:
p.grad = None
batch_loss = 0.0
batch_group_outputs = []
for batch in batch_group:
with amp.autocast(self._use_amp):
batch_outputs = self.batch_outputs(batch, for_training=True)
batch_group_outputs.append(batch_outputs)
loss = batch_outputs["loss"]
reg_loss = batch_outputs.get("reg_loss")
if torch.isnan(loss):
raise ValueError("nan loss encountered")
loss = loss / len(batch_group)
batch_loss += loss.item()
if reg_loss is not None:
reg_loss = reg_loss / len(batch_group)
batch_reg_loss = reg_loss.item()
train_reg_loss += batch_reg_loss # type: ignore
if self._scaler is not None:
self._scaler.scale(loss).backward()
else:
loss.backward()
train_loss += batch_loss
batch_grad_norm = self.rescale_gradients()
# This does nothing if batch_num_total is None or you are using a
# scheduler which doesn't update per batch.
if self._learning_rate_scheduler:
self._learning_rate_scheduler.step_batch(batch_num_total)
if self._momentum_scheduler:
self._momentum_scheduler.step_batch(batch_num_total)
param_updates = None
if self._tensorboard.should_log_histograms_this_batch() and self._master:
# Get the magnitude of parameter updates for logging. We need to do some
# computation before and after the optimizer step, and it's expensive because of
# GPU/CPU copies (necessary for large models, and for shipping to tensorboard), so
# we don't do this every batch, only when it's requested.
param_updates = {
name: param.detach().cpu().clone()
for name, param in self.model.named_parameters()
}
if self._scaler is not None:
self._scaler.step(self.optimizer)
self._scaler.update()
else:
self.optimizer.step()
for name, param in self.model.named_parameters():
param_updates[name].sub_(param.detach().cpu())
else:
if self._scaler is not None:
self._scaler.step(self.optimizer)
self._scaler.update()
else:
self.optimizer.step()
# Update moving averages
if self._moving_average is not None:
self._moving_average.apply(batch_num_total)
# Update the description with the latest metrics
metrics = training_util.get_metrics(
self.model,
train_loss,
train_reg_loss,
batch_loss,
batch_reg_loss,
batches_this_epoch,
world_size=self._world_size,
cuda_device=self.cuda_device,
)
if self._master:
# Updating tqdm only for the master as the trainers wouldn't have one
description = training_util.description_from_metrics(metrics)
batch_group_generator_tqdm.set_description(description, refresh=False)
self._tensorboard.log_batch(
self.model,
self.optimizer,
batch_grad_norm,
metrics,
batch_group,
param_updates,
)
if self._checkpointer is not None:
self._checkpointer.maybe_save_checkpoint(self, epoch, batches_this_epoch)
for callback in self._batch_callbacks:
callback(
self,
batch_group,
batch_group_outputs,
metrics,
epoch,
batches_this_epoch,
is_training=True,
is_master=self._master,
)
if self._distributed and not done_early:
logger.warning(
f"Worker {torch.distributed.get_rank()} completed its entire epoch (training)."
)
# Indicate that we're done so that any workers that have remaining data stop the epoch early.
done = torch.tensor(1, device=self.cuda_device)
torch.distributed.all_reduce(done, torch.distributed.ReduceOp.SUM)
assert done.item()
# Let all workers finish their epoch before computing
# the final statistics for the epoch.
if self._distributed:
dist.barrier()
metrics = training_util.get_metrics(
self.model,
train_loss,
train_reg_loss,
batch_loss=None,
batch_reg_loss=None,
num_batches=batches_this_epoch,
reset=True,
world_size=self._world_size,
cuda_device=self.cuda_device,
)
for (worker, memory) in cpu_memory_usage:
metrics["worker_" + str(worker) + "_memory_MB"] = memory / (1024 * 1024)
for (gpu_num, memory) in gpu_memory_usage:
metrics["gpu_" + str(gpu_num) + "_memory_MB"] = memory / (1024 * 1024)
return metrics
def _validation_loss(self, epoch: int) -> Tuple[float, Optional[float], int]:
"""
Computes the validation loss. Returns it and the number of batches.
"""
logger.info("Validating")
self._pytorch_model.eval()
# Replace parameter values with the shadow values from the moving averages.
if self._moving_average is not None:
self._moving_average.assign_average_value()
if self._validation_data_loader is not None:
validation_data_loader = self._validation_data_loader
else:
raise ConfigurationError(
"Validation results cannot be calculated without a validation_data_loader"
)
regularization_penalty = self.model.get_regularization_penalty()
# Having multiple tqdm bars in case of distributed training will be a mess. Hence only the master's
# progress is shown
if self._master:
val_generator_tqdm = Tqdm.tqdm(validation_data_loader)
else:
val_generator_tqdm = validation_data_loader
batches_this_epoch = 0
val_loss = 0.0
val_batch_loss = 0.0
val_reg_loss = None if regularization_penalty is None else 0.0
val_batch_reg_loss = None if regularization_penalty is None else 0.0
done_early = False
for batch in val_generator_tqdm:
if self._distributed:
# Check whether the other workers have stopped already (due to differing amounts of
# data in each). If so, we can't proceed because we would hang when we hit the
# barrier implicit in Model.forward. We use a IntTensor instead a BoolTensor
# here because NCCL process groups apparently don't support BoolTensor.
done = torch.tensor(0, device=self.cuda_device)
torch.distributed.all_reduce(done, torch.distributed.ReduceOp.SUM)
if done.item() > 0:
done_early = True
logger.warning(
f"Worker {torch.distributed.get_rank()} finishing validation early! "
"This implies that there is an imbalance in your validation "
"data across the workers and that some amount of it will be "
"ignored. A small amount of this is fine, but a major imbalance "
"should be avoided. Note: This warning will appear unless your "
"data is perfectly balanced."
)
break
with amp.autocast(self._use_amp):
batch_outputs = self.batch_outputs(batch, for_training=False)
loss = batch_outputs.get("loss")
reg_loss = batch_outputs.get("reg_loss")
if loss is not None:
# You shouldn't necessarily have to compute a loss for validation, so we allow for
# `loss` to be None. We need to be careful, though - `batches_this_epoch` is
# currently only used as the divisor for the loss function, so we can safely only
# count those batches for which we actually have a loss. If this variable ever
# gets used for something else, we might need to change things around a bit.
batches_this_epoch += 1
val_batch_loss = loss.item()
val_loss += val_batch_loss
if reg_loss is not None:
val_batch_reg_loss = reg_loss.item()
val_reg_loss += val_batch_reg_loss # type: ignore
# Update the description with the latest metrics
val_metrics = training_util.get_metrics(
self.model,
val_loss,
val_reg_loss,
val_batch_loss,
val_batch_reg_loss,
batches_this_epoch,
world_size=self._world_size,
cuda_device=self.cuda_device,
)
description = training_util.description_from_metrics(val_metrics)
if self._master:
val_generator_tqdm.set_description(description, refresh=False)
for callback in self._batch_callbacks:
callback(
self,
[batch],
[batch_outputs],
val_metrics,
epoch,
batches_this_epoch,
is_training=False,
is_master=self._master,
)
if self._distributed and not done_early:
logger.warning(
f"Worker {torch.distributed.get_rank()} completed its entire epoch (validation)."
)
# Indicate that we're done so that any workers that have remaining data stop validation early.
done = torch.tensor(1, device=self.cuda_device)
torch.distributed.all_reduce(done, torch.distributed.ReduceOp.SUM)
assert done.item()
# Now restore the original parameter values.
if self._moving_average is not None:
self._moving_average.restore()
return val_loss, val_reg_loss, batches_this_epoch
def train(self) -> Dict[str, Any]:
"""
Trains the supplied model with the supplied parameters.
"""
try:
return self._try_train()
finally:
# make sure pending events are flushed to disk and files are closed properly
self._tensorboard.close()
def _try_train(self) -> Dict[str, Any]:
try:
epoch_counter = self._restore_checkpoint()
except RuntimeError:
traceback.print_exc()
raise ConfigurationError(
"Could not recover training from the checkpoint. Did you mean to output to "
"a different serialization directory or delete the existing serialization "
"directory?"
)
training_util.enable_gradient_clipping(self.model, self._grad_clipping)
logger.info("Beginning training.")
val_metrics: Dict[str, float] = {}
this_epoch_val_metric: float = 0.0
metrics: Dict[str, Any] = {}
epochs_trained = 0
training_start_time = time.time()
metrics["best_epoch"] = self._metric_tracker.best_epoch
for key, value in self._metric_tracker.best_epoch_metrics.items():
metrics["best_validation_" + key] = value
for callback in self._epoch_callbacks:
callback(self, metrics={}, epoch=-1, is_master=self._master)
for epoch in range(epoch_counter, self._num_epochs):
epoch_start_time = time.time()
train_metrics = self._train_epoch(epoch)
if self._master and self._checkpointer is not None:
self._checkpointer.save_checkpoint(epoch, self, save_model_only=True)
# Wait for the master to finish saving the model checkpoint
if self._distributed:
dist.barrier()
# get peak of memory usage
for key, value in train_metrics.items():
if key.startswith("gpu_") and key.endswith("_memory_MB"):
metrics["peak_" + key] = max(metrics.get("peak_" + key, 0), value)
elif key.startswith("worker_") and key.endswith("_memory_MB"):
metrics["peak_" + key] = max(metrics.get("peak_" + key, 0), value)
if self._validation_data_loader is not None:
with torch.no_grad():
# We have a validation set, so compute all the metrics on it.
val_loss, val_reg_loss, num_batches = self._validation_loss(epoch)
# It is safe again to wait till the validation is done. This is
# important to get the metrics right.
if self._distributed:
dist.barrier()
val_metrics = training_util.get_metrics(
self.model,
val_loss,
val_reg_loss,
batch_loss=None,
batch_reg_loss=None,
num_batches=num_batches,
reset=True,
world_size=self._world_size,
cuda_device=self.cuda_device,
)
# Check validation metric for early stopping
this_epoch_val_metric = val_metrics[self._validation_metric]
self._metric_tracker.add_metric(this_epoch_val_metric)
if self._metric_tracker.should_stop_early():
logger.info("Ran out of patience. Stopping training.")
break
if self._master:
self._tensorboard.log_metrics(
train_metrics, val_metrics=val_metrics, log_to_console=True, epoch=epoch + 1
) # +1 because tensorboard doesn't like 0
# Create overall metrics dict
training_elapsed_time = time.time() - training_start_time
metrics["training_duration"] = str(datetime.timedelta(seconds=training_elapsed_time))
metrics["training_start_epoch"] = epoch_counter
metrics["training_epochs"] = epochs_trained
metrics["epoch"] = epoch
for key, value in train_metrics.items():
metrics["training_" + key] = value
for key, value in val_metrics.items():
metrics["validation_" + key] = value
if self._metric_tracker.is_best_so_far():
# Update all the best_ metrics.
# (Otherwise they just stay the same as they were.)
metrics["best_epoch"] = epoch
for key, value in val_metrics.items():
metrics["best_validation_" + key] = value
self._metric_tracker.best_epoch_metrics = val_metrics
if self._serialization_dir and self._master:
common_util.dump_metrics(
os.path.join(self._serialization_dir, f"metrics_epoch_{epoch}.json"),
metrics,
)
# The Scheduler API is agnostic to whether your schedule requires a validation metric -
# if it doesn't, the validation metric passed here is ignored.
if self._learning_rate_scheduler:
self._learning_rate_scheduler.step(this_epoch_val_metric)
if self._momentum_scheduler:
self._momentum_scheduler.step(this_epoch_val_metric)
if self._master and self._checkpointer is not None:
self._checkpointer.save_checkpoint(
epoch, self, is_best_so_far=self._metric_tracker.is_best_so_far()
)
# Wait for the master to finish saving the checkpoint
if self._distributed:
dist.barrier()
for callback in self._epoch_callbacks:
callback(self, metrics=metrics, epoch=epoch, is_master=self._master)
epoch_elapsed_time = time.time() - epoch_start_time
logger.info("Epoch duration: %s", datetime.timedelta(seconds=epoch_elapsed_time))
if epoch < self._num_epochs - 1:
training_elapsed_time = time.time() - training_start_time
estimated_time_remaining = training_elapsed_time * (
(self._num_epochs - epoch_counter) / float(epoch - epoch_counter + 1) - 1
)
formatted_time = str(datetime.timedelta(seconds=int(estimated_time_remaining)))
logger.info("Estimated training time remaining: %s", formatted_time)
epochs_trained += 1
for callback in self._end_callbacks:
callback(self, metrics=metrics, epoch=epoch, is_master=self._master)
# Load the best model state before returning
best_model_state = (
None if self._checkpointer is None else self._checkpointer.best_model_state()
)
if best_model_state:
self.model.load_state_dict(best_model_state)
return metrics
@contextmanager
def get_checkpoint_state(self) -> Iterator[Tuple[Dict[str, Any], Dict[str, Any]]]:
if self._moving_average is not None:
# Assigning average value to model parameters. The checkpointer will call
# `restore_state_after_checkpointing` when it is done to put this back to what it was.
self._moving_average.assign_average_value()
model_state = self.model.state_dict()
# These are the training states we need to persist.
training_states = {
"metric_tracker": self._metric_tracker.state_dict(),
"optimizer": self.optimizer.state_dict(),
"batch_num_total": self._batch_num_total,
}
# If we have a learning rate or momentum scheduler, we should persist them too.
if self._learning_rate_scheduler is not None:
training_states["learning_rate_scheduler"] = self._learning_rate_scheduler.state_dict()
if self._momentum_scheduler is not None:
training_states["momentum_scheduler"] = self._momentum_scheduler.state_dict()
try:
yield model_state, training_states
finally:
if self._moving_average is not None:
self._moving_average.restore()
def _restore_checkpoint(self) -> int:
"""
Restores the model and training state from the last saved checkpoint.
This includes an epoch count and optimizer state, which is serialized separately
from model parameters. This function should only be used to continue training -
if you wish to load a model for inference/load parts of a model into a new
computation graph, you should use the native Pytorch functions:
` model.load_state_dict(torch.load("/path/to/model/weights.th"))`
If `self._serialization_dir` does not exist or does not contain any checkpointed weights,
this function will do nothing and return 0.
# Returns
epoch: `int`
The epoch at which to resume training, which should be one after the epoch
in the saved training state.
"""
if self._checkpointer is None:
return 0
model_state, training_state = self._checkpointer.restore_checkpoint()
if not training_state:
# No checkpoint to restore, start at 0
return 0
self.model.load_state_dict(model_state)
self.optimizer.load_state_dict(training_state["optimizer"])
if (
self._learning_rate_scheduler is not None
and "learning_rate_scheduler" in training_state
):
self._learning_rate_scheduler.load_state_dict(training_state["learning_rate_scheduler"])
if self._momentum_scheduler is not None and "momentum_scheduler" in training_state:
self._momentum_scheduler.load_state_dict(training_state["momentum_scheduler"])
training_util.move_optimizer_to_cuda(self.optimizer)
# Currently the `training_state` contains a serialized `MetricTracker`.
if "metric_tracker" in training_state:
self._metric_tracker.load_state_dict(training_state["metric_tracker"])
# It used to be the case that we tracked `val_metric_per_epoch`.
elif "val_metric_per_epoch" in training_state:
self._metric_tracker.clear()
self._metric_tracker.add_metrics(training_state["val_metric_per_epoch"])
# And before that we didn't track anything.
else:
self._metric_tracker.clear()
if isinstance(training_state["epoch"], int):
epoch_to_return = training_state["epoch"] + 1
else:
epoch_to_return = int(training_state["epoch"].split(".")[0]) + 1
# For older checkpoints with batch_num_total missing, default to old behavior where
# it is unchanged.
batch_num_total = training_state.get("batch_num_total")
if batch_num_total is not None:
self._batch_num_total = batch_num_total
return epoch_to_return
@classmethod
def from_partial_objects(
cls,
model: Model,
serialization_dir: str,
data_loader: DataLoader,
validation_data_loader: DataLoader = None,
local_rank: int = 0,
patience: int = None,
validation_metric: str = "-loss",
num_epochs: int = 20,
cuda_device: Optional[Union[int, torch.device]] = None,
grad_norm: float = None,
grad_clipping: float = None,
distributed: bool = False,
world_size: int = 1,
num_gradient_accumulation_steps: int = 1,
use_amp: bool = False,
no_grad: List[str] = None,
optimizer: Lazy[Optimizer] = Lazy(Optimizer.default),
learning_rate_scheduler: Lazy[LearningRateScheduler] = None,
momentum_scheduler: Lazy[MomentumScheduler] = None,
tensorboard_writer: Lazy[TensorboardWriter] = Lazy(TensorboardWriter),
moving_average: Lazy[MovingAverage] = None,
checkpointer: Lazy[Checkpointer] = Lazy(Checkpointer),
batch_callbacks: List[BatchCallback] = None,
epoch_callbacks: List[EpochCallback] = None,
end_callbacks: List[EpochCallback] = None,
trainer_callbacks: List[TrainerCallback] = None,
) -> "Trainer":
"""
This method exists so that we can have a documented method to construct this class using
`FromParams`. If you are not using `FromParams` or config files, you can safely ignore this
method.
The reason we can't just use `__init__` with `FromParams` here is because there are
sequential dependencies to this class's arguments. Anything that has a `Lazy[]` type
annotation needs something from one of the non-`Lazy` arguments. The `Optimizer` needs to
have the parameters from the `Model` before it's constructed, and the `Schedulers` need to
have the `Optimizer`. Because of this, the typical way we construct things `FromParams`
doesn't work, so we use `Lazy` to allow for constructing the objects sequentially.
If you're not using `FromParams`, you can just construct these arguments in the right order
yourself in your code and call the constructor directly.
"""
if cuda_device is None:
from torch import cuda
if cuda.device_count() > 0:
cuda_device = 0
else:
cuda_device = -1
check_for_gpu(cuda_device)
if cuda_device >= 0:
# Moving model to GPU here so that the optimizer state gets constructed on
# the right device.
model = model.cuda(cuda_device)
if no_grad:
for name, parameter in model.named_parameters():
if any(re.search(regex, name) for regex in no_grad):
parameter.requires_grad_(False)
parameters = [[n, p] for n, p in model.named_parameters() if p.requires_grad]
optimizer_ = optimizer.construct(model_parameters=parameters)
common_util.log_frozen_and_tunable_parameter_names(model)
batches_per_epoch: Optional[int]
try:
batches_per_epoch = len(data_loader)
batches_per_epoch = math.ceil(batches_per_epoch / num_gradient_accumulation_steps)
except TypeError:
batches_per_epoch = None
moving_average_ = (
None if moving_average is None else moving_average.construct(parameters=parameters)
)
learning_rate_scheduler_ = (
None
if learning_rate_scheduler is None
else learning_rate_scheduler.construct(
optimizer=optimizer_, num_epochs=num_epochs, num_steps_per_epoch=batches_per_epoch
)
)
momentum_scheduler_ = (
None
if momentum_scheduler is None
else momentum_scheduler.construct(optimizer=optimizer_)
)
checkpointer_ = checkpointer.construct(serialization_dir=serialization_dir)
tensorboard_writer_ = tensorboard_writer.construct(serialization_dir=serialization_dir)
return cls(
model,
optimizer_,
data_loader,
patience=patience,
validation_metric=validation_metric,
validation_data_loader=validation_data_loader,
num_epochs=num_epochs,
serialization_dir=serialization_dir,
cuda_device=cuda_device,
grad_norm=grad_norm,
grad_clipping=grad_clipping,
learning_rate_scheduler=learning_rate_scheduler_,
momentum_scheduler=momentum_scheduler_,
tensorboard_writer=tensorboard_writer_,
checkpointer=checkpointer_,
moving_average=moving_average_,
batch_callbacks=batch_callbacks,
epoch_callbacks=epoch_callbacks,
end_callbacks=end_callbacks,
trainer_callbacks=trainer_callbacks,
distributed=distributed,
local_rank=local_rank,
world_size=world_size,
num_gradient_accumulation_steps=num_gradient_accumulation_steps,
use_amp=use_amp,
)
| apache-2.0 | 2,535,458,135,862,169,600 | 41.618905 | 114 | 0.608544 | false |
christabor/codeReflector | tests/test_htmlreflector.py | 1 | 4205 | # -*- coding: utf-8 -*-
__author__ = """Chris Tabor ([email protected])"""
import unittest
from code_reflector import html_reflector
class SelectorOutputTestCase(unittest.TestCase):
def setUp(self):
self.ref = html_reflector.HTMLReflector()
def test_single_class(self):
res = self.ref.process_string('.foo {}').extract().make_html(
save_as_string=True)
self.assertEqual(res, '<div class="foo"></div>')
def test_single_id(self):
res = self.ref.process_string('#foo {}').extract().make_html(
save_as_string=True)
self.assertEqual(res, '<div id="foo"></div>')
def test_pseudoselector(self):
res = self.ref.process_string('#foo:hover {}').extract().make_html(
save_as_string=True)
self.assertEqual(res, '')
def test_pseudoselector_mixed(self):
res = self.ref.process_string(
'#foo:hover {} #bar {}').extract().make_html(
save_as_string=True)
self.assertEqual(res, '<div id="bar"></div>')
def test_nested_id(self):
res = self.ref.process_string('#foo #bar #bim {}').extract().make_html(
save_as_string=True)
expected = ('<div id="foo"><div id="bar"><div id="bim">'
'</div></div></div>')
self.assertEqual(res, expected)
def test_nested_class(self):
res = self.ref.process_string('.foo .bar .bim {}').extract().make_html(
save_as_string=True)
expected = ('<div class="foo"><div class="bar"><div class="bim">'
'</div></div></div>')
self.assertEqual(res, expected)
def test_compound_class_id(self):
res = self.ref.process_string('.foo#bar {}').extract().make_html(
save_as_string=True)
expected = ('<div id="bar" class="foo"></div>')
self.assertEqual(res, expected)
def test_compound_multiclass(self):
res = self.ref.process_string('.foo.bar.bim {}').extract().make_html(
save_as_string=True)
expected = ('<div class="foo bar bim"></div>')
self.assertEqual(res, expected)
def test_compound_id_multiclass(self):
res = self.ref.process_string('#foo.bar.bim {}').extract().make_html(
save_as_string=True)
expected = ('<div id="foo" class="bar bim"></div>')
self.assertEqual(res, expected)
def test_compound_id_class(self):
res = self.ref.process_string('#foo.bar {}').extract().make_html(
save_as_string=True)
expected = ('<div id="foo" class="bar"></div>')
self.assertEqual(res, expected)
def test_nested_simple_class(self):
res = self.ref.process_string('.foo>.bar {}').extract().make_html(
save_as_string=True)
expected = ('<div class="foo"><div class="bar"></div></div>')
self.assertEqual(res, expected)
def test_nested_simple_id(self):
res = self.ref.process_string('#foo>#bar {}').extract().make_html(
save_as_string=True)
expected = ('<div id="foo"><div id="bar"></div></div>')
self.assertEqual(res, expected)
def test_nested_simple_id_spaces(self):
res = self.ref.process_string('#foo > #bar {}').extract().make_html(
save_as_string=True)
expected = ('<div id="foo"><div id="bar"></div></div>')
self.assertEqual(res, expected)
def test_nested_multiid_multiclass_tag(self):
res = self.ref.process_string(
'.foo > .bar > section#bam section.quux {}').extract().make_html(
save_as_string=True)
expected = ('<div class="foo"><div class="bar"><section id="bam">'
'<section class="quux"></section></section></div></div>')
self.assertEqual(res, expected)
def test_nested_multiid_multiclass_tag_mixedspaces(self):
res = self.ref.process_string(
'.foo > .bar>section#bam section.quux {}').extract().make_html(
save_as_string=True)
expected = ('<div class="foo"><div class="bar"><section id="bam">'
'<section class="quux"></section></section></div></div>')
self.assertEqual(res, expected)
| apache-2.0 | 4,916,741,392,170,460,000 | 39.047619 | 79 | 0.571463 | false |
co-ment/comt | src/cm/tests/test_comment_positioning.py | 1 | 4722 | # -*- coding: utf-8 -*-
from django.test import TestCase
from BeautifulSoup import BeautifulSoup
from cm.models import *
from django.core.cache import cache
#Β python manage.py test
#Β
#Β python manage.py test cm.CommentPositioningTest
def create_comment(start_wrapper=0, end_wrapper=0, start_offset=0, end_offset=0, reply_to=None, user=None, state='pending'):
version = Text.objects.all()[0].get_latest_version()
co = Comment.objects.create(text_version=version,
title="tt",
content="tt",
start_wrapper=start_wrapper,
end_wrapper=end_wrapper,
start_offset=start_offset,
end_offset=end_offset,
reply_to=reply_to,
state=state,
user=user)
cache.clear()
return co
class CommentPositioningTest(TestCase):
def assert_comment(self, old_comment_id, start_wrapper, end_wrapper, start_offset, end_offset):
comment = Comment.objects.get(id=old_comment_id)
#print comment.start_wrapper, comment.end_wrapper, comment.start_offset, comment.end_offset
#print start_wrapper, end_wrapper, start_offset, end_offset
self.assertEqual(comment.start_wrapper, start_wrapper)
self.assertEqual(comment.end_wrapper, end_wrapper)
self.assertEqual(comment.start_offset, start_offset)
self.assertEqual(comment.end_offset, end_offset)
def preserve_comment_pos(self, content, new_content, comment_pos_list):
text = Text.objects.create_text("text", "html", content, "", "", "", None)
version = Text.objects.all()[0].get_latest_version()
res = {}
for old, new in comment_pos_list:
x, y, z, k = old
comment = create_comment(x, y, z, k)
res[comment.id] = new
version.edit("text", "html", new_content, keep_comments = True, cancel_modified_scopes=False)
for id, new in res.items():
if not new:
self.assertFalse(Comment.objects.filter(id=id))
else:
x, y , z, k = new
self.assert_comment(id, x, y, z, k)
def test_remove_comment(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>This is a <b>te</b>e<b>est</b> text</body></html>"""
text = Text.objects.create_text("text", "html", content, "", "", "", None)
comment1 = create_comment(2, 2, 2, 4)
comment2 = create_comment(2, 2, 2, 4)
version = Text.objects.all()[0].get_latest_version()
self.assertEqual(len(version.get_comments()), 2)
version.edit("text", "html", new_content, keep_comments = False, cancel_modified_scopes=False)
self.assertEqual(len(version.get_comments()), 0)
def test_wrapper_shifted(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>This is a <b>te</b>e<b>est</b> text</body></html>"""
self.preserve_comment_pos(content, new_content, [([2,2,2,4],[4,4,2,4]),])
def test_comment_removed(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>This is a <b>test</b> txt</body></html>"""
self.preserve_comment_pos(content, new_content, [([2,2,2,4],None),])
def test_offset_shifted(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>a <b>teXXXst</b>a text</body></html>"""
self.preserve_comment_pos(content, new_content, [([2,2,2,4],[2,2,3,5]),])
def test_insert_wrapper(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>This is a <b>test</b> te<b>x</b>t</body></html>"""
self.preserve_comment_pos(content, new_content, [([2,2,2,5],[2,4,2,1]),])
def test_multiwrapper(self):
content = u"""<html><body>This is a <b>test</b> text</body></html>"""
new_content = u"""<html><body>This is a <b>testXXX<b>X</b>XXXXXXX</b>X text</body></html>"""
self.preserve_comment_pos(content, new_content, [([0,2,2,4],None),])
def test_insert_wrapper2(self):
content = u"""<html><body>aa<b>test</b>bb</body></html>"""
new_content = u"""<html><body>aXa<b>test</b>bXb</body></html>"""
self.preserve_comment_pos(content, new_content, [([0,2,1,1],[0,2,2,1]),])
| agpl-3.0 | 716,923,079,325,918,800 | 43.942857 | 124 | 0.563255 | false |
google/playhvz | backend/constants.py | 1 | 1174 | #!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TODO: High-level file comment."""
import sys
def main(argv):
pass
if __name__ == '__main__':
main(sys.argv)
HUMAN = 'resistance'
ZOMBIE = 'horde'
UNDECLARED = 'undeclared'
ALLEGIANCES = (HUMAN, ZOMBIE, UNDECLARED)
TEST_ENDPOINT = 'http://localhost:8080'
PLAYER_VOLUNTEER_ARGS = (
'advertising', 'logistics', 'communications', 'moderator',
'cleric', 'sorcerer', 'admin', 'photographer', 'chronicler',
'server', 'client', 'android', 'ios')
ACCESS_USER = 'user'
ACCESS_ADMIN = 'admin'
ACCESS_ADMIN_OR_PLAYER = 'adminOrPlayer'
ACCESS_PLAYER = 'player'
| apache-2.0 | -48,341,932,827,967,384 | 27.634146 | 74 | 0.707836 | false |
williamdean/pywbem | testsuite/test_mof_compiler.py | 1 | 5847 | #!/usr/bin/env python
#
from comfychair import main, TestCase, NotRunError
from pywbem import *
from pywbem.mof_compiler import MOFCompiler, MOFWBEMConnection, MOFParseError
from urllib import urlretrieve, urlopen
from time import time
import os
import sys
from zipfile import ZipFile
from tempfile import TemporaryFile
ns = 'root/test'
cwd = os.getcwd()
# Change the mofurl when new schema is released.
mofurl = 'http://www.dmtf.org/standards/cim/cim_schema_v220/cim_schema_2.20.0Experimental-MOFs.zip'
class MOFTest(TestCase):
"""A base class that creates a MOF compiler instance"""
def setup(self):
"""Create the MOF compiler."""
def moflog(msg):
print >> self.logfile, msg
self.logfile = open('moflog.txt', 'w')
self.mofcomp = MOFCompiler(MOFWBEMConnection(),
search_paths=['schema'], verbose=False,
log_func=moflog)
os.chdir(cwd)
class TestFullSchema(MOFTest):
def runtest(self):
t = time()
self.mofcomp.compile_file('schema/cim_schema_2.20.0.mof', ns)
print 'elapsed: %f ' % (time() - t),
self.assert_equal(len(self.mofcomp.handle.qualifiers[ns]), 71)
self.assert_equal(len(self.mofcomp.handle.classes[ns]), 1644)
#print self.mofcomp.handle.classes[ns]['CIM_UnsignedCredential'].properties['OtherPublicKeyEncoding'].qualifiers['Description']
class TestAliases(MOFTest):
def runtest(self):
self.mofcomp.compile_file('test.mof', ns)
class TestSchemaError(MOFTest):
def runtest(self):
self.mofcomp.parser.search_paths = []
try:
self.mofcomp.compile_file('schema/System/CIM_ComputerSystem.mof', ns)
except CIMError, ce:
self.assert_equal(ce.args[0], CIM_ERR_FAILED)
self.assert_equal(ce.file_line[0], 'schema/System/CIM_ComputerSystem.mof')
self.assert_equal(ce.file_line[1], 21)
self.mofcomp.compile_file('schema/qualifiers.mof', ns)
try:
self.mofcomp.compile_file('schema/System/CIM_ComputerSystem.mof', ns)
except CIMError, ce:
self.assert_equal(ce.args[0], CIM_ERR_INVALID_SUPERCLASS)
self.assert_equal(ce.file_line[0], 'schema/System/CIM_ComputerSystem.mof')
self.assert_equal(ce.file_line[1], 177)
class TestSchemaSearch(MOFTest):
def runtest(self):
self.mofcomp.compile_file('schema/System/CIM_ComputerSystem.mof', ns)
ccs = self.mofcomp.handle.GetClass('CIM_ComputerSystem',
LocalOnly=False, IncludeQualifiers=True)
self.assert_equal(ccs.properties['RequestedState'].type, 'uint16')
self.assert_equal(ccs.properties['Dedicated'].type, 'uint16')
cele = self.mofcomp.handle.GetClass('CIM_EnabledLogicalElement',
LocalOnly=False, IncludeQualifiers=True)
self.assert_equal(cele.properties['RequestedState'].type, 'uint16')
class TestParseError(MOFTest):
def runtest(self):
file = 'testmofs/parse_error01.mof'
try:
self.mofcomp.compile_file(file, ns)
except MOFParseError, pe:
self.assert_equal(pe.file, file)
self.assert_equal(pe.lineno, 16)
self.assert_equal(pe.context[5][1:5], '^^^^')
self.assert_equal(pe.context[4][1:5], 'size')
file = 'testmofs/parse_error02.mof'
try:
self.mofcomp.compile_file(file, ns)
except MOFParseError, pe:
self.assert_equal(pe.file, file)
self.assert_equal(pe.lineno, 6)
self.assert_equal(pe.context[5][7:13], '^^^^^^')
self.assert_equal(pe.context[4][7:13], 'weight')
file = 'testmofs/parse_error03.mof'
try:
self.mofcomp.compile_file(file, ns)
except MOFParseError, pe:
self.assert_equal(pe.file, file)
self.assert_equal(pe.lineno, 24)
self.assert_equal(pe.context[5][53], '^')
self.assert_equal(pe.context[4][53], '}')
file = 'testmofs/parse_error04.mof'
try:
self.mofcomp.compile_file(file, ns)
except MOFParseError, pe:
self.assert_equal(str(pe), 'Unexpected end of file')
class TestRefs(MOFTest):
def runtest(self):
self.mofcomp.compile_file('testmofs/test_refs.mof', ns)
#################################################################
# Main function
#################################################################
tests = [
TestAliases,
TestRefs,
TestSchemaError,
TestSchemaSearch,
TestParseError,
TestFullSchema,
]
if __name__ == '__main__':
mofbname = mofurl.split('/')[-1]
if not os.path.isdir('schema'):
os.mkdir('schema')
tfo = TemporaryFile()
ufo = urlopen(mofurl)
clen = int(ufo.info().getheader('Content-Length'))
offset = 0
ppct = -1
for data in ufo:
offset+= len(data)
pct = 100*offset/clen
if pct > ppct:
ppct = pct
sys.stdout.write('\rDownloading %s: %d%% ' % (mofbname, pct))
sys.stdout.flush()
tfo.write(data)
tfo.seek(0)
print ''
zf = ZipFile(tfo, 'r')
nlist = zf.namelist()
for i in xrange(0, len(nlist)):
sys.stdout.write('\rUnpacking %s: %d%% ' % (mofbname,
100*(i+1)/len(nlist)))
sys.stdout.flush()
file = nlist[i]
dfile = 'schema/%s' % file
if dfile[-1] == '/':
if not os.path.exists(dfile):
os.mkdir(dfile)
else:
fo = open(dfile, 'w')
fo.write(zf.read(file))
fo.close()
tfo.close()
print ''
main(tests)
| lgpl-2.1 | 3,154,006,598,126,819,300 | 31.303867 | 135 | 0.575167 | false |
t3dev/odoo | addons/stock/tests/test_report.py | 2 | 1121 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import odoo
import odoo.tests
class TestReports(odoo.tests.TransactionCase):
def test_reports(self):
product1 = self.env['product.product'].create({
'name': 'Mellohi',
'default_code': 'C418',
'type': 'product',
'categ_id': self.env.ref('product.product_category_all').id,
'tracking': 'lot',
'barcode': 'scan_me'
})
lot1 = self.env['stock.production.lot'].create({
'name': 'Volume-Beta',
'product_id': product1.id,
})
report = self.env.ref('stock.label_lot_template')
target = b'\n\n\n^XA\n^FO100,50\n^A0N,44,33^FD[C418]Mellohi^FS\n^FO100,100\n^A0N,44,33^FDLN/SN:Volume-Beta^FS\n^FO100,150^BY3\n^BCN,100,Y,N,N\n^FDVolume-Beta^FS\n^XZ\n\n\n'
rendering, qweb_type = report.render_qweb_text(lot1.id)
self.assertEqual(target, rendering.replace(b' ', b''), 'The rendering is not good')
self.assertEqual(qweb_type, 'text', 'the report type is not good')
| gpl-3.0 | 436,866,183,249,691,840 | 42.115385 | 180 | 0.597681 | false |
scholer/cadnano2.5 | cadnano/color.py | 2 | 3229 | # -*- coding: utf-8 -*-
"""This allows the model to have a :class:`Color` object class without
the need for :class:`PyQt5.QtGui.QColor`
When running the Qt Application, :class:`QColor` will be used, otherwise an
API compatible class is used and exported as a :class:`Color` object
Currently :class:`Color` objects are unused in the model and colors are stored as
QColor compatible hex string in format '#rrggbbaa', and therefore is not
exposed in the API documentation
"""
try:
from PyQt5.QtGui import QColor as Color
except Exception:
class Color(object):
"""Overloaded constructor using *args to be compatible with :class:`QColor`
usage::
Color(r, g, b)
or::
Color('#rrggbb') for hex
"""
def __init__(self, *args):
largs = len(args)
if largs == 1:
# clip the `#`
arg = args[0]
if isinstance(arg, str):
raise ValueError("color doesn't support ints")
color_number = int(arg[1:], 16)
r = (color_number >> 16) & 0xFF
g = (color_number >> 8) & 0xFF
b = color_number & 0xFF
self.setRgb(r, g, b, 255)
elif largs == 3:
r, g, b = args
self.setRgb(r, g, b, 255)
else:
r, g, b, a = args
self.setRgb(r, g, b, a)
# end def
def __repr__(self) -> str:
return self.hex()
def setRgb(self, r: int, g: int, b: int, a: int = 255):
"""Set the r, g, b and alpha 8 bit values
Args:
r: 0 - 255
g: 0 - 255
b: 0 - 255
a: 0 - 255
"""
self.r = r
self.g = g
self.b = b
self.a = a
# end def
def setAlpha(self, a: int):
"""Set the alpha 8 bit value
Args:
a (int): 0 - 255
"""
self.a = a
def name(self) -> str:
"""The hex string name. For :class:`QColor` compatibility
Returns:
:class:`QColor` compatible hex string in format '#rrggbbaa'
"""
return self.hex()
def hex(self) -> str:
"""The hex string name.
Returns:
:class:`QColor` compatible hex string in format '#rrggbbaa'
"""
return "#{:02X}{:02X}{:02X}{:02X}".format(self.r, self.g, self.b, self.a)
# end def
def _intToColor(color_number: int) -> Color:
""" legacy color support for converting integers to color objects based on the
cadnano 2 file format
Args:
color_number: integer value of a RGB color
Returns:
the :class:`Color` object
"""
return Color('#%0.6x' % (color_number))
def intToColorHex(color_number: int) -> str:
"""Convert an integer to a hexadecimal string compatible with :class:`QColor`
Args:
color_number: integer value of a RGB color
Returns:
:class:`QColor` compatible hex string in format '#rrggbb'
"""
return '#%0.6x' % (color_number)
| mit | 8,278,896,522,655,440,000 | 27.324561 | 85 | 0.5048 | false |
jessicalucci/TaskManagement | taskflow/engines/dist_engine/dtclient.py | 1 | 7108 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Rackspace Hosting All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Client for Distributed System"""
import collections
import kombu
from kombu import mixins
import logging
import threading
import time
from taskflow import states
LOG = logging.getLogger(__name__)
TASK_EXCHANGE = kombu.Exchange('tasks', type='direct')
class DTClient(object):
def __init__(self, broker_uri=None):
self._connection = kombu.Connection(broker_uri)
self._listeners = collections.defaultdict(list)
self.requires = collections.defaultdict(set)
self.provides = collections.defaultdict(set)
self._is_provided = {}
self.mapper = {}
def _check_requires(self, results, callback):
"""Make sure all requires for a task are satisfied before
kicking off callback, and return accumulated results
"""
requires = callback.requires
if requires is None:
return results
waiting = []
accum_results = {}
for requirement in requires:
if not (requirement in self._is_provided.keys()):
waiting.append(requirement)
else:
accum_results[requirement] = self._is_provided[requirement]
if len(waiting) == 0:
res = callback.celery_task.delay(provides=list(callback.provides),
**accum_results)
self.mapper[res.id] = callback.task_id
return True
else:
LOG.info("Task %s still waiting on %s" %
(callback.task_id, waiting))
return waiting
def register_listener(self, data, callback):
"""Register callback as a listener for task or data
:param data: Data/Task ID that callback is listening for
:callback: Task to be executed upon data provided
"""
listener = Listener(self._connection, data, callback,
self._check_requires)
listener_t = threading.Thread(target=listener.run)
listener_t.daemon = True
listener_t.start()
self._listeners[data].append((listener, callback))
def notify_listeners(self, provides, results):
"""notify listeners of certain data
:param provides: A set of what this task provides. The set
contains either data this task provides, the task id
(task provides itself) or both
:param results: A dict or other data structure of what this
task provides. If a dict is used, the client will attempt
to pass on provided data in a key/value manner
(result[results][provided] = provided_data)
"""
# persist all data
for provided in provides:
if results['status'] == states.SUCCESS:
# Is this data already provided?
if self._is_provided.get(provided):
res = self._is_provided[provided]
LOG.error("WARNING!! %s Data is already provided,"
" and has value %s. OVERWRITING to value"
" %s" % (provided, res, results))
self._is_provided[provided] = (results['results'])
elif results['status'] == states.ERROR:
LOG.error("Task has errored")
# Once we have preserved all new data, notify all listeners
for provided in provides:
if results['status'] == states.SUCCESS:
self._check_active(provided)
_send_task_results(self._connection, provided,
results['results'])
def _check_active(self, queuename):
# Make sure all consumers have had a chance to spin up
# TODO(Jessica): Won't be a problem in large flows.
# Maybe only activate loop for flows of certain length?
for listener in self._listeners[queuename]:
listener = listener[0]
try_interval = 1
while True:
try_interval *= 2
if try_interval >= 30:
raise Exception("Could not find Listener %s \
for data %s" % (listener, queuename))
if listener._consuming is False:
LOG.error("Listener %s for data %s is not active. \
Trying again in %s seconds"
% (listener, queuename, try_interval))
time.sleep(try_interval)
else:
break
return True
def get_listeners(self, data):
"""Return all listeners for given data"""
results = []
for (_listener, callback) in self._listeners[data]:
results.append(callback)
return results
def close_listeners(self):
for listeners in self._listeners.values():
for _listener, callback in listeners:
_listener.should_stop = True
class Listener(mixins.ConsumerMixin):
"""Created when a task is registered for notification"""
def __init__(self, connection, queuename, callback, check):
self._queue = kombu.Queue(queuename, exchange=TASK_EXCHANGE,
routing_key=queuename)
self.connection = connection
# TODO(Jessica): See if callback can be pulled from chain to
# prevent passing of callback around
self._callback = callback
self._check = check
self._consuming = False
def on_consume_ready(self, connection, channel, consumers, **kwargs):
self._consuming = True
def get_consumers(self, Consumer, channel):
return [Consumer(queues=[self._queue],
callbacks=[self._do_callback])]
def _do_callback(self, body, message):
self._check(body, self._callback)
message.ack()
def on_consume_end(self, connection, channel):
connection.release()
def _send_task_results(connection, queuename, results):
"""Send task results to task_id queue"""
payload = results
routing_key = queuename
with kombu.pools.producers[connection].acquire(block=True) as producer:
kombu.common.maybe_declare(TASK_EXCHANGE, producer.channel)
producer.publish(payload, serializer='json',
exchange=TASK_EXCHANGE,
routing_key=routing_key)
| apache-2.0 | -4,631,248,882,896,480,000 | 37.215054 | 78 | 0.589617 | false |
Dev-Cloud-Platform/Dev-Cloud | dev_cloud/cc1/src/clm/utils/decorators.py | 1 | 10248 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.clm.utils.decorators
Here are placed decorators for CLM views functions targeted to specific CLM
role actors (and src.clm.utils.decorators.genericlog() called by all those).
@par Actor decorators
- src.clm.utils.decorators.guest_log
- src.clm.utils.decorators.user_log
- src.clm.utils.decorators.admin_clm_log
All those decorators call src.clm.utils.decorators.genericlog().
By default those decorators call src.clm.utils.decorators.genericlog
with logging disabled. You can enable it by giving kwarg \c log=True ,
when decorating, eg.:
@code
@admin_clm_log(log=True)
def get_by_id(cm_id, caller_id, id):
pass
@endcode
@author Tomasz SoΕnicki <[email protected]>
"""
from clm.utils.cm import CM
from clm.utils import log
from clm.utils.exception import CLMException
from clm.models.user import User
from common.signature import Signature
from common import response
from common.states import user_active_states
from functools import wraps
import json
from django.http import HttpResponse
from django.db import transaction
# Set of functions decorated by actor decorators
# (clm.utils.decorators.guest_log(), src.clm.utils.decorators.user_log(),
# src.clm.utils.decorators.admin_clm_log())
from common.utils import json_convert
global decorated_functions
decorated_functions = set([])
def guest_log(*arg, **kw):
"""
Decorator for functions requiring only \b guest's privilidges.
src.clm.utils.decorators.genericlog() is called with parameters:
- \c is_user=False
- \c is_clm_superuser=False
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@guest_log[(log=<False|True>)]
function (**kw)
@endcode
@par Decorated function's call
@code
function (**kw)
@endcode
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), False, False, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def user_log(*arg, **kw):
"""
Decorator for functions requiring logged in \b user's privilidges.
src.clm.utils.decorators.genericlog() is called with parameters:
- \c is_user=True
- \c is_clm_superuser=False
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@user_log[(log=<False|True>)]
function (cm_id, caller_id, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id=<cm_id>, login=<login>, password=<password>, **kw)
@endcode
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, False, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def admin_cm_log(*arg, **kw):
"""
Decorator for functions requiring \b admin_cm's privilidges.
src.clm.utils.decorators.genericlog is called with parameters:
- \c is_user=True
- \c is_clm_superuser=False
- \c is_cm_superuser=True
@par Decorated function's declaration
@code
@admin_clm_log[(log=<False|True>)]
function (cm_id, caller_id, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id=<cm_id>, login=<login>, password=<password>, **kw)
@endcode
\c password argument is removed by \c src.cm.utils.decorators.genericlog(),
so it doesn't appear in formal parameters of the function.
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, False, True, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def admin_clm_log(*arg, **kw):
"""
Decorator for functions requiring \b admin_clm's privilidges.
src.clm.utils.decorators.genericlog is called with parameters:
- \c is_user=True
- \c is_clm_superuser=True
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@admin_clm_log[(log=<False|True>)]
function (cm_id, caller_id, *args, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id, login, password, *arg, **kw)
@endcode
\c password argument is removed by \c src.cm.utils.decorators.genericlog(),
so it doesn't appear in formal parameters of the function.
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, True, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def auth(is_user, is_clm_superuser, data):
if is_user:
login = data.pop('login')
password = data.get('password')
if password:
del data['password']
try:
user = User.objects.get(login=login)
except User.DoesNotExist:
raise CLMException('user_get')
if 'Signature' in data.keys():
if not Signature.checkSignature(user.password, data.pop('Signature'), data['parameters']):
raise CLMException('user_get')
del data['parameters']
elif user.password != password:
raise CLMException('user_get')
data['caller_id'] = user.id
if user.is_active != user_active_states['ok']:
raise CLMException('user_inactive')
if is_clm_superuser and not user.is_superuser:
raise CLMException('user_permission')
data['cm_id'] = data.pop('cm_id', None)
if not data['cm_id']:
if user.default_cluster_id is not None:
data['cm_id'] = user.default_cluster_id
return user.id
else:
return 0
def genericlog(log_enabled, pack_resp, is_user, is_clm_superuser, is_cm_superuser, fun, args, kwargs):
"""
Generic log is called by actor decorators defined in src.clm.utils.decorators :
- src.clm.utils.decorators.guest_log
- src.clm.utils.decorators.user_log
- src.clm.utils.decorators.admin_cm_log
- src.clm.utils.decorators.admin_clm_log
It calls decorated functions, additionally performing several tasks.
Genericlog performes:
-# <i>if decorated function requires user or admin privilidges</i>: <b>authorization</b>;
-# <b>execution</b> of the decorated function;
-# <b>debug log</b> of the arguments <i>depending on \c log_enabled and function's success</i>;
-# <i>if exception is thrown</i>: <b>general exception log</b>.
@returns{dict} response; fields:
@dictkey{status,string} 'ok', if succeeded
@dictkey{data,dict} response data
"""
# ===========================================================================
# AUTORIZATION
# ===========================================================================
name = '%s.%s' % (fun.__module__.replace('clm.views.', ''), fun.__name__)
request = args[0]
data = json.loads(request.body)
# ===========================================================================
# LOG AGRUMENTS
# ===========================================================================
gen_exception = False
with transaction.commit_manually():
try:
# Execute function
user_id = auth(is_user, is_clm_superuser, data)
resp = fun(**data)
if pack_resp and not hasattr(fun,
'packed'): # if function is decorated by cm_request, 'packed' atribbute will be set - response is already packed by cm
resp = response('ok', resp)
transaction.commit()
except CLMException, e:
transaction.rollback()
user_id = 0
resp = e.response
except Exception, e:
transaction.rollback()
gen_exception = True
user_id = 0
resp = response('clm_error', str(e))
if log_enabled or resp['status'] != 'ok':
log.debug(user_id, '=' * 100)
log.debug(user_id, 'Function: %s' % name)
log.debug(user_id, 'ARGS:\n%s' % json.dumps(data, indent=4))
if gen_exception:
log.exception(user_id, 'General exception')
log.debug(user_id, 'Response: %s' % resp or 'None')
return HttpResponse(json.dumps(resp, default=json_convert))
def cm_request(fun):
"""
Decorator for CM views functions that:
- either are fully transparent and just return CM response,
- or propagate request to CM and further postprocess its response.
Decorated function ought to be defined like:
@par Decorated function's declaration
@code
@cm_request
def function (cm_response, <kwargs>):
# postprocess cm_response
return cm_response
@endcode
@par Decorated function's call
@code
function (cm_id, <kwargs>) # `cm_id` is keyword arg as well, but it's required
@endcode
"""
url = r"%s/%s/" % (fun.__module__.replace("clm.views.", "").replace(".", "/"), fun.__name__)
@wraps(fun)
def wrapper(**data):
log.debug(0, "Forward request to CM: %s" % url)
cm_response = CM(data.pop('cm_id')).send_request(url, **data)
fun.packed = True # mark function response to not be packed by genericlog
return fun(cm_response, **data)
return wrapper
| apache-2.0 | 987,290,668,116,218,800 | 29.771772 | 160 | 0.620084 | false |
opmuse/opmuse | opmuse/sizeof.py | 1 | 1750 | # stolen from http://code.activestate.com/recipes/577504/ with some minor changes
# for pep8 compatibility
from __future__ import print_function
from sys import getsizeof, stderr
from itertools import chain
from collections import deque
try:
from reprlib import repr
except ImportError:
pass
def total_size(o, handlers={}, verbose=False):
""" Returns the approximate memory footprint an object and all of its contents.
Automatically finds the contents of the following builtin containers and
their subclasses: tuple, list, deque, dict, set and frozenset.
To search other containers, add handlers to iterate over their contents:
handlers = {SomeContainerClass: iter,
OtherContainerClass: OtherContainerClass.get_elements}
"""
def dict_handler(d):
return chain.from_iterable(d.items())
all_handlers = {tuple: iter,
list: iter,
deque: iter,
dict: dict_handler,
set: iter,
frozenset: iter}
all_handlers.update(handlers) # user handlers take precedence
seen = set() # track which object id's have already been seen
default_size = getsizeof(0) # estimate sizeof object without __sizeof__
def sizeof(o):
if id(o) in seen: # do not double count the same object
return 0
seen.add(id(o))
s = getsizeof(o, default_size)
if verbose:
print(s, type(o), repr(o), file=stderr)
for typ, handler in all_handlers.items():
if isinstance(o, typ):
s += sum(map(sizeof, handler(o)))
break
return s
return sizeof(o)
| agpl-3.0 | -667,687,790,504,563,000 | 31.407407 | 86 | 0.609143 | false |
sunlightlabs/read_FEC | fecreader/summary_data/management/commands/update_all_candidate_times.py | 1 | 3767 | from datetime import date
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Sum
from summary_data.utils.summary_utils import summarize_committee_periodic_webk
from summary_data.models import Candidate_Overlay, Authorized_Candidate_Committees, Committee_Time_Summary, Committee_Overlay
from shared_utils.cycle_utils import cycle_calendar
from django.conf import settings
try:
CURRENT_CYCLE = settings.CURRENT_CYCLE
except:
print "Missing current cycle list. Defaulting to 2016. "
CURRENT_CYCLE = '2016'
this_cycle_calendar = cycle_calendar[int(CURRENT_CYCLE)]
this_cycle_start = this_cycle_calendar['start']
this_cycle_end = this_cycle_calendar['end']
class Command(BaseCommand):
help = "Redo the summaries of *all candidates* - not just those that need it"
requires_model_validation = False
def handle(self, *args, **options):
candidates = Candidate_Overlay.objects.filter(cycle=CURRENT_CYCLE)
for candidate in candidates:
candidate_pcc = candidate.pcc
authorized_committee_list = Authorized_Candidate_Committees.objects.filter(candidate_id=candidate.fec_id, cycle=CURRENT_CYCLE).values('committee_id')
committee_list = [x.get('committee_id') for x in authorized_committee_list]
print "For candidate %s entering from list: %s" % (candidate.name, committee_list)
all_summaries = Committee_Time_Summary.objects.filter(com_id__in=committee_list, coverage_from_date__gte=this_cycle_start, coverage_through_date__lte=this_cycle_end).order_by('-coverage_through_date', '-coverage_from_date')
if all_summaries:
## Get most recent data from the time summary reports. But for totals that include recent stuff, use committee summaries.
most_recent_report = all_summaries[0]
recent_reports = all_summaries.filter(coverage_from_date=most_recent_report.coverage_from_date, coverage_through_date=most_recent_report.coverage_through_date)
# get data from the most recent report
recent_sums = recent_reports.aggregate( outstanding_loans=Sum('outstanding_loans'), cash_on_hand_end=Sum('cash_on_hand_end'))
for i in recent_sums:
if not recent_sums[i]:
recent_sums[i] = 0
# Independent expenditures are summarized separately.
candidate.cash_on_hand_date = most_recent_report.coverage_through_date
candidate.cash_on_hand = recent_sums['cash_on_hand_end']
candidate.outstanding_loans = recent_sums['outstanding_loans']
authorized_committees = Committee_Overlay.objects.filter(fec_id__in=committee_list,cycle=CURRENT_CYCLE)
sums = authorized_committees.aggregate(tot_contrib=Sum('total_contributions'), tot_disburse=Sum('total_disbursements'), tot_receipts=Sum('total_receipts'), tot_non_ite_contrib=Sum('total_unitemized'))
for i in sums:
if not sums[i]:
sums[i] = 0
candidate.total_contributions = sums['tot_contrib']
candidate.total_unitemized = sums['tot_non_ite_contrib']
candidate.total_disbursements = sums['tot_disburse']
candidate.total_receipts = sums['tot_receipts']
if not candidate.has_contributions and candidate.total_contributions > 0:
candidate.has_contributions = True
candidate.save()
| bsd-3-clause | 9,153,019,869,231,300,000 | 46.696203 | 235 | 0.63605 | false |
bertdecoensel/noysim | noysim/viewer.py | 1 | 28081 | # Noysim -- Noise simulation tools for Aimsun.
# Copyright (c) 2010-2011 by Bert De Coensel, Ghent University & Griffith University.
#
# Classes for sending and viewing noise levels in real-time
import os
import sys
import socket
import threading
import time
import random
import msvcrt
if not hasattr(sys, 'frozen'):
import wxversion
wxversion.select('2.8-msw-unicode') # version of wxPython
import wx
from wx.lib.agw.floatspin import FloatSpin, EVT_FLOATSPIN
import matplotlib
matplotlib.use('WXAgg')
from matplotlib.figure import Figure
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigCanvas, NavigationToolbar2WxAgg as NavigationToolbar
import numpy
import pylab
USERPYC = True # if set to False, low level sockets are used
if USERPYC:
try:
# check if rpyc is installed
import rpyc
from rpyc.utils.server import ThreadedServer
from rpyc.utils.classic import DEFAULT_SERVER_PORT
from rpyc.utils.registry import UDPRegistryClient
from rpyc.core import SlaveService
except:
# revert to using low level sockets
USERPYC = False
raise Exception('rpyc has to be installed')
import version
#---------------------------------------------------------------------------------------------------
# Parameters
#---------------------------------------------------------------------------------------------------
# general parameters
NAME = '%s %s Viewer' % (version.name.capitalize(), version.version)
ABOUT = NAME + '\n\n' + version.copyright.replace(', ', '\n') + '\n' + version.email
# communication with level viewer
RPYCTHREAD = None # global level thread variable (needed to circumvent the rpyc service factory)
HOST = 'localhost'
PORT = 50007
TIMEOUT = 0.01
SLEEP = 0.001
BUFSIZE = 4096
# timing parameters
REDRAWTIME = 100 # number of milliseconds between redraws
FLASHTIME = 1500 # duration of messages on the status bar, in milliseconds
# visualisation parameters
DPI = 100 # dots per inch for plotting and saving
FIGSIZE = (3.0, 3.0) # size of plotting canvas in inches (defaults to 300x300 pixels)
FONTSIZE = 8 # size of font of labels
BGCOLOR = 'black'
GRIDCOLOR = 'gray'
LINECOLOR = 'yellow'
LINEWIDTH = 1
# axes parameters
SPININC = 5.0 # increment of spin controls
XMIN = 10.0 # minimal x-axis range width
XWIDTH = 30.0 # initial value of x-axis range width
YMIN = (0.0, 10.0) # minimal y-axis low and height
YRANGE = (30.0, 60.0) # initial values of y-axis low and height
MARGIN = 1.0 # margin for auto range of levels
# test parameters
TESTDT = 0.5 # simulation timestep in seconds
TESTSLEEP = 0.2 # time between level updates
TESTLOCS = ['(1.00,2.00,3.00)', '(4.00,5.00,6.00)'] # locations of test receivers
randomLevel = lambda: 40.0 + 30.0*random.random() # function that generates a random sound level
#---------------------------------------------------------------------------------------------------
# Communication from plugin to viewer
#---------------------------------------------------------------------------------------------------
class LevelBuffer(object):
""" base interface for sending levels to the viewer, implementing the one-way communication protocol
types of messages:
- command: 'clear'
- levels: 't;loc:level;loc:level'
"""
def __init__(self, host = HOST, port = PORT, active = True, sleep = 0, verbose = False):
object.__init__(self)
self.host = host
self.port = port
self.queue = [] # queue of messages to send
self.active = active # if False, nothing is sent
self.sleep = sleep/1000.0 # time to sleep (in seconds) after sending levels (to slow down a simulation)
self.verbose = verbose # if True, debug code is printed
def sendLevels(self, t, levels):
""" send a series of levels at a particular time at different locations (dict of location:level) """
if self.active:
message = ('%.2f;' % t) + ';'.join([('%s:%.2f' % (str(loc), level)) for loc, level in levels.iteritems()])
self.queue.append(message)
self.flush()
if self.sleep > 0.0:
time.sleep(self.sleep)
def sendClear(self):
""" send a 'clear' message """
if self.active:
message = 'clear'
self.queue.append(message)
self.flush()
def send(self, message):
""" should send a single message string to the viewer (raise an error if not succesful) """
raise NotImplementedError
def flush(self):
""" try to send all message strings in the queue to the viewer """
while (len(self.queue) > 0) and (self.active == True):
message = self.queue[0]
try:
if self.verbose:
print 'trying to send message "%s"' % message
self.send(message)
# remove message from queue
del self.queue[0]
if self.verbose:
print 'sending succesful'
except:
if self.verbose:
print 'sending failed - aborting - length of queue: %d' % len(self.queue)
break
class SocketLevelBuffer(LevelBuffer):
""" implement the level buffer using low level sockets """
def __init__(self, *args, **kwargs):
LevelBuffer.__init__(self, *args, **kwargs)
def send(self, message):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((self.host, self.port))
s.sendall(message)
s.close()
class RPyCLevelBuffer(LevelBuffer):
""" implement the level buffer using Remote Python Calls (RPyC) """
def __init__(self, *args, **kwargs):
LevelBuffer.__init__(self, *args, **kwargs)
def send(self, message):
conn = rpyc.classic.connect('localhost')
conn.root.processMessage(message)
conn.close()
def createLevelBuffer(*args, **kwargs):
""" create a level buffer according to the defined protocol """
if USERPYC:
return RPyCLevelBuffer(*args, **kwargs)
else:
return SocketLevelBuffer(*args, **kwargs)
#---------------------------------------------------------------------------------------------------
# Viewer thread for receiving levels
#---------------------------------------------------------------------------------------------------
VIEWERLOCK = threading.Lock()
class BaseLevelThread(threading.Thread):
""" base interface for a thread for receiving levels """
def __init__(self):
threading.Thread.__init__(self)
self.active = True # set this to false for the thread to stop
self.clear()
def clear(self):
""" clear all data """
VIEWERLOCK.acquire()
self.data = {} # dict with received levels, for each receiver location
self.times = [] # list with times
VIEWERLOCK.release()
def locations(self):
""" return the receiver locations """
VIEWERLOCK.acquire()
result = self.data.keys()[:]
VIEWERLOCK.release()
return result
def levels(self, loc):
""" return the times and levels at the given location """
VIEWERLOCK.acquire()
result = (numpy.asarray(self.times).copy(), numpy.asarray(self.data[loc]).copy())
VIEWERLOCK.release()
return result
class DummyLevelThread(BaseLevelThread):
""" dummy interface for receiving levels, which adds levels at regular instances in time """
def __init__(self, dt = TESTDT, sleep = TESTSLEEP, locs = TESTLOCS):
BaseLevelThread.__init__(self)
self.dt = dt
self.sleep = sleep
self.locs = locs
def run(self):
""" instantiate the server """
print 'thread started...'
t = 0.0
while self.active:
t += self.dt
VIEWERLOCK.acquire()
self.times.append(t)
for loc in self.locs:
if not loc in self.data:
self.data[loc] = []
level = randomLevel()
self.data[loc].append(level)
print 'level received succesfully: time %.2fs, %s, %.2f dB' % (t, loc,level)
VIEWERLOCK.release()
time.sleep(self.sleep)
class ViewerLevelThread(BaseLevelThread):
""" interface for receiving levels, as a thread that runs a server which listens to new levels """
def __init__(self, frame = None, host = HOST, port = PORT, verbose = False):
BaseLevelThread.__init__(self)
self.frame = frame # frame to which the thread is connected
self.host = host
self.port = port
self.verbose = verbose # if True, debug code is printed
def processMessage(self, message):
""" process an incoming message """
if message == '':
pass
elif message == 'clear':
self.clear()
# clear the frame if applicable
if self.frame != None:
self.frame.clear_choices()
self.frame.clear_plot()
if self.verbose:
print 'levels cleared'
else:
# parse the incoming message
tokens = message.split(';')
t = float(tokens[0])
levels = []
for token in tokens[1:]:
loc, level = token.split(':')
level = float(level)
levels.append((loc, level))
# when parsing is succesful, update the data
if (len(self.times) > 0) and (t < self.times[-1]):
if self.verbose:
print 'discarding non-chronological levels: %s' % message
else:
VIEWERLOCK.acquire()
self.times.append(t)
for loc, level in levels:
if not loc in self.data:
self.data[loc] = []
self.data[loc].append(level)
if self.verbose:
print 'level received succesfully: time %.2fs, %s, %.2f dB' % (t, loc,level)
VIEWERLOCK.release()
class SocketViewerLevelThread(ViewerLevelThread):
""" implementation of viewer level thread using low level sockets """
def __init__(self, *args, **kwargs):
ViewerLevelThread.__init__(self, *args, **kwargs)
def run(self):
""" instantiate the server """
if self.verbose:
print 'thread started...'
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((self.host, self.port))
s.listen(1)
while self.active:
# wait for a connection from the plugin
try:
s.settimeout(TIMEOUT)
conn, addr = s.accept()
s.settimeout(None)
except:
time.sleep(SLEEP)
continue
# when there is a connection, fetch the message
if self.verbose:
print 'connection established'
data = ''
try:
while True:
temp = conn.recv(BUFSIZE)
if not temp:
break
data += temp
conn.close()
except:
if self.verbose:
print 'socket error, so skipping message'
# update the levels
try:
self.processMessage(data)
except:
if self.verbose:
print 'error with received message: "%s"' % data
s.close()
if USERPYC:
class RPyCViewerService(SlaveService):
""" service for managing received messages using Remote Python Calls (RPyC) """
def __init__(self, conn):
SlaveService.__init__(self, conn)
def exposed_processMessage(self, message):
""" send a message to the parent thread for processing """
global RPYCTHREAD
RPYCTHREAD.processMessage(message)
class RPyCViewerLevelThread(ViewerLevelThread):
""" implementation of viewer level thread using Remote Python Calls (RPyC) """
def __init__(self, *args, **kwargs):
ViewerLevelThread.__init__(self, *args, **kwargs)
def run(self):
""" instantiate the server """
if self.verbose:
print 'thread started...'
global RPYCTHREAD
RPYCTHREAD = self
self.server = ThreadedServer(RPyCViewerService, port = DEFAULT_SERVER_PORT, auto_register = False, registrar = UDPRegistryClient())
self.server.start()
def join(self):
self.server.close()
ViewerLevelThread.join(self)
def createViewerLevelThread(*args, **kwargs):
""" create a viewer level thread according to the defined protocol """
if USERPYC:
return RPyCViewerLevelThread(*args, **kwargs)
else:
return SocketViewerLevelThread(*args, **kwargs)
#---------------------------------------------------------------------------------------------------
# Utility GUI controls
#---------------------------------------------------------------------------------------------------
class XAxisRangeBox(wx.Panel):
""" panel for adjusting x-axis range """
def __init__(self, parent, ID, minvalue = XMIN, initvalue = XWIDTH, increment = SPININC):
wx.Panel.__init__(self, parent, ID)
self.minvalue = minvalue
self.value = initvalue # initial x-axis range width (in sliding mode)
# controls
self.radio_full = wx.RadioButton(self, -1, label = 'Full range', style = wx.RB_GROUP)
self.radio_slide = wx.RadioButton(self, -1, label = 'Sliding')
self.slide_width = FloatSpin(self, -1, size = (50, -1), digits = 0, value = self.value, min_val = minvalue, increment = increment)
self.slide_width.GetTextCtrl().SetEditable(False)
# event bindings
self.Bind(wx.EVT_UPDATE_UI, self.on_update_radio_buttons, self.radio_full)
self.Bind(EVT_FLOATSPIN, self.on_float_spin, self.slide_width)
# layout
box = wx.StaticBox(self, -1, 'X-axis')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
slide_box = wx.BoxSizer(wx.HORIZONTAL)
slide_box.Add(self.radio_slide, flag=wx.ALIGN_CENTER_VERTICAL)
slide_box.Add(self.slide_width, flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(self.radio_full, 0, wx.ALL, 10)
sizer.Add(slide_box, 0, wx.ALL, 10)
self.SetSizer(sizer)
sizer.Fit(self)
def on_update_radio_buttons(self, event):
""" called when the radio buttons are toggled """
self.slide_width.Enable(self.radio_slide.GetValue())
def on_float_spin(self, event):
""" called when the sliding mode spinbox is changed """
self.value = self.slide_width.GetValue()
def is_full(self):
""" return True if full range is checked """
return self.radio_full.GetValue()
class YAxisRangeBox(wx.Panel):
""" panel for adjusting y-axis range """
def __init__(self, parent, ID, minvalue = YMIN, initvalue = YRANGE, increment = SPININC):
wx.Panel.__init__(self, parent, ID)
self.value = initvalue # initial y-axis range (in manual mode), i.e. (min, max-min)
# controls
self.radio_auto = wx.RadioButton(self, -1, label = 'Auto', style = wx.RB_GROUP)
self.radio_manual = wx.RadioButton(self, -1, label = 'Manual')
self.manual_min = FloatSpin(self, -1, size = (50, -1), digits = 0, value = self.value[0], min_val = minvalue[0], increment = increment)
self.manual_min.GetTextCtrl().SetEditable(False)
self.manual_width = FloatSpin(self, -1, size = (50, -1), digits = 0, value = self.value[1], min_val = minvalue[1], increment = increment)
self.manual_width.GetTextCtrl().SetEditable(False)
# event bindings
self.Bind(wx.EVT_UPDATE_UI, self.on_update_radio_buttons, self.radio_auto)
self.Bind(EVT_FLOATSPIN, self.on_float_spin, self.manual_min)
self.Bind(EVT_FLOATSPIN, self.on_float_spin, self.manual_width)
# layout
box = wx.StaticBox(self, -1, 'Y-axis')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
manual_box = wx.BoxSizer(wx.HORIZONTAL)
manual_box.Add(self.radio_manual, flag=wx.ALIGN_CENTER_VERTICAL)
manual_box.Add(self.manual_min, flag=wx.ALIGN_CENTER_VERTICAL)
manual_box.Add(self.manual_width, flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(self.radio_auto, 0, wx.ALL, 10)
sizer.Add(manual_box, 0, wx.ALL, 10)
self.SetSizer(sizer)
sizer.Fit(self)
def on_update_radio_buttons(self, event):
""" called when the radio buttons are toggled """
toggle = self.radio_manual.GetValue()
self.manual_min.Enable(toggle)
self.manual_width.Enable(toggle)
def on_float_spin(self, event):
""" called when one of the manual mode spinboxes is changed """
self.value = (self.manual_min.GetValue(), self.manual_width.GetValue())
def is_auto(self):
""" return True if auto range is checked """
return self.radio_auto.GetValue()
#---------------------------------------------------------------------------------------------------
# Viewer frame class
#---------------------------------------------------------------------------------------------------
class ViewerFrame(wx.Frame):
""" main frame of the viewer application """
def __init__(self, test = False):
wx.Frame.__init__(self, None, -1, NAME)
self.paused = False
self.locations = []
# creation of controls
self.create_menu()
self.create_status_bar()
self.create_main_panel()
# timer for redrawing
self.redraw_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_redraw_timer, self.redraw_timer)
self.redraw_timer.Start(REDRAWTIME)
# handle closing the frame
self.Bind(wx.EVT_CLOSE, self.on_exit, self)
# manage window style (always on top or not)
self.wstyle = self.GetWindowStyle()
self.SetWindowStyle(self.wstyle | wx.STAY_ON_TOP)
# coordination with data server
if test:
self.thread = DummyLevelThread()
else:
self.thread = createViewerLevelThread(frame = self)
self.thread.start()
def create_menu(self):
""" construction of menu bar """
self.menubar = wx.MenuBar()
# File menu
menu_file = wx.Menu()
m_expt = menu_file.Append(-1, '&Save plot\tCtrl-S')
self.Bind(wx.EVT_MENU, self.on_save_plot, m_expt)
menu_file.AppendSeparator()
m_exit = menu_file.Append(-1, 'E&xit\tCtrl-X')
self.Bind(wx.EVT_MENU, self.on_exit, m_exit)
# View menu
menu_view = wx.Menu()
self.m_ontop = menu_view.Append(-1, '&Stay on top', kind = wx.ITEM_CHECK)
self.m_ontop.Check(True)
self.Bind(wx.EVT_MENU, self.on_ontop, self.m_ontop)
# Help menu
menu_help = wx.Menu()
m_about = menu_help.Append(-1, '&About...')
self.Bind(wx.EVT_MENU, self.on_about, m_about)
# construction of menu bar
self.menubar.Append(menu_file, '&File')
self.menubar.Append(menu_view, '&View')
self.menubar.Append(menu_help, '&Help')
self.SetMenuBar(self.menubar)
def create_status_bar(self):
""" construction of status bar """
self.statusbar = self.CreateStatusBar()
self.statusbar.SetFieldsCount(2)
self.statusbar.SetStatusWidths([50, -1])
def create_main_panel(self):
""" construction of the main controls """
self.panel = wx.Panel(self)
# contruct plotting area
self.fig = Figure(FIGSIZE, dpi = DPI)
# construct axes
self.axes = self.fig.add_subplot(111)
self.axes.set_axis_bgcolor(BGCOLOR)
# adjust font size of axes labels
pylab.setp(self.axes.get_xticklabels(), fontsize = FONTSIZE)
pylab.setp(self.axes.get_yticklabels(), fontsize = FONTSIZE)
# construct canvas with plotting area
self.plot_data = self.axes.plot([], linewidth = LINEWIDTH, color = LINECOLOR)[0]
self.canvas = FigCanvas(self.panel, -1, self.fig)
# construct location choice box
self.location_txt = wx.StaticText(self.panel, -1, label = ' Select location:')
self.location_box = wx.Choice(self.panel, -1, choices = [], size = (150,-1))
self.location_box.Enable(False)
self.Bind(wx.EVT_CHOICE, lambda event: self.draw_plot(), self.location_box)
# layout location choice box
self.hbox0 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox0.Add(self.location_txt, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hbox0.Add(self.location_box, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
# construct buttons
self.pause_button = wx.Button(self.panel, -1, 'Pause')
self.Bind(wx.EVT_BUTTON, self.on_pause_button, self.pause_button)
self.Bind(wx.EVT_UPDATE_UI, self.on_update_pause_button, self.pause_button)
self.clear_button = wx.Button(self.panel, -1, 'Clear')
self.Bind(wx.EVT_BUTTON, self.on_clear_button, self.clear_button)
self.cb_grid = wx.CheckBox(self.panel, -1, 'Show grid', style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, lambda event: self.draw_plot(), self.cb_grid)
self.cb_grid.SetValue(True)
self.cb_xlab = wx.CheckBox(self.panel, -1, 'X-labels', style=wx.ALIGN_RIGHT)
self.Bind(wx.EVT_CHECKBOX, lambda event: self.draw_plot(), self.cb_xlab)
self.cb_xlab.SetValue(True)
# layout buttons (add space using self.hbox1.AddSpacer(5))
self.hbox1 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox1.Add(self.pause_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hbox1.Add(self.clear_button, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hbox1.Add(self.cb_grid, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
self.hbox1.Add(self.cb_xlab, border=5, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL)
# construct axis controls
self.xrange_control = XAxisRangeBox(self.panel, -1)
self.yrange_control = YAxisRangeBox(self.panel, -1)
# layout axis controls
self.hbox2 = wx.BoxSizer(wx.HORIZONTAL)
self.hbox2.Add(self.xrange_control, border=5, flag=wx.ALL)
self.hbox2.Add(self.yrange_control, border=5, flag=wx.ALL)
# finally, create layout of viewer frame
self.vbox = wx.BoxSizer(wx.VERTICAL)
self.vbox.Add(self.canvas, 1, flag=wx.LEFT | wx.TOP | wx.GROW)
self.vbox.Add(self.hbox0, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.vbox.Add(self.hbox1, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.vbox.Add(self.hbox2, 0, flag=wx.ALIGN_LEFT | wx.TOP)
self.panel.SetSizer(self.vbox)
self.vbox.Fit(self)
def draw_plot(self):
""" redraw the plot and update the gui if necessary """
if not self.paused:
# check if data is available
if len(self.locations) == 0:
self.locations = sorted(self.thread.locations())
if len(self.locations) > 0:
self.location_box.AppendItems(self.locations)
self.location_box.SetSelection(0)
self.location_box.Enable(True)
self.flash_status_message('Connection established')
if len(self.locations) > 0:
# fetch data at selected receiver location
loc = self.locations[self.location_box.GetSelection()]
times, levels = self.thread.levels(loc)
if (len(times) == len(levels)):
# calculate x-axis limits
if self.xrange_control.is_full():
# show the full range for the x-axis
xmin = times[0]
xmax = max(times[0] + self.xrange_control.minvalue, times[-1])
else:
# show a sliding window
xmax = times[-1]
xmin = xmax - self.xrange_control.value
# calculate y-axis limits
if self.yrange_control.is_auto():
# find the min and max values of the data and add a minimal margin
ymin = round(min(levels), 0) - MARGIN
ymax = round(max(levels), 0) + MARGIN
else:
# use manual interval
ymin = self.yrange_control.value[0]
ymax = ymin + self.yrange_control.value[1]
# set axis limits
self.axes.set_xbound(lower = xmin, upper = xmax)
self.axes.set_ybound(lower = ymin, upper = ymax)
# finally, plot the data and redraw the plot
self.plot_data.set_xdata(numpy.array(times))
self.plot_data.set_ydata(numpy.array(levels))
# draw grid
if self.cb_grid.IsChecked():
self.axes.grid(True, color = GRIDCOLOR)
else:
self.axes.grid(False)
# draw axis labels
pylab.setp(self.axes.get_xticklabels(), visible = self.cb_xlab.IsChecked())
self.canvas.draw()
def clear_plot(self):
""" clear the data on the plot """
self.plot_data.set_xdata([])
self.plot_data.set_ydata([])
self.canvas.draw()
def on_redraw_timer(self, event):
""" redraw the plot """
self.draw_plot()
def on_pause_button(self, event):
""" called when the pause button is clicked """
self.paused = not self.paused
if self.paused:
self.statusbar.SetStatusText('Paused', 0)
else:
self.statusbar.SetStatusText('', 0)
def on_update_pause_button(self, event):
""" called when the pause button is to be updated """
label = 'Resume' if self.paused else 'Pause'
self.pause_button.SetLabel(label)
def on_clear_button(self, event):
""" called when the clear butten is clicked """
self.thread.clear()
self.clear_choices()
self.clear_plot()
def clear_choices(self):
""" clear the choices box """
self.locations = []
self.location_box.Clear()
self.location_box.Enable(False)
self.flash_status_message('Cleared')
def on_save_plot(self, event):
""" show a window for saving a screenshot """
dlg = wx.FileDialog(self, message = 'Save plot as...', defaultDir = os.getcwd(), defaultFile = 'plot.png', wildcard = 'PNG (*.png)|*.png', style = wx.SAVE)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
self.canvas.print_figure(path, dpi = DPI)
self.flash_status_message('Saved to %s' % path)
def stop_thread(self):
""" stop the level thread """
self.thread.active = False
self.thread.join()
def on_exit(self, event):
""" called when the viewer is closed """
self.stop_thread()
self.Destroy()
def on_ontop(self, event):
""" toggles the stay on top modus """
if self.m_ontop.IsChecked():
self.SetWindowStyle(self.wstyle | wx.STAY_ON_TOP)
else:
self.SetWindowStyle(self.wstyle)
def on_about(self, event):
""" show an about box """
wx.MessageBox(ABOUT, 'About ' + NAME)
def flash_status_message(self, message):
""" flash a message on the status bar """
try:
self.statusbar.SetStatusText(message, 1)
self.timeroff = wx.Timer(self)
self.Bind(wx.EVT_TIMER, lambda event: self.statusbar.SetStatusText('', 1), self.timeroff)
self.timeroff.Start(FLASHTIME, oneShot = True)
except:
pass
#---------------------------------------------------------------------------------------------------
# Test code
#---------------------------------------------------------------------------------------------------
if __name__ == '__main__':
if len(sys.argv) <= 1:
# no command line argument, so run the viewer application
app = wx.PySimpleApp()
app.frame = ViewerFrame()
app.frame.Show()
app.MainLoop()
if (len(sys.argv) == 2) and (sys.argv[1] == 'test'):
# run the viewer in test mode, i.e. generating its own levels for display
app = wx.PySimpleApp()
app.frame = ViewerFrame(test = True)
app.frame.Show()
app.MainLoop()
if (len(sys.argv) == 2) and (sys.argv[1] == 'command'):
# run the viewer in command line mode, i.e. only receiving levels and printing them to the console
print 'Running viewer in command line mode - press any key to stop...'
thread = createViewerLevelThread(frame = None, verbose = True)
thread.start()
# wait until a key is pressed
stop = False
while not stop:
if msvcrt.kbhit():
c = msvcrt.getch()
stop = True
time.sleep(0.1)
# stop the thread
thread.active = False
thread.join()
if (len(sys.argv) == 2) and (sys.argv[1] == 'dummy'):
# run a dummy Aimsun/Noysim2 client that sends random levels (for use with viewer in normal or command line mode)
print 'Running dummy Aimsun/Noysim2 client - press any key to stop...'
client = createLevelBuffer(verbose = True, sleep = 1000*TESTSLEEP)
client.sendClear()
stop = False
(t, dt) = (0.0, TESTDT)
while not stop:
t += dt
client.sendLevels(t = t, levels = dict([(loc, randomLevel()) for loc in TESTLOCS]))
if msvcrt.kbhit():
c = msvcrt.getch()
stop = True
| mit | 800,746,014,258,861,400 | 35.794078 | 159 | 0.607493 | false |
GedheFoundation/sidesa2.0 | surat_masuk.py | 1 | 6371 | #Boa:Frame:surat_masuk
import wx
import wx.richtext
import wx.lib.buttons
import input_administrasi_surat
def create(parent):
return surat_masuk(parent)
[wxID_SURAT_MASUK, wxID_SURAT_MASUKDARI, wxID_SURAT_MASUKDATEPICKERCTRL1,
wxID_SURAT_MASUKINPUT_KETERANGAN, wxID_SURAT_MASUKINPUT_NOMOR_SURAT,
wxID_SURAT_MASUKKOTAK_SURAT_MASUK, wxID_SURAT_MASUKLABEL_ISI_SINGKAT_SURAT,
wxID_SURAT_MASUKLABEL_KETERANGAN, wxID_SURAT_MASUKLABEL_NOMOR_SURAT_MASUK,
wxID_SURAT_MASUKLABEL_TANGGAL_SURAT, wxID_SURAT_MASUKLABEL_TUJUAN_SURAT,
wxID_SURAT_MASUKPERIHAL, wxID_SURAT_MASUKRICHTEXTCTRL1,
wxID_SURAT_MASUKTEXTCTRL1, wxID_SURAT_MASUKTEXTCTRL2,
wxID_SURAT_MASUKTOMBOL_KE_MENU_SURAT, wxID_SURAT_MASUKTOMBOL_SIMPAN,
wxID_SURAT_MASUKTUJUAN,
] = [wx.NewId() for _init_ctrls in range(18)]
class surat_masuk(wx.Frame):
def _init_ctrls(self, prnt):
# generated method, don't edit
wx.Frame.__init__(self, id=wxID_SURAT_MASUK, name=u'surat_masuk',
parent=prnt, pos=wx.Point(438, 232), size=wx.Size(850, 370),
style=wx.DEFAULT_FRAME_STYLE, title=u'Surat Masuk')
self.SetClientSize(wx.Size(850, 370))
self.Center(wx.BOTH)
self.label_nomor_surat_masuk = wx.StaticText(id=wxID_SURAT_MASUKLABEL_NOMOR_SURAT_MASUK,
label=u'Nomor Surat Masuk', name=u'label_nomor_surat_masuk',
parent=self, pos=wx.Point(40, 48), size=wx.Size(122, 17),
style=0)
self.label_tanggal_surat = wx.StaticText(id=wxID_SURAT_MASUKLABEL_TANGGAL_SURAT,
label=u'Tanggal Surat', name=u'label_tanggal_surat', parent=self,
pos=wx.Point(40, 80), size=wx.Size(81, 17), style=0)
self.label_tujuan_surat = wx.StaticText(id=wxID_SURAT_MASUKLABEL_TUJUAN_SURAT,
label=u'Dari', name=u'label_tujuan_surat', parent=self,
pos=wx.Point(40, 112), size=wx.Size(76, 17), style=0)
self.label_isi_singkat_surat = wx.StaticText(id=wxID_SURAT_MASUKLABEL_ISI_SINGKAT_SURAT,
label=u'Isi Singkat Surat', name=u'label_isi_singkat_surat',
parent=self, pos=wx.Point(40, 144), size=wx.Size(97, 17),
style=0)
self.label_keterangan = wx.StaticText(id=wxID_SURAT_MASUKLABEL_KETERANGAN,
label=u'Disposisi', name=u'label_keterangan', parent=self,
pos=wx.Point(40, 280), size=wx.Size(88, 17), style=0)
self.input_nomor_surat = wx.TextCtrl(id=wxID_SURAT_MASUKINPUT_NOMOR_SURAT,
name=u'input_nomor_surat', parent=self, pos=wx.Point(168, 40),
size=wx.Size(312, 25), style=0, value=u'')
self.dari = wx.TextCtrl(id=wxID_SURAT_MASUKDARI, name=u'dari',
parent=self, pos=wx.Point(168, 104), size=wx.Size(312, 25),
style=0, value=u'')
self.input_keterangan = wx.TextCtrl(id=wxID_SURAT_MASUKINPUT_KETERANGAN,
name=u'input_keterangan', parent=self, pos=wx.Point(168, 280),
size=wx.Size(656, 25), style=0, value=u'')
self.kotak_surat_masuk = wx.StaticBox(id=wxID_SURAT_MASUKKOTAK_SURAT_MASUK,
label=u'Surat masuk Input', name=u'kotak_surat_masuk',
parent=self, pos=wx.Point(8, 16), size=wx.Size(832, 304),
style=0)
self.tombol_simpan = wx.lib.buttons.GenBitmapTextButton(bitmap=wx.NullBitmap,
id=wxID_SURAT_MASUKTOMBOL_SIMPAN, label=u'Simpan',
name=u'tombol_simpan', parent=self, pos=wx.Point(280, 328),
size=wx.Size(176, 31), style=0)
self.tombol_simpan.Bind(wx.EVT_BUTTON, self.OnTombol_simpanButton,
id=wxID_SURAT_MASUKTOMBOL_SIMPAN)
self.tombol_ke_menu_surat = wx.lib.buttons.GenBitmapTextButton(bitmap=wx.NullBitmap,
id=wxID_SURAT_MASUKTOMBOL_KE_MENU_SURAT,
label=u'Kembali Ke Menu Surat', name=u'tombol_ke_menu_surat',
parent=self, pos=wx.Point(464, 328), size=wx.Size(200, 31),
style=0)
self.tombol_ke_menu_surat.Bind(wx.EVT_BUTTON,
self.OnTombol_ke_menu_suratButton,
id=wxID_SURAT_MASUKTOMBOL_KE_MENU_SURAT)
self.datePickerCtrl1 = wx.DatePickerCtrl(id=wxID_SURAT_MASUKDATEPICKERCTRL1,
name='datePickerCtrl1', parent=self, pos=wx.Point(168, 72),
size=wx.Size(168, 26), style=wx.DP_SHOWCENTURY)
self.richTextCtrl1 = wx.richtext.RichTextCtrl(id=wxID_SURAT_MASUKRICHTEXTCTRL1,
parent=self, pos=wx.Point(168, 144), size=wx.Size(656, 128),
style=wx.richtext.RE_MULTILINE, value='')
self.tujuan = wx.StaticText(id=wxID_SURAT_MASUKTUJUAN, label=u'Kepada',
name=u'tujuan', parent=self, pos=wx.Point(488, 112),
size=wx.Size(48, 15), style=0)
self.textCtrl1 = wx.TextCtrl(id=wxID_SURAT_MASUKTEXTCTRL1,
name='textCtrl1', parent=self, pos=wx.Point(552, 104),
size=wx.Size(272, 25), style=0, value='')
self.perihal = wx.StaticText(id=wxID_SURAT_MASUKPERIHAL,
label=u'Perihal', name=u'perihal', parent=self, pos=wx.Point(488,
48), size=wx.Size(44, 15), style=0)
self.textCtrl2 = wx.TextCtrl(id=wxID_SURAT_MASUKTEXTCTRL2,
name='textCtrl2', parent=self, pos=wx.Point(552, 40),
size=wx.Size(272, 25), style=0, value='')
def __init__(self, parent):
self._init_ctrls(parent)
def OnTombol_ke_menu_suratButton(self, event):
self.main=input_administrasi_surat.create(None)
self.main.Show()
self.Close()
def OnTombol_simpanButton(self, event):
inputsuratmasuk = str(self.input_tambak.GetValue())
inputjumlah = str(self.input_jumlah.GetValue())
inputpemilik = str(self.input_pemilik.GetValue())
inputketerangan = str(self.richTextCtrl1.GetValue())
add_tambak="INSERT INTO potensipariwisata (namapotensi, jumlah, pemilik, keterangan) VALUES('"+(inputtambak)+"', '"+(inputjumlah)+"', '"+(inputpemilik)+"', '"+(inputketerangan)+"')"
cur.execute(add_tambak)
db.commit()
self.input_tambak.SetValue('')
self.input_jumlah.Clear()
self.input_pemilik.Clear()
self.richTextCtrl1.Clear()
self.pesan = wx.MessageDialog(self,"Data Baru Tambak Disimpan","Konfirmasi",wx.OK)
self.pesan.ShowModal()
| gpl-2.0 | -2,547,471,735,619,508,700 | 47.633588 | 189 | 0.638832 | false |
calamares/calamares | src/modules/dummypythonqt/main.py | 1 | 8246 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# === This file is part of Calamares - <https://calamares.io> ===
#
# SPDX-FileCopyrightText: 2016-2017 Teo Mrnjavac <[email protected]>
# SPDX-FileCopyrightText: 2017 Alf Gaida <[email protected]>
# SPDX-License-Identifier: GPL-3.0-or-later
#
# Calamares is Free Software: see the License-Identifier above.
#
import platform
from PythonQt.QtGui import *
import PythonQt.calamares as calamares
# WARNING: the Calamares PythonQt API is considered EXPERIMENTAL as of
# Calamares 2.5. It comes with no promise or commitment to API stability.
# Set up translations.
# You may skip this if your Calamares module has no user visible strings.
# DO NOT install _ into the builtin namespace because each module loads
# its own catalog.
# DO use the gettext class-based API and manually alias _ as described in:
# https://docs.python.org/3.5/library/gettext.html#localizing-your-module
import gettext
import inspect
import os
_filename = inspect.getframeinfo(inspect.currentframe()).filename
_path = os.path.dirname(os.path.abspath(_filename))
_ = gettext.gettext
# Example Python ViewModule.
# A Python ViewModule is a Python program which defines a ViewStep class.
# One UI module ==> one ViewStep.
# This class must be marked with the @calamares_module decorator. A
# ViewModule may define other classes, but only one may be decorated with
# @calamares_module. Such a class must conform to the Calamares ViewStep
# interface and functions as the entry point of the module.
# A ViewStep manages one or more "wizard pages" through methods like
# back/next, and reports its status through isNextEnabled/isBackEnabled/
# isAtBeginning/isAtEnd. The whole UI, including all the pages, must be
# exposed as a single QWidget, returned by the widget function.
#
# For convenience, both C++ and PythonQt ViewSteps are considered to be
# implementations of ViewStep.h. Additionally, the Calamares PythonQt API
# allows Python developers to keep their identifiers more Pythonic on the
# Python side. Thus, all of the following are considered valid method
# identifiers in a ViewStep implementation: isNextEnabled, isnextenabled,
# is_next_enabled.
@calamares_module
class DummyPythonQtViewStep:
def __init__(self):
# Importing PythonQt.QtGui provides access to most Qt widget classes.
self.main_widget = QFrame()
self.main_widget.setLayout(QVBoxLayout())
label = QLabel()
self.main_widget.layout().addWidget(label)
accumulator = "\nCalamares+PythonQt running embedded Python " +\
platform.python_version()
label.text = accumulator
btn = QPushButton()
# Python strings can be used wherever a method wants a QString. Python
# gettext translations can be used seamlessly as well.
btn.setText(_("Click me!"))
self.main_widget.layout().addWidget(btn)
# The syntax for signal-slot connections is very simple, though
# slightly different from the C++ equivalent. There are no SIGNAL and
# SLOT macros, and a signal can be connected to any Python method
# (without a special "slot" designation).
btn.connect("clicked(bool)", self.on_btn_clicked)
def on_btn_clicked(self):
self.main_widget.layout().addWidget(QLabel(_("A new QLabel.")))
def prettyName(self):
return _("Dummy PythonQt ViewStep")
def isNextEnabled(self):
return True # The "Next" button should be clickable
def isBackEnabled(self):
return True # The "Back" button should be clickable
def isAtBeginning(self):
# True means the currently shown UI page is the first page of this
# module, thus a "Back" button click will not be handled by this
# module and will cause a skip to the previous ViewStep instead
# (if any). False means that the present ViewStep provides other UI
# pages placed logically "before" the current one, thus a "Back" button
# click will be handled by this module instead of skipping to another
# ViewStep. A module (ViewStep) with only one page will always return
# True here.
return True
def isAtEnd(self):
# True means the currently shown UI page is the last page of this
# module, thus a "Next" button click will not be handled by this
# module and will cause a skip to the next ViewStep instead (if any).
# False means that the present ViewStep provides other UI pages placed
# logically "after" the current one, thus a "Next" button click will
# be handled by this module instead of skipping to another ViewStep.
# A module (ViewStep) with only one page will always return True here.
return True
def jobs(self):
# Returns a list of objects that implement Calamares::Job.
return [DummyPQJob("Dummy PythonQt job reporting for duty")]
def widget(self):
# Returns the base QWidget of this module's UI.
return self.main_widget
def retranslate(self, locale_name):
# This is where it gets slightly weird. In most desktop applications we
# shouldn't need this kind of mechanism, because we could assume that
# the operating environment is configured to use a certain language.
# Usually the user would change the system-wide language in a settings
# UI, restart the application, done.
# Alas, Calamares runs on an unconfigured live system, and one of the
# core features of Calamares is to allow the user to pick a language.
# Unfortunately, strings in the UI do not automatically react to a
# runtime language change. To get UI strings in a new language, all
# user-visible strings must be retranslated (by calling tr() in C++ or
# _() in Python) and reapplied on the relevant widgets.
# When the user picks a new UI translation language, Qt raises a QEvent
# of type LanguageChange, which propagates through the QObject
# hierarchy. By catching and reacting to this event, we can show
# user-visible strings in the new language at the right time.
# The C++ side of the Calamares PythonQt API catches the LanguageChange
# event and calls the present method. It is then up to the module
# developer to add here all the needed code to load the module's
# translation catalog for the new language (which is separate from the
# main Calamares strings catalog) and reapply any user-visible strings.
calamares.utils.debug("PythonQt retranslation event "
"for locale name: {}".format(locale_name))
# First we load the catalog file for the new language...
try:
global _
_t = gettext.translation('dummypythonqt',
localedir=os.path.join(_path, 'lang'),
languages=[locale_name])
_ = _t.gettext
except OSError as e:
calamares.utils.debug(e)
pass
# ... and then we can call setText(_("foo")) and similar methods on
# the relevant widgets here to reapply the strings.
# An example Job class. Implements Calamares::Job. For method identifiers, the
# same rules apply as for ViewStep. No decorators are necessary here, because
# only the ViewStep implementation is the unique entry point, and a module can
# have any number of jobs.
class DummyPQJob:
def __init__(self, my_msg):
self.my_msg = my_msg
def pretty_name(self):
return _("The Dummy PythonQt Job")
def pretty_description(self):
return _("This is the Dummy PythonQt Job. "
"The dummy job says: {}").format(self.my_msg)
def pretty_status_message(self):
return _("A status message for Dummy PythonQt Job.")
def exec(self):
# As an example, we touch a file in the target root filesystem.
rmp = calamares.global_storage['rootMountPoint']
os.system("touch {}/calamares_dpqt_was_here".format(rmp))
calamares.utils.debug("the dummy job says {}".format(self.my_msg))
return {'ok': True}
| gpl-3.0 | -682,205,914,237,107,000 | 42.861702 | 79 | 0.680936 | false |
ofavre/cellulart | utils.py | 1 | 1311 | # -*- coding: utf-8 -*-
# License: See LICENSE file.
import math
CLASSIC = 0
WRAP = 1
#MOBIUS = 2
def distance(world_shape, type=CLASSIC):
def distance_classic(a,b):
val = 0.0
for ad,bd in zip(a,b):
val += (ad-bd)**2
return math.sqrt(val)
def distance_wrap(a,b):
val = 0.0
i = 0
for ad,bd in zip(a,b):
di = world_shape[i]
ad %= di
bd %= di
dist = abs(ad-bd)
if ad < bd:
dist2 = abs(ad+di-bd)
if dist2 < dist:
dist = dist2
else:
dist2 = abs(ad-(bd+di))
if dist2 < dist:
dist = dist2
val += dist**2
i += 1
return math.sqrt(val)
if type == CLASSIC:
return distance_classic
elif type == WRAP:
return distance_wrap
else:
return None
def weighted_sum_wrap(value1, weight1, value2, weight2, wrap_length):
if value1 < value2 and (value1+wrap_length)-value2 < value2-value1:
value1 += wrap_length
elif value2 < value1 and (value2+wrap_length)-value1 < value1-value2:
value2 += wrap_length
return ( (weight1*value1 + weight2*value2) / (weight1+weight2) ) % wrap_length
| bsd-3-clause | -2,513,885,121,454,406,000 | 23.735849 | 84 | 0.500381 | false |
nathanbjenx/cairis | cairis/core/colourcodes.py | 1 | 3560 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__author__ = 'Shamal Faily'
def threatColourCode(valueId):
if (valueId == 9): return '359 1 .5'
elif (valueId > 9): return '359 1 .5'
elif (valueId == 8): return '359 1 .7'
elif (valueId == 7): return '6 .86 .44'
elif (valueId == 6): return '10 .7 .94'
elif (valueId == 5): return '19 .65 .99'
elif (valueId == 4): return '27 .48 .99'
elif (valueId == 3): return '34 .38 .99'
elif (valueId == 2): return '36 .21 1'
elif (valueId == 1): return '37 .07 1'
elif (valueId < 1): return '37 .07 1'
def threatLikelihoodColourCode(tlValue):
# Based on orrd5 color scheme
if tlValue == 'Incredible': return "#fef0d9"
elif tlValue == 'Improbable': return "#fdcc8a"
elif tlValue == 'Remote': return "#fc8d59"
elif tlValue == 'Occasional': return "#e34a33"
else: return "#b30000"
def vulnerabilitySeverityColourCode(vsValue):
# Based on orrd4 color scheme
if vsValue == 'Negligible': return '1'
elif vsValue == 'Marginal': return '2'
elif vsValue == 'Critical': return '3'
else: return '4'
def vulnerabilitySeverityTextColourCode(vsValue):
if (vsValue == 'Catastrophic'): return 'white'
else: return 'black'
def usabilityColourCode(valueId):
if (valueId <= 1): return '#F7FBFF'
elif (valueId == 2): return '#DEEBF7'
elif (valueId == 3): return '#C6DBEF'
elif (valueId == 4): return '#9ECAE1'
elif (valueId == 5): return '#6BAED6'
elif (valueId == 6): return '#4292C6'
elif (valueId == 7): return '#2171B5'
elif (valueId == 8): return '#08519C'
elif (valueId == 9): return '#08306B'
elif (valueId > 9): return '#08306B'
def obstacleColourCode(valueId):
if (valueId <= 0.2): return '1'
elif (valueId <= 0.3): return '2'
elif (valueId <= 0.4): return '3'
elif (valueId <= 0.5): return '4'
elif (valueId <= 0.6): return '5'
elif (valueId <= 0.7): return '6'
elif (valueId <= 0.8): return '7'
elif (valueId <= 0.9): return '8'
else: return '9'
def riskTextColourCode(valueId):
if (valueId >= 7): return 'white'
else: return 'black'
def usabilityTextColourCode(valueId):
if (valueId >= 7): return 'white'
else: return 'black'
def probabilityTextColourCode(valueId):
if (valueId >= 0.5): return 'white'
else: return 'black'
def surfaceTypeColourCode(valueId):
if (valueId == 9): return '359 1 .5'
elif (valueId > 9): return '359 1 .5'
elif (valueId == 8): return '359 1 .7'
elif (valueId == 7): return '6 .86 .44'
elif (valueId == 6): return '10 .7 .94'
elif (valueId == 5): return '19 .65 .99'
elif (valueId == 4): return '27 .48 .99'
elif (valueId == 3): return '34 .38 .99'
elif (valueId == 2): return '36 .21 1'
elif (valueId <= 1): return '37 .07 1'
def surfaceTypeTextColourCode(valueId):
if (valueId >= 7): return 'white'
else: return 'black'
| apache-2.0 | 3,056,037,936,750,514,000 | 33.901961 | 63 | 0.660674 | false |
operasoftware/twisted-apns | apns/notification.py | 1 | 5424 | from datetime import datetime
import binascii
import json
import struct
from apns.commands import NOTIFICATION
from apns.utils import datetime_to_timestamp
class NotificationError(Exception):
"""To be thrown upon failures on notification processing."""
pass
class NotificationInvalidPriorityError(NotificationError):
"""
Thrown while packing a notification, if the notification priority field is
invalid.
"""
pass
class NotificationPayloadNotSerializableError(NotificationError):
"""
Thrown while packing a notification, if the notification payload field
could not be serialized to JSON.
"""
pass
class NotificationTokenUnhexlifyError(NotificationError):
"""
Thrown while packing a notification, if the notification token field could
not be converted to binary from its hex representation.
"""
def __init__(self, msg):
super(NotificationTokenUnhexlifyError, self).__init__(msg)
class NotificationInvalidCommandError(NotificationError):
"""
Thrown while unpacking a notification, if the notification command field
contains invalid value.
"""
pass
class NotificationInvalidIdError(NotificationError):
"""
Thrown while unpacking a notification, if the notification structure is
invalid.
"""
pass
class Notification(object):
"""
A representation of the structure of a notification request, as defined in
the iOS documentation.
"""
COMMAND = NOTIFICATION
PRIORITY_NORMAL = 5
PRIORITY_IMMEDIATELY = 10
PRIORITIES = (PRIORITY_NORMAL, PRIORITY_IMMEDIATELY)
PAYLOAD = 2
TOKEN = 1
PRIORITY = 5
NOTIFICATION_ID = 3
EXPIRE = 4
EXPIRE_IMMEDIATELY = 0
def __init__(self, payload=None, token=None, expire=None,
priority=PRIORITY_NORMAL, iden=0):
"""
Init an instance of Notification.
:param payload: object containing structure of payload to be sent to
remote device.
:param token: string containing target device token in hex
:param expire: notification expire time as UNIX timestamp, 0 means that
notification expires immediately.
:param priority: notification priority, as described in iOS
documentation
:param iden: notification ID, as described in iOS documentation
"""
self.payload = payload
self.token = token
self.expire = expire
self.priority = priority
self.iden = iden
def __str__(self):
return '<Notification: %s>' % self.token
def to_binary_string(self):
"""Pack the notification to binary form and return it as string."""
if self.priority not in self.PRIORITIES:
raise NotificationInvalidPriorityError()
try:
token = binascii.unhexlify(self.token)
except TypeError as error:
raise NotificationTokenUnhexlifyError(error)
try:
payload = json.dumps(self.payload)
except TypeError:
raise NotificationPayloadNotSerializableError()
fmt = ">BIBH{0}sBH{1}sBHIBHIBHB".format(len(token), len(payload))
expire = (0 if self.expire == self.EXPIRE_IMMEDIATELY else
datetime_to_timestamp(self.expire))
# |COMMAND|FRAME-LEN|{token}|{payload}|{id:4}|{expire:4}|{priority:1}
# 5 items, each 3 bytes prefix, then each item length
length = 3*5 + len(token) + len(payload) + 4 + 4 + 1
message = struct.pack(fmt, self.COMMAND, length,
self.TOKEN, len(token), token,
self.PAYLOAD, len(payload), payload,
self.NOTIFICATION_ID, 4, self.iden,
self.EXPIRE, 4, expire,
self.PRIORITY, 1, self.priority)
return message
def from_binary_string(self, notification):
"""Unpack the notification from binary string."""
command = struct.unpack('>B', notification[0])[0]
if command != self.COMMAND:
raise NotificationInvalidCommandError()
length = struct.unpack('>I', notification[1:5])[0]
notification = notification[5:]
offset = 0
def next_item(offset):
iden, length = struct.unpack('>BH', notification[offset:offset+3])
offset += 3
payload = notification[offset:offset+length]
offset += length
if iden == self.PAYLOAD:
payload = struct.unpack('>{0}s'.format(length), payload)[0]
self.payload = json.loads(payload)
elif iden == self.TOKEN:
payload = struct.unpack('>{0}s'.format(length), payload)[0]
self.token = binascii.hexlify(payload)
elif iden == self.PRIORITY:
self.priority = struct.unpack('>B', payload)[0]
elif iden == self.NOTIFICATION_ID:
self.iden = struct.unpack('>I', payload)[0]
elif iden == self.EXPIRE:
payload = struct.unpack('>I', payload)[0]
self.expire = (self.EXPIRE_IMMEDIATELY if payload == 0 else
datetime.fromtimestamp(payload))
else:
raise NotificationInvalidIdError()
return offset
while offset < length:
offset = next_item(offset)
| mit | 4,067,661,437,472,473,600 | 31.872727 | 79 | 0.615413 | false |
jahodfra/mrkev | mrkev/__init__.py | 1 | 1675 | __version__ = '0.1'
__license__ = '''
Copyright (c) 2012, Frantisek Jahoda
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the <ORGANIZATION> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
from mrkev.interpreter import Template
from mrkev.parser import MarkupSyntaxError, Parser
__all__ = ['MarkupSyntaxError', 'Parser', 'Template']
| bsd-3-clause | 1,534,452,892,119,909,400 | 87.157895 | 755 | 0.798806 | false |
vessemer/concept-to-clinic | prediction/src/tests/test_segmentation.py | 1 | 2579 | import os
import time
import pylidc as pl
import pytest
from config import Config
from . import get_timeout
from ..algorithms.identify.prediction import load_patient_images
from ..algorithms.segment.trained_model import predict
from ..preprocess.lung_segmentation import save_lung_segments, get_z_range
def test_correct_paths(dicom_paths):
assert os.path.isdir(Config.SEGMENT_ASSETS_DIR)
for path in dicom_paths:
assert os.path.isdir(path)
def test_segment_predict_load(dicom_path):
predicted = predict(dicom_path, [])
assert predicted['volumes'] == []
def test_segment_dicom(dicom_path, nodule_locations):
predicted = predict(dicom_path, nodule_locations)
assert isinstance(predicted['binary_mask_path'], str)
assert predicted['volumes']
assert predicted['volumes'][0] > 0
def test_segment_luna(metaimage_path, luna_nodule):
predicted = predict(metaimage_path, [luna_nodule])
assert isinstance(predicted['binary_mask_path'], str)
assert predicted['volumes']
assert predicted['volumes'][0] > 0
@pytest.mark.stop_timeout
def test_nodule_segmentation(dicom_path, nodule_001):
predict(dicom_path, [nodule_001])
@pytest.mark.stop_timeout
def test_lung_segmentation(dicom_paths):
"""Test whether the annotations of the LIDC images are inside the segmented lung masks.
Iterate over all local LIDC images, fetch the annotations, compute their positions within the masks and check that
at this point the lung masks are set to 255."""
for path in dicom_paths:
min_z, max_z = get_z_range(path)
directories = path.split('/')
lidc_id = directories[2]
patient_id = directories[-1]
original, mask = save_lung_segments(path, patient_id)
original_shape, mask_shape = original.shape, mask.shape
scan = pl.query(pl.Scan).filter(pl.Scan.patient_id == lidc_id).first()
for annotation in scan.annotations:
centroid_x, centroid_y, centroid_z = annotation.centroid()
patient_mask = load_patient_images(patient_id, wildcard="*_m.png")
x_mask = int(mask_shape[1] / original_shape[1] * centroid_x)
y_mask = int(mask_shape[2] / original_shape[2] * centroid_y)
z_mask = int(abs(min_z) - abs(centroid_z))
mask_value = patient_mask[z_mask, x_mask, y_mask]
assert mask_value == 255
@pytest.mark.stop_timeout
def test_stop_timeout():
timeout = get_timeout()
if timeout > 0:
time.sleep(timeout + 1)
raise ValueError("This test should timeout")
| mit | -8,960,259,686,865,526,000 | 32.934211 | 118 | 0.68166 | false |
dabrahams/0install | zeroinstall/0launch-gui/main.py | 1 | 6132 | # Copyright (C) 2009, Thomas Leonard
# See the README file for details, or visit http://0install.net.
from __future__ import print_function
import os, sys
import logging
import warnings
from optparse import OptionParser
from zeroinstall import _, SafeException
from zeroinstall.injector import requirements
from zeroinstall.injector.driver import Driver
from zeroinstall.injector.config import load_config
from zeroinstall.support import tasks
_recalculate = tasks.Blocker('recalculate')
def recalculate():
"""Ask the mainloop to recalculate. If we're already recalculating, wait for that to finish
and then do it again."""
global _recalculate
_recalculate.trigger()
_recalculate = tasks.Blocker('recalculate')
def run_gui(args):
parser = OptionParser(usage=_("usage: %prog [options] interface"))
parser.add_option("", "--before", help=_("choose a version before this"), metavar='VERSION')
parser.add_option("", "--cpu", help=_("target CPU type"), metavar='CPU')
parser.add_option("", "--command", help=_("command to select"), metavar='COMMAND')
parser.add_option("-d", "--download-only", help=_("fetch but don't run"), action='store_true')
parser.add_option("-g", "--force-gui", help=_("display an error if there's no GUI"), action='store_true')
parser.add_option("", "--message", help=_("message to display when interacting with user"))
parser.add_option("", "--not-before", help=_("minimum version to choose"), metavar='VERSION')
parser.add_option("", "--os", help=_("target operation system type"), metavar='OS')
parser.add_option("-r", "--refresh", help=_("check for updates of all interfaces"), action='store_true')
parser.add_option("", "--select-only", help=_("only download the feeds"), action='store_true')
parser.add_option("-s", "--source", help=_("select source code"), action='store_true')
parser.add_option("", "--systray", help=_("download in the background"), action='store_true')
parser.add_option("-v", "--verbose", help=_("more verbose output"), action='count')
parser.add_option("-V", "--version", help=_("display version information"), action='store_true')
parser.add_option("", "--with-store", help=_("add an implementation cache"), action='append', metavar='DIR')
parser.disable_interspersed_args()
(options, args) = parser.parse_args(args)
if options.verbose:
logger = logging.getLogger()
if options.verbose == 1:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.DEBUG)
if options.version:
import gui
print("0launch-gui (zero-install) " + gui.version)
print("Copyright (C) 2010 Thomas Leonard")
print(_("This program comes with ABSOLUTELY NO WARRANTY,"
"\nto the extent permitted by law."
"\nYou may redistribute copies of this program"
"\nunder the terms of the GNU Lesser General Public License."
"\nFor more information about these matters, see the file named COPYING."))
sys.exit(0)
def nogui(ex):
if options.force_gui:
fn = logging.warn
else:
fn = logging.info
fn("No GUI available", exc_info = ex)
sys.exit(100)
with warnings.catch_warnings():
if not options.force_gui:
warnings.filterwarnings("ignore")
if sys.version_info[0] < 3:
try:
import pygtk; pygtk.require('2.0')
except ImportError as ex:
nogui(ex)
import gui
try:
if sys.version_info[0] > 2:
from zeroinstall.gtkui import pygtkcompat
pygtkcompat.enable()
pygtkcompat.enable_gtk(version = '3.0')
import gtk
except (ImportError, ValueError) as ex:
nogui(ex)
if gtk.gdk.get_display() is None:
try:
raise SafeException("Failed to connect to display.")
except SafeException as ex:
nogui(ex) # logging needs this as a raised exception
handler = gui.GUIHandler()
config = load_config(handler)
if options.with_store:
from zeroinstall import zerostore
for x in options.with_store:
config.stores.stores.append(zerostore.Store(os.path.abspath(x)))
if len(args) < 1:
@tasks.async
def prefs_main():
import preferences
box = preferences.show_preferences(config)
done = tasks.Blocker('close preferences')
box.connect('destroy', lambda w: done.trigger())
yield done
tasks.wait_for_blocker(prefs_main())
sys.exit(0)
interface_uri = args[0]
if len(args) > 1:
parser.print_help()
sys.exit(1)
import mainwindow, dialog
r = requirements.Requirements(interface_uri)
r.parse_options(options)
widgets = dialog.Template('main')
driver = Driver(config = config, requirements = r)
root_iface = config.iface_cache.get_interface(interface_uri)
driver.solver.record_details = True
window = mainwindow.MainWindow(driver, widgets, download_only = bool(options.download_only), select_only = bool(options.select_only))
handler.mainwindow = window
if options.message:
window.set_message(options.message)
root = config.iface_cache.get_interface(r.interface_uri)
window.browser.set_root(root)
window.window.connect('destroy', lambda w: handler.abort_all_downloads())
if options.systray:
window.use_systray_icon()
@tasks.async
def main():
force_refresh = bool(options.refresh)
while True:
window.refresh_button.set_sensitive(False)
window.browser.set_update_icons(force_refresh)
solved = driver.solve_with_downloads(force = force_refresh, update_local = True)
if not window.systray_icon:
window.show()
yield solved
try:
window.refresh_button.set_sensitive(True)
window.browser.highlight_problems()
tasks.check(solved)
except Exception as ex:
window.report_exception(ex)
if window.systray_icon and window.systray_icon.get_visible() and \
window.systray_icon.is_embedded():
if driver.solver.ready:
window.systray_icon.set_tooltip(_('Downloading updates for %s') % root_iface.get_name())
window.run_button.set_active(True)
else:
# Should already be reporting an error, but
# blink it again just in case
window.systray_icon.set_blinking(True)
refresh_clicked = dialog.ButtonClickedBlocker(window.refresh_button)
yield refresh_clicked, _recalculate
if refresh_clicked.happened:
force_refresh = True
tasks.wait_for_blocker(main())
| lgpl-2.1 | 4,441,060,670,262,614,500 | 31.791444 | 134 | 0.708415 | false |
otov4its/django-walletone | walletone/views.py | 1 | 1337 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.http.response import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from .forms import WalletOneConfirmForm
from . import signals
logger = logging.getLogger(__name__)
@csrf_exempt
def payment_confirm(request):
if request.method == 'POST':
logger.info('Received a request from WalletOne')
confirm_form = WalletOneConfirmForm(request.POST)
if confirm_form.is_valid():
payment = confirm_form.save()
logger.info('Payment was created')
# Send signal with payment object as arguments
signals.payment_received.send(sender=type(payment), payment=payment)
logger.info('payment_received signal was sent')
return HttpResponse('WMI_RESULT=OK')
else:
errors_message = ''
for key, messages in confirm_form.errors.items():
errors_message += ' '.join(messages)
errors_message = 'Received form not valid: ' + errors_message
logger.warning(errors_message)
return HttpResponse(
'WMI_RESULT=OK&WMI_DESCRIPTION=%s' % errors_message
)
else:
return HttpResponseBadRequest("Expected POST request") | mit | 1,945,754,972,225,154,600 | 35.162162 | 80 | 0.646223 | false |
amrishparmar/mal_cl_interface | nl_interface/search.py | 1 | 4686 | import html
from enum import Enum
import click
import requests
from bs4 import BeautifulSoup
import agent
import network
import ui
class StatusCode(Enum):
"""An Enum represented the type of result of database searches"""
NO_RESULTS = 0
USER_CANCELLED = 1
def display_entry_details(entry):
"""Display all the details of a given entry
:param entry: an anime or manga entry as a Beautiful Soup Tag object
"""
for detail in entry.children:
# ignore newlines in children
if detail != "\n":
# replace in tag name the underscores with spaces and convert to title case
detail_name = detail.name.replace("_", " ").title()
# set the string to be the detail.string by default
detail_string = detail.string
# check that the string contains something
if detail_string is not None:
# unescape html entities and remove break tags
detail_string = html.unescape(detail_string).replace("<br />", "")
detail_string = detail_string.replace("[i]", "").replace("[/i]", "")
click.echo("{}: {}".format(detail_name, detail_string))
def search(credentials, search_type, search_string, display_details=True):
"""Search for an anime or manga entry
:param credentials: A tuple containing valid MAL account details in the format (username, password)
:param search_type: A string denoting the media type to search for, should be either "anime" or "manga"
:param search_string: A string, the anime or manga to search for
:param display_details: A boolean, whether to print the details of the found entry or whether to just return it
:return: A beautiful soup tag, or a network status code if there was an error or the user quit
"""
if search_type not in ["anime", "manga"]:
raise ValueError("Invalid argument for {}, must be either {} or {}.".format(search_type, "anime", "manga"))
url = "https://myanimelist.net/api/{}/search.xml?q={}".format(search_type, search_string.replace(" ", "+"))
# send the async search request to the server
r = ui.threaded_action(network.make_request, "Searching for \"{}\"".format(search_string), request=requests.get,
url=url, auth=credentials, stream=True)
# check if there was an error with the user's internet connection
if r == network.StatusCode.CONNECTION_ERROR:
agent.print_connection_error_msg()
return r
if r.status_code == 204:
agent.print_msg("I'm sorry I could not find any results for \"{}\".".format(search_string))
return StatusCode.NO_RESULTS
elif r.status_code == 200:
# decode the raw content so beautiful soup can read it as xml not a string
r.raw.decode_content = True
soup = BeautifulSoup(r.raw, "xml")
# get all entries
matches = soup.find_all("entry")
# store the length of all_matched list since needed multiple times
num_results = len(matches)
if num_results == 1:
if display_details:
display_entry_details(matches[0])
else:
return matches[0]
else:
agent.print_msg("I found {} results. Did you mean:".format(num_results))
# iterate over the matches and print them out
for i in range(num_results):
# use a different layout for entries that don't have any synonyms
title_format = "{}> {} ({})" if matches[i].synonyms.get_text() != "" else "{}> {}"
click.echo(title_format.format(i + 1, matches[i].title.get_text(), matches[i].synonyms.get_text()))
click.echo("{}> [None of these]".format(num_results + 1))
# get a valid choice from the user
while True:
option = click.prompt("Please choose an option", type=int)
if 1 <= option <= num_results + 1:
break
else:
click.echo("You must enter a value between {} and {}".format(1, num_results + 1))
click.echo()
# check that the user didn't choose the none of these option before trying to display entry
if option != num_results + 1:
if display_details:
display_entry_details(matches[option - 1])
else:
return matches[option - 1]
else:
return StatusCode.USER_CANCELLED
else:
agent.print_msg("There was an error getting the entry on your list. Please try again.")
return network.StatusCode.OTHER_ERROR
| mit | 740,526,283,656,575,100 | 39.396552 | 116 | 0.607981 | false |
vlegoff/tsunami | src/secondaires/peche/commandes/banc/editer.py | 1 | 2649 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant le paramètre 'éditer' de la commande 'banc'."""
from primaires.interpreteur.masque.parametre import Parametre
class PrmEditer(Parametre):
"""Paramètre 'éditer de la commande 'banc'"""
def __init__(self):
"""Constructeur du paramètre."""
Parametre.__init__(self, "Γ©diter", "edit")
self.schema = "<cle>"
self.aide_courte = "ouvre l'Γ©diteur de banc de poisson"
self.aide_longue = \
"Cette commande permet d'accΓ©der Γ l'Γ©diteur " \
"du banc de poisson indiquΓ©."
def interpreter(self, personnage, dic_masques):
"""MΓ©thode d'interprΓ©tation de commande"""
cle = dic_masques["cle"].cle
if cle not in importeur.peche.bancs:
personnage << "|err|Ce banc n'existe pas.|ff|"
return
banc = importeur.peche.bancs[cle]
editeur = importeur.interpreteur.construire_editeur(
"schooledit", personnage, banc)
personnage.contextes.ajouter(editeur)
editeur.actualiser()
| bsd-3-clause | -2,259,236,170,578,934,000 | 43.677966 | 79 | 0.709408 | false |
reviewboard/rbtools | rbtools/clients/plastic.py | 1 | 13440 | """A client for Plastic SCM."""
from __future__ import unicode_literals
import logging
import os
import re
from rbtools.clients import SCMClient, RepositoryInfo
from rbtools.clients.errors import (InvalidRevisionSpecError,
TooManyRevisionsError,
SCMError)
from rbtools.utils.checks import check_install
from rbtools.utils.filesystem import make_tempfile
from rbtools.utils.process import execute
class PlasticClient(SCMClient):
"""A client for Plastic SCM.
This is a wrapper around the cm executable that fetches repository
information and generates compatible diffs.
"""
name = 'Plastic'
server_tool_names = 'Plastic SCM'
supports_changesets = True
supports_patch_revert = True
REVISION_CHANGESET_PREFIX = 'cs:'
def __init__(self, **kwargs):
"""Initialize the client.
Args:
**kwargs (dict):
Keyword arguments to pass through to the superclass.
"""
super(PlasticClient, self).__init__(**kwargs)
def get_local_path(self):
"""Return the local path to the working tree.
Returns:
unicode:
The filesystem path of the repository on the client system.
"""
if not check_install(['cm', 'version']):
logging.debug('Unable to execute "cm version": skipping Plastic')
return None
# Get the workspace directory, so we can strip it from the diff output
self.workspacedir = execute(['cm', 'gwp', '.', '--format={1}'],
split_lines=False,
ignore_errors=True).strip()
logging.debug('Workspace is %s', self.workspacedir)
# Get the repository that the current directory is from
split = execute(['cm', 'ls', self.workspacedir, '--format={8}'],
split_lines=True, ignore_errors=True)
# remove blank lines
split = [x for x in split if x]
m = re.search(r'^rep:(.+)$', split[0], re.M)
if not m:
return None
return m.group(1)
def get_repository_info(self):
"""Return repository information for the current working tree.
Returns:
rbtools.clients.RepositoryInfo:
The repository info structure.
"""
local_path = self.get_local_path()
if local_path:
return RepositoryInfo(path=local_path,
local_path=local_path)
return None
def parse_revision_spec(self, revisions=[]):
"""Parse the given revision spec.
Args:
revisions (list of unicode, optional):
A list of revisions as specified by the user. Items in the list
do not necessarily represent a single revision, since the user
can use SCM-native syntaxes such as ``r1..r2`` or ``r1:r2``.
SCMTool-specific overrides of this method are expected to deal
with such syntaxes.
Raises:
rbtools.clients.errors.InvalidRevisionSpecError:
The given revisions could not be parsed.
rbtools.clients.errors.TooManyRevisionsError:
The specified revisions list contained too many revisions.
Returns:
dict:
A dictionary with the following keys:
``base`` (:py:class:`NoneType`):
Always None.
``tip`` (:py:class:`unicode`):
A revision to use as the tip of the resulting diff.
These will be used to generate the diffs to upload to Review Board
(or print). The Plastic implementation requires that one and only
one revision is passed in. The diff for review will include the
changes in the given changeset or branch.
"""
n_revisions = len(revisions)
if n_revisions == 0:
raise InvalidRevisionSpecError(
'Either a changeset or a branch must be specified')
elif n_revisions == 1:
return {
'base': None,
'tip': revisions[0],
}
else:
raise TooManyRevisionsError
def diff(self, revisions, include_files=[], exclude_patterns=[],
extra_args=[], **kwargs):
"""Perform a diff across all modified files in a Plastic workspace.
Parent diffs are not supported (the second value in the tuple).
Args:
revisions (dict):
A dictionary of revisions, as returned by
:py:meth:`parse_revision_spec`.
include_files (list of unicode, optional):
A list of files to whitelist during the diff generation.
exclude_patterns (list of unicode, optional):
A list of shell-style glob patterns to blacklist during diff
generation.
extra_args (list, unused):
Additional arguments to be passed to the diff generation.
**kwargs (dict, unused):
Unused keyword arguments.
Returns:
dict:
A dictionary containing the following keys:
``diff`` (:py:class:`bytes`):
The contents of the diff to upload.
``changenum`` (:py:class:`unicode`):
The number of the changeset being posted (if ``revisions``
represents a single changeset).
"""
# TODO: use 'files'
changenum = None
tip = revisions['tip']
if tip.startswith(self.REVISION_CHANGESET_PREFIX):
logging.debug('Doing a diff against changeset %s', tip)
try:
changenum = str(int(
tip[len(self.REVISION_CHANGESET_PREFIX):]))
except ValueError:
pass
else:
logging.debug('Doing a diff against branch %s', tip)
if not getattr(self.options, 'branch', None):
self.options.branch = tip
diff_entries = execute(
['cm', 'diff', tip, '--format={status} {path} rev:revid:{revid} '
'rev:revid:{parentrevid} src:{srccmpath} '
'dst:{dstcmpath}{newline}'],
results_unicode=False,
split_lines=True)
diff = self._process_diffs(diff_entries)
return {
'diff': diff,
'changenum': changenum,
}
def _process_diffs(self, diff_entries):
"""Process the given diff entries.
Args:
diff_entries (list):
The list of diff entries.
Returns:
bytes:
The processed diffs.
"""
diff_lines = []
empty_filename = make_tempfile()
tmp_diff_from_filename = make_tempfile()
tmp_diff_to_filename = make_tempfile()
for f in diff_entries:
f = f.strip()
if not f:
continue
m = re.search(br'(?P<type>[ACMD]) (?P<file>.*) '
br'(?P<revspec>rev:revid:[-\d]+) '
br'(?P<parentrevspec>rev:revid:[-\d]+) '
br'src:(?P<srcpath>.*) '
br'dst:(?P<dstpath>.*)$',
f)
if not m:
raise SCMError('Could not parse "cm log" response: %s' % f)
changetype = m.group('type')
filename = m.group('file')
if changetype == b'M':
# Handle moved files as a delete followed by an add.
# Clunky, but at least it works
oldfilename = m.group('srcpath')
oldspec = m.group('revspec')
newfilename = m.group('dstpath')
newspec = m.group('revspec')
self._write_file(oldfilename, oldspec, tmp_diff_from_filename)
dl = self._diff_files(tmp_diff_from_filename, empty_filename,
oldfilename, 'rev:revid:-1', oldspec,
changetype)
diff_lines += dl
self._write_file(newfilename, newspec, tmp_diff_to_filename)
dl = self._diff_files(empty_filename, tmp_diff_to_filename,
newfilename, newspec, 'rev:revid:-1',
changetype)
diff_lines += dl
else:
newrevspec = m.group('revspec')
parentrevspec = m.group('parentrevspec')
logging.debug('Type %s File %s Old %s New %s',
changetype, filename, parentrevspec, newrevspec)
old_file = new_file = empty_filename
if (changetype in [b'A'] or
(changetype in [b'C'] and
parentrevspec == b'rev:revid:-1')):
# There's only one content to show
self._write_file(filename, newrevspec,
tmp_diff_to_filename)
new_file = tmp_diff_to_filename
elif changetype in [b'C']:
self._write_file(filename, parentrevspec,
tmp_diff_from_filename)
old_file = tmp_diff_from_filename
self._write_file(filename, newrevspec,
tmp_diff_to_filename)
new_file = tmp_diff_to_filename
elif changetype in [b'D']:
self._write_file(filename, parentrevspec,
tmp_diff_from_filename)
old_file = tmp_diff_from_filename
else:
raise SCMError('Unknown change type "%s" for %s'
% (changetype, filename))
dl = self._diff_files(old_file, new_file, filename,
newrevspec, parentrevspec, changetype)
diff_lines += dl
os.unlink(empty_filename)
os.unlink(tmp_diff_from_filename)
os.unlink(tmp_diff_to_filename)
return b''.join(diff_lines)
def _diff_files(self, old_file, new_file, filename, newrevspec,
parentrevspec, changetype):
"""Do the work of producing a diff for Plastic.
Args:
old_file (bytes):
The absolute path to the old file.
new_file (bytes):
The absolute path to the new file.
filename (bytes):
The file in the Plastic workspace.
newrevspec (bytes):
The revid spec of the new file.
parentrevspec (bytes):
The revid spec of the old file.
changetype (bytes):
The change type as a single character string.
Returns:
list of bytes:
The computed diff.
"""
if filename.startswith(self.workspacedir):
filename = filename[len(self.workspacedir):]
# Diff returns "1" if differences were found.
dl = execute(['diff', '-urN', old_file, new_file],
extra_ignore_errors=(1, 2),
results_unicode=False)
# If the input file has ^M characters at end of line, lets ignore them.
dl = dl.replace(b'\r\r\n', b'\r\n')
dl = dl.splitlines(True)
# Special handling for the output of the diff tool on binary files:
# diff outputs "Files a and b differ"
# and the code below expects the output to start with
# "Binary files "
if (len(dl) == 1 and
dl[0].startswith(b'Files %s and %s differ'
% (old_file.encode('utf-8'),
new_file.encode('utf-8')))):
dl = [b'Binary files %s and %s differ\n'
% (old_file.encode('utf-8'),
new_file.encode('utf-8'))]
if dl == [] or dl[0].startswith(b'Binary files '):
if dl == []:
return []
dl.insert(0, b'==== %s (%s) ==%s==\n'
% (filename, newrevspec, changetype))
dl.append('\n')
else:
dl[0] = '--- %s\t%s\n' % (filename, parentrevspec)
dl[1] = '+++ %s\t%s\n' % (filename, newrevspec)
# Not everybody has files that end in a newline. This ensures
# that the resulting diff file isn't broken.
if dl[-1][-1] != b'\n':
dl.append(b'\n')
return dl
def _write_file(self, filename, filespec, tmpfile):
"""Retrieve a file from Plastic and write it to a temp file.
Args:
filename (bytes):
The filename to fetch.
filespec (bytes):
The revision of the file to fetch.
tmpfile (unicode):
The name of the temporary file to write to.
"""
logging.debug('Writing "%s" (rev %s) to "%s"',
filename.decode('utf-8'),
filespec.decode('utf-8'),
tmpfile)
execute(['cm', 'cat', filespec, '--file=' + tmpfile])
| mit | 1,965,228,939,996,342,800 | 34.368421 | 79 | 0.510714 | false |
Statoil/SegyIO | python/examples/write.py | 1 | 2756 | import sys
import segyio
import numpy as np
def main():
if len( sys.argv ) < 2:
sys.exit("Usage: write.py [file]")
filename = sys.argv[1]
# the mode parameter is passed directly to C's fopen
# opening the file for writing requires r+, not rw because rw would
# truncate (effectively destroy) the file, and r+ would preserve the size
with segyio.open( filename, "r+" ) as src:
# read trace 0, then double all values
trace = src.trace[0]
trace *= 2
# write trace 0 back to disk
src.trace[0] = trace
# read trace 1, but re-use the memory for speed
trace = src.trace[1]
# square all values. the trace is a plain numpy array
trace = np.square(trace, trace)
# write the trace back to disk, but at trace 2
src.trace[2] = trace
# read every other trace, from 10 through 20
# then write them to every third step from 40 through 52
# i.e. 40, 43, 46...
# slices yield a generator, so only one numpy array is created
for tr, i in zip(src.trace[10:20:2], range(2,13,3)):
src.trace[i] = tr
# iterate over all traces in a file. this is a generator with a shared
# buffer, so it's quite efficient
tracesum = 0
for tr in src.trace:
# accumulate the traces' 30th value
tracesum += tr[30]
print("Trace sum: {}".format(tracesum))
# write the iline at 2 to the iline at 3
sum3 = np.sum(src.iline[3])
src.iline[2] = src.iline[3]
# flush to make sure our changes to the file are visible
src.flush()
sum2 = np.sum(src.iline[2])
print("Accumulates of inlines 2 and 3: {} -- {}".format(sum2, sum3))
# ilines too are plain numpy ndarrays, with trace-major addressing
# i.e. iline[2,40] would be yield trace#2's 40th value
iline = src.iline[2]
# since ilines are numpy arrays they also support numpy operations
iline = np.add(iline, src.iline[4])
# lines too have generator support, so we accumulate the 2nd trace's
# 22nd value.
linesum = 0
for line in src.iline:
linesum += line[2,22]
print("Inline sum: {}".format(linesum))
# xline access is identical to iline access
linesum = 0
for line in src.xline:
linesum += line[2,22]
print("Crossline sum: {}".format(linesum))
# accessing a non-existing inline will raise a KeyError
try:
_ = src.iline[5000]
sys.exit("Was able to access non-existing inline")
except KeyError as e:
print(str(e))
if __name__ == '__main__':
main()
| lgpl-3.0 | 8,186,660,944,869,912,000 | 31.809524 | 78 | 0.58164 | false |
oblalex/gnuplot.py-py3k | PlotItems.py | 1 | 26123 | # $Id: PlotItems.py 299 2007-03-30 12:52:17Z mhagger $
# Copyright (C) 1998-2003 Michael Haggerty <[email protected]>
#
# This file is licensed under the GNU Lesser General Public License
# (LGPL). See LICENSE.txt for details.
"""PlotItems.py -- Objects that can be plotted by Gnuplot.
This module contains several types of PlotItems. PlotItems can be
plotted by passing them to a Gnuplot.Gnuplot object. You can derive
your own classes from the PlotItem hierarchy to customize their
behavior.
"""
import os, string, tempfile, types
from io import StringIO
import numpy
import gp, utils, Errors
class _unset:
"""Used to represent unset keyword arguments."""
pass
class PlotItem:
"""Plotitem represents an item that can be plotted by gnuplot.
For the finest control over the output, you can create 'PlotItems'
yourself with additional keyword options, or derive new classes
from 'PlotItem'.
The handling of options is complicated by the attempt to allow
options and their setting mechanism to be inherited conveniently.
Note first that there are some options that can only be set in the
constructor then never modified, and others that can be set in the
constructor and/or modified using the 'set_option()' member
function. The former are always processed within '__init__'. The
latter are always processed within 'set_option', which is called
by the constructor.
'set_option' is driven by a class-wide dictionary called
'_option_list', which is a mapping '{ <option> : <setter> }' from
option name to the function object used to set or change the
option. <setter> is a function object that takes two parameters:
'self' (the 'PlotItem' instance) and the new value requested for
the option. If <setter> is 'None', then the option is not allowed
to be changed after construction and an exception is raised.
Any 'PlotItem' that needs to add options can add to this
dictionary within its class definition. Follow one of the
examples in this file. Alternatively it could override the
'set_option' member function if it needs to do wilder things.
Members:
'_basecommand' -- a string holding the elementary argument that
must be passed to gnuplot's `plot' command for this item;
e.g., 'sin(x)' or '"filename.dat"'.
'_options' -- a dictionary of (<option>,<string>) tuples
corresponding to the plot options that have been set for
this instance of the PlotItem. <option> is the option as
specified by the user; <string> is the string that needs to
be set in the command line to set that option (or None if no
string is needed). Example::
{'title' : ('Data', 'title "Data"'),
'with' : ('linespoints', 'with linespoints')}
"""
# For _option_list explanation, see docstring for PlotItem.
_option_list = {
'axes' : lambda self, axes: self.set_string_option(
'axes', axes, None, 'axes %s'),
'with' : lambda self, with_: self.set_string_option(
'with', with_, None, 'with %s'),
'title' : lambda self, title: self.set_string_option(
'title', title, 'notitle', 'title "%s"'),
}
_option_list['with_'] = _option_list['with']
# order in which options need to be passed to gnuplot:
_option_sequence = [
'binary',
'index', 'every', 'thru', 'using', 'smooth',
'axes', 'title', 'with'
]
def __init__(self, **keyw):
"""Construct a 'PlotItem'.
Keyword options:
'with_=<string>' -- choose how item will be plotted, e.g.,
with_='points 3 3'.
'title=<string>' -- set the title to be associated with the item
in the plot legend.
'title=None' -- choose 'notitle' option (omit item from legend).
Note that omitting the title option is different than setting
'title=None'; the former chooses gnuplot's default whereas the
latter chooses 'notitle'.
"""
self._options = {}
self.set_option(**keyw)
def get_option(self, name):
"""Return the setting of an option. May be overridden."""
try:
return self._options[name][0]
except:
raise KeyError('option %s is not set!' % name)
def set_option(self, **keyw):
"""Set or change a plot option for this PlotItem.
See documentation for '__init__' for information about allowed
options. This function can be overridden by derived classes
to allow additional options, in which case those options will
also be allowed by '__init__' for the derived class. However,
it is easier to define a new '_option_list' variable for the
derived class.
"""
for (option, value) in keyw.items():
try:
setter = self._option_list[option]
except KeyError:
raise Errors.OptionError('%s=%s' % (option,value))
if setter is None:
raise Errors.OptionError(
'Cannot modify %s option after construction!', option)
else:
setter(self, value)
def set_string_option(self, option, value, default, fmt):
"""Set an option that takes a string value."""
if value is None:
self._options[option] = (value, default)
elif type(value) is str:
self._options[option] = (value, fmt % value)
else:
Errors.OptionError('%s=%s' % (option, value,))
def clear_option(self, name):
"""Clear (unset) a plot option. No error if option was not set."""
try:
del self._options[name]
except KeyError:
pass
def get_base_command_string(self):
raise NotImplementedError()
def get_command_option_string(self):
cmd = []
for opt in self._option_sequence:
(val,str) = self._options.get(opt, (None,None))
if str is not None:
cmd.append(str)
return " ".join(cmd)
def command(self):
"""Build the plot command to be sent to gnuplot.
Build and return the plot command, with options, necessary to
display this item. If anything else needs to be done once per
plot, it can be done here too.
"""
return " ".join([
self.get_base_command_string(),
self.get_command_option_string(),
])
def pipein(self, f):
"""Pipe necessary inline data to gnuplot.
If the plot command requires data to be put on stdin (i.e.,
'plot "-"'), this method should put that data there. Can be
overridden in derived classes.
"""
pass
class Func(PlotItem):
"""Represents a mathematical expression to plot.
Func represents a mathematical expression that is to be computed by
gnuplot itself, as if you would type for example::
gnuplot> plot sin(x)
into gnuplot itself. The argument to the contructor is a string
that should be a mathematical expression. Example::
g.plot(Func('sin(x)', with_='line 3'))
As shorthand, a string passed to the plot method of a Gnuplot
object is also treated as a Func::
g.plot('sin(x)')
"""
def __init__(self, function, **keyw):
PlotItem.__init__(self, **keyw)
self.function = function
def get_base_command_string(self):
return self.function
class _FileItem(PlotItem):
"""A PlotItem representing a file that contains gnuplot data.
This class is not meant for users but rather as a base class for
other types of FileItem.
"""
_option_list = PlotItem._option_list.copy()
_option_list.update({
'binary' : lambda self, binary: self.set_option_binary(binary),
'index' : lambda self, value: self.set_option_colonsep('index', value),
'every' : lambda self, value: self.set_option_colonsep('every', value),
'using' : lambda self, value: self.set_option_colonsep('using', value),
'smooth' : lambda self, smooth: self.set_string_option(
'smooth', smooth, None, 'smooth %s'
),
})
def __init__(self, filename, **keyw):
"""Represent a PlotItem that gnuplot treates as a file.
This class holds the information that is needed to construct
the plot command line, including options that are specific to
file-like gnuplot input.
<filename> is a string representing the filename to be passed
to gnuplot within quotes. It may be the name of an existing
file, '-' for inline data, or the name of a named pipe.
Keyword arguments:
'using=<int>' -- plot that column against line number
'using=<tuple>' -- plot using a:b:c:d etc. Elements in
the tuple that are None are output as the empty
string.
'using=<string>' -- plot `using <string>' (allows gnuplot's
arbitrary column arithmetic)
'every=<value>' -- plot 'every <value>'. <value> is
formatted as for 'using' option.
'index=<value>' -- plot 'index <value>'. <value> is
formatted as for 'using' option.
'binary=<boolean>' -- data in the file is in binary format
(this option is only allowed for grid data for splot).
'smooth=<string>' -- smooth the data. Option should be
'unique', 'csplines', 'acsplines', 'bezier', or
'sbezier'.
The keyword arguments recognized by 'PlotItem' can also be
used here.
Note that the 'using' option is interpreted by gnuplot, so
columns must be numbered starting with 1.
By default, gnuplot uses the name of the file plus any 'using'
option as the dataset title. If you want another title, set
it explicitly using the 'title' option.
"""
self.filename = filename
PlotItem.__init__(self, **keyw)
def get_base_command_string(self):
return gp.double_quote_string(self.filename)
def set_option_colonsep(self, name, value):
if value is None:
self.clear_option(name)
elif type(value) in [str, int]:
self._options[name] = (value, '%s %s' % (name, value,))
elif type(value) is tuple:
subopts = []
for subopt in value:
if subopt is None:
subopts.append('')
else:
subopts.append(str(subopt))
self._options[name] = (
value,
'%s %s' % (name, ":".join(subopts),),
)
else:
raise Errors.OptionError('%s=%s' % (name, value,))
def set_option_binary(self, binary):
if binary:
if not gp.GnuplotOpts.recognizes_binary_splot:
raise Errors.OptionError(
'Gnuplot.py is currently configured to reject binary data')
self._options['binary'] = (1, 'binary')
else:
self._options['binary'] = (0, None)
class _NewFileItem(_FileItem):
def __init__(self, content, filename=None, **keyw):
binary = keyw.get('binary', 0)
if binary:
mode = 'wb'
else:
mode = 'w'
if filename:
# This is a permanent file
self.temp = False
f = open(filename, mode)
else:
self.temp = True
if hasattr(tempfile, 'mkstemp'):
# Use the new secure method of creating temporary files:
(fd, filename,) = tempfile.mkstemp(
suffix='.gnuplot', text=(not binary)
)
f = os.fdopen(fd, mode)
else:
# for backwards compatibility to pre-2.3:
filename = tempfile.mktemp()
f = open(filename, mode)
f.write(content)
f.close()
# If the user hasn't specified a title, set it to None so
# that the name of the temporary file is not used:
if self.temp and 'title' not in keyw:
keyw['title'] = None
_FileItem.__init__(self, filename, **keyw)
def __del__(self):
if self.temp:
os.unlink(self.filename)
class _InlineFileItem(_FileItem):
"""A _FileItem that actually indicates inline data.
"""
def __init__(self, content, **keyw):
# If the user hasn't specified a title, set it to None so that
# '-' is not used:
if 'title' not in keyw:
keyw['title'] = None
if keyw.get('binary', 0):
raise Errors.OptionError('binary inline data is not supported')
_FileItem.__init__(self, '-', **keyw)
if content[-1] == '\n':
self.content = content
else:
self.content = content + '\n'
def pipein(self, f):
f.write(self.content + 'e\n')
if gp.GnuplotOpts.support_fifo:
import threading
class _FIFOWriter(threading.Thread):
"""Create a FIFO (named pipe), write to it, then delete it.
The writing takes place in a separate thread so that the main
thread is not blocked. The idea is that once the writing is
finished we know that gnuplot is done with the data that were in
the file so we can delete the file. This technique removes the
ambiguity about when the temporary files should be deleted.
Since the tempfile module does not provide an easy, secure way
to create a FIFO without race conditions, we instead create a
temporary directory using mkdtemp() then create the FIFO
within that directory. When the writer thread has written the
full information to the FIFO, it deletes both the FIFO and the
temporary directory that contained it.
"""
def __init__(self, content, mode='w'):
self.content = content
self.mode = mode
if hasattr(tempfile, 'mkdtemp'):
# Make the file within a temporary directory that is
# created securely:
self.dirname = tempfile.mkdtemp(suffix='.gnuplot')
self.filename = os.path.join(self.dirname, 'fifo')
else:
# For backwards compatibility pre-2.3, just use
# mktemp() to create filename:
self.dirname = None
self.filename = tempfile.mktemp()
threading.Thread.__init__(
self,
name=('FIFO Writer for %s' % (self.filename,)),
)
os.mkfifo(self.filename)
self.start()
def run(self):
f = open(self.filename, self.mode)
f.write(self.content)
f.close()
os.unlink(self.filename)
if self.dirname is not None:
os.rmdir(self.dirname)
class _FIFOFileItem(_FileItem):
"""A _FileItem based on a FIFO (named pipe).
This class depends on the availablity of os.mkfifo(), which only
exists under Unix.
"""
def __init__(self, content, **keyw):
# If the user hasn't specified a title, set it to None so that
# the name of the temporary FIFO is not used:
if 'title' not in keyw:
keyw['title'] = None
_FileItem.__init__(self, '', **keyw)
self.content = content
if keyw.get('binary', 0):
self.mode = 'wb'
else:
self.mode = 'w'
def get_base_command_string(self):
"""Create the gnuplot command for plotting this item.
The basecommand is different each time because each FIFOWriter
creates a new FIFO.
"""
# Create a new FIFO and a thread to write to it. Retrieve the
# filename of the FIFO to be used in the basecommand.
fifo = _FIFOWriter(self.content, self.mode)
return gp.double_quote_string(fifo.filename)
def File(filename, **keyw):
"""Construct a _FileItem object referring to an existing file.
This is a convenience function that just returns a _FileItem that
wraps the filename.
<filename> is a string holding the filename of an existing file.
The keyword arguments are the same as those of the _FileItem
constructor.
"""
if type(filename) is not str:
raise Errors.OptionError(
'Argument (%s) must be a filename' % (filename,)
)
return _FileItem(filename, **keyw)
def Data(*data, **keyw):
"""Create and return a _FileItem representing the data from *data.
Create a '_FileItem' object (which is a type of 'PlotItem') out of
one or more Float Python numpy arrays (or objects that can be
converted to a float numpy array). If the routine is passed a
single with multiple dimensions, then the last index ranges over
the values comprising a single data point (e.g., [<x>, <y>,
<sigma>]) and the rest of the indices select the data point. If
passed a single array with 1 dimension, then each point is
considered to have only one value (i.e., by default the values
will be plotted against their indices). If the routine is passed
more than one array, they must have identical shapes, and then
each data point is composed of one point from each array. E.g.,
'Data(x,x**2)' is a 'PlotItem' that represents x squared as a
function of x. For the output format, see the comments for
'write_array()'.
How the data are written to gnuplot depends on the 'inline'
argument and preference settings for the platform in use.
Keyword arguments:
'cols=<tuple>' -- write only the specified columns from each
data point to the file. Since cols is used by python, the
columns should be numbered in the python style (starting
from 0), not the gnuplot style (starting from 1).
'inline=<bool>' -- transmit the data to gnuplot 'inline'
rather than through a temporary file. The default is the
value of gp.GnuplotOpts.prefer_inline_data.
'filename=<string>' -- save data to a permanent file.
The keyword arguments recognized by '_FileItem' can also be used
here.
"""
if len(data) == 1:
# data was passed as a single structure
data = utils.float_array(data[0])
# As a special case, if passed a single 1-D array, then it is
# treated as one value per point (by default, plotted against
# its index):
if len(data.shape) == 1:
data = data[:,numpy.newaxis]
else:
# data was passed column by column (for example,
# Data(x,y)); pack it into one big array (this will test
# that sizes are all the same):
data = utils.float_array(data)
dims = len(data.shape)
# transpose so that the last index selects x vs. y:
data = numpy.transpose(data, (dims-1,) + tuple(range(dims-1)))
if 'cols' in keyw:
cols = keyw['cols']
del keyw['cols']
if isinstance(cols, int):
cols = (cols,)
data = numpy.take(data, cols, -1)
if 'filename' in keyw:
filename = keyw['filename'] or None
del keyw['filename']
else:
filename = None
if 'inline' in keyw:
inline = keyw['inline']
del keyw['inline']
if inline and filename:
raise Errors.OptionError(
'cannot pass data both inline and via a file'
)
else:
inline = (not filename) and gp.GnuplotOpts.prefer_inline_data
# Output the content into a string:
f = StringIO()
utils.write_array(f, data)
content = f.getvalue()
if inline:
return _InlineFileItem(content, **keyw)
elif filename:
return _NewFileItem(content, filename=filename, **keyw)
elif gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, **keyw)
def GridData(
data, xvals=None, yvals=None, inline=_unset, filename=None, **keyw
):
"""Return a _FileItem representing a function of two variables.
'GridData' represents a function that has been tabulated on a
rectangular grid. The data are written to a file; no copy is kept
in memory.
Arguments:
'data' -- the data to plot: a 2-d array with dimensions
(numx,numy).
'xvals' -- a 1-d array with dimension 'numx'
'yvals' -- a 1-d array with dimension 'numy'
'binary=<bool>' -- send data to gnuplot in binary format?
'inline=<bool>' -- send data to gnuplot "inline"?
'filename=<string>' -- save data to a permanent file.
Note the unusual argument order! The data are specified *before*
the x and y values. (This inconsistency was probably a mistake;
after all, the default xvals and yvals are not very useful.)
'data' must be a data array holding the values of a function
f(x,y) tabulated on a grid of points, such that 'data[i,j] ==
f(xvals[i], yvals[j])'. If 'xvals' and/or 'yvals' are omitted,
integers (starting with 0) are used for that coordinate. The data
are written to a temporary file; no copy of the data is kept in
memory.
If 'binary=0' then the data are written to a datafile as 'x y
f(x,y)' triplets (y changes most rapidly) that can be used by
gnuplot's 'splot' command. Blank lines are included each time the
value of x changes so that gnuplot knows to plot a surface through
the data.
If 'binary=1' then the data are written to a file in a binary
format that 'splot' can understand. Binary format is faster and
usually saves disk space but is not human-readable. If your
version of gnuplot doesn't support binary format (it is a
recently-added feature), this behavior can be disabled by setting
the configuration variable
'gp.GnuplotOpts.recognizes_binary_splot=0' in the appropriate
gp*.py file.
Thus if you have three arrays in the above format and a Gnuplot
instance called g, you can plot your data by typing
'g.splot(Gnuplot.GridData(data,xvals,yvals))'.
"""
# Try to interpret data as an array:
data = utils.float_array(data)
try:
(numx, numy) = data.shape
except ValueError:
raise Errors.DataError('data array must be two-dimensional')
if xvals is None:
xvals = numpy.arange(numx)
else:
xvals = utils.float_array(xvals)
if xvals.shape != (numx,):
raise Errors.DataError(
'The size of xvals must be the same as the size of '
'the first dimension of the data array')
if yvals is None:
yvals = numpy.arange(numy)
else:
yvals = utils.float_array(yvals)
if yvals.shape != (numy,):
raise Errors.DataError(
'The size of yvals must be the same as the size of '
'the second dimension of the data array')
# Binary defaults to true if recognizes_binary_plot is set;
# otherwise it is forced to false.
binary = keyw.get('binary', 1) and gp.GnuplotOpts.recognizes_binary_splot
keyw['binary'] = binary
if inline is _unset:
inline = (
(not binary) and (not filename)
and gp.GnuplotOpts.prefer_inline_data
)
elif inline and filename:
raise Errors.OptionError(
'cannot pass data both inline and via a file'
)
# xvals, yvals, and data are now all filled with arrays of data.
if binary:
if inline:
raise Errors.OptionError('binary inline data not supported')
# write file in binary format
# It seems that the gnuplot documentation for binary mode
# disagrees with its actual behavior (as of v. 3.7). The
# documentation has the roles of x and y exchanged. We ignore
# the documentation and go with the code.
mout = numpy.zeros((numy + 1, numx + 1), numpy.float32)
mout[0,0] = numx
mout[0,1:] = xvals.astype(numpy.float32)
mout[1:,0] = yvals.astype(numpy.float32)
try:
# try copying without the additional copy implied by astype():
mout[1:,1:] = numpy.transpose(data)
except:
# if that didn't work then downcasting from double
# must be necessary:
mout[1:,1:] = numpy.transpose(data.astype(numpy.float32))
content = mout.tostring()
if (not filename) and gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, filename=filename, **keyw)
else:
# output data to file as "x y f(x)" triplets. This
# requires numy copies of each x value and numx copies of
# each y value. First reformat the data:
set = numpy.transpose(
numpy.array(
(numpy.transpose(numpy.resize(xvals, (numy, numx))),
numpy.resize(yvals, (numx, numy)),
data)), (1,2,0))
# Now output the data with the usual routine. This will
# produce data properly formatted in blocks separated by blank
# lines so that gnuplot can connect the points into a grid.
f = StringIO()
utils.write_array(f, set)
content = f.getvalue()
if inline:
return _InlineFileItem(content, **keyw)
elif filename:
return _NewFileItem(content, filename=filename, **keyw)
elif gp.GnuplotOpts.prefer_fifo_data:
return _FIFOFileItem(content, **keyw)
else:
return _NewFileItem(content, **keyw)
| lgpl-2.1 | 6,933,519,582,789,603,000 | 33.87717 | 79 | 0.598515 | false |
wtpayne/hiai | a3_src/h70_internal/da/lwc/env.py | 1 | 8714 | # -*- coding: utf-8 -*-
"""
Local working copy runtime environment control.
---
type:
python_module
validation_level:
v00_minimum
protection:
k00_public
copyright:
"Copyright 2016 High Integrity Artificial Intelligence Systems"
license:
"Licensed under the Apache License, Version 2.0 (the License);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an AS IS BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."
...
"""
import os
import platform
import da.lwc.discover
import da.register
# -----------------------------------------------------------------------------
def api_path(dependency_id,
iface_name = 'lib_python3',
register = None,
dirpath_lwc_root = None):
"""
Return the path to the specified api.
"""
return _iface_path(
dependency_id = dependency_id,
iface_type = 'api',
iface_name = iface_name,
register = register,
dirpath_lwc_root = dirpath_lwc_root)
# -----------------------------------------------------------------------------
def cli_path(dependency_id,
application_name,
register = None,
dirpath_lwc_root = None):
"""
Return the path to the specified cli binary.
"""
return _iface_path(
dependency_id = dependency_id,
iface_type = 'cli',
iface_name = application_name,
register = register,
dirpath_lwc_root = dirpath_lwc_root)
# -----------------------------------------------------------------------------
def gui_path(dependency_id,
application_name,
register = None,
dirpath_lwc_root = None):
"""
Return the path to the specified gui binary.
"""
return _iface_path(
dependency_id = dependency_id,
iface_type = 'gui',
iface_name = application_name,
register = register,
dirpath_lwc_root = dirpath_lwc_root)
# -----------------------------------------------------------------------------
def _iface_path(dependency_id,
iface_type,
iface_name,
register = None,
dirpath_lwc_root = None):
"""
Return the path for the specified interface type and dependency id.
"""
if register is None:
register = dependencies_register(
dirpath_lwc_root = dirpath_lwc_root)
try:
dependency_data = register[dependency_id]
except KeyError:
raise RuntimeError(
'Could not identify dependency: "{dep}".'.format(
dep = dependency_id))
dirpath_env = da.lwc.discover.path('current_env',
dirpath_lwc_root = dirpath_lwc_root)
try:
relpath_cli = dependency_data[iface_type][iface_name]
except KeyError:
raise RuntimeError(
'Dependency "{dep}" has no {type} with "{name}".'.format(
dep = dependency_id,
type = iface_type,
name = iface_name))
return os.path.normpath(os.path.join(dirpath_env,
dependency_data['dirname'],
dependency_data['policy'],
relpath_cli))
# -----------------------------------------------------------------------------
@da.memo.var
def dependencies_register(dirpath_lwc_root = None):
"""
Return information about the location of dependencies.
"""
# Add some calculated file-paths to the dependency map.
dirpath_curr_env = da.lwc.discover.path('current_env', dirpath_lwc_root)
rootpath_env = da.lwc.discover.path('env', dirpath_lwc_root)
rootpath_env_src = os.path.join(rootpath_env, 'src')
register = da.register.load('dependencies')
for (key, dep) in register.items():
dirname_dep = dep['dirname']
dirname_pol = dep['policy']
dirpath_src = os.path.join(rootpath_env_src, dirname_dep, dirname_pol)
dirpath_dep = os.path.join(dirpath_curr_env, dirname_dep, dirname_pol)
register[key]['name'] = key
register[key]['dirpath_src'] = dirpath_src
register[key]['dirpath_dep'] = dirpath_dep
return register
# -----------------------------------------------------------------------------
# TODO: Refactor to reduce number of branches.
# (Rule disabled to facilitate tightening of the threshold)
@da.memo.var
def python_import_path(iface_name = None, # pylint: disable=R0912
dirpath_lwc_root = None):
"""
Return a list of Python import paths configured for the local working copy.
Dependency information for the current local
working copy is stored in the dependency map
file. Different directories are used to store
python slibraries for python2 and python3.
"""
if iface_name is None:
iface_name = _iface_for_current_python_rt()
dirpath_env = da.lwc.discover.path(
'current_env',
dirpath_lwc_root = dirpath_lwc_root)
register = dependencies_register(
dirpath_lwc_root = dirpath_lwc_root)
# python_path for the specified iface.
# Replicates some of the logic in function
# addpackage in site.py
#
python_path = []
for (_, dependency_data) in register.items():
try:
relpath_iface = dependency_data['api'][iface_name]
except KeyError:
continue
dirpath_package = os.path.normpath(
os.path.join(
dirpath_env,
dependency_data['dirname'],
dependency_data['policy'],
relpath_iface))
if not os.path.isdir(dirpath_package):
continue
eggs = [os.path.join(dirpath_package, name)
for name in os.listdir(dirpath_package)
if name.endswith('.egg')]
if eggs:
python_path.extend(eggs)
else:
python_path.append(dirpath_package)
# All top level directories from src are added to the python_path
dirpath_src = da.lwc.discover.path(
key = 'src',
dirpath_lwc_root = dirpath_lwc_root)
for (_, dir_list, _) in os.walk(dirpath_src):
for name in dir_list:
if name.startswith('.'):
continue
python_path.append(os.path.join(dirpath_src, name))
break
# Add system python as well.
#
# TODO: !WARNING! !DANGEROUS! !REMOVE AS SOON AS POSSIBLE!
#
if iface_name == 'lib_python3':
python_path.append('/usr/lib/python3.4')
python_path.append('/usr/lib/python3.4/plat-x86_64-linux-gnu')
python_path.append('/usr/lib/python3.4/lib-dynload')
python_path.append('/usr/local/lib/python3.4/dist-packages')
python_path.append('/usr/lib/python3/dist-packages')
return python_path
# -----------------------------------------------------------------------------
def _iface_for_current_python_rt():
"""
Return a library interface id compatible with the current Python runtime.
The interface id is used to determine which
library version to import, so we can switch
between python 2.x and python 3.x if required.
"""
(major, minor, _) = platform.python_version_tuple()
try:
return {
'2': 'lib_python2',
'3': 'lib_python3'
}[major]
except KeyError:
raise RuntimeError(
'Python {major}.{minor} not supported.'.format(major = major,
minor = minor))
| apache-2.0 | -3,498,644,829,901,860,400 | 33.995984 | 79 | 0.506197 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/cr/crvserver_binding.py | 1 | 4725 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class crvserver_binding(base_resource):
""" Binding class showing the resources that can be bound to crvserver_binding.
"""
def __init__(self) :
self._name = ""
self.crvserver_filterpolicy_binding = []
self.crvserver_cmppolicy_binding = []
self.crvserver_lbvserver_binding = []
self.crvserver_policymap_binding = []
self.crvserver_cspolicy_binding = []
self.crvserver_crpolicy_binding = []
@property
def name(self) :
"""Name of a cache redirection virtual server about which to display detailed information.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of a cache redirection virtual server about which to display detailed information.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def crvserver_policymap_bindings(self) :
"""policymap that can be bound to crvserver.
"""
try :
return self._crvserver_policymap_binding
except Exception as e:
raise e
@property
def crvserver_lbvserver_bindings(self) :
"""lbvserver that can be bound to crvserver.
"""
try :
return self._crvserver_lbvserver_binding
except Exception as e:
raise e
@property
def crvserver_filterpolicy_bindings(self) :
"""filterpolicy that can be bound to crvserver.
"""
try :
return self._crvserver_filterpolicy_binding
except Exception as e:
raise e
@property
def crvserver_cmppolicy_bindings(self) :
"""cmppolicy that can be bound to crvserver.
"""
try :
return self._crvserver_cmppolicy_binding
except Exception as e:
raise e
@property
def crvserver_cspolicy_bindings(self) :
"""cspolicy that can be bound to crvserver.
"""
try :
return self._crvserver_cspolicy_binding
except Exception as e:
raise e
@property
def crvserver_crpolicy_bindings(self) :
"""crpolicy that can be bound to crvserver.
"""
try :
return self._crvserver_crpolicy_binding
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(crvserver_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.crvserver_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(self, service, name) :
""" Use this API to fetch crvserver_binding resource.
"""
try :
if type(name) is not list :
obj = crvserver_binding()
obj.name = name
response = obj.get_resource(service)
else :
if name and len(name) > 0 :
obj = [crvserver_binding() for _ in range(len(name))]
for i in range(len(name)) :
obj[i].name = name[i];
response[i] = obj[i].get_resource(service)
return response
except Exception as e:
raise e
class crvserver_binding_response(base_response) :
def __init__(self, length=1) :
self.crvserver_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.crvserver_binding = [crvserver_binding() for _ in range(length)]
| apache-2.0 | -5,411,408,159,442,030,000 | 27.810976 | 119 | 0.702646 | false |
jfouca/confucius | confucius/models/conference.py | 1 | 7188 | from django.db import models
from django.db.models import Q
from django.db.models.signals import pre_save, post_delete
from django.dispatch import receiver
from datetime import datetime
from confucius.models import ConfuciusModel, User
class Action(ConfuciusModel):
name = models.CharField(max_length=155, verbose_name='Action')
def __unicode__(self):
return self.name
class Alert(ConfuciusModel):
title = models.CharField(max_length=100, default=None)
content = models.TextField(default=None)
conference = models.ForeignKey('Conference')
trigger_date = models.DateField(verbose_name='trigger date', blank=True, null=True)
reminder = models.ForeignKey('Reminder', blank=True, null=True)
event = models.ForeignKey('Event', blank=True, null=True)
action = models.ForeignKey('Action', blank=True, null=True)
roles = models.ManyToManyField('Role', blank=True)
class Meta(ConfuciusModel.Meta):
unique_together = ('title', 'conference',)
def __unicode__(self):
return self.title
def is_trigger(self):
return self.action is None and self.reminder is None
def is_reminder(self):
return self.action is None and self.trigger_date is None
def is_action(self):
return self.reminder is None and self.trigger_date is None
class Conference(ConfuciusModel):
title = models.CharField(max_length=100, unique=True)
is_open = models.BooleanField(default=False)
has_finalize_paper_selections = models.BooleanField(default=False)
start_date = models.DateField()
submissions_start_date = models.DateField()
submissions_end_date = models.DateField()
reviews_start_date = models.DateField()
reviews_end_date = models.DateField()
url = models.URLField(blank=True)
members = models.ManyToManyField(User, through='Membership')
domains = models.ManyToManyField('Domain', related_name='conferences')
access_key = models.CharField(max_length=8)
maximum_score = models.IntegerField(default=10)
minimum_reviews = models.IntegerField(default=2)
enable_reviewer_confidence = models.BooleanField(default=True)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('confucius.views.conference_access', (),
{'conference_pk': self.pk, 'access_key': self.access_key})
def save(self, *args, **kwargs):
from confucius.utils import random_string
if self.pk is None:
self.access_key = random_string(8)
super(Conference, self).save(*args, **kwargs)
def are_submissions_over(self):
return datetime.now().date() > self.submissions_end_date
def are_submissions_notstarted(self):
return datetime.now().date() < self.submissions_start_date
def are_reviews_notstarted(self):
return datetime.now().date() < self.reviews_start_date
def are_reviews_over(self):
return self.has_finalize_paper_selections or datetime.now().date() > self.reviews_end_date
def is_started(self):
return datetime.now().date() > self.start_date
class Domain(ConfuciusModel):
name = models.CharField(max_length=50, unique=True)
def __unicode__(self):
return self.name
class Event(ConfuciusModel):
name = models.CharField(max_length=155, verbose_name='linked to')
def __unicode__(self):
return self.name
class Membership(ConfuciusModel):
user = models.ForeignKey(User, related_name='memberships')
conference = models.ForeignKey(Conference)
roles = models.ManyToManyField('Role')
domains = models.ManyToManyField(Domain)
last_accessed = models.BooleanField(default=False)
class Meta(ConfuciusModel.Meta):
unique_together = ('user', 'conference')
def set_last_accessed(self):
Membership.objects.filter(user=self.user).update(last_accessed=False)
self.last_accessed = True
self.save()
def _has_role(self, code):
try:
self.roles.get(code=code)
return True
except:
return False
def has_chair_role(self):
return self._has_role('C')
def has_reviewer_role(self):
return self._has_role('R')
def has_submitter_role(self):
return self._has_role('S')
@receiver(pre_save, sender=Membership, dispatch_uid="Membership_identifier")
def my_user_handler(sender, instance, **kwargs):
conference = instance.conference
user_pre_save = instance.user
alerts = Alert.objects.filter((Q(action=1) | Q(action=2)))
for alert in alerts:
if alert.action.pk == 1 and instance.pk is None:
try:
Membership.objects.get(conference=conference, user=user_pre_save)
except:
my_send_mail(alert, conference)
elif alert.action.pk == 2 and instance.pk is not None:
try:
Membership.objects.get(conference=alert.conference, user=user_pre_save)
except:
my_send_mail(alert, conference)
def my_send_mail(alert, conference):
from django.core.mail import send_mail
for role in alert.roles.all():
memberships_list = Membership.objects.filter(roles=role, conference=conference).all()
users_email = [unicode(membership.user.email) for membership in memberships_list]
try:
send_mail("[Confucius Alert] " + alert.title, alert.content, '[email protected]', users_email, fail_silently=False)
except:
print "Error occured during email sending process. Please check your SMTP settings"
class MessageTemplate(ConfuciusModel):
title = models.CharField(max_length=100, default=None)
content = models.TextField(default=None)
conference = models.ForeignKey(Conference, related_name="messages_templates")
class Meta(ConfuciusModel.Meta):
unique_together = ('title', 'conference')
def __unicode__(self):
return self.title
class Reminder(ConfuciusModel):
value = models.PositiveIntegerField()
name = models.CharField(max_length=155, verbose_name='reminder')
class Meta(ConfuciusModel.Meta):
unique_together = ('value', 'name')
def __unicode__(self):
return self.name
class Role(ConfuciusModel):
code = models.CharField(max_length=1)
name = models.CharField(max_length=9)
def __unicode__(self):
return self.name
class Invitation(ConfuciusModel):
user = models.ForeignKey(User)
conference = models.ForeignKey(Conference)
roles = models.ManyToManyField(Role)
decision = models.CharField(max_length=1, choices=(
('A', 'Accepted'),
('R', 'Refused'),
('W', 'Waiting for response')
), default='W')
key = models.CharField(max_length=64, unique=True)
class Meta(ConfuciusModel.Meta):
unique_together = ('user', 'conference')
def _decision(self, code):
self.decision = code
self.save()
def pending(self):
return self.decision == 'W'
def refuse(self):
self._decision('R')
def accept(self):
self._decision('A')
| bsd-3-clause | -5,912,912,759,637,472,000 | 31.378378 | 139 | 0.664719 | false |
rajpushkar83/cloudmesh | cloudmesh/management/project.py | 1 | 12236 | from mongoengine import *
from mongoengine.context_managers import switch_db
from datetime import datetime
import hashlib
import uuid
from user import User, Users
# from comittee import Committee
from pprint import pprint
from cloudmeshobject import CloudmeshObject
from cloudmesh_base.ConfigDict import ConfigDict
from cloudmesh_base.locations import config_file
from cloudmesh.config.cm_config import get_mongo_db, get_mongo_dbname_from_collection, DBConnFactory
def IMPLEMENT():
print "IMPLEMENT ME"
STATUS = ('pending',
'approved',
'completed',
'denied')
CATEGORY = ('Database', 'FutureGrid', 'other')
DISCIPLINE = ('other')
# see https://ncsesdata.nsf.gov/nsf/srs/webcasp/data/gradstud.htm
# put in discipline.txt and initialize from there through reading the file and codes
#
INSTITUTE_ROLE = ('gaduate student',
'undergraduate student',
'staff',
'faculty',
'visitor',
'other')
CLUSTERS = ('india',
'bravo',
'echo',
'delta',
'other', 'None')
SERVICES = ('eucalyptus',
'openstack',
'mpi',
'hadoop',
'mapreduce',
'docker',
'other',
'None')
SOFTWARE = ('HPC', 'other')
PROVISIONING = ('vm',
'baremetal',
'container',
'iaas',
'paas',
'other', 'None')
GRANT_ORG = ('NSF',
'DOE',
'DoD',
'NIH',
'other', 'None')
REQUIRED = False
class Project(CloudmeshObject):
# named connection (not 'default')
dbname = get_mongo_dbname_from_collection("manage")
if dbname:
meta = {'db_alias': dbname}
'''
The project object with its fields. The current fields include
Attributes:
title
abstract
intellectual_merit
broader_impact
use_of_fg
scale_of_use
categories
keywords
primary_discipline
orientation
contact
url
comment
active
projectid
status
lead
managers
members
alumnis
grant_orgnization
grant_id
grant_url
results
aggreement_use
aggreement_slides
aggreement_support
aggreement_sotfware
aggreement_documentation
comments
join_open
join_notification
resources_services
resources_software
resources_clusters
resources_provision
'''
# -------------------------------------------------------------------
# Project Information
# -------------------------------------------------------------------
title = StringField(required=REQUIRED)
# -------------------------------------------------------------------
# Project Vocabulary
# -------------------------------------------------------------------
categories = ListField(StringField(choices=CATEGORY), required=REQUIRED)
keywords = ListField(StringField(), required=REQUIRED)
# -------------------------------------------------------------------
# Project Contact
# -------------------------------------------------------------------
# lead_institutional_role = StringField(choices=INSTITUTE_ROLE, required=REQUIRED)
lead = ReferenceField(User)
managers = ListField(StringField())
members = ListField(ReferenceField(User))
alumnis = ListField(StringField())
contact = StringField(required=REQUIRED)
# active_members = lead u managers u members - alumnis
# if not active : active_members = None
# -------------------------------------------------------------------
# Project Details
# -------------------------------------------------------------------
orientation = StringField(required=REQUIRED)
primary_discipline = StringField(choices=DISCIPLINE, required=REQUIRED)
abstract = StringField(required=REQUIRED)
intellectual_merit = StringField(required=REQUIRED)
broader_impact = StringField(required=REQUIRED)
url = URLField(required=REQUIRED)
results = StringField()
# -------------------------------------------------------------------
# Agreements
# -------------------------------------------------------------------
agreement_use = BooleanField()
agreement_slides = BooleanField()
agreement_support = BooleanField()
agreement_software = BooleanField()
agreement_documentation = BooleanField()
# -------------------------------------------------------------------
# Grant Information
# -------------------------------------------------------------------
grant_organization = StringField(choices=GRANT_ORG)
grant_id = StringField()
grant_url = URLField()
# -------------------------------------------------------------------
# Resources
# -------------------------------------------------------------------
resources_services = ListField(
StringField(choices=SERVICES), required=REQUIRED)
resources_software = ListField(
StringField(choices=SOFTWARE), required=REQUIRED)
resources_clusters = ListField(
StringField(choices=CLUSTERS), required=REQUIRED)
resources_provision = ListField(
StringField(choices=PROVISIONING), required=REQUIRED)
comment = StringField()
use_of_fg = StringField(required=REQUIRED)
scale_of_use = StringField(required=REQUIRED)
# -------------------------------------------------------------------
# Other
# -------------------------------------------------------------------
comments = StringField()
# -------------------------------------------------------------------
# Project Membership Management
# -------------------------------------------------------------------
join_open = BooleanField()
join_notification = BooleanField()
# -------------------------------------------------------------------
# Location
# -------------------------------------------------------------------
loc_name = StringField()
loc_street = StringField()
loc_additional = StringField()
loc_state = StringField()
loc_country = StringField()
# example search in a list field
# Project.objects(categories__contains='education')
active = BooleanField(required=REQUIRED)
projectid = UUIDField()
status = StringField(choices=STATUS, required=REQUIRED)
# maybe we do not need active as this may be covered in status
# -------------------------------------------------------------------
# Project Comittee: contains all the information about the projects committee
# -------------------------------------------------------------------
# comittee = ReferenceField(Committee)
# BUG how can we add also arbitray info in case of other, mabe ommit
# choices
def to_json(self):
"""prints the project as a json object"""
d = {
"title": self.title,
"abstract": self.abstract,
"intellectual_merit": self.intellectual_merit,
"broader_impact": self.broader_impact,
"use_of_fg": self.use_of_fg,
"scale_of_use": self.scale_of_use,
"categories": self.categories,
"keywords": self.keywords,
"primary_discipline": self.primary_discipline,
"orientation": self.orientation,
"contact": self.contact,
"url": self.url,
"active": self.active,
"status": self.status,
"lead": self.lead,
"members": self.members,
"resources_services": self.resources_services,
"resources_software": self.resources_software,
"resources_clusters": self.resources_clusters,
"resources_provision": self.resources_provision
}
return d
def __str__(self):
'''
printing the object as a string
'''
d = self.to_json()
return str(d)
class Projects(object):
'''
convenience opbject to manage multiple prpojects
'''
def __init__(self):
get_mongo_db("manage", DBConnFactory.TYPE_MONGOENGINE)
self.projects = Project.objects()
self.users = User.objects()
def __str__(self):
'''
not implemented
'''
IMPLEMENT()
def find(self):
return Project.objects()
def objects(self):
'''
returns the projects
'''
return Project.objects()
def save(self, project):
'''adds a project to the database but only after it has been verifie
:param project: the project id
:type project: uuid
'''
project.save()
def add_user(self, user_name, project, role):
'''
Adds a member to the project.
:param role: the role of the user
:type role: String
:param user_name: the username
:type user_name: String
:param project: the project id
:type project: uuid
'''
"""adds members to a particular project"""
users = User.objects(user_name=user_name)
if users.count() == 1:
if role == "member":
project.members.append(user)
elif role == "lead":
project.lead.append(user)
elif role == "lead":
project.alumni.append(user)
else:
print "ERROR: The user `{0}` has not registered with FutureGrid".format(user_name)
def find_users(self, project, role):
'''returns all the members of a particular project
:param role: the role of the user
:type role: String
:param project: the project id
:type project: uuid
'''
if role == "member":
return project.members
elif role == "lead":
return project.leads
elif role == "lead":
return project.alumni
def find_by_id(self, id):
'''
finds projects by if
:param id: the project id
:type id: uuid
'''
"""Finds a project by the given id"""
found = Project.objects(projectid=id)
if found.count() > 0:
return found[0].to_json()
else:
return None
# User ID or project ID
def find_by_category(self, category):
'''
find the project by category
:param category: the category
:type category: String
'''
"""Finds and returns all project in that category"""
found = Project.objects(categories=category)
if found.count() > 0:
return found[0].to_json()
else:
return None
def find_by_keyword(self, keyword):
'''
finds a projects matching a keyword
:param keyword: a keyword
:type keyword: String
'''
"""Finds and returns all projects with the entered keyword"""
found = Project.objects(keyword=keyword)
if found.count() > 0:
return found[0].to_json()
else:
return None
def add(self, project):
'''
adds a project
:param project: the username
:type project: String
'''
print "PPPPPP", project
if not project.status:
project.status = 'pending'
if (project.projectid is None) or (project.projectid == ""):
found = False
proposedid = None
# while not found:
# proposedid = uuid.uuid4()
# result = Project.objects(projectid=proposedid)
# print "PPPPP", result
# found = result.count() > 0
# print result.count()
project.projectid = proposedid
else:
print "UUUUUU -{0}-".format(project.projectid)
print "UUID", project.projectid
project.save()
def clear(self):
"""removes all projects from the database"""
for project in Project.objects:
project.delete()
| apache-2.0 | -4,249,167,181,425,727,000 | 28.990196 | 100 | 0.505721 | false |
berkeley-stat159/project-epsilon | code/utils/scripts/t_test_plot_script.py | 1 | 3981 | """
Purpose:
-----------------------------------------------------------
This script creates graphs for t-test for 4 conditions
For each subject each run each condition, plot the t statistics
-----------------------------------------------------------
"""
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
from t_stat import *
from smoothing import *
from matplotlib import colors
from plot_mosaic import *
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import matplotlib
# Create the necessary directories if they do not exist
dirs = ['../../../fig','../../../fig/t-test']
for d in dirs:
if not os.path.exists(d):
os.makedirs(d)
# locate the different paths
project_path = '../../../'
data_path = project_path + 'data/'
txt_path = project_path + 'txt_output/conv_high_res/'
#txt_path = project_path + 'txt_output/conv_normal/'
path_dict = {'data_filtered':{
'folder' : 'ds005/',
'bold_img_name' : 'filtered_func_data_mni.nii.gz',
'run_path' : 'model/model001/',
'feat' : '.feat/'
},
'data_original':{
'folder' : 'ds005/',
'bold_img_name' : 'bold.nii.gz',
'run_path' : 'BOLD/',
'feat' : '/'
}}
# TODO: uncomment for final version
#subject_list = [str(i) for i in range(1,17)]
subject_list = ['1','5']
run_list = [str(i) for i in range(1,2)]
cond_list = [str(i) for i in range(1,5)]
#TODO: Change to relevant path for data or other thing
d = path_dict['data_original']
#OR
#d = path_dict['data_filtered']
images_paths = [('ds005' +'_sub' + s.zfill(3) + '_t1r' + r, \
data_path + d['folder'] + 'sub%s/'%(s.zfill(3)) + d['run_path'] \
+ 'task001_run%s'%(r.zfill(3))+d['feat']+'%s'%( d['bold_img_name'])) \
for r in run_list \
for s in subject_list]
print("\n=====================================================")
thres = 375 #from analysis of the histograms
for image_path in images_paths:
name = image_path[0]
print("Starting t-test analysis and plot for subject "+name[9:12])
img = nib.load(image_path[1])
data_int = img.get_data()
data = data_int.astype(float)
vol_shape = data.shape[:-1]
n_trs = data.shape[-1]
#get the mean value
mean_data = np.mean(data, axis = -1)
#build the mask
in_brain_mask = mean_data > 375
#smooth the data set
smooth_data = smoothing(data, 1, range(n_trs))
#initialize design matrix for t test
p = 7
X_matrix = np.ones((data.shape[-1], p))
#build our design matrix
for cond in range(1,5):
convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_high_res.txt')
#convolved = np.loadtxt(txt_path + name + '_conv_' + str(cond).zfill(3) + '_canonical.txt')
X_matrix[:,cond] = convolved
linear_drift = np.linspace(-1, 1, n_trs)
X_matrix[:,5] = linear_drift
quadratic_drift = linear_drift ** 2
quadratic_drift -= np.mean(quadratic_drift)
X_matrix[:,6] = quadratic_drift
beta, t, df, p = t_stat(smooth_data, X_matrix)
for cond in range(0,4):
print("Starting test for condition " + str(cond+1))
t_newshape = np.reshape(t[cond,:],vol_shape)
t_newshape[~in_brain_mask]=np.nan
t_T = np.zeros(vol_shape)
for z in range(vol_shape[2]):
t_T[:, :, z] = t_newshape[:,:, z].T
t_plot = plot_mosaic(t_T)
plt.imshow(t_plot,interpolation='nearest', cmap='seismic')
zero_out=max(abs(np.nanmin(t_T)),np.nanmax(t_T))
plt.title(name+'_t_statistics'+'_cond_'+'_%s'%(cond+1))
plt.clim(-zero_out,zero_out)
plt.colorbar()
plt.savefig(dirs[1]+'/'+ name +'_t-test_'+'cond'+str(cond+1)+'.png')
plt.close()
print("\nT-test analysis and plots done for selected subjects")
print("See mosaic plots in project-epsilon/fig/t-test/")
| bsd-3-clause | 6,409,189,543,104,317,000 | 34.544643 | 97 | 0.558905 | false |
basho/riak-python-client | riak/test_server.py | 1 | 10342 | # Copyright 2010-present Basho Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os.path
import threading
import string
import re
import random
import shutil
import socket
import time
import stat
from subprocess import Popen, PIPE
from riak.util import deep_merge
from six import string_types
try:
bytes
except NameError:
bytes = str
class Atom(object):
def __init__(self, s):
self.str = s
def __str__(self):
return str(self.str)
def __repr__(self):
return repr(self.str)
def __eq__(self, other):
return self.str == other
def __lt__(self, other):
return self.str < other
def erlang_config(hash, depth=1):
def printable(item):
k, v = item
if isinstance(v, string_types):
p = '"%s"' % v
elif isinstance(v, dict):
p = erlang_config(v, depth + 1)
elif isinstance(v, bool):
p = ("%s" % v).lower()
else:
p = "%s" % v
return "{%s, %s}" % (k, p)
padding = ' ' * depth
parent_padding = ' ' * (depth - 1)
values = (",\n%s" % padding).join(map(printable, hash.items()))
return "[\n%s%s\n%s]" % (padding, values, parent_padding)
class TestServer(object):
VM_ARGS_DEFAULTS = {
"-name": "riaktest%[email protected]" % random.randint(0, 100000),
"-setcookie": "%d_%d" % (random.randint(0, 100000),
random.randint(0, 100000)),
"+K": "true",
"+A": 64,
"-smp": "enable",
"-env ERL_MAX_PORTS": 4096,
"-env ERL_FULLSWEEP_AFTER": 10,
"-pa": os.path.abspath(os.path.join(os.path.dirname(__file__),
"erl_src"))
}
APP_CONFIG_DEFAULTS = {
"riak_core": {
"web_ip": "127.0.0.1",
"web_port": 9000,
"handoff_port": 9001,
"ring_creation_size": 64
},
"riak_kv": {
"storage_backend": Atom("riak_kv_test_backend"),
"pb_ip": "127.0.0.1",
"pb_port": 9002,
"js_vm_count": 8,
"js_max_vm_mem": 8,
"js_thread_stack": 16,
"riak_kv_stat": True,
"map_cache_size": 0,
"vnode_cache_entries": 0,
"test": True,
"memory_backend": {
"test": True,
},
},
"riak_search": {
"enabled": True,
"search_backend": Atom("riak_search_test_backend")
},
}
DEFAULT_BASE_DIR = "RUNNER_BASE_DIR=${RUNNER_SCRIPT_DIR%/*}"
_temp_bin = None
_temp_etc = None
_temp_log = None
_temp_pipe = None
def __init__(self, tmp_dir="/tmp/riak/test_server",
bin_dir=os.path.expanduser("~/.riak/install/riak-0.14.2/bin"),
vm_args=None, **options):
self._lock = threading.Lock()
self.temp_dir = tmp_dir
self.bin_dir = bin_dir
self._prepared = False
self._started = False
self.vm_args = self.VM_ARGS_DEFAULTS.copy()
if vm_args is not None:
self.vm_args = deep_merge(self.vm_args, vm_args)
self.app_config = self.APP_CONFIG_DEFAULTS.copy()
for key, value in options.items():
if key in self.app_config:
self.app_config[key] = deep_merge(self.app_config[key], value)
ring_dir = os.path.join(self.temp_dir, "data", "ring")
crash_log = os.path.join(self.temp_dir, "log", "crash.log")
self.app_config["riak_core"]["ring_state_dir"] = ring_dir
self.app_config["riak_core"]["platform_data_dir"] = self.temp_dir
self.app_config["lager"] = {"crash_log": crash_log}
def prepare(self):
if not self._prepared:
self.touch_ssl_distribution_args()
self.create_temp_directories()
self._riak_script = os.path.join(self._temp_bin, "riak")
self.write_riak_script()
self.write_vm_args()
self.write_app_config()
self._prepared = True
def create_temp_directories(self):
directories = ["bin", "etc", "log", "data", "pipe"]
for directory in directories:
dir = os.path.normpath(os.path.join(self.temp_dir, directory))
if not os.path.exists(dir):
os.makedirs(dir)
setattr(self, "_temp_%s" % directory, dir)
def start(self):
if self._prepared and not self._started:
with self._lock:
self._server = Popen([self._riak_script, "console"],
stdin=PIPE, stdout=PIPE, stderr=PIPE)
self._server.stdin.write("\n")
self._server.stdin.flush()
self.wait_for_erlang_prompt()
self._started = True
def stop(self):
if self._started:
with self._lock:
self._server.stdin.write("init:stop().\n")
self._server.stdin.flush()
self._server.wait()
self._started = False
def cleanup(self):
if self._started:
self.stop()
shutil.rmtree(self.temp_dir, True)
self._prepared = False
def recycle(self):
if self._started:
with self._lock:
stdin = self._server.stdin
if self._kv_backend() == "riak_kv_test_backend":
stdin.write("riak_kv_test_backend:reset().\n")
stdin.flush()
self.wait_for_erlang_prompt()
if self.app_config["riak_search"]["enabled"]:
stdin.write("riak_search_test_backend:reset().\n")
stdin.flush()
self.wait_for_erlang_prompt()
else:
stdin.write("init:restart().\n")
stdin.flush()
self.wait_for_erlang_prompt()
self.wait_for_startup()
def wait_for_startup(self):
listening = False
while not listening:
try:
socket.create_connection((self._http_ip(), self._http_port()),
1.0)
except IOError:
pass
else:
listening = True
def wait_for_erlang_prompt(self):
prompted = False
buffer = ""
while not prompted:
line = self._server.stdout.readline()
if len(line) > 0:
buffer += line
if re.search(r"\(%s\)\d+>" % self.vm_args["-name"], buffer):
prompted = True
if re.search(r'"Kernel pid terminated".*\n', buffer):
raise Exception("Riak test server failed to start.")
def write_riak_script(self):
with open(self._riak_script, "wb") as temp_bin_file:
with open(os.path.join(self.bin_dir, "riak"), "r") as riak_file:
for line in riak_file.readlines():
line = re.sub("(RUNNER_SCRIPT_DIR=)(.*)", r'\1%s' %
self._temp_bin,
line)
line = re.sub("(RUNNER_ETC_DIR=)(.*)", r'\1%s' %
self._temp_etc, line)
line = re.sub("(RUNNER_USER=)(.*)", r'\1', line)
line = re.sub("(RUNNER_LOG_DIR=)(.*)", r'\1%s' %
self._temp_log, line)
line = re.sub("(PIPE_DIR=)(.*)", r'\1%s' %
self._temp_pipe, line)
line = re.sub("(PLATFORM_DATA_DIR=)(.*)", r'\1%s' %
self.temp_dir, line)
if (string.strip(line) == self.DEFAULT_BASE_DIR):
line = ("RUNNER_BASE_DIR=%s\n" %
os.path.normpath(os.path.join(self.bin_dir,
"..")))
temp_bin_file.write(line)
os.fchmod(temp_bin_file.fileno(),
stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
def write_vm_args(self):
with open(self._vm_args_path(), 'wb') as vm_args:
for arg, value in self.vm_args.items():
vm_args.write("%s %s\n" % (arg, value))
def write_app_config(self):
with open(self._app_config_path(), "wb") as app_config:
app_config.write(erlang_config(self.app_config))
app_config.write(".")
def touch_ssl_distribution_args(self):
# To make sure that the ssl_distribution.args file is present,
# the control script in the source node has to have been run at
# least once. Running the `chkconfig` command is innocuous
# enough to accomplish this without other side-effects.
script = os.path.join(self.bin_dir, "riak")
Popen([script, "chkconfig"],
stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate()
def _kv_backend(self):
return self.app_config["riak_kv"]["storage_backend"]
def _http_ip(self):
return self.app_config["riak_core"]["web_ip"]
def _http_port(self):
return self.app_config["riak_core"]["web_port"]
def _app_config_path(self):
return os.path.join(self._temp_etc, "app.config")
def _vm_args_path(self):
return os.path.join(self._temp_etc, "vm.args")
if __name__ == "__main__":
server = TestServer()
server.prepare()
server.start()
print("Started...")
time.sleep(20)
print("Recycling...")
server.recycle()
time.sleep(20)
server.stop()
server.cleanup()
| apache-2.0 | 4,965,672,027,850,435,000 | 33.358804 | 79 | 0.508219 | false |
bkold/CarbonCopy | ribbit_app/tests.py | 1 | 6847 | from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf.urls import url
from ribbit_app.models import Ribbit
from . import views
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
from django.contrib.auth.models import User
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
import unittest, time, re
from time import time
class TestAccess(TestCase):
def setUp(self):
self.c = Client()
def test_entries_access(self):
#no account
response = self.c.get('/')
self.assertEqual(response.status_code,200)
response = self.c.get('/login')
self.assertEqual(response.status_code,302)
#redirect to '/'
response = self.c.get('/logout')
self.assertEqual(response.status_code,302)
#redirect to '/'
response = self.c.get('/signup')
self.assertEqual(response.status_code,302)
#redirect to '/'
response = self.c.get('/public')
self.assertEqual(response.status_code,302)
#redirect to '/'
response = self.c.get('/submit')
self.assertEqual(response.status_code,302)
#redirect to '/'
response = self.c.get('/users')
self.assertEqual(response.status_code,301)
#redirect to '/'
response = self.c.get('/follow')
self.assertEqual(response.status_code,302)
#redirect to '/'
class TestLogedInAccess(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(username="testMan", email="[email protected]", password="123")
def test_entry_created(self):
#####not loged in
response = self.c.get(reverse('b'))
self.assertEqual(response.status_code, 200)
response = self.c.get(reverse('logn'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
response = self.c.get(reverse('logot'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
response = self.c.get(reverse('sign'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
response = self.c.get(reverse('pub'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
response = self.c.get(reverse('us'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
response = self.c.get(reverse('fol'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
#####login
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('b'))
self.assertEqual(response.status_code, 200)
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('logn'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('logot'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('sign'))
self.assertEqual(response.status_code, 302)
#redirect to '/'
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('pub'))
self.assertEqual(response.status_code, 200)
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('us'))
self.assertEqual(response.status_code, 200)
self.c.login(username='testMan', password='123')
response = self.c.get(reverse('fol'))
self.assertEqual(response.status_code, 302)
#redirect to '/users'
def test_entries_template_context(self):
#####upload test
Ribbit.objects.create(content='test post 2', pic='{{MEDIA_URL}}uploaded_files/test.jpg', brightness='20', user=self.user)
response = self.c.get(reverse('sub'))
class TestWebdriver(LiveServerTestCase):
def setUp(self):
self.driver = webdriver.Firefox()
User.objects.create_superuser(
username='admin',
password='admin',
email='[email protected]'
)
def tearDown(self):
# Call tearDown to close the web browser
self.driver.quit()
def test_auth_user(self):
self.driver.get('http://127.0.0.1:8000/')
self.driver.implicitly_wait(10)
username = self.driver.find_element_by_xpath('//input[@placeholder="Username"]')
username.send_keys("test_new_1") # This needs to change evertime
password1 = self.driver.find_element_by_id("id_email")
password1.send_keys("[email protected]")
password1 = self.driver.find_element_by_id("id_password1")
password1.send_keys("123")
password2 = self.driver.find_element_by_id("id_password2")
password2.send_keys("123")
self.driver.find_element_by_xpath('//input[@value="Create Account"]').click()
self.driver.implicitly_wait(10)
#picture input
brightness = self.driver.find_element_by_id("id_brightness")
brightness.send_keys("10")
content = self.driver.find_element_by_id("id_content")
content.send_keys("test")
pic = self.driver.find_element_by_id("id_pic")
pic.send_keys("/home/brian/Desktop/CarbonCopy/CC/ribbit/pictures/uploaded_files/test.jpg") #This should be the addresses of your picture
self.driver.find_element_by_xpath('//input[@value="Post!"]').click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("Public Profiles").click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("My Profile").click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("Public Posts").click()
self.driver.find_element_by_xpath('//input[@value="Log Out"]').click()
def test_login_user(self):
self.driver.get('http://127.0.0.1:8000/')
username = self.driver.find_element_by_id("id_username")
username.send_keys("test_new_1") #this needs to be a vaild user
password = self.driver.find_element_by_id("id_password")
password.send_keys("123")
self.driver.implicitly_wait(10)
self.driver.find_element_by_xpath('//input[@value="Log In"]').click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("Home").click()
#picture input
brightness = self.driver.find_element_by_id("id_brightness")
brightness.send_keys("10")
content = self.driver.find_element_by_id("id_content")
content.send_keys("test")
pic = self.driver.find_element_by_id("id_pic")
pic.send_keys("/home/brian/Desktop/CarbonCopy/CC/ribbit/pictures/uploaded_files/test.jpg") #This should be the addresses of your picture
self.driver.find_element_by_xpath('//input[@value="Post!"]').click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("Public Profiles").click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("My Profile").click()
self.driver.implicitly_wait(10)
self.driver.find_element_by_link_text("Public Posts").click()
self.driver.find_element_by_xpath('//input[@value="Log Out"]').click()
| mit | 3,674,270,257,799,125,000 | 32.89604 | 138 | 0.701913 | false |
dezgeg/debbindiff | setup.py | 1 | 1468 | #!/usr/bin/env python2
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import debbindiff
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(name='debbindiff',
version=debbindiff.VERSION,
description='display differences between files',
long_description=open('README').read(),
author='Lunar',
author_email='[email protected]',
url='https://wiki.debian.org/ReproducibleBuilds',
packages=find_packages(),
tests_require=['pytest'],
cmdclass = {'test': PyTest},
scripts=['debbindiff.py'],
install_requires=[
'python-debian',
'magic',
'rpm',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: POSIX',
'Topic :: Utilities',
],
)
| gpl-3.0 | 7,436,780,751,955,286,000 | 29.583333 | 87 | 0.606267 | false |
Micronaet/micronaet-mx8 | mx_stock_status_multicompany/__init__.py | 1 | 1084 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from . import stock_status
from . import report
from . import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 7,192,910,538,212,100,000 | 42.36 | 79 | 0.617159 | false |
keans/dstools | dstools/utils.py | 1 | 1422 | import itertools
def pairwise(iterable):
"""
s -> (s0,s1), (s1,s2), (s2, s3), ...
"""
a, b = itertools.tee(iterable)
next(b, None)
return list(zip(a, b))
def str2bool(v):
"""
converts a string to a boolean
"""
return v.lower() in ("yes", "true", "t", "1")
def chunks(li, size):
"""
returns the given list in chunks of given size
"""
for i in range(0, len(li), size):
yield li[i:i+size]
def ngram(text, n=3):
"""
return ngrams of the given text
"""
for i in range(len(text) - n + 1):
yield text[i:i+n]
def sizeof_fmt(no_bytes, unit=None, kibi=True):
"""
returns a human friendly output of the given number of bytes
in the given unit (or selecting an auto unit, if not provided)
"""
units = list("kMGTPEZY")
assert(not unit or (unit in units))
if kibi:
# kilo binary: 2**x
base, suffix = (1024.0, "iB")
else:
# kilo: 10**x
base, suffix = (1000.0, "B")
if unit in units:
# calculate size in the target unit
no_bytes = no_bytes / (base ** (units.index(unit) + 1))
else:
# find a useful representation
for no, unit in enumerate(units):
if -base < no_bytes < base:
unit = units[no - 1]
break
no_bytes /= base
return "{:3.2f} {}{}".format(no_bytes, unit, suffix)
| mit | -6,534,625,633,302,136,000 | 21.571429 | 66 | 0.530942 | false |
archesproject/arches | arches/app/models/migrations/0005_4_0_1.py | 1 | 6158 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-04-25 11:36
import os
from django.db import migrations, models
from django.core import management
from arches.app.models.system_settings import settings
from arches.app.models.concept import Concept
from arches.app.search.search_engine_factory import SearchEngineFactory
from arches.app.search.mappings import CONCEPTS_INDEX
from arches.app.search.elasticsearch_dsl_builder import Term, Query
from rdflib import Graph, RDF, RDFS
def forwards_func(apps, schema_editor):
# We get the model from the versioned app registry;
# if we directly import it, it'll be the wrong version
# index base Arches concept
arches_concept = Concept().get(id="00000000-0000-0000-0000-000000000001", include=["label"])
arches_concept.index()
DValueType = apps.get_model("models", "DValueType")
DValueType.objects.create(valuetype="identifier", category="identifiers", namespace="dcterms", datatype="text")
def reverse_func(apps, schema_editor):
Node = apps.get_model("models", "Node")
Edge = apps.get_model("models", "Edge")
for node in Node.objects.all():
node.ontologyclass = str(node.ontologyclass).split("/")[-1]
node.save()
for edge in Edge.objects.all():
edge.ontologyproperty = str(edge.ontologyproperty).split("/")[-1]
edge.save()
# remove index for base Arches concept
se = SearchEngineFactory().create()
query = Query(se, start=0, limit=10000)
query.add_query(Term(field="conceptid", term="00000000-0000-0000-0000-000000000001"))
query.delete(index=CONCEPTS_INDEX)
try:
DValueType = apps.get_model("models", "DValueType")
DValueType.objects.get(valuetype="identifier").delete()
except Exception:
pass
class Migration(migrations.Migration):
dependencies = [
("models", "0004_4_0_1"),
]
operations = [
migrations.RunSQL(
"""
INSERT INTO widgets(
widgetid,
name,
component,
datatype,
defaultconfig
) VALUES (
'31f3728c-7613-11e7-a139-784f435179ea',
'resource-instance-select-widget',
'views/components/widgets/resource-instance-select',
'resource-instance',
'{
"placeholder": ""
}'
);
INSERT INTO d_data_types(
datatype, iconclass, modulename,
classname, defaultconfig, configcomponent,
configname, isgeometric, defaultwidget,
issearchable
) VALUES (
'resource-instance',
'fa fa-external-link-o',
'datatypes.py',
'ResourceInstanceDataType',
'{
"graphid": null
}',
'views/components/datatypes/resource-instance',
'resource-instance-datatype-config',
FALSE,
'31f3728c-7613-11e7-a139-784f435179ea',
TRUE
);
INSERT INTO widgets(
widgetid,
name,
component,
datatype,
defaultconfig
) VALUES (
'ff3c400a-76ec-11e7-a793-784f435179ea',
'resource-instance-multiselect-widget',
'views/components/widgets/resource-instance-multiselect',
'resource-instance-list',
'{
"placeholder": ""
}'
);
INSERT INTO d_data_types(
datatype, iconclass, modulename,
classname, defaultconfig, configcomponent,
configname, isgeometric, defaultwidget,
issearchable
) VALUES (
'resource-instance-list',
'fa fa-external-link-square',
'datatypes.py',
'ResourceInstanceDataType',
'{
"graphid": null
}',
'views/components/datatypes/resource-instance',
'resource-instance-datatype-config',
FALSE,
'ff3c400a-76ec-11e7-a793-784f435179ea',
TRUE
);
""",
"""
DELETE FROM d_data_types
WHERE datatype = 'resource-instance';
DELETE from widgets
WHERE widgetid = '31f3728c-7613-11e7-a139-784f435179ea';
DELETE FROM d_data_types
WHERE datatype = 'resource-instance-list';
DELETE from widgets
WHERE widgetid = 'ff3c400a-76ec-11e7-a793-784f435179ea';
""",
),
migrations.RunSQL(
"""
UPDATE public.nodes
SET ontologyclass='E93_Presence'
WHERE ontologyclass='E94_Space';
UPDATE public.edges
SET ontologyproperty='P167i_was_place_of'
WHERE ontologyproperty='P167i_was_place_at';
UPDATE public.edges
SET ontologyproperty='P167i_was_place_of'
WHERE ontologyproperty='P168_place_is_defined_by';
UPDATE public.edges
SET ontologyproperty='P167_was_at'
WHERE ontologyproperty='P168i_defines_place';
""",
"""
UPDATE public.nodes
SET ontologyclass='E94_Space'
WHERE ontologyclass='E93_Presence';
UPDATE public.edges
SET ontologyproperty='P168_place_is_defined_by'
WHERE ontologyproperty='P167i_was_place_of';
UPDATE public.edges
SET ontologyproperty='P168i_defines_place'
WHERE ontologyproperty='P167_was_at';
UPDATE public.edges
SET ontologyproperty='P167i_was_place_at'
WHERE ontologyproperty='P167i_was_place_of';
""",
),
migrations.RunPython(forwards_func, reverse_func),
]
| agpl-3.0 | 4,566,194,626,997,596,000 | 32.107527 | 115 | 0.546606 | false |
theblindr/blindr-backend | tests/models/test_user.py | 1 | 2530 | from datetime import datetime
from freezegun import freeze_time
from unittest import mock
from tests.blindrtest import BlindrTest
from tests.factory_boy.user_factory import UserFactory
from blindr.models.user import User
class UserTests(BlindrTest):
@freeze_time('2015-01-01')
@mock.patch('blindr.models.user.name_generator.generate_name')
@mock.patch('blindr.models.user.facebook.GraphAPI')
def test_from_facebook_new_user(self, mock_graphapi, mock_generate_name):
graph_instance = mock_graphapi.return_value
graph_instance.get_object.return_value = {
'id': '123123',
'name': 'bob',
'gender': 'male'
}
mock_generate_name.return_value = 'Fake Name'
user = User.query.get('123123')
self.assertIsNone(user)
User.from_facebook('oauth_234234')
mock_graphapi.assert_called_with(access_token='oauth_234234')
graph_instance.get_object.assert_called_with(id='me')
user = User.query.get('123123')
self.assertIsNotNone(user)
self.assertEqual('123123', user.id)
self.assertEqual('oauth_234234', user.OAuth)
self.assertEqual('m', user.gender)
self.assertEqual(datetime(2015, 1, 1), user.last_poll)
self.assertEqual('bob', user.real_name)
self.assertEqual('Fake Name', user.fake_name)
self.assertEqual('', user.facebook_urls)
@freeze_time('2015-01-01')
@mock.patch('blindr.models.user.facebook.GraphAPI')
def test_from_facebook_update_user(self, mock_graphapi):
graph_instance = mock_graphapi.return_value
graph_instance.get_object.return_value = {
'id': '123123',
'name': 'bob',
'gender': 'male'
}
UserFactory(
id='123123',
OAuth='oauth_old',
gender='f',
last_poll=datetime(2000,1,1),
real_name='Alice'
)
user = User.query.get('123123')
self.assertIsNotNone(user)
User.from_facebook('oauth_234234')
mock_graphapi.assert_called_with(access_token='oauth_234234')
graph_instance.get_object.assert_called_with(id='me')
user = User.query.get('123123')
self.assertIsNotNone(user)
self.assertEqual('123123', user.id)
self.assertEqual('oauth_234234', user.OAuth)
self.assertEqual('m', user.gender)
self.assertEqual(datetime(2015, 1, 1), user.last_poll)
self.assertEqual('bob', user.real_name)
| gpl-3.0 | -527,985,798,835,686,300 | 31.857143 | 77 | 0.624506 | false |
OpenTrons/opentrons_sdk | api/src/opentrons/deck_calibration/endpoints.py | 1 | 24629 | from uuid import uuid1
from typing import Dict, Tuple, Optional, NamedTuple
import logging
from enum import Enum
try:
from opentrons import instruments
except ImportError:
pass
from opentrons.config import pipette_config, robot_configs, feature_flags
from opentrons.types import Mount, Point
from opentrons.hardware_control.types import CriticalPoint
from opentrons.deck_calibration import jog, position, dots_set, z_pos
from opentrons.util.linal import add_z, solve, identity_deck_transform
mount_by_name = {'left': Mount.LEFT, 'right': Mount.RIGHT}
log = logging.getLogger(__name__)
class SessionWrapper:
"""Wrapper for single instance of SessionManager"""
def __init__(self):
self._session = None
@property
def session(self) -> Optional['SessionManager']:
"""Get access to the session manager"""
return self._session
@session.setter
def session(self, s: Optional['SessionManager']):
"""Update the session manager"""
self._session = s
session_wrapper = SessionWrapper()
class DeckCalibrationPoint(str, Enum):
"""
The name of a point relative to deck calibration. The number points are
calibration crosses ("1" in slot 1, "2" in slot 3, "3" in slot 7); "safeZ"
is a safe height above the deck, "attachTip" is a good place to go for the
user to attach a tip.
"""
one = "1"
two = "2"
three = "3"
safeZ = "safeZ"
attachTip = "attachTip"
def expected_points():
slot_1_lower_left,\
slot_3_lower_right,\
slot_7_upper_left = dots_set()
return {
DeckCalibrationPoint.one: slot_1_lower_left,
DeckCalibrationPoint.two: slot_3_lower_right,
DeckCalibrationPoint.three: slot_7_upper_left}
def safe_points() -> Dict[str, Tuple[float, float, float]]:
# Safe points are defined as 5mm toward the center of the deck in x, y and
# 10mm above the deck. User is expect to jog to the critical point from the
# corresponding safe point, to avoid collision depending on direction of
# misalignment between the deck and the gantry.
slot_1_lower_left, \
slot_3_lower_right, \
slot_7_upper_left = expected_points().values()
slot_1_safe_point = (
slot_1_lower_left[0] + 5, slot_1_lower_left[1] + 5, 10)
slot_3_safe_point = (
slot_3_lower_right[0] - 5, slot_3_lower_right[1] + 5, 10)
slot_7_safe_point = (
slot_7_upper_left[0] + 5, slot_7_upper_left[1] - 5, 10)
attach_tip_point = (200, 90, 130)
return {
DeckCalibrationPoint.one: slot_1_safe_point,
DeckCalibrationPoint.two: slot_3_safe_point,
DeckCalibrationPoint.three: slot_7_safe_point,
DeckCalibrationPoint.safeZ: z_pos,
DeckCalibrationPoint.attachTip: attach_tip_point
}
def _get_uuid() -> str:
return str(uuid1())
class SessionManager:
"""
Creates a session manager to handle all commands required for factory
calibration.
Before issuing a movement command, the following must be done:
1. Create a session manager
2. Initialize a pipette
3. Select the current pipette
"""
def __init__(self, hardware):
self.id = _get_uuid()
self.pipettes = {}
self.current_mount = None
self.current_model = None
self.tip_length = None
self.points = {k: None for k in expected_points().keys()}
self.z_value = None
self.cp = None
self.pipette_id = None
self.adapter = hardware.sync
self.current_transform = identity_deck_transform()
self.backup_gantry_cal = self.adapter.config.gantry_calibration
robot_configs.backup_configuration(self.adapter.config)
# Start from fresh identity matrix every calibration session
self.adapter.update_config(gantry_calibration=list(
map(lambda i: list(i), self.current_transform)))
def init_pipette(session):
"""
Finds pipettes attached to the robot currently and chooses the correct one
to add to the session.
:return: The pipette type and mount chosen for deck calibration
"""
pipette_info = set_current_mount(session)
pipette = pipette_info['pipette']
res = {}
if pipette:
session.current_model = pipette_info['model']
if not feature_flags.use_protocol_api_v2():
mount = pipette.mount
session.current_mount = mount
else:
mount = pipette.get('mount')
session.current_mount = mount_by_name[mount]
session.pipettes[mount] = pipette
res = {'mount': mount, 'model': pipette_info['model']}
log.info("Pipette info {}".format(session.pipettes))
return res
def get_pipettes(sess: SessionManager):
if not feature_flags.use_protocol_api_v2():
attached_pipettes = sess.adapter.get_attached_pipettes()
left_pipette = None
right_pipette = None
left = attached_pipettes.get('left')
right = attached_pipettes.get('right')
if left['model'] in pipette_config.config_models:
left_pipette = instruments.pipette_by_name(
'left', left['name'])
if right['model'] in pipette_config.config_models:
right_pipette = instruments.pipette_by_name(
'right', right['name'])
else:
attached_pipettes = sess.adapter.attached_instruments
left_pipette = attached_pipettes.get(Mount.LEFT)
right_pipette = attached_pipettes.get(Mount.RIGHT)
return right_pipette, left_pipette
def set_current_mount(session: SessionManager):
"""
Choose the pipette in which to execute commands. If there is no pipette,
or it is uncommissioned, the pipette is not mounted.
:attached_pipettes attached_pipettes: Information obtained from the current
pipettes attached to the robot. This looks like the following:
:dict with keys 'left' and 'right' and a model string for each
mount, or 'uncommissioned' if no model string available
:return: The selected pipette
"""
pipette = None
right_channel = None
left_channel = None
right_pipette, left_pipette = get_pipettes(session)
if right_pipette:
if not feature_flags.use_protocol_api_v2():
right_channel = right_pipette.channels
else:
right_channel = right_pipette.get('channels')
right_pipette['mount'] = 'right'
if left_pipette:
if not feature_flags.use_protocol_api_v2():
left_channel = left_pipette.channels
else:
left_channel = left_pipette.get('channels')
left_pipette['mount'] = 'left'
if right_channel == 1:
pipette = right_pipette
session.cp = CriticalPoint.NOZZLE
elif left_channel == 1:
pipette = left_pipette
session.cp = CriticalPoint.NOZZLE
elif right_pipette:
pipette = right_pipette
session.cp = CriticalPoint.FRONT_NOZZLE
elif left_pipette:
pipette = left_pipette
session.cp = CriticalPoint.FRONT_NOZZLE
model, pip_id = _get_model_name(pipette, session.adapter)
session.pipette_id = pip_id
return {'pipette': pipette, 'model': model}
def _get_model_name(pipette, adapter):
model = None
pip_id = None
if pipette:
if not feature_flags.use_protocol_api_v2():
model = pipette.model
pip_info = adapter.get_attached_pipettes()[pipette.mount]
pip_id = pip_info['id']
else:
model = pipette.get('model')
mount = Mount.LEFT if pipette['mount'] == 'left' else Mount.RIGHT
pip_info = adapter.attached_instruments[mount]
pip_id = pip_info['pipette_id']
return model, pip_id
# -------------- Route Fns -----------------------------------------------
# Note: endpoints should not call these functions directly, to ensure that
# session protections are applied--should be called through the dispatch
# endpoint
# ------------------------------------------------------------------------
class CommandResult(NamedTuple):
success: bool
message: str
async def attach_tip(data) -> CommandResult:
"""
Attach a tip to the current pipette
:param data: a dict that with schema:
{
'tipLength': a float representing how much the length of a pipette
increases when a tip is added
}
"""
if not session_wrapper.session:
raise NoSessionInProgress()
tip_length = data.get('tipLength')
if not tip_length:
message = 'Error: "tipLength" must be specified in request'
status = False
else:
if not feature_flags.use_protocol_api_v2():
pipette = session_wrapper.session.pipettes[
session_wrapper.session.current_mount]
if pipette.tip_attached:
log.warning('attach tip called while tip already attached')
pipette._remove_tip(pipette._tip_length)
pipette._add_tip(tip_length)
else:
session_wrapper.session.adapter.add_tip(
session_wrapper.session.current_mount, tip_length)
if session_wrapper.session.cp == CriticalPoint.NOZZLE:
session_wrapper.session.cp = CriticalPoint.TIP
session_wrapper.session.tip_length = tip_length
message = "Tip length set: {}".format(tip_length)
status = True
return CommandResult(success=status, message=message)
async def detach_tip(data) -> CommandResult:
"""
Detach the tip from the current pipette
:param data: unused
"""
if not session_wrapper.session:
raise NoSessionInProgress()
if not feature_flags.use_protocol_api_v2():
pipette = session_wrapper.session.pipettes[
session_wrapper.session.current_mount]
if not pipette.tip_attached:
log.warning('detach tip called with no tip')
pipette._remove_tip(session_wrapper.session.tip_length)
else:
session_wrapper.session.adapter.remove_tip(
session_wrapper.session.current_mount)
if session_wrapper.session.cp == CriticalPoint.TIP:
session_wrapper.session.cp = CriticalPoint.NOZZLE
session_wrapper.session.tip_length = None
return CommandResult(success=True, message="Tip removed")
async def run_jog(data: dict) -> CommandResult:
"""
Allow the user to jog the selected pipette around the deck map
:param data: a dict with schema:
{
'axis': The current axis you wish to move
'direction': The direction you wish to move (+ or -)
'step': The increment you wish to move
}
:return: The position moved to based on axis, direction, step
given by the user.
"""
if not session_wrapper.session:
raise NoSessionInProgress()
axis = data.get('axis')
direction = data.get('direction')
step = data.get('step')
if axis not in {'x', 'y', 'z'}:
message = '"axis" must be "x", "y", or "z"'
status = False
elif direction not in {-1, 1}:
message = '"direction" must be -1 or 1'
status = False
elif step is None:
message = '"step" must be specified'
status = False
else:
position = jog(
axis,
direction,
step,
session_wrapper.session.adapter,
session_wrapper.session.current_mount,
session_wrapper.session.cp)
message = 'Jogged to {}'.format(position)
status = True
return CommandResult(success=status, message=message)
async def move(data) -> CommandResult:
"""
Allow the user to move the selected pipette to a specific point
:param data: a dict with schema:
{
'point': The name of the point to move to. Must be one of
["1", "2", "3", "safeZ", "attachTip"]
}
:return: The position you are moving to
"""
if not session_wrapper.session:
raise NoSessionInProgress()
point_name = data.get('point')
point = safe_points().get(point_name)
if point and len(point) == 3:
if not feature_flags.use_protocol_api_v2():
pipette = session_wrapper.session.pipettes[
session_wrapper.session.current_mount]
channels = pipette.channels
# For multichannel pipettes in the V1 session, we use the tip closest
# to the front of the robot rather than the back (this is the tip that
# would go into well H1 of a plate when pipetting from the first row of
# a 96 well plate, for instance). Since moves are issued for the A1 tip
# we have to adjust the target point by 2 * Y_OFFSET_MULTI (where the
# offset value is the distance from the axial center of the pipette to
# the A1 tip). By sending the A1 tip to to the adjusted target, the H1
# tip should go to the desired point. Y_OFFSET_MULT must then be backed
# out of xy positions saved in the `save_xy` handler
# (not 2 * Y_OFFSET_MULTI, because the axial center of the pipette
# will only be off by 1* Y_OFFSET_MULTI).
if not channels == 1:
x = point[0]
y = point[1] + pipette_config.Y_OFFSET_MULTI * 2
z = point[2]
point = (x, y, z)
# hack: z=150mm is not a safe point for a gen2 pipette with a tip
# attached, since their home location is z=+172mm and both 300ul
# and 1000ul tips are more than 22mm long. This isn't an issue for
# apiv2 because it can select the NOZZLE critical point.
if pipette.tip_attached and point_name == 'attachTip':
point = (point[0],
point[1],
point[2]-pipette._tip_length)
pipette.move_to((session_wrapper.session.adapter.deck, point),
strategy='arc')
else:
if not point_name == 'attachTip':
intermediate_pos = position(
session_wrapper.session.current_mount,
session_wrapper.session.adapter,
session_wrapper.session.cp)
session_wrapper.session.adapter.move_to(
session_wrapper.session.current_mount,
Point(
x=intermediate_pos[0],
y=intermediate_pos[1],
z=session_wrapper.session.tip_length),
critical_point=session_wrapper.session.cp)
session_wrapper.session.adapter.move_to(
session_wrapper.session.current_mount,
Point(x=point[0],
y=point[1],
z=session_wrapper.session.tip_length),
critical_point=session_wrapper.session.cp)
session_wrapper.session.adapter.move_to(
session_wrapper.session.current_mount,
Point(x=point[0], y=point[1], z=point[2]),
critical_point=session_wrapper.session.cp)
else:
if session_wrapper.session.cp == CriticalPoint.TIP:
session_wrapper.session.cp = CriticalPoint.NOZZLE
session_wrapper.session.adapter.move_to(
session_wrapper.session.current_mount,
Point(x=point[0], y=point[1], z=point[2]),
critical_point=session_wrapper.session.cp)
message = 'Moved to {}'.format(point)
status = True
else:
message = '"point" must be one of "1", "2", "3", "safeZ", "attachTip"'
status = False
return CommandResult(success=status, message=message)
async def save_xy(data) -> CommandResult:
"""
Save the current XY values for the calibration data
:param data: a dict with schema:
{
'point': a string ID ['1', '2', or '3'] of the calibration point to save
}
"""
if not session_wrapper.session:
raise NoSessionInProgress()
valid_points = list(session_wrapper.session.points.keys())
point = data.get('point')
if point not in valid_points:
message = 'point must be one of {}'.format(valid_points)
status = False
elif not session_wrapper.session.current_mount:
message = "Mount must be set before calibrating"
status = False
else:
if not feature_flags.use_protocol_api_v2():
mount = 'Z' if session_wrapper.session.current_mount == 'left'\
else 'A'
x, y, _ = position(mount, session_wrapper.session.adapter)
if session_wrapper.session.pipettes[
session_wrapper.session.current_mount].channels != 1:
# See note in `move`
y = y - pipette_config.Y_OFFSET_MULTI
if session_wrapper.session.current_mount == 'left':
dx, dy, _ = session_wrapper.session.adapter.config.mount_offset
x = x + dx
y = y + dy
else:
x, y, _ = position(
session_wrapper.session.current_mount,
session_wrapper.session.adapter,
session_wrapper.session.cp)
session_wrapper.session.points[point] = (x, y)
message = "Saved point {} value: {}".format(
point, session_wrapper.session.points[point])
status = True
return CommandResult(success=status, message=message)
async def save_z(data) -> CommandResult:
"""
Save the current Z height value for the calibration data
:param data: unused
"""
if not session_wrapper.session:
raise NoSessionInProgress()
if not session_wrapper.session.tip_length:
message = "Tip length must be set before calibrating"
status = False
else:
if not feature_flags.use_protocol_api_v2():
mount = 'Z' if session_wrapper.session.current_mount == 'left' \
else 'A'
actual_z = position(
mount, session_wrapper.session.adapter)[-1]
length_offset = pipette_config.load(
session_wrapper.session.current_model,
session_wrapper.session.pipette_id).model_offset[-1]
session_wrapper.session.z_value =\
actual_z - session_wrapper.session.tip_length + length_offset
else:
session_wrapper.session.z_value = position(
session_wrapper.session.current_mount,
session_wrapper.session.adapter,
session_wrapper.session.cp)[-1]
session_wrapper.session.current_transform[2][3] =\
session_wrapper.session.z_value
session_wrapper.session.adapter.update_config(
gantry_calibration=list(
list(i) for i in session_wrapper.session.current_transform
)
)
message = "Saved z: {}".format(session_wrapper.session.z_value)
status = True
return CommandResult(success=status, message=message)
async def save_transform(data) -> CommandResult:
"""
Calculate the transformation matrix that calibrates the gantry to the deck
:param data: unused
"""
if not session_wrapper.session:
raise NoSessionInProgress()
if any([v is None for v in session_wrapper.session.points.values()]):
message = "Not all points have been saved"
status = False
else:
# expected values based on mechanical drawings of the robot
expected_pos = expected_points()
expected = [
expected_pos[p] for p in expected_pos.keys()]
# measured data
actual = [session_wrapper.session.points[p] for p in
sorted(session_wrapper.session.points.keys())]
# Generate a 2 dimensional transform matrix from the two matricies
flat_matrix = solve(expected, actual).round(4)
# replace relevant X, Y and angular components
# [[cos_x, sin_y, const_zero, delta_x___],
# [-sin_x, cos_y, const_zero, delta_y___],
# [const_zero, const_zero, const_one_, delta_z___],
# [const_zero, const_zero, const_zero, const_one_]]
session_wrapper.session.current_transform = \
add_z(flat_matrix, session_wrapper.session.z_value)
session_wrapper.session.adapter.update_config(
gantry_calibration=list(
list(i) for i in session_wrapper.session.current_transform)
)
new_gantry_cal =\
session_wrapper.session.adapter.config.gantry_calibration
session_wrapper.session.backup_gantry_cal = new_gantry_cal
robot_configs.save_deck_calibration(
session_wrapper.session.adapter.config)
message = "Config file saved and backed up"
status = True
return CommandResult(success=status, message=message)
async def release(data) -> CommandResult:
"""
Release a session
:param data: unused
"""
if not session_wrapper.session:
raise NoSessionInProgress()
if not feature_flags.use_protocol_api_v2():
session_wrapper.session.adapter.remove_instrument('left')
session_wrapper.session.adapter.remove_instrument('right')
else:
session_wrapper.session.adapter.cache_instruments()
full_gantry_cal = session_wrapper.session.backup_gantry_cal
session_wrapper.session.adapter.update_config(
gantry_calibration=full_gantry_cal)
session_wrapper.session = None
return CommandResult(success=True, message="calibration session released")
# ---------------------- End Route Fns -------------------------
# The description of the routes
class CalibrationCommand(str, Enum):
run_jog = "jog"
move = "move"
save_xy = "save xy"
attach_tip = "attach tip"
detach_tip = "detach tip"
save_z = "save z"
save_transform = "save transform"
release = "release"
# Router must be defined after all route functions
router = {
CalibrationCommand.run_jog: run_jog,
CalibrationCommand.move: move,
CalibrationCommand.save_xy: save_xy,
CalibrationCommand.attach_tip: attach_tip,
CalibrationCommand.detach_tip: detach_tip,
CalibrationCommand.save_z: save_z,
CalibrationCommand.save_transform: save_transform,
CalibrationCommand.release: release
}
class SessionInProgress(Exception):
pass
class NoSessionInProgress(Exception):
pass
class SessionForbidden(Exception):
pass
class CreateSessionResult(NamedTuple):
token: str
pipette: Dict
async def create_session(force: bool, hardware) -> CreateSessionResult:
"""
Begins the session manager for factory calibration, if a session is not
already in progress, or if the "force" key is specified in the request. To
force, use the following body:
:param force: force creation of a session
:param hardware: hardware instance
:return: The current session ID token or an error message
"""
if session_wrapper.session and not force:
raise SessionInProgress(
'Error, session in progress. Use "force" key in request '
'body to override')
if force and session_wrapper.session:
await release({})
session_wrapper.session = SessionManager(hardware)
res = init_pipette(session_wrapper.session)
if not res:
session_wrapper.session = None
raise SessionForbidden('Error, pipette not recognized')
return CreateSessionResult(token=session_wrapper.session.id,
pipette=res)
async def dispatch(token: str, command: str, command_data) -> CommandResult:
"""
Routes commands to subhandlers based on the command field in the body.
:param token: The session token. Must match the current session
:param command: The calibration command
:param command_data: The data to pass to command router
"""
if not session_wrapper.session:
raise NoSessionInProgress("Session must be started before "
"issuing commands")
log.info("Dispatching token=%s, command=%s, command_data=%s",
token, command, command_data)
if token != session_wrapper.session.id:
raise SessionForbidden(f"Invalid token: {token}")
try:
command = CalibrationCommand(command)
res = await router[command](data=command_data)
except (ValueError, KeyError):
raise SessionForbidden(
f"Command \"{command}\" is unknown and cannot be executed")
return res
| apache-2.0 | 6,045,012,831,655,699,000 | 34.335725 | 79 | 0.618336 | false |
RyanSkraba/beam | sdks/python/apache_beam/metrics/metric_test.py | 1 | 5846 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import unittest
from builtins import object
from apache_beam.metrics.cells import DistributionData
from apache_beam.metrics.execution import MetricKey
from apache_beam.metrics.execution import MetricsContainer
from apache_beam.metrics.execution import MetricsEnvironment
from apache_beam.metrics.metric import MetricResults
from apache_beam.metrics.metric import Metrics
from apache_beam.metrics.metric import MetricsFilter
from apache_beam.metrics.metricbase import MetricName
from apache_beam.runners.worker import statesampler
from apache_beam.utils import counters
class NameTest(unittest.TestCase):
def test_basic_metric_name(self):
name = MetricName('namespace1', 'name1')
self.assertEqual(name.namespace, 'namespace1')
self.assertEqual(name.name, 'name1')
self.assertEqual(name, MetricName('namespace1', 'name1'))
key = MetricKey('step1', name)
self.assertEqual(key.step, 'step1')
self.assertEqual(key.metric.namespace, 'namespace1')
self.assertEqual(key.metric.name, 'name1')
self.assertEqual(key, MetricKey('step1', MetricName('namespace1', 'name1')))
class MetricResultsTest(unittest.TestCase):
def test_metric_filter_namespace_matching(self):
filter = MetricsFilter().with_namespace('ns1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
def test_metric_filter_name_matching(self):
filter = MetricsFilter().with_name('name1').with_namespace('ns1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_name('name1')
name = MetricName('ns1', 'name1')
key = MetricKey('step1', name)
self.assertTrue(MetricResults.matches(filter, key))
def test_metric_filter_step_matching(self):
name = MetricName('ns1', 'name1')
filter = MetricsFilter().with_step('Step1')
key = MetricKey('Step1', name)
self.assertTrue(MetricResults.matches(filter, key))
key = MetricKey('Step10', name)
self.assertFalse(MetricResults.matches(filter, key))
key = MetricKey('Step10/Step1', name)
self.assertTrue(MetricResults.matches(filter, key))
key = MetricKey('Top1/Outer1/Inner1', name)
filter = MetricsFilter().with_step('Top1/Outer1/Inner1')
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('Top1/Outer1')
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('Outer1/Inner1')
self.assertTrue(MetricResults.matches(filter, key))
filter = MetricsFilter().with_step('Top1/Inner1')
self.assertFalse(MetricResults.matches(filter, key))
class MetricsTest(unittest.TestCase):
def test_get_namespace_class(self):
class MyClass(object):
pass
self.assertEqual('{}.{}'.format(MyClass.__module__, MyClass.__name__),
Metrics.get_namespace(MyClass))
def test_get_namespace_string(self):
namespace = 'MyNamespace'
self.assertEqual(namespace, Metrics.get_namespace(namespace))
def test_get_namespace_error(self):
with self.assertRaises(ValueError):
Metrics.get_namespace(object())
def test_counter_empty_name(self):
with self.assertRaises(ValueError):
Metrics.counter("namespace", "")
def test_counter_empty_namespace(self):
with self.assertRaises(ValueError):
Metrics.counter("", "names")
def test_distribution_empty_name(self):
with self.assertRaises(ValueError):
Metrics.distribution("namespace", "")
def test_distribution_empty_namespace(self):
with self.assertRaises(ValueError):
Metrics.distribution("", "names")
def test_create_counter_distribution(self):
sampler = statesampler.StateSampler('', counters.CounterFactory())
statesampler.set_current_tracker(sampler)
state1 = sampler.scoped_state('mystep', 'myState',
metrics_container=MetricsContainer('mystep'))
try:
sampler.start()
with state1:
counter_ns = 'aCounterNamespace'
distro_ns = 'aDistributionNamespace'
name = 'a_name'
counter = Metrics.counter(counter_ns, name)
distro = Metrics.distribution(distro_ns, name)
counter.inc(10)
counter.dec(3)
distro.update(10)
distro.update(2)
self.assertTrue(isinstance(counter, Metrics.DelegatingCounter))
self.assertTrue(isinstance(distro, Metrics.DelegatingDistribution))
del distro
del counter
container = MetricsEnvironment.current_container()
self.assertEqual(
container.get_counter(
MetricName(counter_ns, name)).get_cumulative(),
7)
self.assertEqual(
container.get_distribution(
MetricName(distro_ns, name)).get_cumulative(),
DistributionData(12, 2, 2, 10))
finally:
sampler.stop()
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,210,012,796,646,365,000 | 34.216867 | 80 | 0.7039 | false |
benitesf/Skin-Lesion-Analysis-Towards-Melanoma-Detection | main.py | 1 | 9476 | # Import methods of features extraction
from features_extraction.feature_extraction import FeatureExtraction
# Import methods of learning
from learning.learning import neural_network
# Import methods of classification
from classification.classification import classify, confusion_matrix, total_error, local_error
#
from skimage import io
from PIL import Image
# Import util methods
from sklearn.model_selection import train_test_split
import util.dirhandler as dh
import config as cfg
import numpy as np
import time
import sys
"""
Get train and test set
----------------------
"""
all_melanoma = sorted(dh.get_file_name_dir(cfg.melanoma_path, cfg.melanoma_extension))
all_ground = sorted(dh.get_file_name_dir(cfg.ground_path, cfg.ground_extension))
melanoma_train, melanoma_test, ground_train, ground_test = train_test_split(all_melanoma, all_ground, test_size=0.25,
random_state=25)
"""
----------------------
"""
"""
Feature Extraction
------------------
"""
feature = FeatureExtraction()
start_t = time.time()
X, y = feature.second_method(melanoma_train, ground_train)
feature_t = (time.time() - start_t)/60 # minutes
"""
------------------
"""
"""
Training Neural Network
-----------------------
"""
# Training the neural network with 83.3 % of the array features
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.16666)
classifier = neural_network()
start_t = time.time()
classifier.fit(X_train, y_train)
classifier_t = (time.time() - start_t)/60 # minutes
score_test = classifier.score(X_test, y_test)
score_train = classifier.score(X_train, y_train)
"""
-----------------------
"""
"""
Classify test images
---------------
"""
melanoma_list = melanoma_test
ground_list = ground_test
seg, tim, dim = classify(melanoma_list, ground_list, feature, classifier, block=True)
"""
---------------
"""
"""
Accuracy
---------
"""
confmat = confusion_matrix(seg, ground_list)
local_err = local_error(confmat)
sensitivity, specificity, accuracy = total_error(local_err)
"""
---------
"""
"""
Measure of times of execution
-----------------------------
"""
tim = np.array(tim) # sec
dim = np.array(dim)
dim = dim[0:,0] * dim[0:,1]
t_by_pix = (tim*(10**6)) / dim # microsec / pix
tim /= 60 # min
total_time = (tim/60).sum() # total hours
mean_time = tim.mean() # mean minutes
std_time = tim.std() # std minutes
"""
-----------------------------
"""
"""
Saving values
-------------
"""
files = [f.split('.')[0]+'_classified.jpg' for f in melanoma_list]
path_save = 'resultados/red3/preprocesado/test/'
for s, f in zip(seg, files):
img = Image.fromarray(s)
img.convert('L').save(path_save + f)
with open(path_save + 'Measures.txt', 'w') as output:
output.write('---------------\n')
output.write('---- RED 3 ----\n')
output.write('---------------\n\n')
output.write('Data Base: ' + cfg.melanoma_path + '\n')
output.write('Number of images: ' + str(cfg.nImage) + '\n')
output.write('Number of fields: ' + str(cfg.nCells) + '\n')
output.write('Number of images to train: ' + str(len(melanoma_train)) + '\n')
output.write('Number of image to test: ' + str(len(melanoma_test)) + '\n')
output.write('Size of Train from Train_Images: ' + str(X_train.shape) + '\n')
output.write('Size of Test from Train_Images: ' + str(X_test.shape) + '\n')
output.write('Type of segmentation: block\n\n')
output.write(classifier.__str__()+'\n\n')
output.write('Final function value: ' + str(classifier.loss_)+'\n\n')
output.write('-------------------------------------------------------------------------\n')
output.write('Time of execution: \n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Feature Extraction: \n')
output.write('\tTime: ' + str(feature_t) + ' min\n')
output.write('Neural Network Training:\n')
output.write('\tTime: ' + str(classifier_t) + ' min\n')
output.write('Segmentation by image:\n')
output.write('\tTotal: ' + str(total_time) + ' hrs\n')
output.write('\tMean: ' + str(mean_time) + '+-' + str(std_time) + ' min\n')
output.write('Segmentation by pixel:\n')
output.write('\tMean: ' + str(t_by_pix.mean()) + '+-' + str(t_by_pix.std()) + ' mircosec/pix\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Score:\n')
output.write('\tX_train: ' + str(score_train) + '\n')
output.write('\tX_test: ' + str(score_test) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Total error\n')
output.write('\tSensitivity: ' + str(sensitivity[0]) + '+-' + str(sensitivity[1]) + '\n')
output.write('\tSpecificity: ' + str(specificity[0]) + '+-' + str(specificity[1]) + '\n')
output.write('\tAccuracy: ' + str(accuracy[0]) + '+-' + str(accuracy[1]) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Numero total de pixeles: ' + str(dim.sum()) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Local error: \n')
output.write('\t[TP\tFP\tFN\tTN]|[sensitivity, specificity, accuracy]\t\n')
for a, g, l, t, d in zip(confmat, ground_list, local_err, tim, dim):
output.write(str(a) + '\t' + g + '\t' + str(l) + '\t' + str(t) + ' min' + '\t' + str(d) + ' pix\n')
"""
-------------
"""
"""
Classify train images
---------------------
"""
melanoma_list = melanoma_train
ground_list = ground_train
seg, tim, dim = classify(melanoma_list, ground_list, feature, classifier, block=True)
"""
---------------------
"""
"""
Accuracy
---------
"""
confmat = confusion_matrix(seg, ground_list)
local_err = local_error(confmat)
sensitivity, specificity, accuracy = total_error(local_err)
"""
---------
"""
"""
Measure of times of execution
-----------------------------
"""
tim = np.array(tim) # sec
dim = np.array(dim)
dim = dim[0:,0] * dim[0:,1]
t_by_pix = (tim*(10**6)) / dim # microsec / pix
tim /= 60 # min
total_time = (tim/60).sum() # total hours
mean_time = tim.mean() # mean minutes
std_time = tim.std() # std minutes
"""
-----------------------------
"""
"""
Saving values
-------------
"""
files = [f.split('.')[0]+'_classified.jpg' for f in melanoma_list]
path_save = 'resultados/red3/preprocesado/train/'
for s, f in zip(seg, files):
img = Image.fromarray(s)
img.convert('L').save(path_save + f)
with open(path_save + 'Measures.txt', 'w') as output:
output.write('---------------\n')
output.write('---- RED 3 ----\n')
output.write('---------------\n\n')
output.write('Data Base: ' + cfg.melanoma_path + '\n')
output.write('Number of images: ' + str(cfg.nImage) + '\n')
output.write('Number of fields: ' + str(cfg.nCells) + '\n')
output.write('Number of images to train: ' + str(len(melanoma_train)) + '\n')
output.write('Number of image to test: ' + str(len(melanoma_test)) + '\n')
output.write('Size of Train from Train_Images: ' + str(X_train.shape) + '\n')
output.write('Size of Test from Train_Images: ' + str(X_test.shape) + '\n')
output.write('Type of segmentation: block\n\n')
output.write(classifier.__str__()+'\n\n')
output.write('Final function value: ' + str(classifier.loss_)+'\n\n')
output.write('-------------------------------------------------------------------------\n')
output.write('Time of execution: \n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Feature Extraction: \n')
output.write('\tTime: ' + str(feature_t) + ' min\n')
output.write('Neural Network Training:\n')
output.write('\tTime: ' + str(classifier_t) + ' min\n')
output.write('Segmentation by image:\n')
output.write('\tTotal: ' + str(total_time) + ' hrs\n')
output.write('\tMean: ' + str(mean_time) + '+-' + str(std_time) + ' min\n')
output.write('Segmentation by pixel:\n')
output.write('\tMean: ' + str(t_by_pix.mean()) + '+-' + str(t_by_pix.std()) + ' mircosec/pix\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Score:\n')
output.write('\tX_train: ' + str(score_train) + '\n')
output.write('\tX_test: ' + str(score_test) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Total error\n')
output.write('\tSensitivity: ' + str(sensitivity[0]) + '+-' + str(sensitivity[1]) + '\n')
output.write('\tSpecificity: ' + str(specificity[0]) + '+-' + str(specificity[1]) + '\n')
output.write('\tAccuracy: ' + str(accuracy[0]) + '+-' + str(accuracy[1]) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Numero total de pixeles: ' + str(dim.sum()) + '\n')
output.write('-------------------------------------------------------------------------\n\n')
output.write('Local error: \n')
output.write('\t[TP\tFP\tFN\tTN]|[sensitivity, specificity, accuracy]\t\n')
for a, g, l, t, d in zip(confmat, ground_list, local_err, tim, dim):
output.write(str(a) + '\t' + g + '\t' + str(l) + '\t' + str(t) + ' min' + '\t' + str(d) + ' pix\n')
"""
-------------
"""
| mit | -516,867,449,965,981,630 | 32.132867 | 117 | 0.529232 | false |
snakeleon/YouCompleteMe-x64 | third_party/ycmd/third_party/watchdog_deps/watchdog/src/watchdog/events.py | 2 | 16163 | # coding: utf-8
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc & contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.events
:synopsis: File system events and event handlers.
:author: [email protected] (Yesudeep Mangalapilly)
:author: [email protected] (MickaΓ«l Schoentgen)
Event Classes
-------------
.. autoclass:: FileSystemEvent
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: FileSystemMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileMovedEvent
:members:
:show-inheritance:
.. autoclass:: DirMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileModifiedEvent
:members:
:show-inheritance:
.. autoclass:: DirModifiedEvent
:members:
:show-inheritance:
.. autoclass:: FileCreatedEvent
:members:
:show-inheritance:
.. autoclass:: FileClosedEvent
:members:
:show-inheritance:
.. autoclass:: DirCreatedEvent
:members:
:show-inheritance:
.. autoclass:: FileDeletedEvent
:members:
:show-inheritance:
.. autoclass:: DirDeletedEvent
:members:
:show-inheritance:
Event Handler Classes
---------------------
.. autoclass:: FileSystemEventHandler
:members:
:show-inheritance:
.. autoclass:: PatternMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: RegexMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: LoggingEventHandler
:members:
:show-inheritance:
"""
import os.path
import logging
import re
from watchdog.utils.patterns import match_any_paths
EVENT_TYPE_MOVED = 'moved'
EVENT_TYPE_DELETED = 'deleted'
EVENT_TYPE_CREATED = 'created'
EVENT_TYPE_MODIFIED = 'modified'
EVENT_TYPE_CLOSED = 'closed'
class FileSystemEvent:
"""
Immutable type that represents a file system event that is triggered
when a change occurs on the monitored file system.
All FileSystemEvent objects are required to be immutable and hence
can be used as keys in dictionaries or be added to sets.
"""
event_type = None
"""The type of the event as a string."""
is_directory = False
"""True if event was emitted for a directory; False otherwise."""
is_synthetic = False
"""
True if event was synthesized; False otherwise.
These are events that weren't actually broadcast by the OS, but
are presumed to have happened based on other, actual events.
"""
def __init__(self, src_path):
self._src_path = src_path
@property
def src_path(self):
"""Source path of the file system object that triggered this event."""
return self._src_path
def __str__(self):
return self.__repr__()
def __repr__(self):
return ("<%(class_name)s: event_type=%(event_type)s, "
"src_path=%(src_path)r, "
"is_directory=%(is_directory)s>"
) % (dict(
class_name=self.__class__.__name__,
event_type=self.event_type,
src_path=self.src_path,
is_directory=self.is_directory))
# Used for comparison of events.
@property
def key(self):
return (self.event_type, self.src_path, self.is_directory)
def __eq__(self, event):
return self.key == event.key
def __ne__(self, event):
return self.key != event.key
def __hash__(self):
return hash(self.key)
class FileSystemMovedEvent(FileSystemEvent):
"""
File system event representing any kind of file system movement.
"""
event_type = EVENT_TYPE_MOVED
def __init__(self, src_path, dest_path):
super().__init__(src_path)
self._dest_path = dest_path
@property
def dest_path(self):
"""The destination path of the move event."""
return self._dest_path
# Used for hashing this as an immutable object.
@property
def key(self):
return (self.event_type, self.src_path, self.dest_path, self.is_directory)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r, "
"dest_path=%(dest_path)r, "
"is_directory=%(is_directory)s>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path,
dest_path=self.dest_path,
is_directory=self.is_directory))
# File events.
class FileDeletedEvent(FileSystemEvent):
"""File system event representing file deletion on the file system."""
event_type = EVENT_TYPE_DELETED
class FileModifiedEvent(FileSystemEvent):
"""File system event representing file modification on the file system."""
event_type = EVENT_TYPE_MODIFIED
class FileCreatedEvent(FileSystemEvent):
"""File system event representing file creation on the file system."""
event_type = EVENT_TYPE_CREATED
class FileMovedEvent(FileSystemMovedEvent):
"""File system event representing file movement on the file system."""
class FileClosedEvent(FileSystemEvent):
"""File system event representing file close on the file system."""
event_type = EVENT_TYPE_CLOSED
# Directory events.
class DirDeletedEvent(FileSystemEvent):
"""File system event representing directory deletion on the file system."""
event_type = EVENT_TYPE_DELETED
is_directory = True
class DirModifiedEvent(FileSystemEvent):
"""
File system event representing directory modification on the file system.
"""
event_type = EVENT_TYPE_MODIFIED
is_directory = True
class DirCreatedEvent(FileSystemEvent):
"""File system event representing directory creation on the file system."""
event_type = EVENT_TYPE_CREATED
is_directory = True
class DirMovedEvent(FileSystemMovedEvent):
"""File system event representing directory movement on the file system."""
is_directory = True
class FileSystemEventHandler:
"""
Base file system event handler that you can override methods from.
"""
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
self.on_any_event(event)
{
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted,
EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
EVENT_TYPE_CLOSED: self.on_closed,
}[event.event_type](event)
def on_any_event(self, event):
"""Catch-all event handler.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
def on_moved(self, event):
"""Called when a file or a directory is moved or renamed.
:param event:
Event representing file/directory movement.
:type event:
:class:`DirMovedEvent` or :class:`FileMovedEvent`
"""
def on_created(self, event):
"""Called when a file or directory is created.
:param event:
Event representing file/directory creation.
:type event:
:class:`DirCreatedEvent` or :class:`FileCreatedEvent`
"""
def on_deleted(self, event):
"""Called when a file or directory is deleted.
:param event:
Event representing file/directory deletion.
:type event:
:class:`DirDeletedEvent` or :class:`FileDeletedEvent`
"""
def on_modified(self, event):
"""Called when a file or directory is modified.
:param event:
Event representing file/directory modification.
:type event:
:class:`DirModifiedEvent` or :class:`FileModifiedEvent`
"""
def on_closed(self, event):
"""Called when a file opened for writing is closed.
:param event:
Event representing file closing.
:type event:
:class:`FileClosedEvent`
"""
class PatternMatchingEventHandler(FileSystemEventHandler):
"""
Matches given patterns with file paths associated with occurring events.
"""
def __init__(self, patterns=None, ignore_patterns=None,
ignore_directories=False, case_sensitive=False):
super().__init__()
self._patterns = patterns
self._ignore_patterns = ignore_patterns
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def patterns(self):
"""
(Read-only)
Patterns to allow matching event paths.
"""
return self._patterns
@property
def ignore_patterns(self):
"""
(Read-only)
Patterns to ignore matching event paths.
"""
return self._ignore_patterns
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if hasattr(event, 'dest_path'):
paths.append(os.fsdecode(event.dest_path))
if event.src_path:
paths.append(os.fsdecode(event.src_path))
if match_any_paths(paths,
included_patterns=self.patterns,
excluded_patterns=self.ignore_patterns,
case_sensitive=self.case_sensitive):
super().dispatch(event)
class RegexMatchingEventHandler(FileSystemEventHandler):
"""
Matches given regexes with file paths associated with occurring events.
"""
def __init__(self, regexes=None, ignore_regexes=None,
ignore_directories=False, case_sensitive=False):
super().__init__()
if regexes is None:
regexes = [r".*"]
if ignore_regexes is None:
ignore_regexes = []
if case_sensitive:
self._regexes = [re.compile(r) for r in regexes]
self._ignore_regexes = [re.compile(r) for r in ignore_regexes]
else:
self._regexes = [re.compile(r, re.I) for r in regexes]
self._ignore_regexes = [re.compile(r, re.I) for r in ignore_regexes]
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def regexes(self):
"""
(Read-only)
Regexes to allow matching event paths.
"""
return self._regexes
@property
def ignore_regexes(self):
"""
(Read-only)
Regexes to ignore matching event paths.
"""
return self._ignore_regexes
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if hasattr(event, 'dest_path'):
paths.append(os.fsdecode(event.dest_path))
if event.src_path:
paths.append(os.fsdecode(event.src_path))
if any(r.match(p) for r in self.ignore_regexes for p in paths):
return
if any(r.match(p) for r in self.regexes for p in paths):
super().dispatch(event)
class LoggingEventHandler(FileSystemEventHandler):
"""Logs all the events captured."""
def __init__(self, logger=None):
super().__init__()
self.logger = logger or logging.root
def on_moved(self, event):
super().on_moved(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Moved %s: from %s to %s", what, event.src_path,
event.dest_path)
def on_created(self, event):
super().on_created(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Created %s: %s", what, event.src_path)
def on_deleted(self, event):
super().on_deleted(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Deleted %s: %s", what, event.src_path)
def on_modified(self, event):
super().on_modified(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Modified %s: %s", what, event.src_path)
def generate_sub_moved_events(src_dir_path, dest_dir_path):
"""Generates an event list of :class:`DirMovedEvent` and
:class:`FileMovedEvent` objects for all the files and directories within
the given moved directory that were moved along with the directory.
:param src_dir_path:
The source path of the moved directory.
:param dest_dir_path:
The destination path of the moved directory.
:returns:
An iterable of file system events of type :class:`DirMovedEvent` and
:class:`FileMovedEvent`.
"""
for root, directories, filenames in os.walk(dest_dir_path):
for directory in directories:
full_path = os.path.join(root, directory)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
event = DirMovedEvent(renamed_path, full_path)
event.is_synthetic = True
yield event
for filename in filenames:
full_path = os.path.join(root, filename)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
event = FileMovedEvent(renamed_path, full_path)
event.is_synthetic = True
yield event
def generate_sub_created_events(src_dir_path):
"""Generates an event list of :class:`DirCreatedEvent` and
:class:`FileCreatedEvent` objects for all the files and directories within
the given moved directory that were moved along with the directory.
:param src_dir_path:
The source path of the created directory.
:returns:
An iterable of file system events of type :class:`DirCreatedEvent` and
:class:`FileCreatedEvent`.
"""
for root, directories, filenames in os.walk(src_dir_path):
for directory in directories:
event = DirCreatedEvent(os.path.join(root, directory))
event.is_synthetic = True
yield event
for filename in filenames:
event = FileCreatedEvent(os.path.join(root, filename))
event.is_synthetic = True
yield event
| gpl-3.0 | -7,516,226,166,804,324,000 | 27.55477 | 99 | 0.619602 | false |
igorgai/django-custom-user | custom_user/forms.py | 1 | 3880 | """EmailUser forms."""
import django
from django import forms
from django.contrib.auth import get_user_model
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
class EmailUserCreationForm(forms.ModelForm):
"""A form for creating new users.
Includes all the required fields, plus a repeated password.
"""
error_messages = {
'duplicate_email': _("A user with that email already exists."),
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(
label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."))
class Meta:
model = get_user_model()
fields = ('email',)
def clean_email(self):
"""Clean form email.
:return str email: cleaned email
:raise forms.ValidationError: Email is duplicated
"""
# Since EmailUser.email is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
email = self.cleaned_data["email"]
try:
get_user_model()._default_manager.get(email=email)
except get_user_model().DoesNotExist:
return email
raise forms.ValidationError(
self.error_messages['duplicate_email'],
code='duplicate_email',
)
def clean_password2(self):
"""Check that the two password entries match.
:return str password2: cleaned password2
:raise forms.ValidationError: password2 != password1
"""
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
"""Save user.
Save the provided password in hashed format.
:return custom_user.models.EmailUser: user
"""
user = super(EmailUserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
# Different password reset link in Django 1.9
if django.VERSION[:2] < (1, 9):
password_reset_link = "password"
else:
password_reset_link = "../password"
class EmailUserChangeForm(forms.ModelForm):
"""A form for updating users.
Includes all the fields on the user, but replaces the password field
with admin's password hash display field.
"""
password = ReadOnlyPasswordHashField(
label=_("Password"),
help_text=_(
"Raw passwords are not stored, so there is no way to see this "
"user's password, but you can change the password using "
"<a href=\"{0}/\">this form</a>.".format(password_reset_link)
),
)
class Meta:
model = get_user_model()
exclude = ()
def __init__(self, *args, **kwargs):
"""Init the form."""
super(EmailUserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions')
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
"""Clean password.
Regardless of what the user provides, return the initial value.
This is done here, rather than on the field, because the
field does not have access to the initial value.
:return str password:
"""
return self.initial["password"]
| bsd-3-clause | -1,206,752,125,892,193,000 | 28.846154 | 75 | 0.617268 | false |
quater/calico-containers | calicoctl/calico_ctl/status.py | 1 | 9262 | # Copyright (c) 2015-2016 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage:
calicoctl status [--runtime=<RUNTIME>]
Description:
Print current status information regarding calico-node container
and the BIRD routing daemon.
Options:
--runtime=<RUNTIME> Specify the runtime used to run the calico/node
container, either "docker" or "rkt".
[default: docker]
"""
import re
import sys
import subprocess32
from prettytable import PrettyTable
from pycalico.datastore_errors import DataStoreError
from requests import ConnectionError
from subprocess32 import Popen, PIPE
from connectors import docker_client, client
from utils import hostname, RKT_CONTAINER_RE, enforce_root
def status(arguments):
"""
Main dispatcher for status commands. Calls the corresponding helper
function.
:param arguments: A dictionary of arguments already processed through
this file's docstring with docopt
:return: None
"""
# Check runtime.
runtime = arguments.get("--runtime")
if not runtime in ["docker", "rkt"]:
print "Invalid runtime specified: '%s'" % runtime
sys.exit(1)
# Start by locating the calico-node container and querying the package
# summary file.
if runtime == "rkt":
enforce_root()
check_container_status_rkt()
else:
check_container_status_docker()
# Now query the host BGP details. If the AS number is not specified on the
# host then it must be inheriting the default.
try:
bgp_ipv4, bgp_ipv6 = client.get_host_bgp_ips(hostname)
bgp_as = client.get_host_as(hostname)
if bgp_as is None:
bgp_as = client.get_default_node_as()
bgp_as += " (inherited)"
except DataStoreError:
print "Error connecting to etcd. Ensure ETCD_ENDPOINTS or ETCD_AUTHORITY is set properly."
bgp_ipv4 = bgp_ipv6 = "unknown"
bgp_as = "unknown"
# TODO: Add additional information to the BIRD section:
# TODO: - Include AS numbers of peers
# TODO: - Include host name of peers when the peer is a calico-node
# TODO: - Include details of peers configured multiple times
print "\nIPv4 BGP status"
if bgp_ipv4:
print "IP: %s AS Number: %s" % (bgp_ipv4, bgp_as)
pprint_bird_protocols(4)
else:
print "No IPv4 address configured.\n"
print "IPv6 BGP status"
if bgp_ipv6:
print "IP: %s AS Number: %s" % (bgp_ipv6, bgp_as)
pprint_bird_protocols(6)
else:
print "No IPv6 address configured.\n"
def check_container_status_docker():
"""
Checks and prints the calico/node container status when running in Docker.
"""
try:
calico_node_info = filter(lambda container: "/calico-node" in
container["Names"],
docker_client.containers())
if len(calico_node_info) == 0:
print "calico-node container not running"
sys.exit(1)
else:
print "calico-node container is running. Status: %s" % \
calico_node_info[0]["Status"]
libraries_cmd = docker_client.exec_create("calico-node",
["sh", "-c",
"cat libraries.txt"])
libraries_out = docker_client.exec_start(libraries_cmd)
result = re.search(r"^calico\s*\((.*)\)\s*$", libraries_out,
re.MULTILINE)
if result is not None:
print "Running felix version %s" % result.group(1)
except ConnectionError:
print "Docker is not running"
sys.exit(1)
def check_container_status_rkt():
"""
Checks and prints the calico/node container status when running in rkt.
"""
list_cmd = ["sudo", "rkt", "list"]
p = Popen(list_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
containers = RKT_CONTAINER_RE.findall(stdout)
if p.returncode:
print "Unable to list rkt containers: '%s'" % stderr.strip()
sys.exit(1)
if len(containers) == 0:
print "calico-node container not running"
sys.exit(1)
else:
# Get statuses for all calico/node containers, and determine
# if any are running.
statuses = [c[2] for c in containers]
running = "running" in statuses
# If one is running, status is "running". Else, use the status of
# the first container.
status = "running" if running else statuses[0]
# Print status. If it at least one is running, this will display
# "running" status.
print "calico-node container status: %s" % status
def pprint_bird_protocols(version):
"""
Pretty print the output from the BIRD "show protocols". This parses the
existing output and lays it out in pretty printed table.
:param version: The IP version (4 or 6).
:return: None.
"""
# This needs to be run as root to access the bird data in /var/run/calico
enforce_root()
# Based on the IP version, run the appropriate BIRD command, and select
# the appropriate separator char for an IP address.
if getattr(sys, 'frozen', False):
# We're running under pyinstaller
birdcl = sys._MEIPASS + "/birdcl"
else:
birdcl = "birdcl"
try:
if version == 4:
results = subprocess32.check_output(
"echo show protocols | %s -s /var/run/calico/bird.ctl" % birdcl,
shell=True)
ip_sep = "."
else:
results = subprocess32.check_output(
"echo show protocols | %s -s /var/run/calico/bird6.ctl" % birdcl,
shell=True)
ip_sep = ":"
except subprocess32.CalledProcessError:
print "Couldn't connect to bird."
return
# Parse the output from BIRD to extract the values in the protocol status
# table. We'll further parse the name since that includes details about
# the type of peer and the peer IP address.
x = PrettyTable(["Peer address", "Peer type", "State",
"Since", "Info"])
lines = results.split("\n")
found_table = False
for line in lines:
# When BIRD displays its protocol table, it prints the bird> prompt and
# then shifts the cursor to print back over the prompt. However, this
# means that we get rogue prompts when parsing the output. For this
# processing just remove the prompt if it is present.
if line.startswith("bird>"):
line = line[5:]
# Skip blank lines.
line = line.strip()
if not line:
continue
# Split the line into columns based on whitespace separators. We split
# a maximum of 5 times because the 6th "info" column may contain a
# string that itself includes whitespace that should be maintained.
columns = re.split("\s+", line.strip(), 5)
# Loop until we find the table heading.
if columns == ["name", "proto", "table", "state", "since", "info"]:
found_table = True
continue
elif not found_table:
continue
# We expect either 5 or 6 columns depending on whether there was a
# value in the info column. Anything else is not handled, so revert
# to displaying the raw BIRD output.
if not (5 <= len(columns) <= 6):
found_table = False
break
# Parse the name, we name our BGP peers as "Mesh", "Node" or "Global"
# followed by the IP address. Extract the info so we can pretty
# print it.
combined = columns[0]
if combined.startswith("Mesh_"):
name = combined[5:].replace("_", ip_sep)
ptype = "node-to-node mesh"
elif combined.startswith("Node_"):
name = combined[5:].replace("_", ip_sep)
ptype = "node specific"
elif combined.startswith("Global_"):
name = combined[7:].replace("_", ip_sep)
ptype = "global"
else:
# This is not a BGP Peer, so do not include in the output.
continue
x.add_row([name, ptype, columns[3], columns[4],
columns[5] if len(columns) == 6 else ""])
# If we parsed the table then pretty print the table, otherwise just output
# the BIRD output directly. The first line of the BIRD output provides an
# overall BIRD status.
if found_table:
print str(x) + "\n"
else:
print results + "\n"
| apache-2.0 | -3,644,788,646,197,634,000 | 35.464567 | 99 | 0.603865 | false |
niwinz/Green-Mine | src/greenmine/wiki/views.py | 1 | 4981 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
from django.shortcuts import get_object_or_404
from ..core.utils.slug import slugify_uniquely
from ..core.generic import GenericView
from ..core.decorators import login_required
from ..scrum.models import Project
from .models import WikiPage, WikiPageHistory
from .forms import WikiPageEditForm
class WikiPageView(GenericView):
menu = ['wiki']
template_path = 'wiki-page.html'
@login_required
def get(self, request, pslug, wslug):
project = get_object_or_404(Project, slug=pslug)
self.check_role(request.user, project, [
('project', 'view'),
('wiki', 'view'),
])
try:
wikipage = project.wiki_pages.get(slug=slugify(wslug))
except WikiPage.DoesNotExist:
return self.render_redirect(reverse('wiki-page-edit',
args=[project.slug, slugify(wslug)]))
context = {
'project': project,
'wikipage': wikipage,
}
return self.render_to_response(self.template_path, context)
class WikiPageEditView(GenericView):
menu = ['wiki']
template_path = 'wiki-page-edit.html'
@login_required
def get(self, request, pslug, wslug):
project = get_object_or_404(Project, slug=pslug)
self.check_role(request.user, project, [
('project', 'view'),
('wiki', ('view', 'create', 'edit')),
])
try:
wikipage = project.wiki_pages.get(slug=slugify(wslug))
except WikiPage.DoesNotExist:
wikipage = None
form = WikiPageEditForm(instance=wikipage)
context = {
'form': form,
'project': project,
}
return self.render_to_response(self.template_path, context)
@login_required
def post(self, request, pslug, wslug):
project = get_object_or_404(Project, slug=pslug)
self.check_role(request.user, project, [
('project', 'view'),
('wiki', ('view', 'create', 'edit')),
])
try:
wikipage = project.wiki_pages.get(slug=slugify(wslug))
except WikiPage.DoesNotExist:
wikipage = None
form = WikiPageEditForm(request.POST, instance=wikipage)
if not form.is_valid():
return self.render_json_errors(form.errors)
wikipage_new = form.save(commit=False)
if wikipage is not None:
old_wikipage = WikiPage.objects.get(pk=wikipage.pk)
history_entry = WikiPageHistory(
wikipage = old_wikipage,
content = old_wikipage.content,
owner = old_wikipage.owner,
created_date = old_wikipage.created_date,
)
history_entry.save()
if not wikipage_new.slug:
wikipage_new.slug = slugify_uniquely(wslug, wikipage_new.__class__)
if not wikipage_new.project_id:
wikipage_new.project = project
wikipage_new.owner = request.user
wikipage_new.save()
return self.render_json({'redirect_to': wikipage_new.get_view_url()})
class WikiPageHistoryView(GenericView):
menu = ['wiki']
template_path = 'wiki-page-history-view.html'
@login_required
def get(self, request, pslug, wslug, hpk):
project = get_object_or_404(Project, slug=pslug)
self.check_role(request.user, project, [
('project', 'view'),
('wiki', 'view'),
])
wikipage = get_object_or_404(project.wiki_pages, slug=wslug)
history_entry = get_object_or_404(wikipage.history_entries, pk=hpk)
context = {
'project': project,
'wikipage': wikipage,
'history_entry': history_entry,
}
return self.render_to_response(self.template_path, context)
class WikipageDeleteView(GenericView):
template_path = 'wiki-page-delete.html'
def get_context(self):
project = get_object_or_404(Project, slug=self.kwargs['pslug'])
self.check_role(self.request.user, project, [
('project', 'view'),
('wiki', ('view', 'delete')),
])
wikipage = get_object_or_404(project.wiki_pages, slug=self.kwargs['wslug'])
context = {
'project': project,
'wikipage': wikipage,
}
return context
@login_required
def get(self, request, **kwargs):
context = self.get_context()
return self.render_to_response(self.template_path, context)
@login_required
def post(self, request, **kwargs):
context = self.get_context()
context['wikipage'].history_entries.all().delete()
context['wikipage'].delete()
return self.render_redirect(reverse('wiki-page',
args = [context['project'].slug, 'home']))
| bsd-3-clause | 2,264,128,126,756,378,400 | 28.64881 | 83 | 0.592853 | false |
akesterson/dpath-python | tests/test_util_set.py | 1 | 1710 | import dpath.util
def test_set_existing_separator():
dict = {
"a": {
"b": 0,
},
}
dpath.util.set(dict, ';a;b', 1, separator=";")
assert(dict['a']['b'] == 1)
dict['a']['b'] = 0
dpath.util.set(dict, ['a', 'b'], 1, separator=";")
assert(dict['a']['b'] == 1)
def test_set_existing_dict():
dict = {
"a": {
"b": 0,
},
}
dpath.util.set(dict, '/a/b', 1)
assert(dict['a']['b'] == 1)
dict['a']['b'] = 0
dpath.util.set(dict, ['a', 'b'], 1)
assert(dict['a']['b'] == 1)
def test_set_existing_list():
dict = {
"a": [
0,
],
}
dpath.util.set(dict, '/a/0', 1)
assert(dict['a'][0] == 1)
dict['a'][0] = 0
dpath.util.set(dict, ['a', '0'], 1)
assert(dict['a'][0] == 1)
def test_set_filter():
def afilter(x):
if int(x) == 31:
return True
return False
dict = {
"a": {
"b": 0,
"c": 1,
"d": 31,
}
}
dpath.util.set(dict, '/a/*', 31337, afilter=afilter)
assert (dict['a']['b'] == 0)
assert (dict['a']['c'] == 1)
assert (dict['a']['d'] == 31337)
dict = {
"a": {
"b": 0,
"c": 1,
"d": 31,
}
}
dpath.util.set(dict, ['a', '*'], 31337, afilter=afilter)
assert (dict['a']['b'] == 0)
assert (dict['a']['c'] == 1)
assert (dict['a']['d'] == 31337)
def test_set_existing_path_with_separator():
dict = {
"a": {
'b/c/d': 0,
},
}
dpath.util.set(dict, ['a', 'b/c/d'], 1)
assert(len(dict['a']) == 1)
assert(dict['a']['b/c/d'] == 1)
| mit | 6,502,466,740,530,720,000 | 17.791209 | 60 | 0.391813 | false |
openlawlibrary/pygls | examples/json-extension/server/tests/unit/test_features.py | 1 | 3751 | ############################################################################
# Copyright(c) Open Law Library. All rights reserved. #
# See ThirdPartyNotices.txt in the project root for additional notices. #
# #
# Licensed under the Apache License, Version 2.0 (the "License") #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http: // www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
############################################################################
import json
from typing import Optional
import pytest
from mock import Mock
from pygls.lsp.types import (DidCloseTextDocumentParams,
DidOpenTextDocumentParams, TextDocumentIdentifier,
TextDocumentItem)
from pygls.workspace import Document, Workspace
from ...server import completions, did_close, did_open
class FakeServer():
"""We don't need real server to unit test features."""
publish_diagnostics = None
show_message = None
show_message_log = None
def __init__(self):
self.workspace = Workspace('', None)
fake_document_uri = 'file://fake_doc.txt'
fake_document_content = 'text'
fake_document = Document(fake_document_uri, fake_document_content)
server = FakeServer()
server.publish_diagnostics = Mock()
server.show_message = Mock()
server.show_message_log = Mock()
server.workspace.get_document = Mock(return_value=fake_document)
def _reset_mocks():
server.publish_diagnostics.reset_mock()
server.show_message.reset_mock()
server.show_message_log.reset_mock()
def test_completions():
completion_list = completions()
labels = [i.label for i in completion_list.items]
assert '"' in labels
assert '[' in labels
assert ']' in labels
assert '{' in labels
assert '}' in labels
def test_did_close():
_reset_mocks()
params = DidCloseTextDocumentParams(
text_document=TextDocumentIdentifier(uri=fake_document_uri))
did_close(server, params)
# Check if show message is called
server.show_message.assert_called_once()
@pytest.mark.asyncio
async def test_did_open():
_reset_mocks()
expected_msg = None
# Get expected error message
try:
json.loads(fake_document_content)
except json.JSONDecodeError as err:
expected_msg = err.msg
params = DidOpenTextDocumentParams(
text_document=TextDocumentItem(uri=fake_document_uri,
language_id='json',
version=1,
text=fake_document_content))
await did_open(server, params)
# Check publish diagnostics is called
server.publish_diagnostics.assert_called_once()
# Check publish diagnostics args message
args = server.publish_diagnostics.call_args
assert args[0][1][0].message is expected_msg
# Check other methods are called
server.show_message.assert_called_once()
server.show_message_log.assert_called_once()
| apache-2.0 | 6,875,330,818,496,659,000 | 33.1 | 79 | 0.582245 | false |
ustuehler/git-cvs | cvsgit/command/clone.py | 1 | 3887 | """Command to clone a CVS repository or module as a Git repository."""
import os.path
import shutil
from cvsgit.main import Command, Conduit
from cvsgit.i18n import _
from cvsgit.command.verify import Verify
class Clone(Command):
__doc__ = _(
"""Clone a CVS repository or module into a Git repository.
Usage: %prog [options] <repository> [<directory>]
Clones an entire CVS repository or a module into a Git repository.
The source argument <repository> must be a local path pointing at
the CVS repository root or a module directory within. The
destination argument <directory> is selected automatically, based
on the last component of the source path.
""")
def initialize_options(self):
self.repository = None
self.directory = None
self.add_option('--bare', action='store_true', help=\
_("Create a bare Git repository without work tree."))
self.add_option('--limit', type='int', metavar='COUNT', help=\
_("Stop importing after COUNT new commits."))
self.add_option('--domain', metavar='DOMAIN', help=\
_("Set the e-mail domain to use for unknown authors."))
self.add_option('--verify', action='store_true', help=\
_("Run the verify command after cloning (does not work "
"with --bare)."))
self.add_option('--no-repack', action='store_true', help=\
_("Don't run \"git repack -adF\" after cloning (so you "
"end up with an uncompressed pack file)."))
self.add_quiet_option()
self.add_verbose_option()
self.add_no_skip_latest_option()
self.add_authors_option()
self.add_stop_on_unknown_author_option()
def finalize_options(self):
if len(self.args) < 1:
self.usage_error(_('missing CVS repository path'))
elif len(self.args) == 1:
self.repository = os.path.abspath(self.args[0])
self.directory = os.path.basename(self.repository)
elif len(self.args) == 2:
self.repository, self.directory = self.args
else:
self.usage_error(_('too many arguments'))
self.finalize_authors_option()
def run(self):
if os.path.exists(self.directory):
self.fatal(_("destination path '%s' already exists") % \
self.directory)
conduit = Conduit(self.directory)
conduit.init(self.repository,
bare=self.options.bare,
domain=self.options.domain,
quiet=self.options.quiet)
try:
conduit.fetch(limit=self.options.limit,
quiet=self.options.quiet,
verbose=self.options.verbose,
flush=self.options.no_skip_latest,
authors=self.options.authors,
stop_on_unknown_author=\
self.options.stop_on_unknown_author)
git = conduit.git
if not self.options.no_repack:
git.check_command('repack', '-adF')
head_branch = git.symbolic_ref('HEAD')
if head_branch == 'refs/heads/master':
if self.options.bare:
git.check_command('branch', '-f', 'master', conduit.branch)
else:
git.check_command('reset', '-q', '--hard', conduit.branch)
except:
shutil.rmtree(self.directory)
raise
# Verify after the above rmtree, because someone likely wants
# to inspect the repository if the verification fails.
if self.options.verify:
try:
olddir = os.getcwd()
os.chdir(git.git_work_tree)
Verify().eval()
finally:
os.chdir(olddir)
| isc | 4,724,368,370,476,854,000 | 38.262626 | 79 | 0.565475 | false |
Clinical-Genomics/scout | scout/server/blueprints/dashboard/controllers.py | 1 | 12437 | import logging
from flask import flash, redirect, request, url_for
from flask_login import current_user
from scout.server.extensions import store
from scout.server.utils import user_institutes
from .forms import DashboardFilterForm
LOG = logging.getLogger(__name__)
def institute_select_choices():
"""Return a list of tuples with institute _id, institute names to populate a form select.
Returns:
institute_choices(list). Example:[(cust000, "Institute 1"), ..]
"""
institute_choices = [("All", "All institutes")] if current_user.is_admin else []
# Collect only institutes available to the user
institute_objs = user_institutes(store, current_user)
for inst in institute_objs:
institute_choices.append((inst["_id"], inst["display_name"]))
return institute_choices
def dashboard_form(request_form=None):
"""Retrieve data to be displayed on dashboard page"""
form = DashboardFilterForm(request_form)
form.search_institute.choices = institute_select_choices()
return form
def compose_slice_query(search_type, search_term):
"""Extract a filter query given a form search term and search type
Args:
search_type(str): example -> "case:"
search_term(str): example -> "17867"
Returns:
slice_query(str): example case:17867
"""
slice_query = None
if search_term and search_type:
slice_query = "".join([search_type, search_term])
return slice_query
def populate_dashboard_data(request):
"""Prepate data display object to be returned to the view
Args:
request(flask.rquest): request received by the view
Returns:
data(dict): data to be diplayed in the template
"""
data = {"dashboard_form": dashboard_form(request.form)}
if request.method == "GET":
return data
allowed_insititutes = [inst[0] for inst in institute_select_choices()]
institute_id = request.form.get(
"search_institute", allowed_insititutes[0]
) # GET request has no institute, select the first option of the select
if institute_id and institute_id not in allowed_insititutes:
flash("Your user is not allowed to visualize this data", "warning")
redirect(url_for("dashboard.index"))
if institute_id == "All":
institute_id = None
slice_query = compose_slice_query(
request.form.get("search_type"), request.form.get("search_term")
)
get_dashboard_info(store, data, institute_id, slice_query)
return data
def get_dashboard_info(adapter, data={}, institute_id=None, slice_query=None):
"""Append case data stats to data display object
Args:
adapter(adapter.MongoAdapter)
data(dict): data dictionary to be passed to template
institute_id(str): institute id
slice_query(str): example case:55888
Returns:
data(dict): data to be diplayed in the template
"""
# If a slice_query is present then numbers in "General statistics" and "Case statistics" will
# reflect the data available for the query
general_sliced_info = get_general_case_info(
adapter, institute_id=institute_id, slice_query=slice_query
)
total_sliced_cases = general_sliced_info["total_cases"]
data["total_cases"] = total_sliced_cases
if total_sliced_cases == 0:
return data
data["pedigree"] = []
for ped_info in general_sliced_info["pedigree"].values():
ped_info["percent"] = ped_info["count"] / total_sliced_cases
data["pedigree"].append(ped_info)
data["cases"] = get_case_groups(
adapter, total_sliced_cases, institute_id=institute_id, slice_query=slice_query
)
data["analysis_types"] = get_analysis_types(
adapter, total_sliced_cases, institute_id=institute_id, slice_query=slice_query
)
overview = [
{
"title": "Phenotype terms",
"count": general_sliced_info["phenotype_cases"],
"percent": general_sliced_info["phenotype_cases"] / total_sliced_cases,
},
{
"title": "Causative variants",
"count": general_sliced_info["causative_cases"],
"percent": general_sliced_info["causative_cases"] / total_sliced_cases,
},
{
"title": "Pinned variants",
"count": general_sliced_info["pinned_cases"],
"percent": general_sliced_info["pinned_cases"] / total_sliced_cases,
},
{
"title": "Cohort tag",
"count": general_sliced_info["cohort_cases"],
"percent": general_sliced_info["cohort_cases"] / total_sliced_cases,
},
]
# Data from "Variant statistics tab" is not filtered by slice_query and numbers will
# reflect verified variants in all available cases for an institute
general_info = get_general_case_info(adapter, institute_id=institute_id)
total_cases = general_info["total_cases"]
sliced_case_ids = general_sliced_info["case_ids"]
verified_query = {
"verb": {"$in": ["validate", "sanger"]},
}
if institute_id: # filter by institute if users wishes so
verified_query["institute"] = institute_id
# Case level information
sliced_validation_cases = set()
sliced_validated_cases = set()
# Variant level information
validated_tp = set()
validated_fp = set()
var_valid_orders = (
0 # use this counter to count 'True Positive', 'False positive' and 'Not validated' vars
)
validate_events = adapter.event_collection.find(verified_query)
for validate_event in list(validate_events):
case_id = validate_event.get("case")
var_obj = adapter.variant(case_id=case_id, document_id=validate_event["variant_id"])
if var_obj: # Don't take into account variants which have been removed from db
var_valid_orders += 1
if case_id in sliced_case_ids:
sliced_validation_cases.add(
case_id
) # add to the set. Can't add same id twice since it'a a set
validation = var_obj.get("validation")
if validation and validation in ["True positive", "False positive"]:
if case_id in sliced_case_ids:
sliced_validated_cases.add(case_id)
if validation == "True positive":
validated_tp.add(var_obj["_id"])
elif validation == "False positive":
validated_fp.add(var_obj["_id"])
n_validation_cases = len(sliced_validation_cases)
n_validated_cases = len(sliced_validated_cases)
# append
overview.append(
{
"title": "Validation ordered",
"count": n_validation_cases,
"percent": n_validation_cases / total_sliced_cases,
}
)
overview.append(
{
"title": "Validated cases (TP + FP)",
"count": n_validated_cases,
"percent": n_validated_cases / total_sliced_cases,
}
)
data["overview"] = overview
variants = []
nr_validated = len(validated_tp) + len(validated_fp)
variants.append({"title": "Validation ordered", "count": var_valid_orders, "percent": 1})
# taking into account that var_valid_orders might be 0:
percent_validated_tp = 0
percent_validated_fp = 0
if var_valid_orders:
percent_validated_tp = len(validated_tp) / var_valid_orders
percent_validated_fp = len(validated_fp) / var_valid_orders
variants.append(
{
"title": "Validated True Positive",
"count": len(validated_tp),
"percent": percent_validated_tp,
}
)
variants.append(
{
"title": "Validated False Positive",
"count": len(validated_fp),
"percent": percent_validated_fp,
}
)
data["variants"] = variants
return data
def get_general_case_info(adapter, institute_id=None, slice_query=None):
"""Return general information about cases
Args:
adapter(adapter.MongoAdapter)
institute_id(str)
slice_query(str): Query to filter cases to obtain statistics for.
Returns:
general(dict)
"""
general = {}
# Potentially sensitive slice queries are assumed allowed if we have got this far
name_query = slice_query
cases = adapter.cases(owner=institute_id, name_query=name_query)
phenotype_cases = 0
causative_cases = 0
pinned_cases = 0
cohort_cases = 0
pedigree = {
1: {"title": "Single", "count": 0},
2: {"title": "Duo", "count": 0},
3: {"title": "Trio", "count": 0},
"many": {"title": "Many", "count": 0},
}
case_ids = set()
total_cases = 0
for total_cases, case in enumerate(cases, 1):
case_ids.add(case["_id"])
if case.get("phenotype_terms"):
phenotype_cases += 1
if case.get("causatives"):
causative_cases += 1
if case.get("suspects"):
pinned_cases += 1
if case.get("cohorts"):
cohort_cases += 1
nr_individuals = len(case.get("individuals", []))
if nr_individuals == 0:
continue
if nr_individuals > 3:
pedigree["many"]["count"] += 1
else:
pedigree[nr_individuals]["count"] += 1
general["total_cases"] = total_cases
general["phenotype_cases"] = phenotype_cases
general["causative_cases"] = causative_cases
general["pinned_cases"] = pinned_cases
general["cohort_cases"] = cohort_cases
general["pedigree"] = pedigree
general["case_ids"] = case_ids
return general
def get_case_groups(adapter, total_cases, institute_id=None, slice_query=None):
"""Return the information about case groups
Args:
store(adapter.MongoAdapter)
total_cases(int): Total number of cases
slice_query(str): Query to filter cases to obtain statistics for.
Returns:
cases(dict):
"""
# Create a group with all cases in the database
cases = [{"status": "all", "count": total_cases, "percent": 1}]
# Group the cases based on their status
pipeline = []
group = {"$group": {"_id": "$status", "count": {"$sum": 1}}}
subquery = {}
if institute_id and slice_query:
subquery = adapter.cases(owner=institute_id, name_query=slice_query, yield_query=True)
elif institute_id:
subquery = adapter.cases(owner=institute_id, yield_query=True)
elif slice_query:
subquery = adapter.cases(name_query=slice_query, yield_query=True)
query = {"$match": subquery} if subquery else {}
if query:
pipeline.append(query)
pipeline.append(group)
res = adapter.case_collection.aggregate(pipeline)
for status_group in res:
cases.append(
{
"status": status_group["_id"],
"count": status_group["count"],
"percent": status_group["count"] / total_cases,
}
)
return cases
def get_analysis_types(adapter, total_cases, institute_id=None, slice_query=None):
"""Return information about analysis types.
Group cases based on analysis type for the individuals.
Args:
adapter(adapter.MongoAdapter)
total_cases(int): Total number of cases
institute_id(str)
slice_query(str): Query to filter cases to obtain statistics for.
Returns:
analysis_types array of hashes with name: analysis_type(str), count: count(int)
"""
# Group cases based on analysis type of the individuals
query = {}
subquery = {}
if institute_id and slice_query:
subquery = adapter.cases(owner=institute_id, name_query=slice_query, yield_query=True)
elif institute_id:
subquery = adapter.cases(owner=institute_id, yield_query=True)
elif slice_query:
subquery = adapter.cases(name_query=slice_query, yield_query=True)
query = {"$match": subquery}
pipeline = []
if query:
pipeline.append(query)
pipeline.append({"$unwind": "$individuals"})
pipeline.append({"$group": {"_id": "$individuals.analysis_type", "count": {"$sum": 1}}})
analysis_query = adapter.case_collection.aggregate(pipeline)
analysis_types = [{"name": group["_id"], "count": group["count"]} for group in analysis_query]
return analysis_types
| bsd-3-clause | -1,192,204,301,845,954,000 | 31.643045 | 98 | 0.617512 | false |
HenningOp/ACS | GIS-Tools/Q_prePro1/ACS_preProcessing1/acs_preProcessing1_dialog.py | 1 | 1828 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
ACS_preProcessing1Dialog
A QGIS plugin
Transfer stream data
-------------------
begin : 2017-07-26
git sha : $Format:%H$
copyright : (C) 2017 by Laura Bienstein
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from PyQt4 import QtGui, uic
FORM_CLASS, _ = uic.loadUiType(os.path.join(
os.path.dirname(__file__), 'acs_preProcessing1_dialog_base.ui'))
class ACS_preProcessing1Dialog(QtGui.QDialog, FORM_CLASS):
def __init__(self, parent=None):
"""Constructor."""
super(ACS_preProcessing1Dialog, self).__init__(parent)
# Set up the user interface from Designer.
# After setupUI you can access any designer object by doing
# self.<objectname>, and you can use autoconnect slots - see
# http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html
# #widgets-and-dialogs-with-auto-connect
self.setupUi(self)
| gpl-3.0 | 7,588,516,538,750,990,000 | 43.585366 | 77 | 0.429978 | false |
cgarrard/osgeopy-code | Chapter3/chapter3.py | 1 | 17766 | # I use the print function in this code, even though I don't in the book text,
# so that you can run it as a regular script and still get the output. You only
# get output without using print if you're using the interactive window.
# Set this variable to your osgeopy-data directory so that the following
# examples will work without editing. We'll use the os.path.join() function
# to combine this directory and the filenames to make a complete path. Of
# course, you can type the full path to the file for each example if you'd
# prefer.
import os
import sys
data_dir = r'D:\osgeopy-data'
# data_dir =
########################## 3.2 Introduction to OGR ##########################
# Import the module.
from osgeo import ogr
# Get the GeoJSON driver.
driver = ogr.GetDriverByName('GeoJSON')
print(driver)
# It's not case sensitive, so this also works.
driver = ogr.GetDriverByName('geojson')
print(driver)
# This does not work because the real name is 'Esri shapefile'.
driver = ogr.GetDriverByName('shapefile')
print(driver)
# Print out a list of drivers.
import ospybook as pb
pb.print_drivers()
########################### 3.3 Reading vector data #########################
#################### 3.3.1 Accessing specific features ######################
# Open the data source for the examples.
fn = os.path.join(data_dir, 'global', 'ne_50m_populated_places.shp')
ds = ogr.Open(fn, 0)
if ds is None:
sys.exit('Could not open {0}.'.format(fn))
lyr = ds.GetLayer(0)
# Get the total number of features and the last one.
num_features = lyr.GetFeatureCount()
last_feature = lyr.GetFeature(num_features - 1)
print(last_feature.NAME)
# Test what happens if you try to loop through a layer twice. The second
# loop should not print anything. (This is actually why in later examples we
# reopen the data source and get the layer for each little code snippet.
# If you ran them all at once without doing that, they wouldn't work.)
fn = os.path.join(data_dir, 'Washington', 'large_cities.geojson')
ds = ogr.Open(fn, 0)
lyr = ds.GetLayer(0)
print('First loop')
for feat in lyr:
print(feat.GetField('Name'), feat.GetField('Population'))
print('Second loop')
for feat in lyr:
pt = feat.geometry()
print(feat.GetField('Name'), pt.GetX(), pt.GetY())
# # But it will if you reset reading first.
ds = ogr.Open(fn, 0)
lyr = ds.GetLayer(0)
print('First loop')
for feat in lyr:
print(feat.GetField('Name'), feat.GetField('Population'))
print('Second loop')
lyr.ResetReading() # This is the important line.
for feat in lyr:
pt = feat.geometry()
print(feat.GetField('Name'), pt.GetX(), pt.GetY())
######################### 3.3.2 Viewing your data ###########################
# Print name and population attributes.
import ospybook as pb
fn = os.path.join(data_dir, 'global', 'ne_50m_populated_places.shp')
pb.print_attributes(fn, 3, ['NAME', 'POP_MAX'])
# Turn off geometries but skip field list parameters that come before the
# "geom" one.
pb.print_attributes(fn, 3, geom=False)
# If you want to see what happens without the "geom" keyword in the last
# example, try this:
pb.print_attributes(fn, 3, False)
# Import VectorPlotter and change directories
from ospybook.vectorplotter import VectorPlotter
os.chdir(os.path.join(data_dir, 'global'))
# Plot populated places on top of countries from an interactive session.
vp = VectorPlotter(True)
vp.plot('ne_50m_admin_0_countries.shp', fill=False)
vp.plot('ne_50m_populated_places.shp', 'bo')
# Plot populated places on top of countries non-interactively. Delete the vp
# variable if you tried the interactive one first.
del vp
vp = VectorPlotter(False)
vp.plot('ne_50m_admin_0_countries.shp', fill=False)
vp.plot('ne_50m_populated_places.shp', 'bo')
vp.draw()
######################### 3.4 Getting metadata ##############################
# Open the large_cities data source.
fn = os.path.join(data_dir, 'Washington', 'large_cities.geojson')
ds = ogr.Open(fn)
if ds is None:
sys.exit('Could not open {0}.'.format(fn))
# Get the spatial extent.
lyr = ds.GetLayer(0)
extent = lyr.GetExtent()
print(extent)
print('Upper left corner: {}, {}'.format(extent[0], extent[3]))
print('Lower right corner: {}, {}'.format(extent[1], extent[2]))
# Get geometry type
print(lyr.GetGeomType())
print(lyr.GetGeomType() == ogr.wkbPoint)
print(lyr.GetGeomType() == ogr.wkbPolygon)
# Get geometry type as human-readable string.
feat = lyr.GetFeature(0)
print(feat.geometry().GetGeometryName())
# Get spatial reference system. The output is also in listing3_2.py.
print(lyr.GetSpatialRef())
# Get field names and types
for field in lyr.schema:
print(field.name, field.GetTypeName())
######################## 3.5 Writing vector data ############################
# Check the results from listing 3.2.
os.chdir(os.path.join(data_dir, 'global'))
vp = VectorPlotter(True)
vp.plot('ne_50m_admin_0_countries.shp', fill=False)
vp.plot('capital_cities.shp', 'bo')
###################### 3.5.1 Creating new data sources ######################
# Get the same driver as an existing data source
ds = ogr.Open(os.path.join(data_dir, 'global', 'ne_50m_admin_0_countries.shp'))
driver = ds.GetDriver()
print(driver.name)
# Get a driver by name
json_driver = ogr.GetDriverByName('GeoJSON')
print(json_driver.name)
# Create a GeoJSON file
json_fn = os.path.join(data_dir, 'output', 'example.geojson')
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
# Create a SpatiaLite database. This will fail if your version of OGR wasn't
# built with SpatiaLite suppoert.
driver = ogr.GetDriverByName('SQLite')
ds = driver.CreateDataSource(os.path.join(data_dir, 'output', 'earth.sqlite'),
['SPATIALITE=yes'])
# Delete a data source if it exists instead of trying to overwrite it.
if os.path.exists(json_fn):
json_driver.DeleteDataSource(json_fn)
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
############################ Using OGR exceptions ###########################
# Try running this when output/africa.geojson already exists in order to raise
# the error.
# Turn on OGR exceptions. Try commenting this out to see how the behavior
# changes.
ogr.UseExceptions()
fn = os.path.join(data_dir, 'output', 'africa.geojson')
driver = ogr.GetDriverByName('GeoJSON')
print('Doing some preliminary analysis...')
try:
# This will fail if the file already exists
ds = driver.CreateDataSource(fn)
lyr = ds.CreateLayer('layer')
# Do more stuff, like fields and save data
except RuntimeError as e:
# This runs if the data source already exists and an error was raised
print(e)
print('Doing some more analysis...')
######################### 3.5.2 Creating new fields #########################
# Create a shapefile that changes the precision for an attribute field. Also
# notice how the name field is not kept at 6 characters. If it were, many of
# the names would be truncated, but you shouldn't see that if you look at the
# attributes for the file created here. For the example, we'll create x and y
# fields for the Washington large_cities dataset.
# Much of this code is not in the book text.
# Open the input shapefile.
in_fn = os.path.join(data_dir, 'Washington', 'large_cities.shp')
in_ds = ogr.Open(in_fn, 0)
if in_ds is None:
sys.exit('Could not open {0}.'.format(in_fn))
in_lyr = in_ds.GetLayer(0)
# Create the output shapefile.
driver = in_ds.GetDriver()
out_fn = os.path.join(data_dir, 'output', 'precision_test.shp')
if os.path.exists(out_fn):
driver.DeleteDataSource(out_fn)
out_ds = driver.CreateDataSource(out_fn)
if out_ds is None:
sys.exit('Could not create {0}.'.format(out_fn))
# Create the shapefile layer.
out_lyr = out_ds.CreateLayer('precision_test',
in_lyr.GetSpatialRef(),
ogr.wkbPoint)
# Set the name field to have a width of 6, but it will be expanded.
name_fld = ogr.FieldDefn('Name', ogr.OFTString)
name_fld.SetWidth(6)
out_lyr.CreateField(name_fld)
# Create two attribute fields using default precision.
coord_fld = ogr.FieldDefn('X_default', ogr.OFTReal)
out_lyr.CreateField(coord_fld)
coord_fld.SetName('Y_default')
out_lyr.CreateField(coord_fld)
# Create two attribute fields using a smaller precision. THIS IS THE
# EXAMPLE IN THE BOOK.
coord_fld = ogr.FieldDefn('X_short', ogr.OFTReal)
coord_fld.SetWidth(8)
coord_fld.SetPrecision(3)
out_lyr.CreateField(coord_fld)
coord_fld.SetName('Y_short')
out_lyr.CreateField(coord_fld)
# Copy data. After doing this, look at the attributes for your new shapefile
# and see the difference between the default and short fields.
out_feat = ogr.Feature(out_lyr.GetLayerDefn())
for in_feat in in_lyr:
pt = in_feat.geometry()
name = in_feat.GetField('NAME')
out_feat.SetGeometry(in_feat.geometry())
out_feat.SetField('Name', name)
out_feat.SetField('X_default', pt.GetX())
out_feat.SetField('Y_default', pt.GetY())
out_feat.SetField('X_short', pt.GetX())
out_feat.SetField('Y_short', pt.GetY())
out_lyr.CreateFeature(out_feat)
######################## 3.6 Updating existing data #########################
# Set things up for the following examples.
original_fn = os.path.join(data_dir, 'Washington', 'large_cities.shp')
new_fn = os.path.join(data_dir, 'output', 'large_cities2.shp')
# First make a copy of a shapefile so you have something to test things on.
pb.copy_datasource(original_fn, new_fn)
# Open the copied shapefile for writing.
ds = ogr.Open(new_fn, 1)
if ds is None:
sys.exit('Could not open {0}.'.format(new_fn))
lyr = ds.GetLayer(0)
# Take a look at the attributes before you change anything.
print('Original attributes')
pb.print_attributes(lyr, geom=False)
#################### 3.6.1 Changing the layer definition ####################
# Change the name of the "Name" attribute field by creating a new field
# definition and using it to alter the existing field.
i = lyr.GetLayerDefn().GetFieldIndex('Name')
fld_defn = ogr.FieldDefn('City_Name', ogr.OFTString)
lyr.AlterFieldDefn(i, fld_defn, ogr.ALTER_NAME_FLAG)
# Change the name of the POINT_X field to X_coord and the precision to 4
# decimal places. Need to make sure that the width is big enough or things
# don't work right, so set it to the original width to be safe.
lyr_defn = lyr.GetLayerDefn()
i = lyr_defn.GetFieldIndex('X')
width = lyr_defn.GetFieldDefn(i).GetWidth()
fld_defn = ogr.FieldDefn('X_coord', ogr.OFTReal)
fld_defn.SetWidth(width)
fld_defn.SetPrecision(4)
flag = ogr.ALTER_NAME_FLAG + ogr.ALTER_WIDTH_PRECISION_FLAG
lyr.AlterFieldDefn(i, fld_defn, flag)
# A slightly different method to change the name of the POINT_X field to
# X_coord and the precision to 4 decimal places. Copy the original field
# definition and use it. This uses the built-in Python copy module. If you
# do not copy the FieldDefn and instead try to use the original, you will
# probably get weird results.
import copy
lyr_defn = lyr.GetLayerDefn()
i = lyr_defn.GetFieldIndex('X')
fld_defn = copy.copy(lyr_defn.GetFieldDefn(i))
fld_defn.SetName('X_coord')
fld_defn.SetPrecision(4)
flag = ogr.ALTER_NAME_FLAG + ogr.ALTER_WIDTH_PRECISION_FLAG
lyr.AlterFieldDefn(i, fld_defn, flag)
# Take a look at the attributes now. The precision won't be affected yet,
# but the field names should be changed and there should be a blank ID
# field.
print('\nNew field names and empty ID field')
pb.print_attributes(lyr, geom=False)
############### 3.6.2 Adding, updating, and deleting features ###############
# Add a unique ID to each feature.
lyr.ResetReading()
lyr.CreateField(ogr.FieldDefn('ID', ogr.OFTInteger))
n = 1
for feat in lyr:
feat.SetField('ID', n)
lyr.SetFeature(feat)
n += 1
print('\nID has been added and precision has taken effect')
pb.print_attributes(lyr, geom=False)
# Delete Seattle. Notice that although it doesn't print the record for Seattle,
# it still thinks there are 14 features.
lyr.ResetReading()
for feat in lyr:
if feat.GetField('City_Name') == 'Seattle':
lyr.DeleteFeature(feat.GetFID())
print('\nSeattle deleted')
pb.print_attributes(lyr, geom=False)
# Pack the database in order to get rid of that ghost feature, and recompute
# the spatial extent.
ds.ExecuteSQL('REPACK ' + lyr.GetName())
ds.ExecuteSQL('RECOMPUTE EXTENT ON ' + lyr.GetName())
print('\nDatabase packed')
pb.print_attributes(lyr, geom=False)
################## Bonus examples for creating new layers ###################
# Just set stuff up for the examples. Note that these examples do not copy any
# attribute data into the json files.
shp_fn = os.path.join(data_dir, 'global', 'ne_50m_admin_0_countries.shp')
shp_ds = ogr.Open(shp_fn, 0)
if shp_ds is None:
sys.exit('Could not open {0}'.format(shp_fn))
shp_lyr = shp_ds.GetLayer(0)
json_driver = ogr.GetDriverByName('GeoJSON')
######################### Example 1: Default precision
# Create a json file using the default precision. Use a text editor to comapare
# the file created here with the files created in the next two examples.
# Create the data source.
json_fn = os.path.join(data_dir, 'output', 'africa-default.geojson')
if os.path.exists(json_fn):
json_driver.DeleteDataSource(json_fn)
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
# Create the layer with no options.
json_lyr = json_ds.CreateLayer('africa',
shp_lyr.GetSpatialRef(),
ogr.wkbMultiPolygon)
# Write some data.
shp_lyr.ResetReading()
json_feat = ogr.Feature(json_lyr.GetLayerDefn())
for shp_feat in shp_lyr:
if shp_feat.GetField('CONTINENT') == 'Africa':
json_feat.SetGeometry(shp_feat.geometry())
json_lyr.CreateFeature(json_feat)
del json_ds
######################### Example 2: 6-digit precision
# Create a json file using the optional COORDINATE_PRECISION creation option
# and set the precision to 6 digits.
json_fn = os.path.join(data_dir, 'output', 'africa-6digit.geojson')
if os.path.exists(json_fn):
json_driver.DeleteDataSource(json_fn)
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
lyr_options = ['COORDINATE_PRECISION=6']
json_lyr = json_ds.CreateLayer('africa',
shp_lyr.GetSpatialRef(),
ogr.wkbMultiPolygon,
lyr_options)
# Write some data.
shp_lyr.ResetReading()
json_feat = ogr.Feature(json_lyr.GetLayerDefn())
for shp_feat in shp_lyr:
if shp_feat.GetField('CONTINENT') == 'Africa':
json_feat.SetGeometry(shp_feat.geometry())
json_lyr.CreateFeature(json_feat)
json_ds
########################### Example 3: Bounding box
# Create a json file using the optional COORDINATE_PRECISION and WRITE_BBOX
# creation options.
json_fn = os.path.join(data_dir, 'output', 'africa-bbox.geojson')
if os.path.exists(json_fn):
json_driver.DeleteDataSource(json_fn)
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
lyr_options = ['COORDINATE_PRECISION=6', 'WRITE_BBOX=YES']
json_lyr = json_ds.CreateLayer('africa',
shp_lyr.GetSpatialRef(),
ogr.wkbMultiPolygon,
lyr_options)
# Write some data.
shp_lyr.ResetReading()
json_feat = ogr.Feature(json_lyr.GetLayerDefn())
for shp_feat in shp_lyr:
if shp_feat.GetField('CONTINENT') == 'Africa':
json_feat.SetGeometry(shp_feat.geometry())
json_lyr.CreateFeature(json_feat)
del json_ds
################# Bonus examples for creating new features ##################
shp_fn = os.path.join(data_dir, 'global', 'ne_50m_admin_0_countries.shp')
json_fn = os.path.join(data_dir, 'output', 'africa.geojson')
# Open input.
shp_ds = ogr.Open(shp_fn, 0)
if shp_ds is None:
sys.exit('Could not open {0}'.format(shp_fn))
shp_lyr = shp_ds.GetLayer(0)
# Create output file.
json_driver = ogr.GetDriverByName('GeoJSON')
if os.path.exists(json_fn):
json_driver.DeleteDataSource(json_fn)
json_ds = json_driver.CreateDataSource(json_fn)
if json_ds is None:
sys.exit('Could not create {0}.'.format(json_fn))
lyr_options = ['COORDINATE_PRECISION=6']
json_lyr = json_ds.CreateLayer('africa',
shp_lyr.GetSpatialRef(),
ogr.wkbMultiPolygon,
lyr_options)
# Add attribute fields to the layer.
name_fld = ogr.FieldDefn('Name', ogr.OFTString)
json_lyr.CreateField(name_fld)
pop_fld = ogr.FieldDefn('Population', ogr.OFTInteger)
json_lyr.CreateField(pop_fld)
# For the fun of it, let's also add an integer field but "mistakenly" put a
# string in it to see what happens.
test_fld = ogr.FieldDefn('Test_field', ogr.OFTInteger)
json_lyr.CreateField(test_fld)
# Get the feature definition.
feat_defn = json_lyr.GetLayerDefn()
# Create an output feature to use repeatedly.
json_feat = ogr.Feature(feat_defn)
for shp_feat in shp_lyr:
if shp_feat.GetField('CONTINENT') == 'Africa':
# Copy attribute values if in Africa.
name = shp_feat.GetField('NAME')
pop = shp_feat.GetField('POP_EST')
json_feat.SetField('Name', name)
json_feat.SetField('Population', pop)
# Put a string in an integer field.
json_feat.SetField('Test_field', name)
# Copy the geometry.
json_feat.SetGeometry(shp_feat.geometry())
# Insert the data into the GeoJSON file.
json_lyr.CreateFeature(json_feat)
del json_ds, shp_ds
| mit | 3,989,574,481,614,995,500 | 32.969407 | 79 | 0.675335 | false |
Hitachi-Data-Systems/org-chart-builder | openpyxl/worksheet/dimensions.py | 1 | 6673 | from __future__ import absolute_import
# Copyright (c) 2010-2014 openpyxl
from openpyxl.compat import safe_string
from openpyxl.cell import get_column_interval, column_index_from_string
from openpyxl.descriptors import Integer, Float, Bool, Strict, String, Alias
from openpyxl.compat import OrderedDict
class Dimension(Strict):
"""Information about the display properties of a row or column."""
__fields__ = ('index',
'hidden',
'outlineLevel',
'collapsed',)
index = Integer()
hidden = Bool()
outlineLevel = Integer(allow_none=True)
outline_level = Alias('outlineLevel')
collapsed = Bool()
_style = None
def __init__(self, index, hidden, outlineLevel,
collapsed, worksheet, visible=True, style=None):
self.index = index
self.hidden = hidden
self.outlineLevel = outlineLevel
self.collapsed = collapsed
self.worksheet = worksheet
if style is not None: # accept pointer when parsing
self._style = int(style)
def __iter__(self):
for key in self.__fields__[1:]:
value = getattr(self, key)
if value:
yield key, safe_string(value)
@property
def visible(self):
return not self.hidden
@property
def style(self):
if self._style is not None:
return self.worksheet.parent.shared_styles[self._style]
@style.setter
def style(self, style):
if style is not None:
self._style = self.worksheet.parent.shared_styles.add(style)
class RowDimension(Dimension):
"""Information about the display properties of a row."""
__fields__ = Dimension.__fields__ + ('ht', 'customFormat', 'customHeight', 's')
r = Alias('index')
ht = Float(allow_none=True)
height = Alias('ht')
thickBot = Bool()
thickTop = Bool()
def __init__(self,
index=0,
ht=None,
customHeight=None, # do not write
s=None,
customFormat=None, # do not write
hidden=False,
outlineLevel=0,
outline_level=None,
collapsed=False,
visible=None,
height=None,
r=None,
spans=None,
thickBot=None,
thickTop=None,
worksheet=None):
if r is not None:
index = r
if height is not None:
ht = height
self.ht = ht
if visible is not None:
hidden = not visible
if outline_level is not None:
outlineLevel = outlineLevel
super(RowDimension, self).__init__(index, hidden, outlineLevel,
collapsed, worksheet, style=s)
@property
def customFormat(self):
"""Always true if there is a style for the row"""
return self._style is not None
@property
def customHeight(self):
"""Always true if there is a height for the row"""
return self.ht is not None
@property
def s(self):
return self.style
@s.setter
def s(self, style):
self.style = style
def __iter__(self):
for key in self.__fields__[1:]:
if key == 's':
value = getattr(self, '_style')
else:
value = getattr(self, key)
if value:
yield key, safe_string(value)
class ColumnDimension(Dimension):
"""Information about the display properties of a column."""
width = Float(allow_none=True)
bestFit = Bool()
auto_size = Alias('bestFit')
index = String()
min = Integer(allow_none=True)
max = Integer(allow_none=True)
collapsed = Bool()
__fields__ = Dimension.__fields__ + ('width', 'bestFit', 'customWidth', 'style',
'min', 'max')
def __init__(self,
index='A',
width=None,
bestFit=False,
hidden=False,
outlineLevel=0,
outline_level=None,
collapsed=False,
style=None,
min=None,
max=None,
customWidth=False, # do not write
visible=None,
auto_size=None,
worksheet=None):
self.width = width
self.min = min
self.max = max
if visible is not None:
hidden = not visible
if auto_size is not None:
bestFit = auto_size
self.bestFit = bestFit
if outline_level is not None:
outlineLevel = outline_level
self.collapsed = collapsed
super(ColumnDimension, self).__init__(index, hidden, outlineLevel,
collapsed, worksheet, style=style)
@property
def customWidth(self):
"""Always true if there is a width for the column"""
return self.width is not None
def __iter__(self):
for key in self.__fields__[1:]:
if key == 'style':
value = getattr(self, '_style')
else:
value = getattr(self, key)
if value:
yield key, safe_string(value)
#@property
# def col_label(self):
# return get_column_letter(self.index)
class DimensionHolder(OrderedDict):
"hold (row|column)dimensions and allow operations over them"
def __init__(self, direction, *args, **kwargs):
self.direction = direction
super(DimensionHolder, self).__init__(*args, **kwargs)
def group(self, start, end=None, outline_level=1, hidden=False):
"""allow grouping a range of consecutive columns together
:param start: first column to be grouped (mandatory)
:param end: last column to be grouped (optional, default to start)
:param outline_level: outline level
:param hidden: should the group be hidden on workbook open or not
"""
if end is None:
end = start
if start in self:
new_dim = self.pop(start)
else:
new_dim = ColumnDimension(index=start)
work_sequence = get_column_interval(start, end)
for column_letter in work_sequence:
if column_letter in self:
del self[column_letter]
new_dim.min, new_dim.max = map(column_index_from_string, (start, end))
new_dim.outline_level = outline_level
new_dim.hidden = hidden
self[start] = new_dim
| apache-2.0 | 5,091,376,572,354,088,000 | 30.476415 | 84 | 0.542335 | false |
timberline-secondary/hackerspace | src/portfolios/views.py | 1 | 8722 | import os
from django.urls import reverse
from django.http import Http404
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic import ListView, CreateView, DetailView, UpdateView, DeleteView
from comments.models import Document
from portfolios.models import Portfolio, Artwork
from tenant.views import allow_non_public_view, AllowNonPublicViewMixin
from portfolios.forms import PortfolioForm, ArtworkForm
class PortfolioList(AllowNonPublicViewMixin, LoginRequiredMixin, ListView):
model = Portfolio
template_name = 'portfolios/list.html'
class PortfolioCreate(AllowNonPublicViewMixin, LoginRequiredMixin, CreateView):
model = Portfolio
form_class = PortfolioForm
template_name = 'portfolios/form.html'
def form_valid(self, form):
data = form.save(commit=False)
data.user = self.request.user
data.save()
return super(PortfolioCreate, self).form_valid(form)
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(PortfolioCreate, self).get_context_data(**kwargs)
context['heading'] = "Create " + self.request.user.get_username() + "'s Portfolio"
context['submit_btn_value'] = "Create"
return context
class PortfolioDetail(AllowNonPublicViewMixin, LoginRequiredMixin, DetailView):
model = Portfolio
def dispatch(self, *args, **kwargs):
# only allow admins or the users to see their own portfolios, unless they are shared
portfolio = get_object_or_404(Portfolio, pk=self.kwargs.get('pk'))
if portfolio.listed_locally or portfolio.user == self.request.user or self.request.user.is_staff:
return super(PortfolioDetail, self).dispatch(*args, **kwargs)
else:
raise Http404("Sorry, this portfolio isn't shared!")
@allow_non_public_view
@login_required
def detail(request, pk=None):
if pk is None:
pk = request.user.id
user = get_object_or_404(User, id=pk)
p, created = Portfolio.objects.get_or_create(user=user)
# only allow admins or the users to see their own portfolios, unless they are shared
if request.user.is_staff or p.pk == request.user.id or p.listed_locally:
context = {
"p": p,
}
return render(request, 'portfolios/detail.html', context)
else:
raise Http404("Sorry, this portfolio isn't shared!")
def public_list(request):
public_portfolios = Portfolio.objects.all().filter(listed_publicly=True)
return render(request, 'portfolios/public_list.html', {"portfolios": public_portfolios})
def public(request, uuid):
p = get_object_or_404(Portfolio, uuid=uuid)
return render(request, 'portfolios/public.html', {"p": p})
@allow_non_public_view
@login_required
def edit(request, pk=None):
# portfolio pk is portfolio.user.id
if pk is None:
pk = request.user.id
user = get_object_or_404(User, id=pk)
p = get_object_or_404(Portfolio, user=user)
# if user submitted the Portfolio form to make changes:
form = PortfolioForm(request.POST or None, instance=p)
if form.is_valid():
form.save()
messages.success(request, "Portfolio updated.")
# only allow admins or the users to edit their own portfolios
if request.user.is_staff or request.user == p.user:
context = {
"p": p,
"form": form,
}
return render(request, 'portfolios/edit.html', context)
else:
raise Http404("Sorry, this portfolio isn't yours!")
######################################
#
# ARTWORK VIEWS
#
######################################
class ArtworkCreate(AllowNonPublicViewMixin, LoginRequiredMixin, SuccessMessageMixin, CreateView):
model = Artwork
form_class = ArtworkForm
template_name = 'portfolios/art_form.html'
success_message = "The art was added to the Portfolio"
def get_success_url(self):
return reverse('portfolios:edit', kwargs={'pk': self.object.portfolio.pk})
def form_valid(self, form):
data = form.save(commit=False)
data.portfolio = get_object_or_404(Portfolio, pk=self.kwargs.get('pk'))
data.save()
return super(ArtworkCreate, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(ArtworkCreate, self).get_context_data(**kwargs)
portfolio = get_object_or_404(Portfolio, pk=self.kwargs.get('pk'))
context['heading'] = "Add Art to " + portfolio.user.get_username() + "'s Portfolio"
context['submit_btn_value'] = "Create"
context['portfolio'] = portfolio
return context
def dispatch(self, *args, **kwargs):
portfolio = get_object_or_404(Portfolio, pk=self.kwargs.get('pk'))
# only allow the user or staff to edit
if portfolio.user == self.request.user or self.request.user.is_staff:
return super(ArtworkCreate, self).dispatch(*args, **kwargs)
else:
raise Http404("Sorry, this isn't your portfolio!")
class ArtworkUpdate(AllowNonPublicViewMixin, LoginRequiredMixin, SuccessMessageMixin, UpdateView):
model = Artwork
form_class = ArtworkForm
template_name = 'portfolios/art_form.html'
success_message = "Art updated!"
def get_success_url(self):
return reverse('portfolios:edit', kwargs={'pk': self.object.portfolio.pk})
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(ArtworkUpdate, self).get_context_data(**kwargs)
context['heading'] = "Edit " + self.object.portfolio.user.get_username() + "'s Portfolio Art"
context['submit_btn_value'] = "Update"
context['portfolio'] = self.object.portfolio
return context
def dispatch(self, *args, **kwargs):
art = get_object_or_404(Artwork, pk=self.kwargs.get('pk'))
# only allow the user or staff to edit
if art.portfolio.user == self.request.user or self.request.user.is_staff:
return super(ArtworkUpdate, self).dispatch(*args, **kwargs)
else:
raise Http404("Sorry, this isn't your art!")
class ArtworkDelete(AllowNonPublicViewMixin, LoginRequiredMixin, DeleteView):
model = Artwork
def get_success_url(self):
return reverse('portfolios:edit', kwargs={'pk': self.object.portfolio.pk})
# @login_required
# def art_detail(request, pk):
# art = get_object_or_404(Artwork, pk=pk)
# # only allow admins or the users to view
# if request.user.is_staff or art.portfolio.user == request.user:
# context = {
# "art": art,
# }
# return render(request, 'portfolios/art_detail.html', context)
# else:
# raise Http404("Sorry, this isn't your art!")
def is_acceptable_image_type(filename):
# Get extension from filename to determine filetype...very hacky...
# TODO use MIMETYPES
name, ext = os.path.splitext(filename)
img_ext_list = [".png", ".gif", ".jpg"]
return ext in img_ext_list
def is_acceptable_vid_type(filename):
# Get extension from filename to determine filetype...very hacky...
name, ext = os.path.splitext(filename)
vid_ext_list = [".ogg", ".avi", ".mp4", ".mkv", ".webm", ".ogv"]
return ext in vid_ext_list
@allow_non_public_view
@login_required
def art_add(request, doc_id):
doc = get_object_or_404(Document, id=doc_id)
doc_user = doc.comment.user
if request.user.is_staff or doc_user == request.user:
filename = os.path.basename(doc.docfile.name)
if is_acceptable_image_type(filename):
image_file = doc.docfile
video_file = None
elif is_acceptable_vid_type(filename):
image_file = None
video_file = doc.docfile
else:
raise Http404("Unsupported image or video format. See your teacher if"
" you think this format should be supported.")
portfolio, created = Portfolio.objects.get_or_create(user=doc_user)
Artwork.create(
title=os.path.splitext(filename)[0],
image_file=image_file,
video_file=video_file,
portfolio=portfolio,
date=doc.comment.timestamp.date(),
)
return redirect('portfolios:detail', pk=portfolio.pk)
else:
raise Http404("I don't think you're supposed to be here....")
| gpl-3.0 | -2,756,887,367,504,752,600 | 35.957627 | 105 | 0.658794 | false |
bzamecnik/sms-tools | tests/sprModel_test.py | 1 | 1710 | import math
import numpy as np
from scipy.signal import get_window
from smst.utils.math import rmse
from smst.utils import audio
from smst.models import spr
from .common import sound_path
# TODO: the test needs fixing after the model is fixed
def test_reconstruct_sound():
fs, x = audio.read_wav(sound_path("sax-phrase-short.wav"))
window_size, fft_size, hop_size = 2001, 2048, 128
window = get_window('hamming', window_size)
# fix the random seed for reproducibility
np.random.seed(42)
xtfreq, xtmag, xtphase, x_residual = spr.from_audio(
x, fs, window, fft_size, hop_size,
t=-80, maxnSines=100, minSineDur=.01, freqDevOffset=20, freqDevSlope=0.01)
x_reconstructed, x_sine = spr.to_audio(xtfreq, xtmag, xtphase, x_residual, 512, hop_size, fs)
assert 138746 == len(x)
assert len(x) == len(x_residual)
expected_frame_count = int(math.ceil(float(len(x)) / hop_size))
assert expected_frame_count == len(xtfreq)
assert expected_frame_count == len(xtmag)
assert expected_frame_count == len(xtphase)
assert xtfreq.shape[1] <= 100
# statistics of the model for regression testing without explicitly storing the whole data
assert np.allclose(799.3384358567838, xtfreq.mean())
assert np.allclose(-24.080251067421795, xtmag.mean())
assert np.allclose(1.0900513921895467, xtphase.mean())
# TODO: this is completely off, it should be equal to len(x)!
assert 1083 * 128 == len(x_reconstructed)
assert 1083 * 128 == len(x_sine)
assert np.allclose(2.1079553110776107e-17, rmse(x[:len(x_reconstructed)], x_reconstructed))
assert np.allclose(0.0043912712540510645, rmse(x[:len(x_reconstructed)], x_sine))
| agpl-3.0 | -4,352,714,113,328,351,000 | 34.625 | 97 | 0.70117 | false |
hojel/epubia | markdown2pdf.py | 1 | 3489 | # -*- coding: utf-8 -*-
import markdown
import ho.pisa as pisa
import StringIO
import os
import re
from Cheetah.Template import Template
from tempfile import NamedTemporaryFile
debug = False
def markdown2pdf(text, pdffile, cssfile='xhtml2pdf.css', src_dir='.',
fontfile='arial.ttf', skipTo1st=False):
global debug
md = markdown.Markdown(extensions=['meta','footnotes'])
html = md.convert(text)
# post-process unofficial markup
# 1) <p>*</p> --> <p class="blankpara"> </p>
# 2) quotation mark
html = html.replace('<p>*</p>', '<p class="blankpara"> </p>')
html = re.sub(u'β ?', "“", html)
html = html.replace(u'β',"”")
html = re.sub(u"β ?", "‘", html)
html = html.replace(u"β","’")
if debug:
open('test.html','w').write(html.encode('utf-8'))
htmline = []
#-- Cover & Title Page
cover_file = None
title = None
author = None
cif = None
if 'cover_url' in md.Meta:
cover_url = md.Meta['cover_url'][0]
if cover_url.startswith('http://'):
import urllib
cif = NamedTemporaryFile(delete=False)
cif.write( urllib.urlopen(cover_url).read() )
cif.close()
cover_file = cif.name
else:
cover_file = cover_url
if cover_url.startswith('file://'):
cover_file = cover_url[7:]
if 'title' in md.Meta:
title = md.Meta['title'][0].replace(', ','<br />')
if 'author' in md.Meta:
author = md.Meta['author'][0].replace(', ','<br />')
cover_tmpl = open(os.path.join('template','pdf','coverpage.html'), 'r').read().decode('utf-8')
coverpg_htm = str( Template(cover_tmpl, searchList=[ {'cover_url':cover_file,'title':title,'author':author} ]) )
htmline.append( unicode(coverpg_htm,'utf-8') )
#-- Body
# correct image path
for url in re.compile('<img [^>]*src="(.*?)"').findall(html):
if url.startswith('http://') or os.path.isabs(url):
pass
else:
html = html.replace(url, os.path.normpath(src_dir+'/'+url))
if skipTo1st:
html = html[ html.find('<h1'): ]
html = html.replace('<h1 />','<h1></h1>')
htmline.append(html)
#-- PDF generation
css_tmpl = open(os.path.join('template','pdf',cssfile), 'r').read().decode('utf-8')
target_css = str( Template(css_tmpl, searchList=[ {'font':'fonts/'+fontfile} ]) )
fp = file(pdffile,'wb')
pdf = pisa.pisaDocument(
StringIO.StringIO('\n'.join(htmline).encode('utf-8')),
fp,
#path=src_dir, # not working!
#link_callback=fetch_resources,
default_css=target_css,
#xhtml=True,
encoding='utf-8')
fp.close()
if cif and os.path.exists(cif.name):
os.remove(cif.name)
#if debug and not pdf.err:
# pisa.startViewer(pdffile)
# suppress ho.pisa loggin message
import logging
class PisaNullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger("ho.pisa").addHandler(PisaNullHandler())
if __name__ == "__main__":
debug = True
import os, sys
outfile = os.path.splitext(sys.argv[1])[0] + ".pdf"
text = unicode(open(sys.argv[1],'r'),'utf-8')[1:]
markdown2pdf(text, outfile, fontfile='SeoulHangang.ttf')
# vim:sw=4:ts=4:et
| mit | -3,050,844,844,239,022,000 | 34.642105 | 116 | 0.554726 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.