repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
shipci/boto
refs/heads/develop
tests/unit/dynamodb/test_batch.py
136
#!/usr/bin/env python # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # from tests.unit import unittest from boto.dynamodb.batch import Batch from boto.dynamodb.table import Table from boto.dynamodb.layer2 import Layer2 from boto.dynamodb.batch import BatchList DESCRIBE_TABLE_1 = { 'Table': { 'CreationDateTime': 1349910554.478, 'ItemCount': 1, 'KeySchema': {'HashKeyElement': {'AttributeName': u'foo', 'AttributeType': u'S'}}, 'ProvisionedThroughput': {'ReadCapacityUnits': 10, 'WriteCapacityUnits': 10}, 'TableName': 'testtable', 'TableSizeBytes': 54, 'TableStatus': 'ACTIVE'} } DESCRIBE_TABLE_2 = { 'Table': { 'CreationDateTime': 1349910554.478, 'ItemCount': 1, 'KeySchema': {'HashKeyElement': {'AttributeName': u'baz', 'AttributeType': u'S'}, 'RangeKeyElement': {'AttributeName': 'myrange', 'AttributeType': 'N'}}, 'ProvisionedThroughput': {'ReadCapacityUnits': 10, 'WriteCapacityUnits': 10}, 'TableName': 'testtable2', 'TableSizeBytes': 54, 'TableStatus': 'ACTIVE'} } class TestBatchObjects(unittest.TestCase): maxDiff = None def setUp(self): self.layer2 = Layer2('access_key', 'secret_key') self.table = Table(self.layer2, DESCRIBE_TABLE_1) self.table2 = Table(self.layer2, DESCRIBE_TABLE_2) def test_batch_to_dict(self): b = Batch(self.table, ['k1', 'k2'], attributes_to_get=['foo'], consistent_read=True) self.assertDictEqual( b.to_dict(), {'AttributesToGet': ['foo'], 'Keys': [{'HashKeyElement': {'S': 'k1'}}, {'HashKeyElement': {'S': 'k2'}}], 'ConsistentRead': True} ) def test_batch_consistent_read_defaults_to_false(self): b = Batch(self.table, ['k1']) self.assertDictEqual( b.to_dict(), {'Keys': [{'HashKeyElement': {'S': 'k1'}}], 'ConsistentRead': False} ) def test_batch_list_consistent_read(self): b = BatchList(self.layer2) b.add_batch(self.table, ['k1'], ['foo'], consistent_read=True) b.add_batch(self.table2, [('k2', 54)], ['bar'], consistent_read=False) self.assertDictEqual( b.to_dict(), {'testtable': {'AttributesToGet': ['foo'], 'Keys': [{'HashKeyElement': {'S': 'k1'}}], 'ConsistentRead': True}, 'testtable2': {'AttributesToGet': ['bar'], 'Keys': [{'HashKeyElement': {'S': 'k2'}, 'RangeKeyElement': {'N': '54'}}], 'ConsistentRead': False}}) if __name__ == '__main__': unittest.main()
rebearteta/social-ideation
refs/heads/master
app/migrations/0004_socialnetworkapp_last_real_time_update_sig.py
2
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('app', '0003_auto_20150817_1530'), ] operations = [ migrations.AddField( model_name='socialnetworkapp', name='last_real_time_update_sig', field=models.CharField(max_length=100, null=True, editable=False), ), ]
infobloxopen/neutron
refs/heads/master
neutron/db/external_net_db.py
23
# Copyright (c) 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.orm import exc from sqlalchemy.sql import expression as expr from neutron.api.v2 import attributes from neutron.common import constants as l3_constants from neutron.common import exceptions as n_exc from neutron.db import db_base_plugin_v2 from neutron.db import model_base from neutron.db import models_v2 from neutron.extensions import external_net from neutron import manager from neutron.plugins.common import constants as service_constants DEVICE_OWNER_ROUTER_GW = l3_constants.DEVICE_OWNER_ROUTER_GW class ExternalNetwork(model_base.BASEV2): network_id = sa.Column(sa.String(36), sa.ForeignKey('networks.id', ondelete="CASCADE"), primary_key=True) # Add a relationship to the Network model in order to instruct # SQLAlchemy to eagerly load this association network = orm.relationship( models_v2.Network, backref=orm.backref("external", lazy='joined', uselist=False, cascade='delete')) class External_net_db_mixin(object): """Mixin class to add external network methods to db_base_plugin_v2.""" def _network_model_hook(self, context, original_model, query): query = query.outerjoin(ExternalNetwork, (original_model.id == ExternalNetwork.network_id)) return query def _network_filter_hook(self, context, original_model, conditions): if conditions is not None and not hasattr(conditions, '__iter__'): conditions = (conditions, ) # Apply the external network filter only in non-admin and non-advsvc # context if self.model_query_scope(context, original_model): conditions = expr.or_(ExternalNetwork.network_id != expr.null(), *conditions) return conditions def _network_result_filter_hook(self, query, filters): vals = filters and filters.get(external_net.EXTERNAL, []) if not vals: return query if vals[0]: return query.filter((ExternalNetwork.network_id != expr.null())) return query.filter((ExternalNetwork.network_id == expr.null())) # TODO(salvatore-orlando): Perform this operation without explicitly # referring to db_base_plugin_v2, as plugins that do not extend from it # might exist in the future db_base_plugin_v2.NeutronDbPluginV2.register_model_query_hook( models_v2.Network, "external_net", '_network_model_hook', '_network_filter_hook', '_network_result_filter_hook') def _network_is_external(self, context, net_id): try: context.session.query(ExternalNetwork).filter_by( network_id=net_id).one() return True except exc.NoResultFound: return False def _extend_network_dict_l3(self, network_res, network_db): # Comparing with None for converting uuid into bool network_res[external_net.EXTERNAL] = network_db.external is not None return network_res # Register dict extend functions for networks db_base_plugin_v2.NeutronDbPluginV2.register_dict_extend_funcs( attributes.NETWORKS, ['_extend_network_dict_l3']) def _process_l3_create(self, context, net_data, req_data): external = req_data.get(external_net.EXTERNAL) external_set = attributes.is_attr_set(external) if not external_set: return if external: # expects to be called within a plugin's session context.session.add(ExternalNetwork(network_id=net_data['id'])) net_data[external_net.EXTERNAL] = external def _process_l3_update(self, context, net_data, req_data): new_value = req_data.get(external_net.EXTERNAL) net_id = net_data['id'] if not attributes.is_attr_set(new_value): return if net_data.get(external_net.EXTERNAL) == new_value: return if new_value: context.session.add(ExternalNetwork(network_id=net_id)) net_data[external_net.EXTERNAL] = True else: # must make sure we do not have any external gateway ports # (and thus, possible floating IPs) on this network before # allow it to be update to external=False port = context.session.query(models_v2.Port).filter_by( device_owner=DEVICE_OWNER_ROUTER_GW, network_id=net_data['id']).first() if port: raise external_net.ExternalNetworkInUse(net_id=net_id) context.session.query(ExternalNetwork).filter_by( network_id=net_id).delete() net_data[external_net.EXTERNAL] = False def _process_l3_delete(self, context, network_id): l3plugin = manager.NeutronManager.get_service_plugins().get( service_constants.L3_ROUTER_NAT) if l3plugin: l3plugin.delete_disassociated_floatingips(context, network_id) def _filter_nets_l3(self, context, nets, filters): vals = filters and filters.get(external_net.EXTERNAL, []) if not vals: return nets ext_nets = set(en['network_id'] for en in context.session.query(ExternalNetwork)) if vals[0]: return [n for n in nets if n['id'] in ext_nets] else: return [n for n in nets if n['id'] not in ext_nets] def get_external_network_id(self, context): nets = self.get_networks(context, {external_net.EXTERNAL: [True]}) if len(nets) > 1: raise n_exc.TooManyExternalNetworks() else: return nets[0]['id'] if nets else None
maropu/spark
refs/heads/master
examples/src/main/python/mllib/correlations_example.py
27
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numpy as np from pyspark import SparkContext # $example on$ from pyspark.mllib.stat import Statistics # $example off$ if __name__ == "__main__": sc = SparkContext(appName="CorrelationsExample") # SparkContext # $example on$ seriesX = sc.parallelize([1.0, 2.0, 3.0, 3.0, 5.0]) # a series # seriesY must have the same number of partitions and cardinality as seriesX seriesY = sc.parallelize([11.0, 22.0, 33.0, 33.0, 555.0]) # Compute the correlation using Pearson's method. Enter "spearman" for Spearman's method. # If a method is not specified, Pearson's method will be used by default. print("Correlation is: " + str(Statistics.corr(seriesX, seriesY, method="pearson"))) data = sc.parallelize( [np.array([1.0, 10.0, 100.0]), np.array([2.0, 20.0, 200.0]), np.array([5.0, 33.0, 366.0])] ) # an RDD of Vectors # calculate the correlation matrix using Pearson's method. Use "spearman" for Spearman's method. # If a method is not specified, Pearson's method will be used by default. print(Statistics.corr(data, method="pearson")) # $example off$ sc.stop()
rettigs/rob421-applied-robotics
refs/heads/master
combined_robot/app.py
1
#!/usr/bin/env python from __future__ import division import cv import cv2 import video from common import draw_str, RectSelector from mosse import MOSSE # Constants LAUNCH = 0 CARRIAGE = 1 class App: def __init__(self, video_src, robotq, appq, launchspeed, swatted): self.cap = video.create_capture(video_src) _, self.frame = self.cap.read() cv2.namedWindow('frame') self.row = 0 self.bounceshot = 0 cv2.createTrackbar('row', 'frame', 0, 2, self.onrow) cv2.createTrackbar('speed', 'frame', 0, 512, self.onspeed) cv2.createTrackbar('bounceshot', 'frame', 0, 1, self.onbounceshot) cv2.imshow('frame', self.frame) self.rect_sel = RectSelector('frame', self.onrect) self.trackers = [] self.robotq = robotq self.appq = appq self.launchspeed = launchspeed self.swatted = swatted def nothing(*arg): pass def onrow(self, row): '''When the row is changed, update the speed.''' self.row = row if self.bounceshot: if row == 0: speed = 160 elif row == 1: speed = 165 elif row == 2: speed = 170 else: if row == 0: speed = 230 elif row == 1: speed = 235 elif row == 2: speed = 240 cv2.setTrackbarPos('speed', 'frame', speed) def onspeed(self, speed): '''When the speed is changed, send it to the robot.''' self.robotq.put((0, speed)) def onbounceshot(self, bounceshot): '''When we toggle bounce shots, update the speed.''' self.bounceshot = bounceshot self.onrow(self.row) def onrect(self, rect): frame_gray = cv2.cvtColor(self.frame, cv2.COLOR_BGR2GRAY) tracker = MOSSE(frame_gray, rect) self.trackers = [tracker] def drawcrosshairs(self, img, width, height, color=(0, 255, 255), thickness=1): p0 = int(width // 2), 0 p1 = int(width // 2), int(height) cv2.line(img, p0, p1, color, thickness) p0 = int(width // 2) - int(width // 10), int(height // 2) p1 = int(width // 2) + int(width // 10), int(height // 2) cv2.line(img, p0, p1, color, thickness) def run(self): direction = 0 while True: ret, self.frame = self.cap.read() self.frame = cv2.flip(self.frame, -1) if not ret: break frame_gray = cv2.cvtColor(self.frame, cv2.COLOR_BGR2GRAY) for tracker in self.trackers: tracker.update(frame_gray) vis = self.frame.copy() width = self.cap.get(cv.CV_CAP_PROP_FRAME_WIDTH) height = self.cap.get(cv.CV_CAP_PROP_FRAME_HEIGHT) if len(self.trackers) > 0: x, _ = self.trackers[0].draw_state(vis) x = int(x) # Make the robot move toward the object if x < width // 2: if direction >= 0: print "Going left" self.robotq.put((1, 100000, 1)) direction = -1 elif x > width // 2: if direction <= 0: print "Going right" self.robotq.put((1, 100000, 0)) direction = 1 else: print "Cup targeting complete" self.robotq.put((1, 0, 0)) direction = 0 elif direction != 0: self.robotq.put((1, 0, 0)) direction = 0 self.drawcrosshairs(vis, width, height) self.rect_sel.draw(vis) draw_str(vis, (5, 15), "Launch speed: {}".format(self.launchspeed.value)) draw_str(vis, (5, 30), "Swatted: {}".format(self.swatted.value)) cv2.imshow('frame', vis) ch = cv2.waitKey(10) if ch == 27: break if ch == ord('d'): print "Manually going right" self.robotq.put((1, 50, 0)) if ch == ord('a'): print "Manually going left" self.robotq.put((1, 50, 1)) if ch == ord(' '): print "Shooting" self.robotq.put('shoot') if ch == ord('s'): print "Swatting" self.robotq.put('swat') if ch == ord('c'): self.trackers = [] cv2.destroyAllWindows() self.robotq.put('exit')
misli/softwarecollections
refs/heads/master
softwarecollections/tests.py
24123
from django.test import TestCase # Create your tests here.
catmaid/CATMAID
refs/heads/master
django/applications/catmaid/management/commands/catmaid_get_auth_token.py
2
# -*- coding: utf-8 -*- import getpass import logging from django.contrib.auth import authenticate from rest_framework.authtoken.models import Token from django.core.management.base import BaseCommand, CommandError logger = logging.getLogger(__name__) class Command(BaseCommand): help = "Get an API token for a given user." def add_arguments(self, parser): parser.add_argument( 'username', default=None, nargs='?', const=None, help='Username of existing account to get API token for. If not given, user will be given prompt.' ) parser.add_argument( '--password', '-p', nargs='?', default=None, const=None, help='Password of existing account to get API token for. If not given, user will be given secure prompt.' ) def handle(self, *args, **options): # full names for user input functions needed for mocking in unit tests username = options.get('username') or input('Enter CATMAID username: ') password = options.get('password') or getpass.getpass('Enter CATMAID password: ') user = authenticate(username=username, password=password) if user is None: raise CommandError('Incorrect credentials.') if not user.is_active: raise CommandError('User account is disabled.') token, created = Token.objects.get_or_create(user=user) message = '{} API token for user {}\n\tToken {}'.format( 'Created new' if created else 'Got existing', username, token.key ) self.stdout.write(self.style.SUCCESS(message))
jrper/fluidity
refs/heads/master
python/vtktools.py
4
#!/usr/bin/env python import math import sys import numpy import vtk # All returned arrays are cast into either numpy or numarray arrays arr=numpy.array class vtu: """Unstructured grid object to deal with VTK unstructured grids.""" def __init__(self, filename = None): """Creates a vtu object by reading the specified file.""" if filename is None: self.ugrid = vtk.vtkUnstructuredGrid() else: self.gridreader = None if filename[-4:] == ".vtu": self.gridreader=vtk.vtkXMLUnstructuredGridReader() elif filename[-5:] == ".pvtu": self.gridreader=vtk.vtkXMLPUnstructuredGridReader() else: raise Exception("ERROR: don't recognise file extension" + filename) self.gridreader.SetFileName(filename) self.gridreader.Update() self.ugrid=self.gridreader.GetOutput() if self.ugrid.GetNumberOfPoints() + self.ugrid.GetNumberOfCells() == 0: raise Exception("ERROR: No points or cells found after loading vtu " + filename) self.filename=filename def GetScalarField(self, name): """Returns an array with the values of the specified scalar field.""" try: pointdata=self.ugrid.GetPointData() vtkdata=pointdata.GetScalars(name) vtkdata.GetNumberOfTuples() except: try: celldata=self.ugrid.GetCellData() vtkdata=celldata.GetScalars(name) vtkdata.GetNumberOfTuples() except: raise Exception("ERROR: couldn't find point or cell scalar field data with name "+name+" in file "+self.filename+".") return arr([vtkdata.GetTuple1(i) for i in range(vtkdata.GetNumberOfTuples())]) def GetScalarRange(self, name): """Returns the range (min, max) of the specified scalar field.""" try: pointdata=self.ugrid.GetPointData() vtkdata=pointdata.GetScalars(name) vtkdata.GetRange() except: try: celldata=self.ugrid.GetCellData() vtkdata=celldata.GetScalars(name) vtkdata.GetRange() except: raise Exception("ERROR: couldn't find point or cell scalar field data with name "+name+" in file "+self.filename+".") return vtkdata.GetRange() def GetVectorField(self, name): """Returns an array with the values of the specified vector field.""" try: pointdata=self.ugrid.GetPointData() vtkdata=pointdata.GetScalars(name) vtkdata.GetNumberOfTuples() except: try: celldata=self.ugrid.GetCellData() vtkdata=celldata.GetScalars(name) vtkdata.GetNumberOfTuples() except: raise Exception("ERROR: couldn't find point or cell vector field data with name "+name+" in file "+self.filename+".") return arr([vtkdata.GetTuple3(i) for i in range(vtkdata.GetNumberOfTuples())]) def GetVectorNorm(self, name): """Return the field with the norm of the specified vector field.""" v = self.GetVectorField(name) n = [] try: from scipy.linalg import norm except ImportError: def norm(v): r = 0.0 for x in v: r = r + x**2 r = math.sqrt(r) return r for node in range(self.ugrid.GetNumberOfPoints()): n.append(norm(v[node])) return arr(n) def GetField(self,name): """Returns an array with the values of the specified field.""" try: pointdata=self.ugrid.GetPointData() vtkdata=pointdata.GetArray(name) vtkdata.GetNumberOfTuples() except: try: celldata=self.ugrid.GetCellData() vtkdata=celldata.GetArray(name) vtkdata.GetNumberOfTuples() except: raise Exception("ERROR: couldn't find point or cell field data with name "+name+" in file "+self.filename+".") nc=vtkdata.GetNumberOfComponents() nt=vtkdata.GetNumberOfTuples() array=arr([vtkdata.GetValue(i) for i in range(nc*nt)]) if nc==9: return array.reshape(nt,3,3) elif nc==4: return array.reshape(nt,2,2) else: return array.reshape(nt,nc) def GetFieldRank(self, name): """ Returns the rank of the supplied field. """ try: pointdata=self.ugrid.GetPointData() vtkdata=pointdata.GetArray(name) vtkdata.GetNumberOfTuples() except: try: celldata=self.ugrid.GetCellData() vtkdata=celldata.GetArray(name) vtkdata.GetNumberOfTuples() except: raise Exception("ERROR: couldn't find point or cell field data with name "+name+" in file "+self.filename+".") comps = vtkdata.GetNumberOfComponents() if comps == 1: return 0 elif comps in [2, 3]: return 1 elif comps in [4, 9]: return 2 else: raise Exception("Field rank > 2 encountered") def Write(self, filename=[]): """Writes the grid to a vtu file. If no filename is specified it will use the name of the file originally read in, thus overwriting it! """ if filename==[]: filename=self.filename if filename is None: raise Exception("No file supplied") if filename.endswith('pvtu'): gridwriter=vtk.vtkXMLPUnstructuredGridWriter() else: gridwriter=vtk.vtkXMLUnstructuredGridWriter() gridwriter.SetFileName(filename) if vtk.vtkVersion.GetVTKMajorVersion() <= 5: gridwriter.SetInput(self.ugrid) else: gridwriter.SetInputData(self.ugrid) gridwriter.Write() def AddScalarField(self, name, array): """Adds a scalar field with the specified name using the values from the array.""" data = vtk.vtkDoubleArray() data.SetNumberOfValues(len(array)) data.SetName(name) for i in range(len(array)): data.SetValue(i, array[i]) if len(array) == self.ugrid.GetNumberOfPoints(): pointdata=self.ugrid.GetPointData() pointdata.AddArray(data) pointdata.SetActiveScalars(name) elif len(array) == self.ugrid.GetNumberOfCells(): celldata=self.ugrid.GetCellData() celldata.AddArray(data) celldata.SetActiveScalars(name) else: raise Exception("Length neither number of nodes nor number of cells") def AddVectorField(self, name, array): """Adds a vector field with the specified name using the values from the array.""" n=array.size data = vtk.vtkDoubleArray() data.SetNumberOfComponents(array.shape[1]) data.SetNumberOfValues(n) data.SetName(name) for i in range(n): data.SetValue(i, array.reshape(n)[i]) if array.shape[0]==self.ugrid.GetNumberOfPoints(): pointdata=self.ugrid.GetPointData() pointdata.AddArray(data) pointdata.SetActiveVectors(name) elif array.shape[0]==self.ugrid.GetNumberOfCells(): celldata=self.ugrid.GetCellData() celldata.AddArray(data) else: raise Exception("Length neither number of nodes nor number of cells") def AddField(self, name, array): """Adds a field with arbitrary number of components under the specified name using.""" n=array.size sh=arr(array.shape) data = vtk.vtkDoubleArray() # number of tuples is sh[0] # number of components is the product of the rest of sh data.SetNumberOfComponents(sh[1:].prod()) data.SetNumberOfValues(n) data.SetName(name) flatarray=array.reshape(n) for i in range(n): data.SetValue(i, flatarray[i]) if sh[0]==self.ugrid.GetNumberOfPoints(): pointdata=self.ugrid.GetPointData() pointdata.AddArray(data) elif sh[0]==self.ugrid.GetNumberOfCells(): celldata=self.ugrid.GetCellData() celldata.AddArray(data) else: raise Exception("Length neither number of nodes nor number of cells") def ApplyProjection(self, projection_x, projection_y, projection_z): """Applys a projection to the grid coordinates. This overwrites the existing values.""" npoints = self.ugrid.GetNumberOfPoints () for i in range (npoints): (x,y,z) = self.ugrid.GetPoint (i) new_x = eval (projection_x) new_y = eval (projection_y) new_z = eval (projection_z) self.ugrid.GetPoints ().SetPoint (i, new_x, new_y, new_z) def ApplyCoordinateTransformation(self,f): """Applys a coordinate transformation to the grid coordinates. This overwrites the existing values.""" npoints = self.ugrid.GetNumberOfPoints () for i in range (npoints): (x,y,z) = self.ugrid.GetPoint (i) newX = f(arr([x,y,z]),t=0) self.ugrid.GetPoints ().SetPoint (i, newX[0], newX[1], newX[2]) def ApplyEarthProjection(self): """ Assume the input geometry is the Earth in Cartesian geometry and project to longatude, latitude, depth.""" npoints = self.ugrid.GetNumberOfPoints () earth_radius = 6378000.0 rad_to_deg = 180.0/math.pi deg_to_rad = math.pi/180.0 for i in range (npoints): (x,y,z) = self.ugrid.GetPoint (i) r = math.sqrt(x*x+y*y+z*z) depth = r - earth_radius longitude = rad_to_deg*math.atan2(y, x) latitude = 90.0 - rad_to_deg*math.acos(z/r) self.ugrid.GetPoints ().SetPoint (i, longitude, latitude, depth) def ProbeData(self, coordinates, name): """Interpolate field values at these coordinates.""" probe = VTU_Probe(self.ugrid, coordinates) return probe.GetField(name) def RemoveField(self, name): """Removes said field from the unstructured grid.""" pointdata=self.ugrid.GetPointData() pointdata.RemoveArray(name) def GetLocations(self): """Returns an array with the locations of the nodes.""" vtkPoints = self.ugrid.GetPoints() if vtkPoints is None: vtkData = vtk.vtkDoubleArray() else: vtkData = vtkPoints.GetData() return arr([vtkData.GetTuple3(i) for i in range(vtkData.GetNumberOfTuples())]) def GetCellPoints(self, id): """Returns an array with the node numbers of each cell (ndglno).""" idlist=vtk.vtkIdList() self.ugrid.GetCellPoints(id, idlist) return arr([idlist.GetId(i) for i in range(idlist.GetNumberOfIds())]) def GetFieldNames(self): """Returns the names of the available fields.""" vtkdata=self.ugrid.GetPointData() return [vtkdata.GetArrayName(i) for i in range(vtkdata.GetNumberOfArrays())] def GetPointCells(self, id): """Return an array with the elements which contain a node.""" idlist=vtk.vtkIdList() self.ugrid.GetPointCells(id, idlist) return arr([idlist.GetId(i) for i in range(idlist.GetNumberOfIds())]) def GetPointPoints(self, id): """Return the nodes connecting to a given node.""" cells = self.GetPointCells(id) lst = [] for cell in cells: lst = lst + list(self.GetCellPoints(cell)) s = set(lst) # remove duplicates return arr(list(s)) # make into a list again def GetDistance(self, x, y): """Return the distance in physical space between x and y.""" posx = self.ugrid.GetPoint(x) posy = self.ugrid.GetPoint(y) return math.sqrt(sum([(posx[i] - posy[i])**2 for i in range(len(posx))])) def Crop(self, min_x, max_x, min_y, max_y, min_z, max_z): """Trim off the edges defined by a bounding box.""" trimmer = vtk.vtkExtractUnstructuredGrid() if vtk.vtkVersion.GetVTKMajorVersion() <= 5: trimmer.SetInput(self.ugrid) else: trimmer.SetInputData(self.ugrid) trimmer.SetExtent(min_x, max_x, min_y, max_y, min_z, max_z) trimmer.Update() trimmed_ug = trimmer.GetOutput() self.ugrid = trimmed_ug def IntegrateField(self, field): """ Integrate the supplied scalar field, assuming a linear representation on a tetrahedral mesh. Needs numpy-izing for speed. """ assert field[0].shape in [(), (1,)] integral = 0.0 n_cells = self.ugrid.GetNumberOfCells() vtkGhostLevels = self.ugrid.GetCellData().GetArray("vtkGhostLevels") for cell_no in range(n_cells): integrate_cell = True if vtkGhostLevels: integrate_cell = (vtkGhostLevels.GetTuple1(cell_no) == 0) if integrate_cell: Cell = self.ugrid.GetCell(cell_no) Cell_points = Cell.GetPoints () nCell_points = Cell.GetNumberOfPoints() if nCell_points == 4: Volume = abs(Cell.ComputeVolume(Cell_points.GetPoint(0), \ Cell_points.GetPoint(1), \ Cell_points.GetPoint(2), \ Cell_points.GetPoint(3))) elif nCell_points == 3: Volume = abs(Cell.TriangleArea(Cell_points.GetPoint(0), \ Cell_points.GetPoint(1), \ Cell_points.GetPoint(2))) else: raise Exception("Unexpected number of points: " + str(nCell_points)) Cell_ids = Cell.GetPointIds() for point in range(Cell_ids.GetNumberOfIds()): PointId = Cell_ids.GetId(point) integral = integral + (Volume*field[PointId] / float(nCell_points)) return integral def GetCellVolume(self, id): cell = self.ugrid.GetCell(id) pts = cell.GetPoints() if isinstance(cell, vtk.vtkTriangle): return cell.TriangleArea(pts.GetPoint(0), pts.GetPoint(1), pts.GetPoint(2)) elif cell.GetNumberOfPoints() == 4: return abs(cell.ComputeVolume(pts.GetPoint(0), pts.GetPoint(1), pts.GetPoint(2), pts.GetPoint(3))) elif cell.GetNumberOfPoints() == 3: return abs(cell.ComputeVolume(pts.GetPoint(0), pts.GetPoint(1), pts.GetPoint(2))) else: raise Exception("Unexpected number of points") def GetFieldIntegral(self, name): """ Integrate the named field. """ return self.IntegrateField(self.GetField(name)) def GetFieldRms(self, name): """ Return the rms of the supplied scalar or vector field. """ field = self.GetField(name) rank = self.GetFieldRank(name) if rank == 0: normField = arr([field[i] ** 2.0 for i in range(len(field))]) elif rank == 1: normField = self.GetVectorNorm(name) else: raise Exception("Cannot calculate norm field for field rank > 1") volField = arr([1.0 for i in range(len(field))]) rms = self.IntegrateField(normField) rms /= self.IntegrateField(volField) rms = numpy.sqrt(rms) return float(rms) def StructuredPointProbe(self, nx, ny, nz, bounding_box=None): """ Probe the unstructured grid dataset using a structured points dataset. """ probe = vtk.vtkProbeFilter () if vtk.vtkVersion.GetVTKMajorVersion() <= 5: probe.SetSource(self.ugrid) else: probe.SetSourceData(self.ugrid) sgrid = vtk.vtkStructuredPoints() bbox = [0.0,0.0, 0.0,0.0, 0.0,0.0] if bounding_box==None: bbox = self.ugrid.GetBounds() else: bbox = bounding_box sgrid.SetOrigin([bbox[0], bbox[2], bbox[4]]) sgrid.SetDimensions(nx, ny, nz) spacing = [0.0, 0.0, 0.0] if nx>1: spacing[0] = (bbox[1]-bbox[0])/(nx-1.0) if ny>1: spacing[1] = (bbox[3]-bbox[2])/(ny-1.0) if nz>1: spacing[2] = (bbox[5]-bbox[4])/(nz-1.0) sgrid.SetSpacing(spacing) if vtk.vtkVersion.GetVTKMajorVersion() <= 5: probe.SetInput(sgrid) else: probe.SetInputData(sgrid) probe.Update () return probe.GetOutput() def GetDerivative(self, name): """ Returns the derivative of field 'name', a vector field if 'name' is scalar, and a tensor field if 'name' is a vector. The field 'name' has to be point-wise data. The returned array gives a cell-wise derivative. """ cd=vtk.vtkCellDerivatives() if vtk.vtkVersion.GetVTKMajorVersion() <= 5: cd.SetInput(sgrid) else: cd.SetInputData(sgrid) pointdata=self.ugrid.GetPointData() nc=pointdata.GetArray(name).GetNumberOfComponents() if nc==1: cd.SetVectorModeToComputeGradient() cd.SetTensorModeToPassTensors() pointdata.SetActiveScalars(name) cd.Update() vtkdata=cd.GetUnstructuredGridOutput().GetCellData().GetArray('ScalarGradient') return arr([vtkdata.GetTuple3(i) for i in range(vtkdata.GetNumberOfTuples())]) else: cd.SetTensorModeToComputeGradient() cd.SetVectorModeToPassVectors() pointdata.SetActiveVectors(name) cd.Update() vtkdata=cd.GetUnstructuredGridOutput().GetCellData().GetArray('VectorGradient') return arr([vtkdata.GetTuple9(i) for i in range(vtkdata.GetNumberOfTuples())]) def GetVorticity(self, name): """ Returns the vorticity of vectorfield 'name'. The field 'name' has to be point-wise data. The returned array gives a cell-wise derivative. """ cd=vtk.vtkCellDerivatives() if vtk.vtkVersion.GetVTKMajorVersion() <= 5: cd.SetInput(self.ugrid) else: cd.SetInputData(self.ugrid) pointdata=self.ugrid.GetPointData() cd.SetVectorModeToComputeVorticity() cd.SetTensorModeToPassTensors() pointdata.SetActiveVectors(name) cd.Update() vtkdata=cd.GetUnstructuredGridOutput().GetCellData().GetArray('VectorGradient') return arr([vtkdata.GetTuple3(i) for i in range(vtkdata.GetNumberOfTuples())]) def CellDataToPointData(self): """ Transforms all cell-wise fields in the vtu to point-wise fields. All existing fields will remain. """ cdtpd=vtk.vtkCellDataToPointData() if vtk.vtkVersion.GetVTKMajorVersion() <= 5: cdtpd.SetInput(self.ugrid) else: cdtpd.SetInputData(self.ugrid) cdtpd.PassCellDataOn() cdtpd.Update() self.ugrid=cdtpd.GetUnstructuredGridOutput() class VTU_Probe(object): """A class that combines a vtkProbeFilter with a list of invalid points (points that it failed to probe where we take the value of the nearest point)""" def __init__(self, ugrid, coordinates): # Initialise locator locator = vtk.vtkPointLocator() locator.SetDataSet(ugrid) locator.SetTolerance(10.0) locator.Update() # Initialise probe points = vtk.vtkPoints() points.SetDataTypeToDouble() ilen, jlen = coordinates.shape for i in range(ilen): points.InsertNextPoint(coordinates[i][0], coordinates[i][1], coordinates[i][2]) polydata = vtk.vtkPolyData() polydata.SetPoints(points) self.probe = vtk.vtkProbeFilter() if vtk.vtkVersion.GetVTKMajorVersion() <= 5: self.probe.SetInput(polydata) self.probe.SetSource(ugrid) else: self.probe.SetInputData(polydata) self.probe.SetSourceData(ugrid) self.probe.Update() # Generate a list invalidNodes, containing a map from invalid nodes in the # result to their closest nodes in the input valid_ids = self.probe.GetValidPoints() valid_loc = 0 self.invalidNodes = [] for i in range(ilen): if valid_ids.GetTuple1(valid_loc) == i: valid_loc += 1 else: nearest = locator.FindClosestPoint([coordinates[i][0], coordinates[i][1], coordinates[i][2]]) self.invalidNodes.append((i, nearest)) self.ugrid = ugrid def GetField(self, name): # Get final updated values pointdata = self.probe.GetOutput().GetPointData() vtkdata=pointdata.GetArray(name) nc=vtkdata.GetNumberOfComponents() nt=vtkdata.GetNumberOfTuples() array = arr([vtkdata.GetValue(i) for i in range(nt * nc)]) # Fix the point data at invalid nodes if len(self.invalidNodes) > 0: oldField = self.ugrid.GetPointData().GetArray(name) if oldField is None: oldField = self.ugrid.GetCellData().GetArray(name) if oldField is None: raise Exception("ERROR: couldn't find point or cell field data with name "+name+".") components = oldField.GetNumberOfComponents() for invalidNode, nearest in self.invalidNodes: for comp in range(nc): array[invalidNode * nc + comp] = oldField.GetValue(nearest * nc + comp) # this is a copy and paster from vtu.GetField above: if nc==9: return array.reshape(nt,3,3) elif nc==4: return array.reshape(nt,2,2) else: return array.reshape(nt,nc) return array def VtuMatchLocations(vtu1, vtu2, tolerance = 1.0e-6): """ Check that the locations in the supplied vtus match exactly, returning True if they match and False otherwise. The locations must be in the same order. """ locations1 = vtu1.GetLocations().tolist() locations2 = vtu2.GetLocations() if not len(locations1) == len(locations2): return False for i in range(len(locations1)): if not len(locations1[i]) == len(locations2[i]): return False for j in range(len(locations1[i])): if abs(locations1[i][j] - locations2[i][j]) > tolerance: return False return True def VtuMatchLocationsArbitrary(vtu1, vtu2, tolerance = 1.0e-6): """ Check that the locations in the supplied vtus match, returning True if they match and False otherwise. The locations may be in a different order. """ locations1 = vtu1.GetLocations() locations2 = vtu2.GetLocations() if not locations1.shape == locations2.shape: return False for j in range(locations1.shape[1]): # compute the smallest possible precision given the range of this coordinate epsilon = numpy.finfo(numpy.float).eps * numpy.abs(locations1[:,j]).max() if tolerance<epsilon: # the specified tolerance is smaller than possible machine precision # (or something else went wrong) raise Exception("ERROR: specified tolerance is smaller than machine precision of given locations") # ensure epsilon doesn't get too small (might be for zero for instance) epsilon=max(epsilon,tolerance/100.0) # round to that many decimal places (-2 to be sure) so that # we don't get rounding issues with lexsort locations1[:,j]=numpy.around(locations1[:,j], int(-numpy.log10(epsilon))-2) locations2[:,j]=numpy.around(locations2[:,j], int(-numpy.log10(epsilon))-2) # lexical sort on x,y and z coordinates resp. of locations1 and locations2 sort_index1=numpy.lexsort(locations1.T) sort_index2=numpy.lexsort(locations2.T) # should now be in same order, so we can check for its biggest difference return numpy.allclose(locations1[sort_index1],locations2[sort_index2], atol=tolerance) def VtuDiff(vtu1, vtu2, filename = None): """ Generate a vtu with fields generated by taking the difference between the field values in the two supplied vtus. Fields that are not common between the two vtus are neglected. If probe is True, the fields of vtu2 are projected onto the cell points of vtu1. Otherwise, the cell points of vtu1 and vtu2 must match. """ # Generate empty output vtu resultVtu = vtu() resultVtu.filename = filename # If the input vtu point locations match, do not use probe useProbe = not VtuMatchLocations(vtu1, vtu2) if useProbe: probe = VTU_Probe(vtu2.ugrid, vtu1.GetLocations()) # Copy the grid from the first input vtu into the output vtu resultVtu.ugrid.DeepCopy(vtu1.ugrid) # Find common field names between the input vtus and generate corresponding # difference fields fieldNames1 = vtu1.GetFieldNames() fieldNames2 = vtu2.GetFieldNames() for fieldName in fieldNames1: field1 = vtu1.GetField(fieldName) if fieldName in fieldNames2: if useProbe: field2 = probe.GetField(fieldName) else: field2 = vtu2.GetField(fieldName) resultVtu.AddField(fieldName, field1-field2) else: resultVtu.RemoveField(fieldName) # Also look for cell-based fields. This only works if we don't have # to interpolate (both meshes are the same) vtkdata=vtu1.ugrid.GetCellData() fieldNames1 = [vtkdata.GetArrayName(i) for i in range(vtkdata.GetNumberOfArrays())] vtkdata=vtu2.ugrid.GetCellData() fieldNames2 = [vtkdata.GetArrayName(i) for i in range(vtkdata.GetNumberOfArrays())] if useProbe: # meshes are different - we can't interpolate cell-based fields so let's just remove them from the output for fieldName in fieldNames1: if fieldName=='vtkGhostLevels': # this field should just be passed on unchanged continue resultVtu.RemoveField(fieldName) else: # meshes are the same - we can simply subtract for fieldName in fieldNames1: if fieldName=='vtkGhostLevels': # this field should just be passed on unchanged continue elif fieldName in fieldNames2: field1 = vtu1.GetField(fieldName) field2 = vtu2.GetField(fieldName) resultVtu.AddField(fieldName, field1-field2) else: resultVtu.RemoveField(fieldName) return resultVtu
MGautier/security-sensor
refs/heads/master
trunk/Documentacion/Memoria/trozos-codigo/codigo-9-events-test-events-source.py
2
def test_events_source(self): """ Comprobacion de que la fuente de seguridad, a la que pertenece, es igual a la asociada Returns: """ log_sources = LogSources.objects.get(Type="Iptables") events = Events.objects.get(ID_Source=log_sources) self.assertEqual(events.get_source(), log_sources)
blueboxgroup/horizon
refs/heads/master
horizon/test/tests/middleware.py
61
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time from django.conf import settings from django.http import HttpResponseRedirect # noqa from horizon import exceptions from horizon import middleware from horizon.test import helpers as test class MiddlewareTests(test.TestCase): def test_redirect_login_fail_to_login(self): url = settings.LOGIN_URL request = self.factory.post(url) mw = middleware.HorizonMiddleware() resp = mw.process_exception(request, exceptions.NotAuthorized()) resp.client = self.client self.assertRedirects(resp, url) def test_session_timeout(self): requested_url = '/project/instances/' request = self.factory.get(requested_url) try: timeout = settings.SESSION_TIMEOUT except AttributeError: timeout = 1800 request.session['last_activity'] = int(time.time()) - (timeout + 10) mw = middleware.HorizonMiddleware() resp = mw.process_request(request) self.assertEqual(302, resp.status_code) self.assertEqual(requested_url, resp.get('Location')) def test_process_response_redirect_on_ajax_request(self): url = settings.LOGIN_URL mw = middleware.HorizonMiddleware() request = self.factory.post(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest') request.META['HTTP_X_REQUESTED_WITH'] = 'XMLHttpRequest' request.horizon = {'async_messages': [('error', 'error_msg', 'extra_tag')]} response = HttpResponseRedirect(url) response.client = self.client resp = mw.process_response(request, response) self.assertEqual(200, resp.status_code) self.assertEqual(url, resp['X-Horizon-Location'])
mbareta/edx-platform-ft
refs/heads/open-release/eucalyptus.master
openedx/core/lib/api/view_utils.py
29
""" Utilities related to API views """ import functools from django.core.exceptions import NON_FIELD_ERRORS, ValidationError, ObjectDoesNotExist from django.http import Http404 from django.utils.translation import ugettext as _ from rest_framework import status, response from rest_framework.exceptions import APIException from rest_framework.permissions import IsAuthenticated from rest_framework.request import clone_request from rest_framework.response import Response from rest_framework.mixins import RetrieveModelMixin, UpdateModelMixin from rest_framework.generics import GenericAPIView from lms.djangoapps.courseware.courses import get_course_with_access from lms.djangoapps.courseware.courseware_access_exception import CoursewareAccessException from opaque_keys.edx.keys import CourseKey from xmodule.modulestore.django import modulestore from openedx.core.lib.api.authentication import ( SessionAuthenticationAllowInactiveUser, OAuth2AuthenticationAllowInactiveUser, ) from openedx.core.lib.api.permissions import IsUserInUrl class DeveloperErrorViewMixin(object): """ A view mixin to handle common error cases other than validation failure (auth failure, method not allowed, etc.) by generating an error response conforming to our API conventions with a developer message. """ def make_error_response(self, status_code, developer_message): """ Build an error response with the given status code and developer_message """ return Response({"developer_message": developer_message}, status=status_code) def make_validation_error_response(self, validation_error): """ Build a 400 error response from the given ValidationError """ if hasattr(validation_error, "message_dict"): response_obj = {} message_dict = dict(validation_error.message_dict) # Extract both Django form and DRF serializer non-field errors non_field_error_list = ( message_dict.pop(NON_FIELD_ERRORS, []) + message_dict.pop("non_field_errors", []) ) if non_field_error_list: response_obj["developer_message"] = non_field_error_list[0] if message_dict: response_obj["field_errors"] = { field: {"developer_message": message_dict[field][0]} for field in message_dict } return Response(response_obj, status=400) else: return self.make_error_response(400, validation_error.messages[0]) def handle_exception(self, exc): """ Generalized helper method for managing specific API exception workflows """ if isinstance(exc, APIException): return self.make_error_response(exc.status_code, exc.detail) elif isinstance(exc, Http404) or isinstance(exc, ObjectDoesNotExist): return self.make_error_response(404, exc.message or "Not found.") elif isinstance(exc, ValidationError): return self.make_validation_error_response(exc) else: raise class ExpandableFieldViewMixin(object): """A view mixin to add expansion information to the serializer context for later use by an ExpandableField.""" def get_serializer_context(self): """Adds expand information from query parameters to the serializer context to support expandable fields.""" result = super(ExpandableFieldViewMixin, self).get_serializer_context() result['expand'] = [x for x in self.request.query_params.get('expand', '').split(',') if x] return result def view_course_access(depth=0, access_action='load', check_for_milestones=False): """ Method decorator for an API endpoint that verifies the user has access to the course. """ def _decorator(func): """Outer method decorator.""" @functools.wraps(func) def _wrapper(self, request, *args, **kwargs): """ Expects kwargs to contain 'course_id'. Passes the course descriptor to the given decorated function. Raises 404 if access to course is disallowed. """ course_id = CourseKey.from_string(kwargs.pop('course_id')) with modulestore().bulk_operations(course_id): try: course = get_course_with_access( request.user, access_action, course_id, depth=depth, check_if_enrolled=True, ) except CoursewareAccessException as error: return response.Response(data=error.to_json(), status=status.HTTP_404_NOT_FOUND) return func(self, request, course=course, *args, **kwargs) return _wrapper return _decorator def view_auth_classes(is_user=False, is_authenticated=True): """ Function and class decorator that abstracts the authentication and permission checks for api views. """ def _decorator(func_or_class): """ Requires either OAuth2 or Session-based authentication. If is_user is True, also requires username in URL matches the request user. """ func_or_class.authentication_classes = ( OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser ) func_or_class.permission_classes = () if is_authenticated: func_or_class.permission_classes += (IsAuthenticated,) if is_user: func_or_class.permission_classes += (IsUserInUrl,) return func_or_class return _decorator def add_serializer_errors(serializer, data, field_errors): """Adds errors from serializer validation to field_errors. data is the original data to deserialize.""" if not serializer.is_valid(): errors = serializer.errors for key, error in errors.iteritems(): field_errors[key] = { 'developer_message': u"Value '{field_value}' is not valid for field '{field_name}': {error}".format( field_value=data.get(key, ''), field_name=key, error=error ), 'user_message': _(u"This value is invalid."), } return field_errors def build_api_error(message, **kwargs): """Build an error dict corresponding to edX API conventions. Args: message (string): The string to use for developer and user messages. The user message will be translated, but for this to work message must have already been scraped. ugettext_noop is useful for this. **kwargs: format parameters for message """ return { 'developer_message': message.format(**kwargs), 'user_message': _(message).format(**kwargs), # pylint: disable=translation-of-non-string } class RetrievePatchAPIView(RetrieveModelMixin, UpdateModelMixin, GenericAPIView): """Concrete view for retrieving and updating a model instance. Like DRF's RetrieveUpdateAPIView, but without PUT and with automatic validation errors in the edX format. """ def get(self, request, *args, **kwargs): """Retrieves the specified resource using the RetrieveModelMixin.""" return self.retrieve(request, *args, **kwargs) def patch(self, request, *args, **kwargs): """Checks for validation errors, then updates the model using the UpdateModelMixin.""" field_errors = self._validate_patch(request.data) if field_errors: return Response({'field_errors': field_errors}, status=status.HTTP_400_BAD_REQUEST) return self.partial_update(request, *args, **kwargs) def _validate_patch(self, patch): """Validates a JSON merge patch. Captures DRF serializer errors and converts them to edX's standard format.""" field_errors = {} serializer = self.get_serializer(self.get_object_or_none(), data=patch, partial=True) fields = self.get_serializer().get_fields() for key in patch: if key in fields and fields[key].read_only: field_errors[key] = { 'developer_message': "This field is not editable", 'user_message': _("This field is not editable"), } add_serializer_errors(serializer, patch, field_errors) return field_errors def get_object_or_none(self): """ Retrieve an object or return None if the object can't be found. NOTE: This replaces functionality that was removed in Django Rest Framework v3.1. """ try: return self.get_object() except Http404: if self.request.method == 'PUT': # For PUT-as-create operation, we need to ensure that we have # relevant permissions, as if this was a POST request. This # will either raise a PermissionDenied exception, or simply # return None. self.check_permissions(clone_request(self.request, 'POST')) else: # PATCH requests where the object does not exist should still # return a 404 response. raise
NEricN/RobotCSimulator
refs/heads/master
Python/App/Lib/lib2to3/tests/test_refactor.py
38
""" Unit tests for refactor.py. """ from __future__ import with_statement import sys import os import codecs import operator import StringIO import tempfile import shutil import unittest import warnings from lib2to3 import refactor, pygram, fixer_base from lib2to3.pgen2 import token from . import support TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data") FIXER_DIR = os.path.join(TEST_DATA_DIR, "fixers") sys.path.append(FIXER_DIR) try: _DEFAULT_FIXERS = refactor.get_fixers_from_package("myfixes") finally: sys.path.pop() _2TO3_FIXERS = refactor.get_fixers_from_package("lib2to3.fixes") class TestRefactoringTool(unittest.TestCase): def setUp(self): sys.path.append(FIXER_DIR) def tearDown(self): sys.path.pop() def check_instances(self, instances, classes): for inst, cls in zip(instances, classes): if not isinstance(inst, cls): self.fail("%s are not instances of %s" % instances, classes) def rt(self, options=None, fixers=_DEFAULT_FIXERS, explicit=None): return refactor.RefactoringTool(fixers, options, explicit) def test_print_function_option(self): rt = self.rt({"print_function" : True}) self.assertIs(rt.grammar, pygram.python_grammar_no_print_statement) self.assertIs(rt.driver.grammar, pygram.python_grammar_no_print_statement) def test_write_unchanged_files_option(self): rt = self.rt() self.assertFalse(rt.write_unchanged_files) rt = self.rt({"write_unchanged_files" : True}) self.assertTrue(rt.write_unchanged_files) def test_fixer_loading_helpers(self): contents = ["explicit", "first", "last", "parrot", "preorder"] non_prefixed = refactor.get_all_fix_names("myfixes") prefixed = refactor.get_all_fix_names("myfixes", False) full_names = refactor.get_fixers_from_package("myfixes") self.assertEqual(prefixed, ["fix_" + name for name in contents]) self.assertEqual(non_prefixed, contents) self.assertEqual(full_names, ["myfixes.fix_" + name for name in contents]) def test_detect_future_features(self): run = refactor._detect_future_features fs = frozenset empty = fs() self.assertEqual(run(""), empty) self.assertEqual(run("from __future__ import print_function"), fs(("print_function",))) self.assertEqual(run("from __future__ import generators"), fs(("generators",))) self.assertEqual(run("from __future__ import generators, feature"), fs(("generators", "feature"))) inp = "from __future__ import generators, print_function" self.assertEqual(run(inp), fs(("generators", "print_function"))) inp ="from __future__ import print_function, generators" self.assertEqual(run(inp), fs(("print_function", "generators"))) inp = "from __future__ import (print_function,)" self.assertEqual(run(inp), fs(("print_function",))) inp = "from __future__ import (generators, print_function)" self.assertEqual(run(inp), fs(("generators", "print_function"))) inp = "from __future__ import (generators, nested_scopes)" self.assertEqual(run(inp), fs(("generators", "nested_scopes"))) inp = """from __future__ import generators from __future__ import print_function""" self.assertEqual(run(inp), fs(("generators", "print_function"))) invalid = ("from", "from 4", "from x", "from x 5", "from x im", "from x import", "from x import 4", ) for inp in invalid: self.assertEqual(run(inp), empty) inp = "'docstring'\nfrom __future__ import print_function" self.assertEqual(run(inp), fs(("print_function",))) inp = "'docstring'\n'somng'\nfrom __future__ import print_function" self.assertEqual(run(inp), empty) inp = "# comment\nfrom __future__ import print_function" self.assertEqual(run(inp), fs(("print_function",))) inp = "# comment\n'doc'\nfrom __future__ import print_function" self.assertEqual(run(inp), fs(("print_function",))) inp = "class x: pass\nfrom __future__ import print_function" self.assertEqual(run(inp), empty) def test_get_headnode_dict(self): class NoneFix(fixer_base.BaseFix): pass class FileInputFix(fixer_base.BaseFix): PATTERN = "file_input< any * >" class SimpleFix(fixer_base.BaseFix): PATTERN = "'name'" no_head = NoneFix({}, []) with_head = FileInputFix({}, []) simple = SimpleFix({}, []) d = refactor._get_headnode_dict([no_head, with_head, simple]) top_fixes = d.pop(pygram.python_symbols.file_input) self.assertEqual(top_fixes, [with_head, no_head]) name_fixes = d.pop(token.NAME) self.assertEqual(name_fixes, [simple, no_head]) for fixes in d.itervalues(): self.assertEqual(fixes, [no_head]) def test_fixer_loading(self): from myfixes.fix_first import FixFirst from myfixes.fix_last import FixLast from myfixes.fix_parrot import FixParrot from myfixes.fix_preorder import FixPreorder rt = self.rt() pre, post = rt.get_fixers() self.check_instances(pre, [FixPreorder]) self.check_instances(post, [FixFirst, FixParrot, FixLast]) def test_naughty_fixers(self): self.assertRaises(ImportError, self.rt, fixers=["not_here"]) self.assertRaises(refactor.FixerError, self.rt, fixers=["no_fixer_cls"]) self.assertRaises(refactor.FixerError, self.rt, fixers=["bad_order"]) def test_refactor_string(self): rt = self.rt() input = "def parrot(): pass\n\n" tree = rt.refactor_string(input, "<test>") self.assertNotEqual(str(tree), input) input = "def f(): pass\n\n" tree = rt.refactor_string(input, "<test>") self.assertEqual(str(tree), input) def test_refactor_stdin(self): class MyRT(refactor.RefactoringTool): def print_output(self, old_text, new_text, filename, equal): results.extend([old_text, new_text, filename, equal]) results = [] rt = MyRT(_DEFAULT_FIXERS) save = sys.stdin sys.stdin = StringIO.StringIO("def parrot(): pass\n\n") try: rt.refactor_stdin() finally: sys.stdin = save expected = ["def parrot(): pass\n\n", "def cheese(): pass\n\n", "<stdin>", False] self.assertEqual(results, expected) def check_file_refactoring(self, test_file, fixers=_2TO3_FIXERS, options=None, mock_log_debug=None, actually_write=True): tmpdir = tempfile.mkdtemp(prefix="2to3-test_refactor") self.addCleanup(shutil.rmtree, tmpdir) # make a copy of the tested file that we can write to shutil.copy(test_file, tmpdir) test_file = os.path.join(tmpdir, os.path.basename(test_file)) os.chmod(test_file, 0o644) def read_file(): with open(test_file, "rb") as fp: return fp.read() old_contents = read_file() rt = self.rt(fixers=fixers, options=options) if mock_log_debug: rt.log_debug = mock_log_debug rt.refactor_file(test_file) self.assertEqual(old_contents, read_file()) if not actually_write: return rt.refactor_file(test_file, True) new_contents = read_file() self.assertNotEqual(old_contents, new_contents) return new_contents def test_refactor_file(self): test_file = os.path.join(FIXER_DIR, "parrot_example.py") self.check_file_refactoring(test_file, _DEFAULT_FIXERS) def test_refactor_file_write_unchanged_file(self): test_file = os.path.join(FIXER_DIR, "parrot_example.py") debug_messages = [] def recording_log_debug(msg, *args): debug_messages.append(msg % args) self.check_file_refactoring(test_file, fixers=(), options={"write_unchanged_files": True}, mock_log_debug=recording_log_debug, actually_write=False) # Testing that it logged this message when write=False was passed is # sufficient to see that it did not bail early after "No changes". message_regex = r"Not writing changes to .*%s%s" % ( os.sep, os.path.basename(test_file)) for message in debug_messages: if "Not writing changes" in message: self.assertRegexpMatches(message, message_regex) break else: self.fail("%r not matched in %r" % (message_regex, debug_messages)) def test_refactor_dir(self): def check(structure, expected): def mock_refactor_file(self, f, *args): got.append(f) save_func = refactor.RefactoringTool.refactor_file refactor.RefactoringTool.refactor_file = mock_refactor_file rt = self.rt() got = [] dir = tempfile.mkdtemp(prefix="2to3-test_refactor") try: os.mkdir(os.path.join(dir, "a_dir")) for fn in structure: open(os.path.join(dir, fn), "wb").close() rt.refactor_dir(dir) finally: refactor.RefactoringTool.refactor_file = save_func shutil.rmtree(dir) self.assertEqual(got, [os.path.join(dir, path) for path in expected]) check([], []) tree = ["nothing", "hi.py", ".dumb", ".after.py", "notpy.npy", "sappy"] expected = ["hi.py"] check(tree, expected) tree = ["hi.py", os.path.join("a_dir", "stuff.py")] check(tree, tree) def test_file_encoding(self): fn = os.path.join(TEST_DATA_DIR, "different_encoding.py") self.check_file_refactoring(fn) def test_false_file_encoding(self): fn = os.path.join(TEST_DATA_DIR, "false_encoding.py") data = self.check_file_refactoring(fn) def test_bom(self): fn = os.path.join(TEST_DATA_DIR, "bom.py") data = self.check_file_refactoring(fn) self.assertTrue(data.startswith(codecs.BOM_UTF8)) def test_crlf_newlines(self): old_sep = os.linesep os.linesep = "\r\n" try: fn = os.path.join(TEST_DATA_DIR, "crlf.py") fixes = refactor.get_fixers_from_package("lib2to3.fixes") self.check_file_refactoring(fn, fixes) finally: os.linesep = old_sep def test_refactor_docstring(self): rt = self.rt() doc = """ >>> example() 42 """ out = rt.refactor_docstring(doc, "<test>") self.assertEqual(out, doc) doc = """ >>> def parrot(): ... return 43 """ out = rt.refactor_docstring(doc, "<test>") self.assertNotEqual(out, doc) def test_explicit(self): from myfixes.fix_explicit import FixExplicit rt = self.rt(fixers=["myfixes.fix_explicit"]) self.assertEqual(len(rt.post_order), 0) rt = self.rt(explicit=["myfixes.fix_explicit"]) for fix in rt.post_order: if isinstance(fix, FixExplicit): break else: self.fail("explicit fixer not loaded")
tknapen/RL_7T_experiment
refs/heads/RL
exp_tools/EyeLinkCoreGraphicsPsychopy.py
1
import pylink RIGHT_EYE = 1 LEFT_EYE = 0 BINOCULAR = 2 HIGH = 1 LOW = 0 WHITE = (255,255,255) GRAY = GREY = (128,128,128) BLACK = (0,0,0) buttons =(0, 0); class EyeLinkCoreGraphicsPsychopy(pylink.EyeLinkCustomDisplay): def __init__(self, tracker, display, displaySize): '''Initialize a Custom EyeLinkCoreGraphics for Psychopy tracker: the TRACKER() object display: the Psychopy display window ''' pylink.EyeLinkCustomDisplay.__init__(self) self.display = display self.displaySize = displaySize self.tracker = tracker print("Finished initializing custom graphics") class Tracker_EyeLink(): def __init__(self, win, clock, sj = "TEST", autoCalibration=True, saccadeSensitivity = HIGH, calibrationType = 'HV9', calibrationTargetColor = WHITE, calibrationBgColor = BLACK, CalibrationSounds = False ): ''' win: psychopy visual window used for the experiment clock: psychopy time clock recording time for whole experiment sj: Subject identifier string (affects EDF filename) cf plus bas autoCalibration: True: enable auto-pacing during calibration saccadeSensitivity: HIGH: Pursuit and neurological work LOW: Cognitive research calibrationType: H3: Horizontal 3-point HV3: 3-point calibration, poor linearization HV5: 5-point calibration, poor at corners HV9: 9-point calibration, best overall calibrationTargetColor and calibrationBgColor: RGB tuple, i.e., (255,0,0) for Red One of: BLACK, WHITE, GRAY calibrationSounds: True: enable feedback sounds when calibrating ''' self.edfFileName = str(sj)+".edf" ##nom du .edf print(self.edfFileName) self.screenSize = (1024,768) self.units = 'deg'#inf['windowUnits']# 'deg' self.monitorName = "ViewSonic" #inf['windowMonitor.name']# monitor de l'INT print("Connecting to eyetracker.") self.tracker = pylink.EyeLink("100.1.1.1")######### CONNECTION A L'EYELINK ######################## print("Loading custom graphics") genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, self.screenSize) self.tracker.openDataFile(self.edfFileName)#### pylink.flushGetkeyQueue() #It may be called at any time to get rid any of old keys from the queue. self.tracker.setOfflineMode();###Places EyeLink tracker in off-line mode. Wait till the traker has finished the mode transition. self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d" %( tuple(self.screenSize) )) self.tracker.setCalibrationType(calibrationType) self.tracker.sendMessage("DISPLAY_COORDS 0 0 %d %d" %( tuple(self.screenSize) )) ###EYElink version eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) else: tracker_software_ver = 0 if eyelink_ver>=2: self.tracker.sendCommand("select_parser_configuration %d" %saccadeSensitivity) else: if saccadeSensitivity == HIGH: svt, sat = 22, 5000 else: svt, sat = 35, 9500 self.tracker.sendCommand("saccade_velocity_threshold = %d" %svt) self.tracker.sendCommand("saccade_acceleration_threshold = %d" %sat) if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") # set EDF file contents self.tracker.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if tracker_software_ver>=4: self.tracker.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else: self.tracker.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver>=4: self.tracker.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else: self.tracker.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") #Set the calibration settings: pylink.setCalibrationColors( calibrationTargetColor, calibrationBgColor) if CalibrationSounds: pylink.setCalibrationSounds("", "", "") pylink.setDriftCorrectSounds("", "off", "off") else: pylink.setCalibrationSounds("off", "off", "off") pylink.setDriftCorrectSounds("off", "off", "off") if autoCalibration: self.tracker.enableAutoCalibration else: self.tracker.disableAutoCalibration win.flip() print("Opening graphics") pylink.openGraphicsEx(genv) print("Begining tracker setup") self.tracker.doTrackerSetup() win.flip() def sendMessage(self, msg): '''Record a message to the tracker''' print(msg) self.tracker.sendMessage(msg) def sendCommand(self, msg): '''Send command to the tracker''' print(msg) self.tracker.sendCommand(msg) def resetEventQue(self): '''Reset the eyetracker event cue usage: use this prior to a loop calling recordFixation() so that old fixations or other events are cleared from the buffer. ''' self.tracker.resetData() def getStatus(self):###virer? """Return the status of the connection to the eye tracker""" if self.tracker.breakPressed(): return("ABORT_EXPT") if self.tracker.escapePressed(): return("SKIP_TRIAL") if self.tracker.isRecording()==0: return("RECORDING") if self.tracker.isConnected(): return("ONLINE") else: return("OFFLINE") return("UNKNOWN STATUS: " + str(self.tracker.getStatus()) ) #================================================================ def endTrial(self): '''Ends recording: adds 100 msec of data to catch final events''' pylink.endRealTimeMode() #Returns the application to a priority slightly above normal, to end realtime mode pylink.pumpDelay(100)# ??????? self.tracker.stopRecording()#Stops recording, resets EyeLink data mode. Call 50 to 100 msec after an event occurs that ends the trial. ##################################################################### # Eyetracker set up and take-down ##################################################################### def preTrial(self, trial, win, calibTrial=False): '''Set up each trial with the eye tracker ''' if calibTrial: cond = "Test/Calibration Trial" else: cond = "Non-test/no calibration trial" message ="record_status_message 'Trial %d %s'"%(trial, cond)####rajouter le run? self.tracker.sendCommand(message) msg = "TRIALID %s"%trial self.tracker.sendMessage(msg) #Do drift correction if necissary if calibTrial: win.flip() while True: try: error = self.tracker.doDriftCorrect(self.screenSize[0]/2,self.screenSize[1]/2,1,1) if error != 27: self.tracker.applyDriftCorrect break else: #self.tracker.doTrackerSetup() win.flip()#####refaire une calib? except: print("Exception")######sert a rien break win.flip() ##################################################################################enlever au dessus + print("Switching to record mode") error = self.tracker.startRecording(1,1,1,1) pylink.beginRealTimeMode(100) if error: return error#### if not self.tracker.waitForBlockStart(1000, 1, 0):###### self.tracker.sendMessage ("TRIAL ERROR") self.endTrial()#arreter l'enregistrement car il y a erreur print "ERROR: No link samples received!" return "TRIAL_ERROR" self.eye_used = self.tracker.eyeAvailable(); #determine which eye(s) are available if self.eye_used == RIGHT_EYE: self.tracker.sendMessage("EYE_USED 1 RIGHT") elif self.eye_used == LEFT_EYE or self.eye_used == BINOCULAR: self.tracker.sendMessage("EYE_USED 0 LEFT") self.eye_used = LEFT_EYE else: print "Error in getting the eye information!" return "TRIAL_ERROR" self.tracker.sendMessage ("TRIAL ERROR") self.tracker.flushKeybuttons(0) def closeConnection(self): '''Clean everything up, save data and close connection to tracker''' if self.tracker != None: # File transfer and cleanup! self.tracker.setOfflineMode(); pylink.msecDelay (600) #Close the file and transfer it to Display PC self.tracker.closeDataFile() self.tracker.receiveDataFile(self.edfFileName, self.edfFileName) self.tracker.close(); return "Eyelink connection closed successfully" else: return "Eyelink not available, not closed properly"
idea4bsd/idea4bsd
refs/heads/idea4bsd-master
python/testData/highlighting/awaitInSetPy35.py
23
async def f11(x): y = {await<error descr="expression expected"> </error>for await<error descr="expression expected"> </error>in []} # fail await x def f12(x): y = {await for await in []} return x async def f21(x): y = {mapper(await<error descr="expression expected">)</error> for await<error descr="expression expected"> </error>in []} # fail await x def f22(x): y = {mapper(await) for await in []} return x async def f31(x): await<error descr="expression expected"> </error>= [] # fail y = {i for i in await<error descr="expression expected">}</error> # fail await x def f32(x): await = [] y = {i for i in await} return x async def f41(x): y = {<error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> z for z in []} # fail await x async def f42(x): y = {mapper(<error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> z) for z in []} # fail await x async def f43(x): y = {z for <error descr="can't assign to await expression">await z</error> in []} # fail await x async def f44(x): y = {z for z in await x} await x async def f51(): await<error descr="expression expected"> </error>= 5 # fail return {await<error descr="expression expected">}</error> # fail def f52(): await = 5 return {await} async def f61(): await<error descr="expression expected"> </error>= 5 # fail return {"a", await<error descr="expression expected">,</error> "b"} # fail def f62(): await = 5 return {"a", await, "b"} async def f71(x): return {await x} async def f72(x): return {"a", await x, "b"} async def f81(x): {<error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> fun() for fun in funcs if <error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> smth} {<error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> fun() <error descr="Python version 3.5 does not support 'async' inside comprehensions and generator expressions">async</error> for fun in funcs if <error descr="Python version 3.5 does not support 'await' inside comprehensions">await</error> smth}
vmindru/ansible
refs/heads/devel
lib/ansible/modules/remote_management/oneview/oneview_network_set_facts.py
125
#!/usr/bin/python # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: oneview_network_set_facts short_description: Retrieve facts about the OneView Network Sets description: - Retrieve facts about the Network Sets from OneView. version_added: "2.4" requirements: - hpOneView >= 2.0.1 author: - Felipe Bulsoni (@fgbulsoni) - Thiago Miotto (@tmiotto) - Adriane Cardozo (@adriane-cardozo) options: name: description: - Network Set name. options: description: - "List with options to gather facts about Network Set. Option allowed: C(withoutEthernet). The option C(withoutEthernet) retrieves the list of network_sets excluding Ethernet networks." extends_documentation_fragment: - oneview - oneview.factsparams ''' EXAMPLES = ''' - name: Gather facts about all Network Sets oneview_network_set_facts: hostname: 172.16.101.48 username: administrator password: my_password api_version: 500 no_log: true delegate_to: localhost - debug: var=network_sets - name: Gather paginated, filtered, and sorted facts about Network Sets oneview_network_set_facts: hostname: 172.16.101.48 username: administrator password: my_password api_version: 500 params: start: 0 count: 3 sort: 'name:descending' filter: name='netset001' no_log: true delegate_to: localhost - debug: var=network_sets - name: Gather facts about all Network Sets, excluding Ethernet networks oneview_network_set_facts: hostname: 172.16.101.48 username: administrator password: my_password api_version: 500 options: - withoutEthernet no_log: true delegate_to: localhost - debug: var=network_sets - name: Gather facts about a Network Set by name oneview_network_set_facts: hostname: 172.16.101.48 username: administrator password: my_password api_version: 500 name: Name of the Network Set no_log: true delegate_to: localhost - debug: var=network_sets - name: Gather facts about a Network Set by name, excluding Ethernet networks oneview_network_set_facts: hostname: 172.16.101.48 username: administrator password: my_password api_version: 500 name: Name of the Network Set options: - withoutEthernet no_log: true delegate_to: localhost - debug: var=network_sets ''' RETURN = ''' network_sets: description: Has all the OneView facts about the Network Sets. returned: Always, but can be empty. type: dict ''' from ansible.module_utils.oneview import OneViewModuleBase class NetworkSetFactsModule(OneViewModuleBase): argument_spec = dict( name=dict(type='str'), options=dict(type='list'), params=dict(type='dict'), ) def __init__(self): super(NetworkSetFactsModule, self).__init__(additional_arg_spec=self.argument_spec) def execute_module(self): name = self.module.params.get('name') if 'withoutEthernet' in self.options: filter_by_name = ("\"'name'='%s'\"" % name) if name else '' network_sets = self.oneview_client.network_sets.get_all_without_ethernet(filter=filter_by_name) elif name: network_sets = self.oneview_client.network_sets.get_by('name', name) else: network_sets = self.oneview_client.network_sets.get_all(**self.facts_params) return dict(changed=False, ansible_facts=dict(network_sets=network_sets)) def main(): NetworkSetFactsModule().run() if __name__ == '__main__': main()
missionpinball/mpf-examples
refs/heads/dev
tutorial/step_2/tests/test_step.py
7
"""Contains tests for the MPF tutorial.""" from mpf.tests.MpfMachineTestCase import MpfMachineTestCase class TestTutorialMachine(MpfMachineTestCase): """Contains tests for the MPF machine config""" def test_attract(self): """Test that machine starts attract.""" self.assertModeRunning('attract')
USGSDenverPychron/pychron
refs/heads/develop
launchers/pyview.py
1
# =============================================================================== # Copyright 2014 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= # ============= standard library imports ======================== # ============= local library imports ========================== from helpers import entry_point entry_point('pyview', 'PyView') # ============= EOF =============================================
hyperized/ansible
refs/heads/devel
lib/ansible/modules/network/fortios/fortios_icap_profile.py
13
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_icap_profile short_description: Configure ICAP profiles in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify icap feature and profile category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.8" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true version_added: 2.9 state: description: - Indicates whether to create or remove the object. This attribute was present already in previous version in a deeper level. It has been moved out to this outer level. type: str required: false choices: - present - absent version_added: 2.9 icap_profile: description: - Configure ICAP profiles. default: null type: dict suboptions: state: description: - B(Deprecated) - Starting with Ansible 2.9 we recommend using the top-level 'state' parameter. - HORIZONTALLINE - Indicates whether to create or remove the object. type: str required: false choices: - present - absent methods: description: - The allowed HTTP methods that will be sent to ICAP server for further processing. type: str choices: - delete - get - head - options - post - put - trace - other name: description: - ICAP profile name. required: true type: str replacemsg_group: description: - Replacement message group. Source system.replacemsg-group.name. type: str request: description: - Enable/disable whether an HTTP request is passed to an ICAP server. type: str choices: - disable - enable request_failure: description: - Action to take if the ICAP server cannot be contacted when processing an HTTP request. type: str choices: - error - bypass request_path: description: - Path component of the ICAP URI that identifies the HTTP request processing service. type: str request_server: description: - ICAP server to use for an HTTP request. Source icap.server.name. type: str response: description: - Enable/disable whether an HTTP response is passed to an ICAP server. type: str choices: - disable - enable response_failure: description: - Action to take if the ICAP server cannot be contacted when processing an HTTP response. type: str choices: - error - bypass response_path: description: - Path component of the ICAP URI that identifies the HTTP response processing service. type: str response_server: description: - ICAP server to use for an HTTP response. Source icap.server.name. type: str streaming_content_bypass: description: - Enable/disable bypassing of ICAP server for streaming content. type: str choices: - disable - enable ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure ICAP profiles. fortios_icap_profile: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" state: "present" icap_profile: methods: "delete" name: "default_name_4" replacemsg_group: "<your_own_value> (source system.replacemsg-group.name)" request: "disable" request_failure: "error" request_path: "<your_own_value>" request_server: "<your_own_value> (source icap.server.name)" response: "disable" response_failure: "error" response_path: "<your_own_value>" response_server: "<your_own_value> (source icap.server.name)" streaming_content_bypass: "disable" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_icap_profile_data(json): option_list = ['methods', 'name', 'replacemsg_group', 'request', 'request_failure', 'request_path', 'request_server', 'response', 'response_failure', 'response_path', 'response_server', 'streaming_content_bypass'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for elem in data: elem = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def icap_profile(data, fos): vdom = data['vdom'] if 'state' in data and data['state']: state = data['state'] elif 'state' in data['icap_profile'] and data['icap_profile']: state = data['icap_profile']['state'] else: state = True icap_profile_data = data['icap_profile'] filtered_data = underscore_to_hyphen(filter_icap_profile_data(icap_profile_data)) if state == "present": return fos.set('icap', 'profile', data=filtered_data, vdom=vdom) elif state == "absent": return fos.delete('icap', 'profile', mkey=filtered_data['name'], vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_icap(data, fos): if data['icap_profile']: resp = icap_profile(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "state": {"required": False, "type": "str", "choices": ["present", "absent"]}, "icap_profile": { "required": False, "type": "dict", "default": None, "options": { "state": {"required": False, "type": "str", "choices": ["present", "absent"]}, "methods": {"required": False, "type": "str", "choices": ["delete", "get", "head", "options", "post", "put", "trace", "other"]}, "name": {"required": True, "type": "str"}, "replacemsg_group": {"required": False, "type": "str"}, "request": {"required": False, "type": "str", "choices": ["disable", "enable"]}, "request_failure": {"required": False, "type": "str", "choices": ["error", "bypass"]}, "request_path": {"required": False, "type": "str"}, "request_server": {"required": False, "type": "str"}, "response": {"required": False, "type": "str", "choices": ["disable", "enable"]}, "response_failure": {"required": False, "type": "str", "choices": ["error", "bypass"]}, "response_path": {"required": False, "type": "str"}, "response_server": {"required": False, "type": "str"}, "streaming_content_bypass": {"required": False, "type": "str", "choices": ["disable", "enable"]} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_icap(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_icap(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
nju520/django
refs/heads/master
tests/view_tests/default_urls.py
405
from django.conf.urls import url from django.contrib import admin urlpatterns = [ # This is the same as in the default project template url(r'^admin/', admin.site.urls), ]
Russell-IO/ansible
refs/heads/devel
lib/ansible/modules/network/netscaler/netscaler_gslb_service.py
72
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2017 Citrix Systems # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: netscaler_gslb_service short_description: Manage gslb service entities in Netscaler. description: - Manage gslb service entities in Netscaler. version_added: "2.4" author: George Nikolopoulos (@giorgos-nikolopoulos) options: servicename: description: - >- Name for the GSLB service. Must begin with an ASCII alphanumeric or underscore C(_) character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space, colon C(:), at C(@), equals C(=), and hyphen C(-) characters. Can be changed after the GSLB service is created. - >- - "Minimum length = 1" cnameentry: description: - "Canonical name of the GSLB service. Used in CNAME-based GSLB." - "Minimum length = 1" servername: description: - "Name of the server hosting the GSLB service." - "Minimum length = 1" servicetype: choices: - 'HTTP' - 'FTP' - 'TCP' - 'UDP' - 'SSL' - 'SSL_BRIDGE' - 'SSL_TCP' - 'NNTP' - 'ANY' - 'SIP_UDP' - 'SIP_TCP' - 'SIP_SSL' - 'RADIUS' - 'RDP' - 'RTSP' - 'MYSQL' - 'MSSQL' - 'ORACLE' description: - "Type of service to create." port: description: - "Port on which the load balancing entity represented by this GSLB service listens." - "Minimum value = 1" - "Range 1 - 65535" - "* in CLI is represented as 65535 in NITRO API" publicip: description: - >- The public IP address that a NAT device translates to the GSLB service's private IP address. Optional. publicport: description: - >- The public port associated with the GSLB service's public IP address. The port is mapped to the service's private port number. Applicable to the local GSLB service. Optional. maxclient: description: - >- The maximum number of open connections that the service can support at any given time. A GSLB service whose connection count reaches the maximum is not considered when a GSLB decision is made, until the connection count drops below the maximum. - "Minimum value = C(0)" - "Maximum value = C(4294967294)" healthmonitor: description: - "Monitor the health of the GSLB service." type: bool sitename: description: - "Name of the GSLB site to which the service belongs." - "Minimum length = 1" cip: choices: - 'enabled' - 'disabled' description: - >- In the request that is forwarded to the GSLB service, insert a header that stores the client's IP address. Client IP header insertion is used in connection-proxy based site persistence. cipheader: description: - >- Name for the HTTP header that stores the client's IP address. Used with the Client IP option. If client IP header insertion is enabled on the service and a name is not specified for the header, the NetScaler appliance uses the name specified by the cipHeader parameter in the set ns param command or, in the GUI, the Client IP Header parameter in the Configure HTTP Parameters dialog box. - "Minimum length = 1" sitepersistence: choices: - 'ConnectionProxy' - 'HTTPRedirect' - 'NONE' description: - "Use cookie-based site persistence. Applicable only to C(HTTP) and C(SSL) GSLB services." siteprefix: description: - >- The site's prefix string. When the service is bound to a GSLB virtual server, a GSLB site domain is generated internally for each bound service-domain pair by concatenating the site prefix of the service and the name of the domain. If the special string NONE is specified, the site-prefix string is unset. When implementing HTTP redirect site persistence, the NetScaler appliance redirects GSLB requests to GSLB services by using their site domains. clttimeout: description: - >- Idle time, in seconds, after which a client connection is terminated. Applicable if connection proxy based site persistence is used. - "Minimum value = 0" - "Maximum value = 31536000" maxbandwidth: description: - >- Integer specifying the maximum bandwidth allowed for the service. A GSLB service whose bandwidth reaches the maximum is not considered when a GSLB decision is made, until its bandwidth consumption drops below the maximum. downstateflush: choices: - 'enabled' - 'disabled' description: - >- Flush all active transactions associated with the GSLB service when its state transitions from UP to DOWN. Do not enable this option for services that must complete their transactions. Applicable if connection proxy based site persistence is used. maxaaausers: description: - >- Maximum number of SSL VPN users that can be logged on concurrently to the VPN virtual server that is represented by this GSLB service. A GSLB service whose user count reaches the maximum is not considered when a GSLB decision is made, until the count drops below the maximum. - "Minimum value = C(0)" - "Maximum value = C(65535)" monthreshold: description: - >- Monitoring threshold value for the GSLB service. If the sum of the weights of the monitors that are bound to this GSLB service and are in the UP state is not equal to or greater than this threshold value, the service is marked as DOWN. - "Minimum value = C(0)" - "Maximum value = C(65535)" hashid: description: - "Unique hash identifier for the GSLB service, used by hash based load balancing methods." - "Minimum value = C(1)" comment: description: - "Any comments that you might want to associate with the GSLB service." appflowlog: choices: - 'enabled' - 'disabled' description: - "Enable logging appflow flow information." ipaddress: description: - >- IP address for the GSLB service. Should represent a load balancing, content switching, or VPN virtual server on the NetScaler appliance, or the IP address of another load balancing device. monitor_bindings: description: - Bind monitors to this gslb service suboptions: weight: description: - Weight to assign to the monitor-service binding. - A larger number specifies a greater weight. - Contributes to the monitoring threshold, which determines the state of the service. - Minimum value = C(1) - Maximum value = C(100) monitor_name: description: - Monitor name. extends_documentation_fragment: netscaler requirements: - nitro python sdk ''' EXAMPLES = ''' - name: Setup gslb service 2 delegate_to: localhost register: result check_mode: "{{ check_mode }}" netscaler_gslb_service: operation: present servicename: gslb-service-2 cnameentry: example.com sitename: gslb-site-1 ''' RETURN = ''' loglines: description: list of logged messages by the module returned: always type: list sample: "['message 1', 'message 2']" msg: description: Message detailing the failure reason returned: failure type: string sample: "Action does not exist" diff: description: List of differences between the actual configured object and the configuration specified in the module returned: failure type: dictionary sample: "{ 'targetlbvserver': 'difference. ours: (str) server1 other: (str) server2' }" ''' import copy from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.netscaler.netscaler import ( ConfigProxy, get_nitro_client, netscaler_common_arguments, log, loglines, ensure_feature_is_enabled, monkey_patch_nitro_api, get_immutables_intersection, ) try: monkey_patch_nitro_api() from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbservice import gslbservice from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbservice_lbmonitor_binding import gslbservice_lbmonitor_binding from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception PYTHON_SDK_IMPORTED = True except ImportError as e: PYTHON_SDK_IMPORTED = False def gslb_service_exists(client, module): if gslbservice.count_filtered(client, 'servicename:%s' % module.params['servicename']) > 0: return True else: return False def gslb_service_identical(client, module, gslb_service_proxy): gslb_service_list = gslbservice.get_filtered(client, 'servicename:%s' % module.params['servicename']) diff_dict = gslb_service_proxy.diff_object(gslb_service_list[0]) # Ignore ip attribute missing if 'ip' in diff_dict: del diff_dict['ip'] if len(diff_dict) == 0: return True else: return False def get_actual_monitor_bindings(client, module): log('get_actual_monitor_bindings') # Get actual monitor bindings and index them by monitor_name actual_monitor_bindings = {} if gslbservice_lbmonitor_binding.count(client, servicename=module.params['servicename']) != 0: # Get all monitor bindings associated with the named gslb vserver fetched_bindings = gslbservice_lbmonitor_binding.get(client, servicename=module.params['servicename']) # index by monitor name for binding in fetched_bindings: # complete_missing_attributes(binding, gslbservice_lbmonitor_binding_rw_attrs, fill_value=None) actual_monitor_bindings[binding.monitor_name] = binding return actual_monitor_bindings def get_configured_monitor_bindings(client, module): log('get_configured_monitor_bindings') configured_monitor_proxys = {} gslbservice_lbmonitor_binding_rw_attrs = [ 'weight', 'servicename', 'monitor_name', ] # Get configured monitor bindings and index them by monitor_name if module.params['monitor_bindings'] is not None: for configured_monitor_bindings in module.params['monitor_bindings']: binding_values = copy.deepcopy(configured_monitor_bindings) binding_values['servicename'] = module.params['servicename'] proxy = ConfigProxy( actual=gslbservice_lbmonitor_binding(), client=client, attribute_values_dict=binding_values, readwrite_attrs=gslbservice_lbmonitor_binding_rw_attrs, readonly_attrs=[], ) configured_monitor_proxys[configured_monitor_bindings['monitor_name']] = proxy return configured_monitor_proxys def monitor_bindings_identical(client, module): log('monitor_bindings_identical') actual_bindings = get_actual_monitor_bindings(client, module) configured_proxys = get_configured_monitor_bindings(client, module) actual_keyset = set(actual_bindings.keys()) configured_keyset = set(configured_proxys.keys()) symmetric_difference = actual_keyset ^ configured_keyset if len(symmetric_difference) != 0: log('Symmetric difference %s' % symmetric_difference) return False # Item for item equality test for key, proxy in configured_proxys.items(): if not proxy.has_equal_attributes(actual_bindings[key]): log('monitor binding difference %s' % proxy.diff_object(actual_bindings[key])) return False # Fallthrough to True result return True def sync_monitor_bindings(client, module): log('sync_monitor_bindings') actual_monitor_bindings = get_actual_monitor_bindings(client, module) configured_monitor_proxys = get_configured_monitor_bindings(client, module) # Delete actual bindings not in configured bindings for monitor_name, actual_binding in actual_monitor_bindings.items(): if monitor_name not in configured_monitor_proxys.keys(): log('Deleting absent binding for monitor %s' % monitor_name) log('dir is %s' % dir(actual_binding)) gslbservice_lbmonitor_binding.delete(client, actual_binding) # Delete and re-add actual bindings that differ from configured for proxy_key, binding_proxy in configured_monitor_proxys.items(): if proxy_key in actual_monitor_bindings: actual_binding = actual_monitor_bindings[proxy_key] if not binding_proxy.has_equal_attributes(actual_binding): log('Deleting differing binding for monitor %s' % actual_binding.monitor_name) log('dir %s' % dir(actual_binding)) log('attribute monitor_name %s' % getattr(actual_binding, 'monitor_name')) log('attribute monitorname %s' % getattr(actual_binding, 'monitorname', None)) gslbservice_lbmonitor_binding.delete(client, actual_binding) log('Adding anew binding for monitor %s' % binding_proxy.monitor_name) binding_proxy.add() # Add configured monitors that are missing from actual for proxy_key, binding_proxy in configured_monitor_proxys.items(): if proxy_key not in actual_monitor_bindings.keys(): log('Adding monitor binding for monitor %s' % binding_proxy.monitor_name) binding_proxy.add() def diff_list(client, module, gslb_service_proxy): gslb_service_list = gslbservice.get_filtered(client, 'servicename:%s' % module.params['servicename']) diff_list = gslb_service_proxy.diff_object(gslb_service_list[0]) if 'ip' in diff_list: del diff_list['ip'] return diff_list def all_identical(client, module, gslb_service_proxy): return gslb_service_identical(client, module, gslb_service_proxy) and monitor_bindings_identical(client, module) def main(): module_specific_arguments = dict( servicename=dict(type='str'), cnameentry=dict(type='str'), servername=dict(type='str'), servicetype=dict( type='str', choices=[ 'HTTP', 'FTP', 'TCP', 'UDP', 'SSL', 'SSL_BRIDGE', 'SSL_TCP', 'NNTP', 'ANY', 'SIP_UDP', 'SIP_TCP', 'SIP_SSL', 'RADIUS', 'RDP', 'RTSP', 'MYSQL', 'MSSQL', 'ORACLE', ] ), port=dict(type='int'), publicip=dict(type='str'), publicport=dict(type='int'), maxclient=dict(type='float'), healthmonitor=dict(type='bool'), sitename=dict(type='str'), cip=dict( type='str', choices=[ 'enabled', 'disabled', ] ), cipheader=dict(type='str'), sitepersistence=dict( type='str', choices=[ 'ConnectionProxy', 'HTTPRedirect', 'NONE', ] ), siteprefix=dict(type='str'), clttimeout=dict(type='float'), maxbandwidth=dict(type='float'), downstateflush=dict( type='str', choices=[ 'enabled', 'disabled', ] ), maxaaausers=dict(type='float'), monthreshold=dict(type='float'), hashid=dict(type='float'), comment=dict(type='str'), appflowlog=dict( type='str', choices=[ 'enabled', 'disabled', ] ), ipaddress=dict(type='str'), ) hand_inserted_arguments = dict( monitor_bindings=dict(type='list'), ) argument_spec = dict() argument_spec.update(netscaler_common_arguments) argument_spec.update(module_specific_arguments) argument_spec.update(hand_inserted_arguments) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) module_result = dict( changed=False, failed=False, loglines=loglines, ) # Fail the module if imports failed if not PYTHON_SDK_IMPORTED: module.fail_json(msg='Could not load nitro python sdk') # Fallthrough to rest of execution client = get_nitro_client(module) try: client.login() except nitro_exception as e: msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg) except Exception as e: if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>": module.fail_json(msg='Connection error %s' % str(e)) elif str(type(e)) == "<class 'requests.exceptions.SSLError'>": module.fail_json(msg='SSL Error %s' % str(e)) else: module.fail_json(msg='Unexpected error during login %s' % str(e)) readwrite_attrs = [ 'servicename', 'cnameentry', 'ip', 'servername', 'servicetype', 'port', 'publicip', 'publicport', 'maxclient', 'healthmonitor', 'sitename', 'cip', 'cipheader', 'sitepersistence', 'siteprefix', 'clttimeout', 'maxbandwidth', 'downstateflush', 'maxaaausers', 'monthreshold', 'hashid', 'comment', 'appflowlog', 'ipaddress', ] readonly_attrs = [ 'gslb', 'svrstate', 'svreffgslbstate', 'gslbthreshold', 'gslbsvcstats', 'monstate', 'preferredlocation', 'monitor_state', 'statechangetimesec', 'tickssincelaststatechange', 'threshold', 'clmonowner', 'clmonview', '__count', ] immutable_attrs = [ 'servicename', 'cnameentry', 'ip', 'servername', 'servicetype', 'port', 'sitename', 'state', 'cipheader', 'cookietimeout', 'clttimeout', 'svrtimeout', 'viewip', 'monitor_name_svc', 'newname', ] transforms = { 'healthmonitor': ['bool_yes_no'], 'cip': [lambda v: v.upper()], 'downstateflush': [lambda v: v.upper()], 'appflowlog': [lambda v: v.upper()], } # params = copy.deepcopy(module.params) module.params['ip'] = module.params['ipaddress'] # Instantiate config proxy gslb_service_proxy = ConfigProxy( actual=gslbservice(), client=client, attribute_values_dict=module.params, transforms=transforms, readwrite_attrs=readwrite_attrs, readonly_attrs=readonly_attrs, immutable_attrs=immutable_attrs, ) try: ensure_feature_is_enabled(client, 'GSLB') # Apply appropriate state if module.params['state'] == 'present': if not gslb_service_exists(client, module): if not module.check_mode: gslb_service_proxy.add() sync_monitor_bindings(client, module) if module.params['save_config']: client.save_config() module_result['changed'] = True elif not all_identical(client, module, gslb_service_proxy): # Check if we try to change value of immutable attributes immutables_changed = get_immutables_intersection(gslb_service_proxy, diff_list(client, module, gslb_service_proxy).keys()) if immutables_changed != []: module.fail_json( msg='Cannot update immutable attributes %s' % (immutables_changed,), diff=diff_list(client, module, gslb_service_proxy), **module_result ) # Update main configuration object if not gslb_service_identical(client, module, gslb_service_proxy): if not module.check_mode: gslb_service_proxy.update() # Update monitor bindigns if not monitor_bindings_identical(client, module): if not module.check_mode: sync_monitor_bindings(client, module) # Fallthrough to save and change status update module_result['changed'] = True if module.params['save_config']: client.save_config() else: module_result['changed'] = False # Sanity check for state if not module.check_mode: if not gslb_service_exists(client, module): module.fail_json(msg='GSLB service does not exist', **module_result) if not gslb_service_identical(client, module, gslb_service_proxy): module.fail_json( msg='GSLB service differs from configured', diff=diff_list(client, module, gslb_service_proxy), **module_result ) if not monitor_bindings_identical(client, module): module.fail_json( msg='Monitor bindings differ from configured', diff=diff_list(client, module, gslb_service_proxy), **module_result ) elif module.params['state'] == 'absent': if gslb_service_exists(client, module): if not module.check_mode: gslb_service_proxy.delete() if module.params['save_config']: client.save_config() module_result['changed'] = True else: module_result['changed'] = False # Sanity check for state if not module.check_mode: if gslb_service_exists(client, module): module.fail_json(msg='GSLB service still exists', **module_result) except nitro_exception as e: msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg, **module_result) client.logout() module.exit_json(**module_result) if __name__ == "__main__": main()
haiyangd/server-watch-client-
refs/heads/master
modules/local.py
2
# -*- coding: utf-8 -*- from modules.base_module import BaseModule from monitoring.services import service_is_running from monitoring.system import system_load, system_uptime, system_resources_summary class UptimeMonitor(BaseModule): def run(self): self.log(system_uptime()) class LoadMonitor(BaseModule): def run(self): self.log('{avg1} {avg5} {avg15} {runnable}/{existing}', **system_load()) class ServiceMonitor(BaseModule): def run(self): ups = [] downs = [] for srv in self.params or []: if service_is_running(srv): ups.append(srv) else: downs.append(srv) self.log('up: {0}, down: {1}', ups, downs) class ResourceMonitor(BaseModule): def run(self): self.log('RAM: {ram}, SWAP: {swap}, DISK: {disk}, NET: -{net-recv}/+{net-sent}', **system_resources_summary())
takeshineshiro/cinder
refs/heads/master
cinder/tests/unit/test_api_urlmap.py
17
# Copyright (c) 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Tests for cinder.api.urlmap.py """ from cinder.api import urlmap from cinder import test class TestParseFunctions(test.TestCase): def test_unquote_header_value_without_quotes(self): arg = 'TestString' result = urlmap.unquote_header_value(arg) self.assertEqual(arg, result) def test_unquote_header_value_with_quotes(self): result = urlmap.unquote_header_value('"TestString"') self.assertEqual('TestString', result) def test_parse_list_header(self): arg = 'token, "quoted value"' result = urlmap.parse_list_header(arg) self.assertEqual(['token', 'quoted value'], result) def test_parse_options_header(self): result = urlmap.parse_options_header('Content-Type: text/html;' ' mimetype=text/html') self.assertEqual(('Content-Type:', {'mimetype': 'text/html'}), result) def test_parse_options_header_without_value(self): result = urlmap.parse_options_header(None) self.assertEqual(('', {}), result) class TestAccept(test.TestCase): def test_best_match_ValueError(self): arg = 'text/html; q=some_invalud_value' accept = urlmap.Accept(arg) self.assertEqual((None, {}), accept.best_match(['text/html'])) def test_best_match(self): arg = '*/*; q=0.7, application/json; q=0.7, text/html; q=-0.8' accept = urlmap.Accept(arg) self.assertEqual(('application/json', {'q': '0.7'}), accept.best_match(['application/json', 'application/xml', 'text/html'])) def test_match_mask_one_asterisk(self): arg = 'text/*; q=0.7' accept = urlmap.Accept(arg) self.assertEqual(('text/html', {'q': '0.7'}), accept.best_match(['text/html'])) def test_match_mask_two_asterisk(self): arg = '*/*; q=0.7' accept = urlmap.Accept(arg) self.assertEqual(('text/html', {'q': '0.7'}), accept.best_match(['text/html'])) def test_match_mask_no_asterisk(self): arg = 'application/json; q=0.7' accept = urlmap.Accept(arg) self.assertEqual((None, {}), accept.best_match(['text/html'])) def test_content_type_params(self): arg = "application/xml; q=0.1, application/json; q=0.2," \ " text/html; q=0.3" accept = urlmap.Accept(arg) self.assertEqual({'q': '0.2'}, accept.content_type_params('application/json')) def test_content_type_params_wrong_content_type(self): arg = 'application/xml; q=0.1, text/html; q=0.1' accept = urlmap.Accept(arg) self.assertEqual({}, accept.content_type_params('application/json')) class TestUrlMapFactory(test.TestCase): def setUp(self): super(TestUrlMapFactory, self).setUp() self.global_conf = {'not_found_app': 'app_global', 'domain hoobar.com port 10 /': 'some_app_global'} self.loader = self.mox.CreateMockAnything() def test_not_found_app_in_local_conf(self): local_conf = {'not_found_app': 'app_local', 'domain foobar.com port 20 /': 'some_app_local'} self.loader.get_app('app_local', global_conf=self.global_conf).\ AndReturn('app_local_loader') self.loader.get_app('some_app_local', global_conf=self.global_conf).\ AndReturn('some_app_loader') self.mox.ReplayAll() expected_urlmap = urlmap.URLMap(not_found_app='app_local_loader') expected_urlmap['http://foobar.com:20'] = 'some_app_loader' self.assertEqual(expected_urlmap, urlmap.urlmap_factory(self.loader, self.global_conf, **local_conf)) def test_not_found_app_not_in_local_conf(self): local_conf = {'domain foobar.com port 20 /': 'some_app_local'} self.loader.get_app('app_global', global_conf=self.global_conf).\ AndReturn('app_global_loader') self.loader.get_app('some_app_local', global_conf=self.global_conf).\ AndReturn('some_app_returned_by_loader') self.mox.ReplayAll() expected_urlmap = urlmap.URLMap(not_found_app='app_global_loader') expected_urlmap['http://foobar.com:20'] = 'some_app_returned'\ '_by_loader' self.assertEqual(expected_urlmap, urlmap.urlmap_factory(self.loader, self.global_conf, **local_conf)) def test_not_found_app_is_none(self): local_conf = {'not_found_app': None, 'domain foobar.com port 20 /': 'some_app_local'} self.loader.get_app('some_app_local', global_conf=self.global_conf).\ AndReturn('some_app_returned_by_loader') self.mox.ReplayAll() expected_urlmap = urlmap.URLMap(not_found_app=None) expected_urlmap['http://foobar.com:20'] = 'some_app_returned'\ '_by_loader' self.assertEqual(expected_urlmap, urlmap.urlmap_factory(self.loader, self.global_conf, **local_conf)) class TestURLMap(test.TestCase): def setUp(self): super(TestURLMap, self).setUp() self.urlmap = urlmap.URLMap() self.input_environ = {'HTTP_ACCEPT': "application/json;" "version=9.0", 'REQUEST_METHOD': "GET", 'CONTENT_TYPE': 'application/xml', 'SCRIPT_NAME': '/scriptname', 'PATH_INFO': "/resource.xml"} self.environ = {'HTTP_ACCEPT': "application/json;" "version=9.0", 'REQUEST_METHOD': "GET", 'CONTENT_TYPE': 'application/xml', 'SCRIPT_NAME': '/scriptname/app_url', 'PATH_INFO': "/resource.xml"} def test_match_with_applications(self): self.urlmap[('http://10.20.30.40:50', '/path/somepath')] = 'app' self.assertEqual((None, None), self.urlmap._match('20.30.40.50', '20', 'path/somepath')) def test_match_without_applications(self): self.assertEqual((None, None), self.urlmap._match('host', 20, 'app_url/somepath')) def test_match_path_info_equals_app_url(self): self.urlmap[('http://20.30.40.50:60', '/app_url/somepath')] = 'app' self.assertEqual(('app', '/app_url/somepath'), self.urlmap._match('http://20.30.40.50', '60', '/app_url/somepath')) def test_match_path_info_equals_app_url_many_app(self): self.urlmap[('http://20.30.40.50:60', '/path')] = 'app1' self.urlmap[('http://20.30.40.50:60', '/path/somepath')] = 'app2' self.urlmap[('http://20.30.40.50:60', '/path/somepath/elsepath')] = \ 'app3' self.assertEqual(('app3', '/path/somepath/elsepath'), self.urlmap._match('http://20.30.40.50', '60', '/path/somepath/elsepath')) def test_set_script_name(self): app = self.mox.CreateMockAnything() start_response = self.mox.CreateMockAnything() app.__call__(self.environ, start_response).AndReturn('value') self.mox.ReplayAll() wrap = self.urlmap._set_script_name(app, '/app_url') self.assertEqual('value', wrap(self.input_environ, start_response)) def test_munge_path(self): app = self.mox.CreateMockAnything() start_response = self.mox.CreateMockAnything() app.__call__(self.environ, start_response).AndReturn('value') self.mox.ReplayAll() wrap = self.urlmap._munge_path(app, '/app_url/resource.xml', '/app_url') self.assertEqual('value', wrap(self.input_environ, start_response)) def test_content_type_strategy_without_version(self): self.assertEqual(None, self.urlmap._content_type_strategy('host', 20, self.environ)) def test_content_type_strategy_with_version(self): environ = {'HTTP_ACCEPT': "application/vnd.openstack.melange+xml;" "version=9.0", 'REQUEST_METHOD': "GET", 'PATH_INFO': "/resource.xml", 'CONTENT_TYPE': 'application/xml; version=2.0'} self.urlmap[('http://10.20.30.40:50', '/v2.0')] = 'app' self.mox.StubOutWithMock(self.urlmap, '_set_script_name') self.urlmap._set_script_name('app', '/v2.0').AndReturn('value') self.mox.ReplayAll() self.assertEqual('value', self.urlmap._content_type_strategy( 'http://10.20.30.40', '50', environ)) def test_path_strategy_wrong_path_info(self): self.assertEqual((None, None, None), self.urlmap._path_strategy('http://10.20.30.40', '50', '/resource')) def test_path_strategy_mime_type_only(self): self.assertEqual(('application/xml', None, None), self.urlmap._path_strategy('http://10.20.30.40', '50', '/resource.xml')) def test_path_strategy(self): self.urlmap[('http://10.20.30.40:50', '/path/elsepath/')] = 'app' self.mox.StubOutWithMock(self.urlmap, '_munge_path') self.urlmap._munge_path('app', '/path/elsepath/resource.xml', '/path/elsepath').AndReturn('value') self.mox.ReplayAll() self.assertEqual( ('application/xml', 'value', '/path/elsepath'), self.urlmap._path_strategy('http://10.20.30.40', '50', '/path/elsepath/resource.xml')) def test_path_strategy_wrong_mime_type(self): self.urlmap[('http://10.20.30.40:50', '/path/elsepath/')] = 'app' self.mox.StubOutWithMock(self.urlmap, '_munge_path') self.urlmap._munge_path('app', '/path/elsepath/resource.abc', '/path/elsepath').AndReturn('value') self.mox.ReplayAll() self.assertEqual( (None, 'value', '/path/elsepath'), self.urlmap._path_strategy('http://10.20.30.40', '50', '/path/elsepath/resource.abc')) def test_accept_strategy_version_not_in_params(self): environ = {'HTTP_ACCEPT': "application/xml; q=0.1, application/json; " "q=0.2", 'REQUEST_METHOD': "GET", 'PATH_INFO': "/resource.xml", 'CONTENT_TYPE': 'application/xml; version=2.0'} self.assertEqual(('application/xml', None), self.urlmap._accept_strategy('http://10.20.30.40', '50', environ, ['application/xml'])) def test_accept_strategy_version(self): environ = {'HTTP_ACCEPT': "application/xml; q=0.1; version=1.0," "application/json; q=0.2; version=2.0", 'REQUEST_METHOD': "GET", 'PATH_INFO': "/resource.xml", 'CONTENT_TYPE': 'application/xml; version=2.0'} self.urlmap[('http://10.20.30.40:50', '/v1.0')] = 'app' self.mox.StubOutWithMock(self.urlmap, '_set_script_name') self.urlmap._set_script_name('app', '/v1.0').AndReturn('value') self.mox.ReplayAll() self.assertEqual(('application/xml', 'value'), self.urlmap._accept_strategy('http://10.20.30.40', '50', environ, ['application/xml']))
danduggan/hltd
refs/heads/master
lib/elasticsearch-py-1.4/elasticsearch/connection/http_urllib3.py
2
import time import urllib3_hltd as urllib3 from urllib3_hltd.exceptions import ReadTimeoutError, SSLError as UrllibSSLError import warnings from .base import Connection from ..exceptions import ConnectionError, ImproperlyConfigured, ConnectionTimeout, SSLError from ..compat import urlencode class Urllib3HttpConnection(Connection): """ Default connection class using the `urllib3` library and the http protocol. :arg http_auth: optional http auth information as either ':' separated string or a tuple :arg use_ssl: use ssl for the connection if `True` :arg verify_certs: whether to verify SSL certificates :arg ca_certs: optional path to CA bundle. See http://urllib3.readthedocs.org/en/latest/security.html#using-certifi-with-urllib3 for instructions how to get default set :arg client_cert: path to the file containing the private key and the certificate :arg maxsize: the maximum number of connections which will be kept open to this host. """ def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, verify_certs=False, ca_certs=None, client_cert=None, maxsize=10, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, **kwargs) self.headers = {} if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) self.headers = urllib3.make_headers(basic_auth=http_auth) pool_class = urllib3.HTTPConnectionPool kw = {} if use_ssl: pool_class = urllib3.HTTPSConnectionPool if verify_certs: kw['cert_reqs'] = 'CERT_REQUIRED' kw['ca_certs'] = ca_certs kw['cert_file'] = client_cert elif ca_certs: raise ImproperlyConfigured("You cannot pass CA certificates when verify SSL is off.") else: warnings.warn( 'Connecting to %s using SSL with verify_certs=False is insecure.' % host) self.pool = pool_class(host, port=port, timeout=self.timeout, maxsize=maxsize, **kw) def perform_request(self, method, url, params=None, body=None, timeout=None, ignore=()): url = self.url_prefix + url if params: url = '%s?%s' % (url, urlencode(params)) full_url = self.host + url start = time.time() try: kw = {} if timeout: kw['timeout'] = timeout # in python2 we need to make sure the url is not unicode. Otherwise # the body will be decoded into unicode too and that will fail (#133). if not isinstance(url, str): url = url.encode('utf-8') response = self.pool.urlopen(method, url, body, retries=False, headers=self.headers, **kw) duration = time.time() - start raw_data = response.data.decode('utf-8') except UrllibSSLError as e: self.log_request_fail(method, full_url, body, time.time() - start, exception=e) raise SSLError('N/A', str(e), e) except ReadTimeoutError as e: self.log_request_fail(method, full_url, body, time.time() - start, exception=e) raise ConnectionTimeout('TIMEOUT', str(e), e) except Exception as e: self.log_request_fail(method, full_url, body, time.time() - start, exception=e) raise ConnectionError('N/A', str(e), e) if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail(method, url, body, duration, response.status) self._raise_error(response.status, raw_data) self.log_request_success(method, full_url, url, body, response.status, raw_data, duration) return response.status, response.getheaders(), raw_data
allenp/odoo
refs/heads/9.0
addons/payment_buckaroo/controllers/main.py
10
# -*- coding: utf-8 -*- import json import logging import pprint import werkzeug from openerp import http, SUPERUSER_ID from openerp.http import request _logger = logging.getLogger(__name__) class BuckarooController(http.Controller): _return_url = '/payment/buckaroo/return' _cancel_url = '/payment/buckaroo/cancel' _exception_url = '/payment/buckaroo/error' _reject_url = '/payment/buckaroo/reject' @http.route([ '/payment/buckaroo/return', '/payment/buckaroo/cancel', '/payment/buckaroo/error', '/payment/buckaroo/reject', ], type='http', auth='none') def buckaroo_return(self, **post): """ Buckaroo.""" _logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context) return_url = post.pop('return_url', '') if not return_url: data ='' + post.pop('ADD_RETURNDATA', '{}').replace("'", "\"") custom = json.loads(data) return_url = custom.pop('return_url', '/') return werkzeug.utils.redirect(return_url)
rmcgibbo/mongo-task
refs/heads/master
mongo_task/__init__.py
12133432
mammique/django
refs/heads/tp_alpha
django/contrib/gis/db/backends/__init__.py
12133432
eduNEXT/edx-platform
refs/heads/master
openedx/core/djangoapps/user_authn/__init__.py
12133432
nesdis/djongo
refs/heads/master
tests/django_tests/tests/v21/tests/check_framework/tests.py
56
import sys from io import StringIO from django.apps import apps from django.core import checks from django.core.checks import Error, Warning from django.core.checks.registry import CheckRegistry from django.core.management import call_command from django.core.management.base import CommandError from django.db import models from django.test import SimpleTestCase from django.test.utils import ( isolate_apps, override_settings, override_system_checks, ) from .models import SimpleModel, my_check class DummyObj: def __repr__(self): return "obj" class SystemCheckFrameworkTests(SimpleTestCase): def test_register_and_run_checks(self): def f(**kwargs): calls[0] += 1 return [1, 2, 3] def f2(**kwargs): return [4] def f3(**kwargs): return [5] calls = [0] # test register as decorator registry = CheckRegistry() registry.register()(f) registry.register("tag1", "tag2")(f2) registry.register("tag2", deploy=True)(f3) # test register as function registry2 = CheckRegistry() registry2.register(f) registry2.register(f2, "tag1", "tag2") registry2.register(f3, "tag2", deploy=True) # check results errors = registry.run_checks() errors2 = registry2.run_checks() self.assertEqual(errors, errors2) self.assertEqual(sorted(errors), [1, 2, 3, 4]) self.assertEqual(calls[0], 2) errors = registry.run_checks(tags=["tag1"]) errors2 = registry2.run_checks(tags=["tag1"]) self.assertEqual(errors, errors2) self.assertEqual(sorted(errors), [4]) errors = registry.run_checks(tags=["tag1", "tag2"], include_deployment_checks=True) errors2 = registry2.run_checks(tags=["tag1", "tag2"], include_deployment_checks=True) self.assertEqual(errors, errors2) self.assertEqual(sorted(errors), [4, 5]) class MessageTests(SimpleTestCase): def test_printing(self): e = Error("Message", hint="Hint", obj=DummyObj()) expected = "obj: Message\n\tHINT: Hint" self.assertEqual(str(e), expected) def test_printing_no_hint(self): e = Error("Message", obj=DummyObj()) expected = "obj: Message" self.assertEqual(str(e), expected) def test_printing_no_object(self): e = Error("Message", hint="Hint") expected = "?: Message\n\tHINT: Hint" self.assertEqual(str(e), expected) def test_printing_with_given_id(self): e = Error("Message", hint="Hint", obj=DummyObj(), id="ID") expected = "obj: (ID) Message\n\tHINT: Hint" self.assertEqual(str(e), expected) def test_printing_field_error(self): field = SimpleModel._meta.get_field('field') e = Error("Error", obj=field) expected = "check_framework.SimpleModel.field: Error" self.assertEqual(str(e), expected) def test_printing_model_error(self): e = Error("Error", obj=SimpleModel) expected = "check_framework.SimpleModel: Error" self.assertEqual(str(e), expected) def test_printing_manager_error(self): manager = SimpleModel.manager e = Error("Error", obj=manager) expected = "check_framework.SimpleModel.manager: Error" self.assertEqual(str(e), expected) def test_equal_to_self(self): e = Error("Error", obj=SimpleModel) self.assertEqual(e, e) def test_equal_to_same_constructed_check(self): e1 = Error("Error", obj=SimpleModel) e2 = Error("Error", obj=SimpleModel) self.assertEqual(e1, e2) def test_not_equal_to_different_constructed_check(self): e1 = Error("Error", obj=SimpleModel) e2 = Error("Error2", obj=SimpleModel) self.assertNotEqual(e1, e2) def test_not_equal_to_non_check(self): e = Error("Error", obj=DummyObj()) self.assertNotEqual(e, 'a string') def simple_system_check(**kwargs): simple_system_check.kwargs = kwargs return [] def tagged_system_check(**kwargs): tagged_system_check.kwargs = kwargs return [checks.Warning('System Check')] tagged_system_check.tags = ['simpletag'] def deployment_system_check(**kwargs): deployment_system_check.kwargs = kwargs return [checks.Warning('Deployment Check')] deployment_system_check.tags = ['deploymenttag'] class CheckCommandTests(SimpleTestCase): def setUp(self): simple_system_check.kwargs = None tagged_system_check.kwargs = None self.old_stdout, self.old_stderr = sys.stdout, sys.stderr sys.stdout, sys.stderr = StringIO(), StringIO() def tearDown(self): sys.stdout, sys.stderr = self.old_stdout, self.old_stderr @override_system_checks([simple_system_check, tagged_system_check]) def test_simple_call(self): call_command('check') self.assertEqual(simple_system_check.kwargs, {'app_configs': None}) self.assertEqual(tagged_system_check.kwargs, {'app_configs': None}) @override_system_checks([simple_system_check, tagged_system_check]) def test_given_app(self): call_command('check', 'auth', 'admin') auth_config = apps.get_app_config('auth') admin_config = apps.get_app_config('admin') self.assertEqual(simple_system_check.kwargs, {'app_configs': [auth_config, admin_config]}) self.assertEqual(tagged_system_check.kwargs, {'app_configs': [auth_config, admin_config]}) @override_system_checks([simple_system_check, tagged_system_check]) def test_given_tag(self): call_command('check', tags=['simpletag']) self.assertIsNone(simple_system_check.kwargs) self.assertEqual(tagged_system_check.kwargs, {'app_configs': None}) @override_system_checks([simple_system_check, tagged_system_check]) def test_invalid_tag(self): msg = 'There is no system check with the "missingtag" tag.' with self.assertRaisesMessage(CommandError, msg): call_command('check', tags=['missingtag']) @override_system_checks([simple_system_check]) def test_list_tags_empty(self): call_command('check', list_tags=True) self.assertEqual('\n', sys.stdout.getvalue()) @override_system_checks([tagged_system_check]) def test_list_tags(self): call_command('check', list_tags=True) self.assertEqual('simpletag\n', sys.stdout.getvalue()) @override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check]) def test_list_deployment_check_omitted(self): call_command('check', list_tags=True) self.assertEqual('simpletag\n', sys.stdout.getvalue()) @override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check]) def test_list_deployment_check_included(self): call_command('check', deploy=True, list_tags=True) self.assertEqual('deploymenttag\nsimpletag\n', sys.stdout.getvalue()) @override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check]) def test_tags_deployment_check_omitted(self): msg = 'There is no system check with the "deploymenttag" tag.' with self.assertRaisesMessage(CommandError, msg): call_command('check', tags=['deploymenttag']) @override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check]) def test_tags_deployment_check_included(self): call_command('check', deploy=True, tags=['deploymenttag']) self.assertIn('Deployment Check', sys.stderr.getvalue()) @override_system_checks([tagged_system_check]) def test_fail_level(self): with self.assertRaises(CommandError): call_command('check', fail_level='WARNING') def custom_error_system_check(app_configs, **kwargs): return [Error('Error', id='myerrorcheck.E001')] def custom_warning_system_check(app_configs, **kwargs): return [Warning('Warning', id='mywarningcheck.E001')] class SilencingCheckTests(SimpleTestCase): def setUp(self): self.old_stdout, self.old_stderr = sys.stdout, sys.stderr self.stdout, self.stderr = StringIO(), StringIO() sys.stdout, sys.stderr = self.stdout, self.stderr def tearDown(self): sys.stdout, sys.stderr = self.old_stdout, self.old_stderr @override_settings(SILENCED_SYSTEM_CHECKS=['myerrorcheck.E001']) @override_system_checks([custom_error_system_check]) def test_silenced_error(self): out = StringIO() err = StringIO() call_command('check', stdout=out, stderr=err) self.assertEqual(out.getvalue(), 'System check identified no issues (1 silenced).\n') self.assertEqual(err.getvalue(), '') @override_settings(SILENCED_SYSTEM_CHECKS=['mywarningcheck.E001']) @override_system_checks([custom_warning_system_check]) def test_silenced_warning(self): out = StringIO() err = StringIO() call_command('check', stdout=out, stderr=err) self.assertEqual(out.getvalue(), 'System check identified no issues (1 silenced).\n') self.assertEqual(err.getvalue(), '') class CheckFrameworkReservedNamesTests(SimpleTestCase): @isolate_apps('check_framework', kwarg_name='apps') @override_system_checks([checks.model_checks.check_all_models]) def test_model_check_method_not_shadowed(self, apps): class ModelWithAttributeCalledCheck(models.Model): check = 42 class ModelWithFieldCalledCheck(models.Model): check = models.IntegerField() class ModelWithRelatedManagerCalledCheck(models.Model): pass class ModelWithDescriptorCalledCheck(models.Model): check = models.ForeignKey(ModelWithRelatedManagerCalledCheck, models.CASCADE) article = models.ForeignKey( ModelWithRelatedManagerCalledCheck, models.CASCADE, related_name='check', ) errors = checks.run_checks(app_configs=apps.get_app_configs()) expected = [ Error( "The 'ModelWithAttributeCalledCheck.check()' class method is " "currently overridden by 42.", obj=ModelWithAttributeCalledCheck, id='models.E020' ), Error( "The 'ModelWithRelatedManagerCalledCheck.check()' class method is " "currently overridden by %r." % ModelWithRelatedManagerCalledCheck.check, obj=ModelWithRelatedManagerCalledCheck, id='models.E020' ), Error( "The 'ModelWithDescriptorCalledCheck.check()' class method is " "currently overridden by %r." % ModelWithDescriptorCalledCheck.check, obj=ModelWithDescriptorCalledCheck, id='models.E020' ), ] self.assertEqual(errors, expected) class ChecksRunDuringTests(SimpleTestCase): def test_registered_check_did_run(self): self.assertTrue(my_check.did_run)
feynmanliang/bachbot
refs/heads/master
Chorale-Beat-Count.py
1
from music21 import * #Filter for every Bach chorale that is in 4/4: catalog = stream.Opus() for workName in corpus.getBachChorales(): work = converter.parse(workName) firstTS = work.flat.getTimeSignatures()[0] if firstTS.ratioString == '4/4': catalog.append(work) #Count usages allbeats = list(); for p in catalog.flat.notes: for counter in range(len(p.pitches)): allbeats.append(p.beat) #Create the histogram import matplotlib.pyplot as plt import numpy as np plt.hist(allbeats, bins=32, range=(1,5)) #plt.title("Metrical Position Usage") plt.xlabel("Metrical Position ('crotchet', or '1/4 note')", fontsize=20) plt.ylabel("Frequency of Usage", fontsize=20) plt.xlim(1,5) plt.xticks(np.arange(1, 5, 1.0)) plt.savefig('Chorale position usage.png', facecolor='w', edgecolor='w', format='png') plt.show()
omgbebebe/warzone2100
refs/heads/master
tools/conversion/ini2json/ini2json_challenge.py
9
import ConfigParser import json import sys if len(sys.argv) < 2: print 'Need file parameter' sys.exit(1) config = ConfigParser.ConfigParser() config.optionxform = str # stop making keys lowercase config.read(sys.argv[1]) def is_number(s): try: int(s) return True except ValueError: return False data = {} for section in config.sections(): entry = {} for opt in config.items(section): key = opt[0] value = opt[1] if value.startswith('"') and value.endswith('"'): value = value[1:-1] accum = [] if is_number(value): entry[key] = int(value) else: entry[key] = value assert not section in data, '%s conflicts' % section data[section] = entry print json.dumps(data, indent=4, separators=(',', ': '), sort_keys=True)
hobbeswalsh/tensor
refs/heads/master
tensor/outputs/riemann.py
1
import time from twisted.internet import reactor, defer, task from twisted.python import log try: from OpenSSL import SSL from twisted.internet import ssl except: SSL=None from tensor.protocol import riemann from tensor.objects import Output if SSL: class ClientTLSContext(ssl.ClientContextFactory): def __init__(self, key, cert): self.key = key self.cert = cert def getContext(self): self.method = SSL.TLSv1_METHOD ctx = ssl.ClientContextFactory.getContext(self) ctx.use_certificate_file(self.cert) ctx.use_privatekey_file(self.key) return ctx class RiemannTCP(Output): """Riemann TCP output **Configuration arguments:** :param server: Riemann server hostname (default: localhost) :type server: str. :param port: Riemann server port (default: 5555) :type port: int. :param maxrate: Maximum de-queue rate (0 is no limit) :type maxrate: int. :param maxsize: Maximum queue size (0 is no limit, default is 250000) :type maxsize: int. :param interval: De-queue interval in seconds (default: 1.0) :type interval: float. :param pressure: Maximum backpressure (-1 is no limit) :type pressure: int. :param tls: Use TLS (default false) :type tls: bool. :param cert: Host certificate path :type cert: str. :param key: Host private key path :type key: str. :param allow_nan: Send events with None metric value (default true) :type allow_nan: bool """ def __init__(self, *a): Output.__init__(self, *a) self.events = [] self.t = task.LoopingCall(self.tick) self.inter = float(self.config.get('interval', 1.0)) # tick interval self.pressure = int(self.config.get('pressure', -1)) self.maxsize = int(self.config.get('maxsize', 250000)) self.expire = self.config.get('expire', False) self.allow_nan = self.config.get('allow_nan', True) maxrate = int(self.config.get('maxrate', 0)) if maxrate > 0: self.queueDepth = int(maxrate * self.inter) else: self.queueDepth = None self.tls = self.config.get('tls', False) if self.tls: self.cert = self.config['cert'] self.key = self.config['key'] def createClient(self): """Create a TCP connection to Riemann with automatic reconnection """ self.factory = riemann.RiemannClientFactory() server = self.config.get('server', 'localhost') port = self.config.get('port', 5555) if self.tls: if SSL: self.connector = reactor.connectSSL(server, port, self.factory, ClientTLSContext(self.key, self.cert)) else: log.msg('[FATAL] SSL support not available!' \ ' Please install PyOpenSSL. Exiting now') reactor.stop() else: self.connector = reactor.connectTCP(server, port, self.factory) d = defer.Deferred() def cb(): # Wait until we have a useful proto object if hasattr(self.factory, 'proto') and self.factory.proto: self.t.start(self.inter) d.callback(None) else: reactor.callLater(0.01, cb) cb() return d def stop(self): """Stop this client. """ self.t.stop() self.factory.stopTrying() self.connector.disconnect() def tick(self): """Clock tick called every self.inter """ if self.factory.proto: # Check backpressure if (self.pressure < 0) or (self.factory.proto.pressure <= self.pressure): self.emptyQueue() elif self.expire: # Check queue age and expire stale events for i, e in enumerate(self.events): if (time.time() - e.time) > e.ttl: self.events.pop(i) def emptyQueue(self): """Remove all or self.queueDepth events from the queue """ if self.events: if self.queueDepth and (len(self.events) > self.queueDepth): # Remove maximum of self.queueDepth items from queue events = self.events[:self.queueDepth] self.events = self.events[self.queueDepth:] else: events = self.events self.events = [] if self.allow_nan: self.factory.proto.sendEvents(events) else: self.factory.proto.sendEvents([e for e in events if e.metric is not None]) def eventsReceived(self, events): """Receives a list of events and transmits them to Riemann Arguments: events -- list of `tensor.objects.Event` """ # Make sure queue isn't oversized if (self.maxsize < 1) or (len(self.events) < self.maxsize): self.events.extend(events) class RiemannUDP(Output): """Riemann UDP output (spray-and-pray mode) **Configuration arguments:** :param server: Riemann server IP address (default: 127.0.0.1) :type server: str. :param port: Riemann server port (default: 5555) :type port: int. """ def __init__(self, *a): Output.__init__(self, *a) self.protocol = None def createClient(self): """Create a UDP connection to Riemann""" server = self.config.get('server', '127.0.0.1') port = self.config.get('port', 5555) def connect(ip): self.protocol = riemann.RiemannUDP(ip, port) self.endpoint = reactor.listenUDP(0, self.protocol) d = reactor.resolve(server) d.addCallback(connect) return d def eventsReceived(self, events): """Receives a list of events and transmits them to Riemann Arguments: events -- list of `tensor.objects.Event` """ if self.protocol: self.protocol.sendEvents(events)
yangdw/PyRepo
refs/heads/master
src/annotation/Firefly/gfirefly/management/commands/reloadmodule.py
16
#coding:utf8 ''' Created on 2013-8-12 @author: lan (www.9miao.com) ''' import urllib,sys def execute(*args): """ """ if not args: masterport =9998 else: masterport = int(args[0]) url = "http://localhost:%s/reloadmodule"%masterport try: response = urllib.urlopen(url) except: response = None if response: sys.stdout.write("reload module success \n") else: sys.stdout.write("reload module failed \n")
lulandco/SickRage
refs/heads/develop
lib/mako/ext/extract.py
76
import re from mako import compat from mako import lexer from mako import parsetree class MessageExtractor(object): def process_file(self, fileobj): template_node = lexer.Lexer( fileobj.read(), input_encoding=self.config['encoding']).parse() for extracted in self.extract_nodes(template_node.get_children()): yield extracted def extract_nodes(self, nodes): translator_comments = [] in_translator_comments = False input_encoding = self.config['encoding'] or 'ascii' comment_tags = list( filter(None, re.split(r'\s+', self.config['comment-tags']))) for node in nodes: child_nodes = None if in_translator_comments and \ isinstance(node, parsetree.Text) and \ not node.content.strip(): # Ignore whitespace within translator comments continue if isinstance(node, parsetree.Comment): value = node.text.strip() if in_translator_comments: translator_comments.extend( self._split_comment(node.lineno, value)) continue for comment_tag in comment_tags: if value.startswith(comment_tag): in_translator_comments = True translator_comments.extend( self._split_comment(node.lineno, value)) continue if isinstance(node, parsetree.DefTag): code = node.function_decl.code child_nodes = node.nodes elif isinstance(node, parsetree.BlockTag): code = node.body_decl.code child_nodes = node.nodes elif isinstance(node, parsetree.CallTag): code = node.code.code child_nodes = node.nodes elif isinstance(node, parsetree.PageTag): code = node.body_decl.code elif isinstance(node, parsetree.CallNamespaceTag): code = node.expression child_nodes = node.nodes elif isinstance(node, parsetree.ControlLine): if node.isend: in_translator_comments = False continue code = node.text elif isinstance(node, parsetree.Code): in_translator_comments = False code = node.code.code elif isinstance(node, parsetree.Expression): code = node.code.code else: continue # Comments don't apply unless they immediately preceed the message if translator_comments and \ translator_comments[-1][0] < node.lineno - 1: translator_comments = [] translator_strings = [ comment[1] for comment in translator_comments] if isinstance(code, compat.text_type): code = code.encode(input_encoding, 'backslashreplace') used_translator_comments = False # We add extra newline to work around a pybabel bug # (see python-babel/babel#274, parse_encoding dies if the first # input string of the input is non-ascii) # Also, because we added it, we have to subtract one from # node.lineno code = compat.byte_buffer(compat.b('\n') + code) for message in self.process_python( code, node.lineno - 1, translator_strings): yield message used_translator_comments = True if used_translator_comments: translator_comments = [] in_translator_comments = False if child_nodes: for extracted in self.extract_nodes(child_nodes): yield extracted @staticmethod def _split_comment(lineno, comment): """Return the multiline comment at lineno split into a list of comment line numbers and the accompanying comment line""" return [(lineno + index, line) for index, line in enumerate(comment.splitlines())]
SuperDARNCanada/borealis
refs/heads/master
experiments/testing_archive/test_wavetype_not_defined.py
4
#!/usr/bin/python # write an experiment that raises an exception import sys import os BOREALISPATH = os.environ['BOREALISPATH'] sys.path.append(BOREALISPATH) import experiments.superdarn_common_fields as scf from experiment_prototype.experiment_prototype import ExperimentPrototype class TestExperiment(ExperimentPrototype): def __init__(self): cpid = 1 super(TestExperiment, self).__init__(cpid) if scf.IS_FORWARD_RADAR: beams_to_use = scf.STD_16_FORWARD_BEAM_ORDER else: beams_to_use = scf.STD_16_REVERSE_BEAM_ORDER if scf.opts.site_id in ["cly", "rkn", "inv"]: num_ranges = scf.POLARDARN_NUM_RANGES if scf.opts.site_id in ["sas", "pgr"]: num_ranges = scf.STD_NUM_RANGES slice_1 = { # slice_id = 0, there is only one slice. "pulse_sequence": scf.SEQUENCE_7P, "tau_spacing": scf.TAU_SPACING_7P, "pulse_len": scf.PULSE_LEN_45KM, "num_ranges": num_ranges, "first_range": scf.STD_FIRST_RANGE, "intt": 3500, # duration of an integration, in ms "beam_angle": scf.STD_16_BEAM_ANGLE, "beam_order": beams_to_use, "scanbound": [i * 3.5 for i in range(len(beams_to_use))], #1 min scan "txfreq" : scf.COMMON_MODE_FREQ_1, #kHz "acf": True, "xcf": True, # cross-correlation processing "acfint": True, # interferometer acfs "wavetype": 'SIGN', # if you can't read the signs, you're gonna fail } self.add_slice(slice_1)
mfherbst/spack
refs/heads/develop
var/spack/repos/builtin/packages/sqlitebrowser/package.py
4
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Sqlitebrowser(CMakePackage): """DB Browser for SQLite (DB4S) is a high quality, visual, open source tool to create, design, and edit database files compatible with SQLite.""" homepage = "https://sqlitebrowser.org" url = "https://github.com/sqlitebrowser/sqlitebrowser/archive/v3.10.1.tar.gz" version('3.10.1', '66cbe41f9da5be80067942ed3816576c') msg = 'sqlitebrowser requires C++11 support' conflicts('%gcc@:4.8.0', msg=msg) conflicts('%clang@:3.2', msg=msg) conflicts('%intel@:12', msg=msg) conflicts('%xl@:13.0', msg=msg) conflicts('%xl_r@:13.0', msg=msg) depends_on('sqlite@3:+functions') depends_on('[email protected]:')
metacloud/python-neutronclient
refs/heads/mc/2013.1.7
neutronclient/neutron/v2_0/quota.py
5
# Copyright 2012 OpenStack LLC. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # vim: tabstop=4 shiftwidth=4 softtabstop=4 import argparse import logging from cliff import lister from cliff import show from neutronclient.common import exceptions from neutronclient.common import utils from neutronclient.neutron import v2_0 as neutronV20 from neutronclient.openstack.common.gettextutils import _ def get_tenant_id(tenant_id, client): return (tenant_id if tenant_id else client.get_quotas_tenant()['tenant']['tenant_id']) class DeleteQuota(neutronV20.NeutronCommand): """Delete defined quotas of a given tenant.""" api = 'network' resource = 'quota' log = logging.getLogger(__name__ + '.DeleteQuota') def get_parser(self, prog_name): parser = super(DeleteQuota, self).get_parser(prog_name) parser.add_argument( '--tenant-id', metavar='tenant-id', help='the owner tenant ID') parser.add_argument( '--tenant_id', help=argparse.SUPPRESS) return parser def run(self, parsed_args): self.log.debug('run(%s)' % parsed_args) neutron_client = self.get_client() neutron_client.format = parsed_args.request_format tenant_id = get_tenant_id(parsed_args.tenant_id, neutron_client) obj_deleter = getattr(neutron_client, "delete_%s" % self.resource) obj_deleter(tenant_id) print >>self.app.stdout, (_('Deleted %(resource)s: %(tenant_id)s') % {'tenant_id': tenant_id, 'resource': self.resource}) return class ListQuota(neutronV20.NeutronCommand, lister.Lister): """List defined quotas of all tenants.""" api = 'network' resource = 'quota' log = logging.getLogger(__name__ + '.ListQuota') def get_parser(self, prog_name): parser = super(ListQuota, self).get_parser(prog_name) return parser def get_data(self, parsed_args): self.log.debug('get_data(%s)' % parsed_args) neutron_client = self.get_client() search_opts = {} self.log.debug('search options: %s', search_opts) neutron_client.format = parsed_args.request_format obj_lister = getattr(neutron_client, "list_%ss" % self.resource) data = obj_lister(**search_opts) info = [] collection = self.resource + "s" if collection in data: info = data[collection] _columns = len(info) > 0 and sorted(info[0].keys()) or [] return (_columns, (utils.get_item_properties(s, _columns) for s in info)) class ShowQuota(neutronV20.NeutronCommand, show.ShowOne): """Show quotas of a given tenant """ api = 'network' resource = "quota" log = logging.getLogger(__name__ + '.ShowQuota') def get_parser(self, prog_name): parser = super(ShowQuota, self).get_parser(prog_name) parser.add_argument( '--tenant-id', metavar='tenant-id', help='the owner tenant ID') parser.add_argument( '--tenant_id', help=argparse.SUPPRESS) return parser def get_data(self, parsed_args): self.log.debug('get_data(%s)' % parsed_args) neutron_client = self.get_client() neutron_client.format = parsed_args.request_format tenant_id = get_tenant_id(parsed_args.tenant_id, neutron_client) params = {} obj_shower = getattr(neutron_client, "show_%s" % self.resource) data = obj_shower(tenant_id, **params) if self.resource in data: for k, v in data[self.resource].iteritems(): if isinstance(v, list): value = "" for _item in v: if value: value += "\n" if isinstance(_item, dict): value += utils.dumps(_item) else: value += str(_item) data[self.resource][k] = value elif v is None: data[self.resource][k] = '' return zip(*sorted(data[self.resource].iteritems())) else: return None class UpdateQuota(neutronV20.NeutronCommand, show.ShowOne): """Define tenant's quotas not to use defaults.""" resource = 'quota' log = logging.getLogger(__name__ + '.UpdateQuota') def get_parser(self, prog_name): parser = super(UpdateQuota, self).get_parser(prog_name) parser.add_argument( '--tenant-id', metavar='tenant-id', help='the owner tenant ID') parser.add_argument( '--tenant_id', help=argparse.SUPPRESS) parser.add_argument( '--network', metavar='networks', help='the limit of networks') parser.add_argument( '--subnet', metavar='subnets', help='the limit of subnets') parser.add_argument( '--port', metavar='ports', help='the limit of ports') parser.add_argument( '--router', metavar='routers', help='the limit of routers') parser.add_argument( '--floatingip', metavar='floatingips', help='the limit of floating IPs') parser.add_argument( '--security-group', metavar='security_groups', help='the limit of security groups') parser.add_argument( '--security-group-rule', metavar='security_group_rules', help='the limit of security groups rules') return parser def _validate_int(self, name, value): try: return_value = int(value) except Exception: message = (_('quota limit for %(name)s must be an integer') % {'name': name}) raise exceptions.NeutronClientException(message=message) return return_value def args2body(self, parsed_args): quota = {} for resource in ('network', 'subnet', 'port', 'router', 'floatingip', 'security_group', 'security_group_rule'): if getattr(parsed_args, resource): quota[resource] = self._validate_int( resource, getattr(parsed_args, resource)) return {self.resource: quota} def get_data(self, parsed_args): self.log.debug('run(%s)' % parsed_args) neutron_client = self.get_client() neutron_client.format = parsed_args.request_format _extra_values = neutronV20.parse_args_to_dict(self.values_specs) neutronV20._merge_args(self, parsed_args, _extra_values, self.values_specs) body = self.args2body(parsed_args) if self.resource in body: body[self.resource].update(_extra_values) else: body[self.resource] = _extra_values obj_updator = getattr(neutron_client, "update_%s" % self.resource) tenant_id = get_tenant_id(parsed_args.tenant_id, neutron_client) data = obj_updator(tenant_id, body) if self.resource in data: for k, v in data[self.resource].iteritems(): if isinstance(v, list): value = "" for _item in v: if value: value += "\n" if isinstance(_item, dict): value += utils.dumps(_item) else: value += str(_item) data[self.resource][k] = value elif v is None: data[self.resource][k] = '' return zip(*sorted(data[self.resource].iteritems())) else: return None
earsneyes/ardupilot
refs/heads/master
Tools/autotest/common.py
142
import util, pexpect, time, math from pymavlink import mavwp # a list of pexpect objects to read while waiting for # messages. This keeps the output to stdout flowing expect_list = [] def expect_list_clear(): '''clear the expect list''' global expect_list for p in expect_list[:]: expect_list.remove(p) def expect_list_extend(list): '''extend the expect list''' global expect_list expect_list.extend(list) def idle_hook(mav): '''called when waiting for a mavlink message''' global expect_list for p in expect_list: util.pexpect_drain(p) def message_hook(mav, msg): '''called as each mavlink msg is received''' idle_hook(mav) def expect_callback(e): '''called when waiting for a expect pattern''' global expect_list for p in expect_list: if p == e: continue util.pexpect_drain(p) def get_distance(loc1, loc2): '''get ground distance between two locations''' dlat = loc2.lat - loc1.lat dlong = loc2.lng - loc1.lng return math.sqrt((dlat*dlat) + (dlong*dlong)) * 1.113195e5 def get_bearing(loc1, loc2): '''get bearing from loc1 to loc2''' off_x = loc2.lng - loc1.lng off_y = loc2.lat - loc1.lat bearing = 90.00 + math.atan2(-off_y, off_x) * 57.2957795 if bearing < 0: bearing += 360.00 return bearing; def wait_seconds(mav, seconds_to_wait): tstart = get_sim_time(mav) tnow = tstart while tstart + seconds_to_wait > tnow: tnow = get_sim_time(mav) def get_sim_time(mav): m = mav.recv_match(type='SYSTEM_TIME', blocking=True) return m.time_boot_ms * 1.0e-3 def wait_altitude(mav, alt_min, alt_max, timeout=30): climb_rate = 0 previous_alt = 0 '''wait for a given altitude range''' tstart = get_sim_time(mav) print("Waiting for altitude between %u and %u" % (alt_min, alt_max)) while get_sim_time(mav) < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) climb_rate = m.alt - previous_alt previous_alt = m.alt print("Wait Altitude: Cur:%u, min_alt:%u, climb_rate: %u" % (m.alt, alt_min , climb_rate)) if m.alt >= alt_min and m.alt <= alt_max: print("Altitude OK") return True print("Failed to attain altitude range") return False def wait_groundspeed(mav, gs_min, gs_max, timeout=30): '''wait for a given ground speed range''' tstart = get_sim_time(mav) print("Waiting for groundspeed between %.1f and %.1f" % (gs_min, gs_max)) while get_sim_time(mav) < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) print("Wait groundspeed %.1f, target:%.1f" % (m.groundspeed, gs_min)) if m.groundspeed >= gs_min and m.groundspeed <= gs_max: return True print("Failed to attain groundspeed range") return False def wait_roll(mav, roll, accuracy, timeout=30): '''wait for a given roll in degrees''' tstart = get_sim_time(mav) print("Waiting for roll of %d at %s" % (roll, time.ctime())) while get_sim_time(mav) < tstart + timeout: m = mav.recv_match(type='ATTITUDE', blocking=True) p = math.degrees(m.pitch) r = math.degrees(m.roll) print("Roll %d Pitch %d" % (r, p)) if math.fabs(r - roll) <= accuracy: print("Attained roll %d" % roll) return True print("Failed to attain roll %d" % roll) return False def wait_pitch(mav, pitch, accuracy, timeout=30): '''wait for a given pitch in degrees''' tstart = get_sim_time(mav) print("Waiting for pitch of %u at %s" % (pitch, time.ctime())) while get_sim_time(mav) < tstart + timeout: m = mav.recv_match(type='ATTITUDE', blocking=True) p = math.degrees(m.pitch) r = math.degrees(m.roll) print("Pitch %d Roll %d" % (p, r)) if math.fabs(p - pitch) <= accuracy: print("Attained pitch %d" % pitch) return True print("Failed to attain pitch %d" % pitch) return False def wait_heading(mav, heading, accuracy=5, timeout=30): '''wait for a given heading''' tstart = get_sim_time(mav) print("Waiting for heading %u with accuracy %u" % (heading, accuracy)) while get_sim_time(mav) < tstart + timeout: m = mav.recv_match(type='VFR_HUD', blocking=True) print("Heading %u" % m.heading) if math.fabs(m.heading - heading) <= accuracy: print("Attained heading %u" % heading) return True print("Failed to attain heading %u" % heading) return False def wait_distance(mav, distance, accuracy=5, timeout=30): '''wait for flight of a given distance''' tstart = get_sim_time(mav) start = mav.location() while get_sim_time(mav) < tstart + timeout: pos = mav.location() delta = get_distance(start, pos) print("Distance %.2f meters" % delta) if math.fabs(delta - distance) <= accuracy: print("Attained distance %.2f meters OK" % delta) return True if delta > (distance + accuracy): print("Failed distance - overshoot delta=%f distance=%f" % (delta, distance)) return False print("Failed to attain distance %u" % distance) return False def wait_location(mav, loc, accuracy=5, timeout=30, target_altitude=None, height_accuracy=-1): '''wait for arrival at a location''' tstart = get_sim_time(mav) if target_altitude is None: target_altitude = loc.alt print("Waiting for location %.4f,%.4f at altitude %.1f height_accuracy=%.1f" % ( loc.lat, loc.lng, target_altitude, height_accuracy)) while get_sim_time(mav) < tstart + timeout: pos = mav.location() delta = get_distance(loc, pos) print("Distance %.2f meters alt %.1f" % (delta, pos.alt)) if delta <= accuracy: if height_accuracy != -1 and math.fabs(pos.alt - target_altitude) > height_accuracy: continue print("Reached location (%.2f meters)" % delta) return True print("Failed to attain location") return False def wait_waypoint(mav, wpnum_start, wpnum_end, allow_skip=True, max_dist=2, timeout=400, mode=None): '''wait for waypoint ranges''' tstart = get_sim_time(mav) # this message arrives after we set the current WP start_wp = mav.waypoint_current() current_wp = start_wp print("\ntest: wait for waypoint ranges start=%u end=%u\n\n" % (wpnum_start, wpnum_end)) # if start_wp != wpnum_start: # print("test: Expected start waypoint %u but got %u" % (wpnum_start, start_wp)) # return False while get_sim_time(mav) < tstart + timeout: seq = mav.waypoint_current() m = mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True) wp_dist = m.wp_dist m = mav.recv_match(type='VFR_HUD', blocking=True) # if we exited the required mode, finish if mode is not None and mav.flightmode != mode: print('Exited %s mode' % mode) return True print("test: WP %u (wp_dist=%u Alt=%d), current_wp: %u, wpnum_end: %u" % (seq, wp_dist, m.alt, current_wp, wpnum_end)) if seq == current_wp+1 or (seq > current_wp+1 and allow_skip): print("test: Starting new waypoint %u" % seq) tstart = get_sim_time(mav) current_wp = seq # the wp_dist check is a hack until we can sort out the right seqnum # for end of mission #if current_wp == wpnum_end or (current_wp == wpnum_end-1 and wp_dist < 2): if (current_wp == wpnum_end and wp_dist < max_dist): print("Reached final waypoint %u" % seq) return True if (seq >= 255): print("Reached final waypoint %u" % seq) return True if seq > current_wp+1: print("Failed: Skipped waypoint! Got wp %u expected %u" % (seq, current_wp+1)) return False print("Failed: Timed out waiting for waypoint %u of %u" % (wpnum_end, wpnum_end)) return False def save_wp(mavproxy, mav): mavproxy.send('rc 7 1000\n') mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True) wait_seconds(mav, 1) mavproxy.send('rc 7 2000\n') mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==2000', blocking=True) wait_seconds(mav, 1) mavproxy.send('rc 7 1000\n') mav.recv_match(condition='RC_CHANNELS_RAW.chan7_raw==1000', blocking=True) wait_seconds(mav, 1) def wait_mode(mav, mode, timeout=None): print("Waiting for mode %s" % mode) mav.recv_match(condition='MAV.flightmode.upper()=="%s".upper()' % mode, timeout=timeout, blocking=True) print("Got mode %s" % mode) return mav.flightmode def mission_count(filename): '''load a mission from a file and return number of waypoints''' wploader = mavwp.MAVWPLoader() wploader.load(filename) num_wp = wploader.count() return num_wp def sim_location(mav): '''return current simulator location''' from pymavlink import mavutil m = mav.recv_match(type='SIMSTATE', blocking=True) return mavutil.location(m.lat*1.0e-7, m.lng*1.0e-7, 0, math.degrees(m.yaw)) def log_download(mavproxy, mav, filename, timeout=360): '''download latest log''' mavproxy.send("log list\n") mavproxy.expect("numLogs") mav.wait_heartbeat() mav.wait_heartbeat() mavproxy.send("log download latest %s\n" % filename) mavproxy.expect("Finished downloading", timeout=timeout) mav.wait_heartbeat() mav.wait_heartbeat() return True
micropython/micropython
refs/heads/master
examples/rp2/pio_uart_rx.py
5
# Example using PIO to create a UART RX interface. # # To make it work you'll need a wire connecting GPIO4 and GPIO3. # # Demonstrates: # - PIO shifting in data on a pin # - PIO jmp(pin) instruction # - PIO irq handler # - using the second core via _thread import _thread from machine import Pin, UART from rp2 import PIO, StateMachine, asm_pio UART_BAUD = 9600 HARD_UART_TX_PIN = Pin(4, Pin.OUT) PIO_RX_PIN = Pin(3, Pin.IN, Pin.PULL_UP) @asm_pio( autopush=True, push_thresh=8, in_shiftdir=rp2.PIO.SHIFT_RIGHT, fifo_join=PIO.JOIN_RX, ) def uart_rx_mini(): # fmt: off # Wait for start bit wait(0, pin, 0) # Preload bit counter, delay until eye of first data bit set(x, 7) [10] # Loop 8 times label("bitloop") # Sample data in_(pins, 1) # Each iteration is 8 cycles jmp(x_dec, "bitloop") [6] # fmt: on @asm_pio( in_shiftdir=rp2.PIO.SHIFT_RIGHT, ) def uart_rx(): # fmt: off label("start") # Stall until start bit is asserted wait(0, pin, 0) # Preload bit counter, then delay until halfway through # the first data bit (12 cycles incl wait, set). set(x, 7) [10] label("bitloop") # Shift data bit into ISR in_(pins, 1) # Loop 8 times, each loop iteration is 8 cycles jmp(x_dec, "bitloop") [6] # Check stop bit (should be high) jmp(pin, "good_stop") # Either a framing error or a break. Set a sticky flag # and wait for line to return to idle state. irq(block, 4) wait(1, pin, 0) # Don't push data if we didn't see good framing. jmp("start") # No delay before returning to start; a little slack is # important in case the TX clock is slightly too fast. label("good_stop") push(block) # fmt: on # The handler for a UART break detected by the PIO. def handler(sm): print("break", time.ticks_ms(), end=" ") # Function for core1 to execute to write to the given UART. def core1_task(uart, text): uart.write(text) # Set up the hard UART we're going to use to print characters. uart = UART(1, UART_BAUD, tx=HARD_UART_TX_PIN) for pio_prog in ("uart_rx_mini", "uart_rx"): # Set up the state machine we're going to use to receive the characters. sm = StateMachine( 0, globals()[pio_prog], freq=8 * UART_BAUD, in_base=PIO_RX_PIN, # For WAIT, IN jmp_pin=PIO_RX_PIN, # For JMP ) sm.irq(handler) sm.active(1) # Tell core 1 to print some text to UART 1 text = "Hello, world from PIO, using {}!".format(pio_prog) _thread.start_new_thread(core1_task, (uart, text)) # Echo characters received from PIO to the console. for i in range(len(text)): print(chr(sm.get() >> 24), end="") print()
arnavd96/Cinemiezer
refs/heads/master
myvenv/lib/python3.4/site-packages/botocore/vendored/requests/packages/chardet/jisfreq.py
3130
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # # Japanese frequency table, applied to both S-JIS and EUC-JP # They are sorted in order. # 128 --> 0.77094 # 256 --> 0.85710 # 512 --> 0.92635 # 1024 --> 0.97130 # 2048 --> 0.99431 # # Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 # Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 # # Typical Distribution Ratio, 25% of IDR JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 # Char to FreqOrder table , JIS_TABLE_SIZE = 4368 JISCharToFreqOrder = ( 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 #Everything below is of no interest for detection purpose 2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384 6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400 6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416 6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432 6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448 4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464 4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480 3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496 3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512 4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528 3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544 6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560 4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576 6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592 6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608 6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624 6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640 6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656 6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672 3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688 3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704 6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720 2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736 4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752 4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768 4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784 6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800 3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816 4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832 4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848 6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864 4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880 6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896 3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912 2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928 4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944 2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960 6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976 4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992 6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008 6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024 6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040 4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056 6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072 2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088 6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104 4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120 6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136 4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152 4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168 6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184 6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200 6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216 3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232 1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248 3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264 3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280 4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296 6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312 3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328 6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344 3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360 3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376 2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392 6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408 6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424 3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440 6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456 3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472 6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488 6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504 6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520 4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536 6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552 4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568 3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584 3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600 6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616 6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632 4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648 6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664 6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680 6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696 6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712 6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728 6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744 4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760 4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776 3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792 6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808 4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824 2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840 6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856 6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872 4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888 2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904 4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920 2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936 4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952 4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968 4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984 6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000 3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016 6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032 3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048 6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064 2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080 3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096 7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112 2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128 3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144 3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160 3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176 3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192 7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208 7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224 7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240 7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256 7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272 4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288 3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304 3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320 4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336 3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352 3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368 7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384 4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400 7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416 7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432 7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448 7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464 7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480 4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496 4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512 7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528 3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544 4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560 7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576 7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592 4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608 3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624 3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640 7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656 4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672 4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688 4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704 4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720 4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736 4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752 7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768 7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784 7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800 7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816 7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832 2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848 3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864 7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880 7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896 3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912 4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928 3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944 3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960 2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976 7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992 7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008 4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024 3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040 3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056 7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072 7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088 7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104 4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120 7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136 2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152 3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168 4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184 7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200 4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216 4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232 7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248 7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264 5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280 7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296 7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312 7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328 7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344 7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360 5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376 5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392 7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408 3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424 7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440 7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456 3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472 7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488 7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504 1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520 3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536 4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552 2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568 3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584 2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600 5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616 4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632 4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648 5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664 7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680 7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696 7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712 7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728 3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744 7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760 3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776 7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792 4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808 7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824 7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840 7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856 7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872 7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888 7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904 7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920 7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936 7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952 7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968 7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984 7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000 8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016 8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032 8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048 8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064 8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080 8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096 8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112 8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128 8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144 8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160 8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176 8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192 8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208 8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224 8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240 8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256 8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272 # flake8: noqa
apache/airflow
refs/heads/main
airflow/providers/amazon/aws/hooks/dynamodb.py
3
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module contains the AWS DynamoDB hook""" from typing import Iterable, List, Optional from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook class AwsDynamoDBHook(AwsBaseHook): """ Interact with AWS DynamoDB. Additional arguments (such as ``aws_conn_id``) may be specified and are passed down to the underlying AwsBaseHook. .. seealso:: :class:`~airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook` :param table_keys: partition key and sort key :type table_keys: list :param table_name: target DynamoDB table :type table_name: str """ def __init__( self, *args, table_keys: Optional[List] = None, table_name: Optional[str] = None, **kwargs ) -> None: self.table_keys = table_keys self.table_name = table_name kwargs["resource_type"] = "dynamodb" super().__init__(*args, **kwargs) def write_batch_data(self, items: Iterable) -> bool: """Write batch items to DynamoDB table with provisioned throughout capacity.""" try: table = self.get_conn().Table(self.table_name) with table.batch_writer(overwrite_by_pkeys=self.table_keys) as batch: for item in items: batch.put_item(Item=item) return True except Exception as general_error: raise AirflowException(f"Failed to insert items in dynamodb, error: {str(general_error)}")
glwu/python-for-android
refs/heads/master
python-build/python-libs/xmpppy/doc/examples/xsend.py
87
#!/usr/bin/python # $Id: xsend.py,v 1.8 2006/10/06 12:30:42 normanr Exp $ import sys,os,xmpp,time if len(sys.argv) < 2: print "Syntax: xsend JID text" sys.exit(0) tojid=sys.argv[1] text=' '.join(sys.argv[2:]) jidparams={} if os.access(os.environ['HOME']+'/.xsend',os.R_OK): for ln in open(os.environ['HOME']+'/.xsend').readlines(): if not ln[0] in ('#',';'): key,val=ln.strip().split('=',1) jidparams[key.lower()]=val for mandatory in ['jid','password']: if mandatory not in jidparams.keys(): open(os.environ['HOME']+'/.xsend','w').write('#Uncomment fields before use and type in correct credentials.\n#[email protected]/resource (/resource is optional)\n#PASSWORD=juliet\n') print 'Please point ~/.xsend config file to valid JID for sending messages.' sys.exit(0) jid=xmpp.protocol.JID(jidparams['jid']) cl=xmpp.Client(jid.getDomain(),debug=[]) con=cl.connect() if not con: print 'could not connect!' sys.exit() print 'connected with',con auth=cl.auth(jid.getNode(),jidparams['password'],resource=jid.getResource()) if not auth: print 'could not authenticate!' sys.exit() print 'authenticated using',auth #cl.SendInitPresence(requestRoster=0) # you may need to uncomment this for old server id=cl.send(xmpp.protocol.Message(tojid,text)) print 'sent message with id',id time.sleep(1) # some older servers will not send the message if you disconnect immediately after sending #cl.disconnect()
asurve/incubator-systemml
refs/heads/master
scripts/perftest/python/run_perftest.py
13
#!/usr/bin/env python3 # ------------------------------------------------------------- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # ------------------------------------------------------------- import sys import time import argparse from functools import reduce import os from os.path import join import logging from datetime import datetime from datagen import config_packets_datagen from train import config_packets_train from predict import config_packets_predict from utils_misc import get_families, config_reader, \ exec_dml_and_parse_time, exec_test_data, check_predict, get_folder_metrics, split_config_args, \ get_default_dir from utils_fs import create_dir_local, write_success, check_SUCCESS_file_exists # A packet is a dictionary # with key as the algorithm # value as the list with configuration json files ML_ALGO = {'binomial': ['MultiLogReg', 'l2-svm', 'm-svm'], 'clustering': ['Kmeans'], 'multinomial': ['naive-bayes', 'MultiLogReg', 'm-svm'], 'regression1': ['LinearRegDS', 'LinearRegCG'], 'regression2': ['GLM_poisson', 'GLM_gamma', 'GLM_binomial'], 'stats1': ['Univar-Stats', 'bivar-stats'], 'stats2': ['stratstats'], 'dimreduction': ['PCA']} ML_GENDATA = {'binomial': 'genRandData4LogisticRegression', 'clustering': 'genRandData4Kmeans', 'multinomial': 'genRandData4Multinomial', 'regression1': 'genRandData4LogisticRegression', 'regression2': 'genRandData4LogisticRegression', 'stats1': 'genRandData4DescriptiveStats', 'stats2': 'genRandData4StratStats', 'dimreduction': 'genRandData4PCA'} ML_TRAIN = {'GLM_poisson': 'GLM', 'GLM_gamma': 'GLM', 'GLM_binomial': 'GLM', 'LinearRegCG': 'LinearRegCG', 'LinearRegDS': 'LinearRegDS', 'stratstats': 'stratstats', 'Univar-Stats': 'Univar-Stats', 'bivar-stats': 'bivar-stats', 'Kmeans': 'Kmeans', 'm-svm': 'm-svm', 'l2-svm': 'l2-svm', 'MultiLogReg': 'MultiLogReg', 'naive-bayes': 'naive-bayes', 'PCA': 'PCA'} ML_PREDICT = {'Kmeans': 'Kmeans-predict', 'LinearRegCG': 'GLM-predict', 'LinearRegDS': 'GLM-predict', 'm-svm': 'm-svm-predict', 'l2-svm': 'l2-svm-predict', 'MultiLogReg': 'GLM-predict', 'naive-bayes': 'naive-bayes-predict', 'GLM_poisson': 'GLM-predict', 'GLM_gamma': 'GLM-predict', 'GLM_binomial': 'GLM-predict'} DENSE_TYPE_ALGOS = ['clustering', 'stats1', 'stats2', 'dimreduction'] # Responsible for execution and metric logging def algorithm_workflow(algo, exec_type, config_path, dml_file_name, action_mode, current_dir): """ This function is responsible for overall workflow. This does the following actions Check if the input is key value argument or list of positional args Execution and time Logging Metrics algo: String Input algorithm specified exec_type: String Contains the execution type singlenode / hybrid_spark config_path: String Path to read the json file from dml_file_name: String DML file name to be used while processing the arguments give action_mode: String Type of action data-gen, train ... current_dir: String Current location of hdfs / local temp being processed """ config_data = config_reader(config_path + '.json') if isinstance(config_data, dict): dict_args = ' '.join([str(key) + '=' + str(val) for key, val in config_data.items()]) args = {'-nvargs': dict_args} if isinstance(config_data, list): list_args = ' '.join(config_data) args = {'-args': list_args} config_file_name = config_path.split('/')[-1] mat_type, mat_shape, intercept = get_folder_metrics(config_file_name, action_mode) temp_cwd = join(current_dir, config_file_name) # temp_dir_exist exit_flag_success = check_SUCCESS_file_exists(temp_cwd) if exit_flag_success: time = 'data_exists' else: time = exec_dml_and_parse_time(exec_type, dml_file_name, args, backend_args_dict, systemml_args_dict, config_path) write_success(time, temp_cwd) print('{},{},{},{},{},{}'.format(algo, action_mode, intercept, mat_type, mat_shape, time)) current_metrics = [algo, action_mode, intercept, mat_type, mat_shape, time] logging.info(','.join(current_metrics)) return exit_flag_success def perf_test_entry(family, algo, exec_type, mat_type, mat_shape, config_dir, mode, temp_dir, file_system_type): """ This function is the entry point for performance testing family: List A family may contain one or more algorithm based on data generation script used algo: List Input algorithms exec_type: String Contains the execution type singlenode / hybrid_spark mat_type: List Type of matrix to generate dense, sparse, all mat_shape: List Dimensions of the input matrix with rows and columns config_dir: String Location to store all configuration mode: List Type of workload to run. data-gen, train ... temp_dir: String Location to store all output files created during perf test file_system_type: String """ # algos to run is a list of tuples with # [(m-svm, binomial), (m-svm, multinomial)...] # Basic block for execution of scripts algos_to_run = [] # Sections below build algos_to_run in our performance test # Handles algorithms like m-svm and MultiLogReg which have multiple # data generation scripts (dual datagen) # --family is taken into consideration only when there are multiple datagen for an algo if family is not None and algo is not None: for current_algo in algo: family_list = get_families(current_algo, ML_ALGO) if len(family_list) == 1: algos_to_run.append((current_algo, family_list[0])) else: intersection = set(family).intersection(family_list) for valid_family in intersection: algos_to_run.append((current_algo, valid_family)) # When the user inputs just algorithms to run elif algo is not None: for current_algo in algo: family_list = get_families(current_algo, ML_ALGO) for f in family_list: algos_to_run.append((current_algo, f)) # When the user just specifies only families to run elif family is not None: for current_family in family: algos = ML_ALGO[current_family] for current_algo in algos: algos_to_run.append((current_algo, current_family)) if 'data-gen' in mode: # Create config directories data_gen_config_dir = join(config_dir, 'data-gen') create_dir_local(data_gen_config_dir) # Create output path data_gen_dir = join(temp_dir, 'data-gen') conf_packet = config_packets_datagen(algos_to_run, mat_type, mat_shape, data_gen_dir, DENSE_TYPE_ALGOS, data_gen_config_dir) for family_name, config_folders in conf_packet.items(): for config in config_folders: file_name = ML_GENDATA[family_name] success_file = algorithm_workflow(family_name, exec_type, config, file_name, 'data-gen', data_gen_dir) # Statistic family do not require to be split if family_name not in ['stats1', 'stats2']: if not success_file: exec_test_data(exec_type, backend_args_dict, systemml_args_dict, data_gen_dir, config) if 'train' in mode: # Create config directories train_config_dir = join(config_dir, 'train') create_dir_local(train_config_dir) # Create output path data_gen_dir = join(temp_dir, 'data-gen') train_dir = join(temp_dir, 'train') conf_packet = config_packets_train(algos_to_run, mat_type, mat_shape, data_gen_dir, train_dir, DENSE_TYPE_ALGOS, train_config_dir) for algo_family_name, config_files in conf_packet.items(): for config in config_files: algo_name = algo_family_name.split('.')[0] file_name = ML_TRAIN[algo_name] algorithm_workflow(algo_family_name, exec_type, config, file_name, 'train', train_dir) if 'predict' in mode: # Create config directories predict_config_dir = join(config_dir, 'predict') create_dir_local(predict_config_dir) # Create output path data_gen_dir = join(temp_dir, 'data-gen') train_dir = join(temp_dir, 'train') predict_dir = join(temp_dir, 'predict') algos_to_run = list(filter(lambda algo: check_predict(algo[0], ML_PREDICT), algos_to_run)) if len(algos_to_run) < 1: # No algorithms with predict found pass conf_packet = config_packets_predict(algos_to_run, mat_type, mat_shape, data_gen_dir, train_dir, predict_dir, DENSE_TYPE_ALGOS, predict_config_dir) for algo_family_name, config_files in conf_packet.items(): for config in config_files: algo_name = algo_family_name.split('.')[0] file_name = ML_PREDICT[algo_name] algorithm_workflow(algo_family_name, exec_type, config, file_name, 'predict', predict_dir) if __name__ == '__main__': # sys ml env set and error handling systemml_home = os.environ.get('SYSTEMML_HOME') if systemml_home is None: print('SYSTEMML_HOME not found') sys.exit() # Supported Arguments mat_type = ['dense', 'sparse', 'all'] workload = ['data-gen', 'train', 'predict'] execution_mode = ['hybrid_spark', 'singlenode'] file_system_type = ['hdfs', 'local'] # Default Arguments default_mat_shape = ['10k_100'] # Default temp directory, contains everything generated in perftest default_config_dir = join(systemml_home, 'temp_perftest') # Initialize time start_time = time.time() # Default Date Time time_now = str(datetime.now()) # Remove duplicates algorithms and used as default inputs all_algos = set(reduce(lambda x, y: x + y, ML_ALGO.values())) # Families all_families = ML_ALGO.keys() # Default Conf default_conf = 'spark.driver.maxResultSize=0 ' \ 'spark.network.timeout=6000s ' \ 'spark.rpc.askTimeout=6000s ' \ 'spark.memory.useLegacyMode=true ' \ 'spark.files.useFetchCache=false' \ default_conf_big_job = 'spark.executor.extraJavaOptions=\"-Xmn5500m\" ' \ 'spark.executor.memory=\"-Xms50g\" ' \ 'spark.yarn.executor.memoryOverhead=8250 ' \ 'spark.driver.extraJavaOptions=\"-Xms20g -Xmn2g\"' # Argparse Module cparser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='SystemML Performance Test Script') cparser.add_argument('--family', help='space separated list of classes of algorithms ' '(available : ' + ', '.join(sorted(all_families)) + ')', metavar='', choices=all_families, nargs='+', default=all_families) cparser.add_argument('--algo', help='space separated list of algorithm to run ' '(Overrides --family, available : ' + ', '.join(sorted(all_algos)) + ')', metavar='', choices=all_algos, nargs='+') cparser.add_argument('--exec-type', default='hybrid_spark', help='System-ML backend ' 'available : ' + ','.join(execution_mode), metavar='', choices=execution_mode) cparser.add_argument('--mat-type', default=['all'], help='space separated list of types of matrix to generate ' 'available : ' + ','.join(mat_type), metavar='', choices=mat_type, nargs='+') cparser.add_argument('--mat-shape', default=default_mat_shape, help='space separated list of shapes of matrices ' 'to generate (e.g 10k_1k, 20M_4k)', metavar='', nargs='+') cparser.add_argument('--config-dir', default=default_config_dir, help='temporary directory ' 'where generated, training and prediction data is put', metavar='') cparser.add_argument('--filename', default='perf_test', help='name of the output file for the perf' ' metrics', metavar='') cparser.add_argument('--mode', default=workload, help='space separated list of types of workloads to run (available: data-gen, train, predict)', metavar='', choices=workload, nargs='+') cparser.add_argument('--temp-dir', help='the path on the file system to place the working temporary directory at', metavar='') cparser.add_argument('--file-system-type', choices=file_system_type, metavar='', help='file system for temp directory, ' 'supported types are \'hdfs\' for hybrid_spark and \'local\' for standalone;' 'default for hybrid_spark is \'hdfs\' and for standalone is \'local\'') # Configuration Options cparser.add_argument('-stats', help='Monitor and report caching/recompilation statistics, ' 'heavy hitter <count> is 10 unless overridden', nargs='?', const='10', metavar='') cparser.add_argument('-explain', help='explains plan levels can be hops, runtime, ' 'recompile_hops, recompile_runtime', nargs='?', const='runtime', metavar='') cparser.add_argument('-config', help='System-ML configuration file (e.g SystemML-config.xml)', metavar='') cparser.add_argument('-gpu', help='uses CUDA instructions when reasonable, ' 'set <force> option to skip conservative memory estimates ' 'and use GPU wherever possible', nargs='?', const='no_option') # Spark Configuration Option cparser.add_argument('--master', help='local, yarn', metavar='') cparser.add_argument('--deploy-mode', help='client, cluster', metavar='') cparser.add_argument('--driver-memory', help='Memory for driver (e.g. 512M)', metavar='') cparser.add_argument('--num-executors', help='Number of executors to launch', metavar='') cparser.add_argument('--executor-memory', help='Memory per executor', metavar='') cparser.add_argument('--executor-cores', help='Number of cores', metavar='') cparser.add_argument('--conf', help='Spark configuration parameters, please use these ' 'parameters for large performance tests ' + default_conf_big_job, default=default_conf, nargs='+', metavar='') # Single node execution mode options cparser.add_argument('-heapmem', help='maximum JVM heap memory', metavar='', default='8g') # Args is a namespace args = cparser.parse_args() all_arg_dict = vars(args) create_dir_local(args.config_dir) # Global variables perftest_args_dict, systemml_args_dict, backend_args_dict = split_config_args(all_arg_dict) # temp_dir hdfs / local path check if args.file_system_type is None: if args.exec_type == 'hybrid_spark': args.file_system_type = 'hdfs' else: args.file_system_type = 'local' perftest_args_dict['temp_dir'] = get_default_dir(args.file_system_type, args.temp_dir, args.exec_type, default_config_dir) # default_mat_type validity if len(args.mat_type) > 2: print('length of --mat-type argument cannot be greater than two') sys.exit() if args.algo is not None: # This section check the validity of dual datagen algorithms like m-svm algo_families = {} for current_algo in args.algo: algo_families[current_algo] = get_families(current_algo, ML_ALGO) if len(algo_families[current_algo]) > 1: if args.family is None: print('family should be present for {}'.format(current_algo)) sys.exit() valid_families = set(algo_families[current_algo]) input_families = set(args.family) common_families = input_families.intersection(valid_families) if len(common_families) == 0: sys.exit('Please specify a valid family for {} and the ' 'valid families are {}'.format(current_algo, ' '.join(valid_families))) # Set level to 0 -> debug mode # Set level to 20 -> Plain metrics log_filename = args.filename + '_' + args.exec_type + '.out' logging.basicConfig(filename=join(args.config_dir, log_filename), level=20) logging.info('New performance test started at {}'.format(time_now)) logging.info('algorithm,run_type,intercept,matrix_type,data_shape,time_sec') # Remove filename item from dictionary as its already used to create the log above del perftest_args_dict['filename'] perf_test_entry(**perftest_args_dict) total_time = (time.time() - start_time) logging.info('total_time,none,none,none,none,{}'.format(total_time)) logging.info('Performance tests complete')
spensmith/ops-devbox
refs/heads/master
action_plugins/copyv.py
5
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import os from ansible import utils import ansible.constants as C import ansible.utils.template as template from ansible.utils.vault import VaultLib from ansible import errors from ansible.runner.return_data import ReturnData import base64 import json import stat import tempfile import pipes ## fixes https://github.com/ansible/ansible/issues/3518 # http://mypy.pythonblogs.com/12_mypy/archive/1253_workaround_for_python_bug_ascii_codec_cant_encode_character_uxa0_in_position_111_ordinal_not_in_range128.html import sys reload(sys) sys.setdefaultencoding("utf8") class ActionModule(object): def __init__(self, runner): self.runner = runner def run(self, conn, tmp_path, module_name, module_args, inject, complex_args=None, **kwargs): ''' handler for file transfer operations ''' # load up options options = {} if complex_args: options.update(complex_args) options.update(utils.parse_kv(module_args)) source = options.get('src', None) content = options.get('content', None) dest = options.get('dest', None) raw = utils.boolean(options.get('raw', 'no')) force = utils.boolean(options.get('force', 'yes')) # content with newlines is going to be escaped to safely load in yaml # now we need to unescape it so that the newlines are evaluated properly # when writing the file to disk if content: if isinstance(content, unicode): try: content = content.decode('unicode-escape') except UnicodeDecodeError: pass if (source is None and content is None and not 'first_available_file' in inject) or dest is None: result=dict(failed=True, msg="src (or content) and dest are required") return ReturnData(conn=conn, result=result) elif (source is not None or 'first_available_file' in inject) and content is not None: result=dict(failed=True, msg="src and content are mutually exclusive") return ReturnData(conn=conn, result=result) # Check if the source ends with a "/" source_trailing_slash = False if source: source_trailing_slash = source.endswith("/") # Define content_tempfile in case we set it after finding content populated. content_tempfile = None # If content is defined make a temp file and write the content into it. if content is not None: try: # If content comes to us as a dict it should be decoded json. # We need to encode it back into a string to write it out. if type(content) is dict: content_tempfile = self._create_content_tempfile(json.dumps(content)) else: content_tempfile = self._create_content_tempfile(content) source = content_tempfile except Exception, err: result = dict(failed=True, msg="could not write content temp file: %s" % err) return ReturnData(conn=conn, result=result) # if we have first_available_file in our vars # look up the files and use the first one we find as src elif 'first_available_file' in inject: found = False for fn in inject.get('first_available_file'): fn_orig = fn fnt = template.template(self.runner.basedir, fn, inject) fnd = utils.path_dwim(self.runner.basedir, fnt) if not os.path.exists(fnd) and '_original_file' in inject: fnd = utils.path_dwim_relative(inject['_original_file'], 'files', fnt, self.runner.basedir, check=False) if os.path.exists(fnd): source = fnd found = True break if not found: results = dict(failed=True, msg="could not find src in first_available_file list") return ReturnData(conn=conn, result=results) else: source = template.template(self.runner.basedir, source, inject) if '_original_file' in inject: source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir) else: source = utils.path_dwim(self.runner.basedir, source) # A list of source file tuples (full_path, relative_path) which will try to copy to the destination source_files = [] # If source is a directory populate our list else source is a file and translate it to a tuple. if os.path.isdir(source): # Get the amount of spaces to remove to get the relative path. if source_trailing_slash: sz = len(source) + 1 else: sz = len(source.rsplit('/', 1)[0]) + 1 # Walk the directory and append the file tuples to source_files. for base_path, sub_folders, files in os.walk(source): for file in files: full_path = os.path.join(base_path, file) rel_path = full_path[sz:] source_files.append((full_path, rel_path)) # If it's recursive copy, destination is always a dir, # explicitly mark it so (note - copy module relies on this). if not conn.shell.path_has_trailing_slash(dest): dest = conn.shell.join_path(dest, '') else: source_files.append((source, os.path.basename(source))) changed = False diffs = [] module_result = {"changed": False} # A register for if we executed a module. # Used to cut down on command calls when not recursive. module_executed = False # Tell _execute_module to delete the file if there is one file. delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp_path that we can share as the _exec_module create is too late. if not delete_remote_tmp: if "-tmp-" not in tmp_path: tmp_path = self.runner._make_tmp_path(conn) # expand any user home dir specifier dest = self.runner._remote_expand_user(conn, dest, tmp_path) vault = VaultLib(password=self.runner.vault_pass) for source_full, source_rel in source_files: vault_temp_file = None data = None try: data = open(source_full).read() except IOError: raise errors.AnsibleError("file could not read: %s" % source_full) if vault.is_encrypted(data): # if the file is encrypted and no password was specified, # the decrypt call would throw an error, but we check first # since the decrypt function doesn't know the file name if self.runner.vault_pass is None: raise errors.AnsibleError("A vault password must be specified to decrypt %s" % source_full) data = vault.decrypt(data) # Make a temp file vault_temp_file = self._create_content_tempfile(data) source_full = vault_temp_file; # Generate a hash of the local file. local_checksum = utils.checksum(source_full) # If local_checksum is not defined we can't find the file so we should fail out. if local_checksum is None: result = dict(failed=True, msg="could not find src=%s" % source_full) return ReturnData(conn=conn, result=result) # This is kind of optimization - if user told us destination is # dir, do path manipulation right away, otherwise we still check # for dest being a dir via remote call below. if conn.shell.path_has_trailing_slash(dest): dest_file = conn.shell.join_path(dest, source_rel) else: dest_file = conn.shell.join_path(dest) # Attempt to get the remote checksum remote_checksum = self.runner._remote_checksum(conn, tmp_path, dest_file, inject) if remote_checksum == '3': # The remote_checksum was executed on a directory. if content is not None: # If source was defined as content remove the temporary file and fail out. self._remove_tempfile_if_content_defined(content, content_tempfile) result = dict(failed=True, msg="can not use content with a dir as dest") return ReturnData(conn=conn, result=result) else: # Append the relative source location to the destination and retry remote_checksum dest_file = conn.shell.join_path(dest, source_rel) remote_checksum = self.runner._remote_checksum(conn, tmp_path, dest_file, inject) if remote_checksum == '4': result = dict(msg="python isn't present on the system. Unable to compute checksum", failed=True) return ReturnData(conn=conn, result=result) if remote_checksum != '1' and not force: # remote_file exists so continue to next iteration. continue if local_checksum != remote_checksum: # The checksums don't match and we will change or error out. changed = True # Create a tmp_path if missing only if this is not recursive. # If this is recursive we already have a tmp_path. if delete_remote_tmp: if "-tmp-" not in tmp_path: tmp_path = self.runner._make_tmp_path(conn) if self.runner.diff and not raw: diff = self._get_diff_data(conn, tmp_path, inject, dest_file, source_full) else: diff = {} if self.runner.noop_on_check(inject): self._remove_tempfile_if_content_defined(content, content_tempfile) diffs.append(diff) changed = True module_result = dict(changed=True) continue # Define a remote directory that we will copy the file to. tmp_src = tmp_path + 'source' if not raw: conn.put_file(source_full, tmp_src) else: conn.put_file(source_full, dest_file) # We have copied the file remotely and no longer require our content_tempfile self._remove_tempfile_if_content_defined(content, content_tempfile) # Remove the vault tempfile if we have one if vault_temp_file: os.remove(vault_temp_file); vault_temp_file = None # fix file permissions when the copy is done as a different user if self.runner.become and self.runner.become_user != 'root' and not raw: self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp_path) if raw: # Continue to next iteration if raw is defined. continue # Run the copy module # src and dest here come after original and override them # we pass dest only to make sure it includes trailing slash in case of recursive copy new_module_args = dict( src=tmp_src, dest=dest, original_basename=source_rel ) if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True if self.runner.no_log: new_module_args['NO_LOG'] = True module_args_tmp = utils.merge_module_args(module_args, new_module_args) module_return = self.runner._execute_module(conn, tmp_path, 'copy', module_args_tmp, inject=inject, complex_args=complex_args, delete_remote_tmp=delete_remote_tmp) module_executed = True else: # no need to transfer the file, already correct hash, but still need to call # the file module in case we want to change attributes self._remove_tempfile_if_content_defined(content, content_tempfile) # Remove the vault tempfile if we have one if vault_temp_file: os.remove(vault_temp_file); vault_temp_file = None if raw: # Continue to next iteration if raw is defined. # self.runner._remove_tmp_path(conn, tmp_path) continue tmp_src = tmp_path + source_rel # Build temporary module_args. new_module_args = dict( src=tmp_src, dest=dest, original_basename=source_rel ) if self.runner.noop_on_check(inject): new_module_args['CHECKMODE'] = True if self.runner.no_log: new_module_args['NO_LOG'] = True module_args_tmp = utils.merge_module_args(module_args, new_module_args) # Execute the file module. module_return = self.runner._execute_module(conn, tmp_path, 'file', module_args_tmp, inject=inject, complex_args=complex_args, delete_remote_tmp=delete_remote_tmp) module_executed = True module_result = module_return.result if not module_result.get('checksum'): module_result['checksum'] = local_checksum if module_result.get('failed') == True: return module_return if module_result.get('changed') == True: changed = True # Delete tmp_path if we were recursive or if we did not execute a module. if (not C.DEFAULT_KEEP_REMOTE_FILES and not delete_remote_tmp) \ or (not C.DEFAULT_KEEP_REMOTE_FILES and delete_remote_tmp and not module_executed): self.runner._remove_tmp_path(conn, tmp_path) # the file module returns the file path as 'path', but # the copy module uses 'dest', so add it if it's not there if 'path' in module_result and 'dest' not in module_result: module_result['dest'] = module_result['path'] # TODO: Support detailed status/diff for multiple files if len(source_files) == 1: result = module_result else: result = dict(dest=dest, src=source, changed=changed) if len(diffs) == 1: return ReturnData(conn=conn, result=result, diff=diffs[0]) else: return ReturnData(conn=conn, result=result) def _create_content_tempfile(self, content): ''' Create a tempfile containing defined content ''' fd, content_tempfile = tempfile.mkstemp() f = os.fdopen(fd, 'w') try: f.write(content) except Exception, err: os.remove(content_tempfile) raise Exception(err) finally: f.close() return content_tempfile def _get_diff_data(self, conn, tmp, inject, destination, source): peek_result = self.runner._execute_module(conn, tmp, 'file', "path=%s diff_peek=1" % destination, inject=inject, persist_files=True) if not peek_result.is_successful(): return {} diff = {} if peek_result.result['state'] == 'absent': diff['before'] = '' elif peek_result.result['appears_binary']: diff['dst_binary'] = 1 elif peek_result.result['size'] > utils.MAX_FILE_SIZE_FOR_DIFF: diff['dst_larger'] = utils.MAX_FILE_SIZE_FOR_DIFF else: dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % destination, inject=inject, persist_files=True) if 'content' in dest_result.result: dest_contents = dest_result.result['content'] if dest_result.result['encoding'] == 'base64': dest_contents = base64.b64decode(dest_contents) else: raise Exception("unknown encoding, failed: %s" % dest_result.result) diff['before_header'] = destination diff['before'] = dest_contents src = open(source) src_contents = src.read(8192) st = os.stat(source) if "\x00" in src_contents: diff['src_binary'] = 1 elif st[stat.ST_SIZE] > utils.MAX_FILE_SIZE_FOR_DIFF: diff['src_larger'] = utils.MAX_FILE_SIZE_FOR_DIFF else: src.seek(0) diff['after_header'] = source diff['after'] = src.read() return diff def _remove_tempfile_if_content_defined(self, content, content_tempfile): if content is not None: os.remove(content_tempfile) def _result_key_merge(self, options, results): # add keys to file module results to mimic copy if 'path' in results.result and 'dest' not in results.result: results.result['dest'] = results.result['path'] del results.result['path'] return results
zedr/django
refs/heads/master
tests/comment_tests/custom_comments/views.py
182
from django.http import HttpResponse def custom_submit_comment(request): return HttpResponse("Hello from the custom submit comment view.") def custom_flag_comment(request, comment_id): return HttpResponse("Hello from the custom flag view.") def custom_delete_comment(request, comment_id): return HttpResponse("Hello from the custom delete view.") def custom_approve_comment(request, comment_id): return HttpResponse("Hello from the custom approve view.")
jangorecki/h2o-3
refs/heads/master
h2o-py/tests/testdir_hdfs/pyunit_INTERNAL_HDFS_airlines_glrm_profile.py
4
from __future__ import print_function import sys sys.path.insert(1,"../../") import h2o import time from tests import pyunit_utils from h2o.estimators.glrm import H2OGeneralizedLowRankEstimator #---------------------------------------------------------------------- # Purpose: This test is to run GLRM on airline data and measure # how fast it can run with the various optimization methods that we # are looking at. #---------------------------------------------------------------------- def hdfs_orc_parser(): # Check if we are running inside the H2O network by seeing if we can touch # the namenode. hadoop_namenode_is_accessible = pyunit_utils.hadoop_namenode_is_accessible() if hadoop_namenode_is_accessible: numElements2Compare = 10 tol_time = 200 tol_numeric = 1e-5 hdfs_name_node = pyunit_utils.hadoop_namenode() hdfs_csv_file = "/datasets/air_csv_part" col_types = ['real', 'real', 'real', 'real', 'real', 'real', 'real', 'real', 'enum', 'real', 'enum', 'real', 'real', 'enum', 'real', 'real', 'enum', 'enum', 'real', 'enum', 'enum', 'real', 'real', 'real', 'enum', 'enum', 'enum', 'enum', 'enum', 'enum', 'enum'] # import CSV file print("Import airlines 116M dataset in original csv format from HDFS") url_csv = "hdfs://{0}{1}".format(hdfs_name_node, hdfs_csv_file) acs_orig = h2o.import_file(url_csv, na_strings=['\\N'], col_types=col_types) print("Data size number of rows: {0}, number of columns: {1}".format(acs_orig.nrow, acs_orig.ncol)) seeds = [2297378124, 3849570216, 6733652048, 8915337442, 8344418400, 9416580152, 2598632624, 4977008454, 8273228579, 8185554539, 3219125000, 2998879373, 7707012513, 5786923379, 5029788935, 935945790, 7092607078, 9305834745, 6173975590, 5397294255] run_time_ms = [] iterations = [] objective = [] num_runs = 1 # number of times to repeat experiments for ind in range(num_runs): acs_model = H2OGeneralizedLowRankEstimator(k = 10, transform = 'STANDARDIZE', loss = 'Quadratic', multi_loss="Categorical", model_id="clients_core_glrm", regularization_x="L2", regularization_y="L1", gamma_x=0.2, gamma_y=0.5, init="SVD", max_iterations = 200, seed=seeds[ind % len(seeds)]) acs_model.train(x = acs_orig.names, training_frame= acs_orig, seed=seeds[ind % len(seeds)]) run_time_ms.append(acs_model._model_json['output']['end_time'] - acs_model._model_json['output']['start_time']) iterations.append(acs_model._model_json['output']['iterations']) objective.append(acs_model._model_json['output']['objective']) print("Run time in ms: {0}".format(run_time_ms)) print("number of iterations: {0}".format(iterations)) print("objective function value: {0}".format(objective)) sys.stdout.flush() else: raise EnvironmentError if __name__ == "__main__": pyunit_utils.standalone_test(hdfs_orc_parser) else: hdfs_orc_parser()
suneeshtr/persona
refs/heads/master
node_modules/l/node_modules/hook.io/node_modules/npm/node_modules/node-gyp/gyp/test/generator-output/gyptest-actions.py
74
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies --generator-output= behavior when using actions. """ import TestGyp # Ninja and Android don't support --generator-output. test = TestGyp.TestGyp(formats=['!ninja', '!android']) # All the generated files should go under 'gypfiles'. The source directory # ('actions') should be untouched. test.writable(test.workpath('actions'), False) test.run_gyp('actions.gyp', '--generator-output=' + test.workpath('gypfiles'), chdir='actions') test.writable(test.workpath('actions'), True) test.relocate('actions', 'relocate/actions') test.relocate('gypfiles', 'relocate/gypfiles') test.writable(test.workpath('relocate/actions'), False) # Some of the action outputs use "pure" relative paths (i.e. without prefixes # like <(INTERMEDIATE_DIR) or <(PROGRAM_DIR)). Even though we are building under # 'gypfiles', such outputs will still be created relative to the original .gyp # sources. Projects probably wouldn't normally do this, since it kind of defeats # the purpose of '--generator-output', but it is supported behaviour. test.writable(test.workpath('relocate/actions/build'), True) test.writable(test.workpath('relocate/actions/subdir1/build'), True) test.writable(test.workpath('relocate/actions/subdir1/actions-out'), True) test.writable(test.workpath('relocate/actions/subdir2/build'), True) test.writable(test.workpath('relocate/actions/subdir2/actions-out'), True) test.build('actions.gyp', test.ALL, chdir='relocate/gypfiles') expect = """\ Hello from program.c Hello from make-prog1.py Hello from make-prog2.py """ if test.format == 'xcode': chdir = 'relocate/actions/subdir1' else: chdir = 'relocate/gypfiles' test.run_built_executable('program', chdir=chdir, stdout=expect) test.must_match('relocate/actions/subdir2/actions-out/file.out', "Hello from make-file.py\n") test.pass_test()
rfguri/vimfiles
refs/heads/master
bundle/ycm/third_party/ycmd/third_party/requests/requests/packages/chardet/eucjpprober.py
2918
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import sys from . import constants from .mbcharsetprober import MultiByteCharSetProber from .codingstatemachine import CodingStateMachine from .chardistribution import EUCJPDistributionAnalysis from .jpcntx import EUCJPContextAnalysis from .mbcssm import EUCJPSMModel class EUCJPProber(MultiByteCharSetProber): def __init__(self): MultiByteCharSetProber.__init__(self) self._mCodingSM = CodingStateMachine(EUCJPSMModel) self._mDistributionAnalyzer = EUCJPDistributionAnalysis() self._mContextAnalyzer = EUCJPContextAnalysis() self.reset() def reset(self): MultiByteCharSetProber.reset(self) self._mContextAnalyzer.reset() def get_charset_name(self): return "EUC-JP" def feed(self, aBuf): aLen = len(aBuf) for i in range(0, aLen): # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte codingState = self._mCodingSM.next_state(aBuf[i]) if codingState == constants.eError: if constants._debug: sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n') self._mState = constants.eNotMe break elif codingState == constants.eItsMe: self._mState = constants.eFoundIt break elif codingState == constants.eStart: charLen = self._mCodingSM.get_current_charlen() if i == 0: self._mLastChar[1] = aBuf[0] self._mContextAnalyzer.feed(self._mLastChar, charLen) self._mDistributionAnalyzer.feed(self._mLastChar, charLen) else: self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen) self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], charLen) self._mLastChar[0] = aBuf[aLen - 1] if self.get_state() == constants.eDetecting: if (self._mContextAnalyzer.got_enough_data() and (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): self._mState = constants.eFoundIt return self.get_state() def get_confidence(self): contxtCf = self._mContextAnalyzer.get_confidence() distribCf = self._mDistributionAnalyzer.get_confidence() return max(contxtCf, distribCf)
prmdsharma/PredictionIO
refs/heads/develop
examples/experimental/scala-parallel-recommendation-entitymap/data/import_eventserver.py
48
""" Import sample data for recommendation engine """ import predictionio import argparse import random SEED = 3 def import_events(client): random.seed(SEED) count = 0 print "Importing data..." # generate 10 users, with user uid1,2,....,10 # with some random attributes user_ids = [ ("uid"+str(i)) for i in range(1, 11)] for user_id in user_ids: print "Set user", user_id client.create_event( event="$set", entity_type="user", entity_id=user_id, properties={ "attr0" : float(random.randint(0, 4)), "attr1" : random.randint(10, 14), "attr2" : random.randint(20, 24) } ) count += 1 # generate 50 items, with iid1,2,....,50 # with some randome attributes item_ids = [ ("iid"+str(i)) for i in range(1, 51)] for item_id in item_ids: print "Set item", item_id client.create_event( event="$set", entity_type="item", entity_id=item_id, properties={ "attrA" : random.choice(["something1", "something2", "valueX"]), "attrB" : random.randint(10, 30), "attrC" : random.choice([True, False]) } ) count += 1 # each user randomly rate or buy 10 items for user_id in user_ids: for viewed_item in random.sample(item_ids, 10): if (random.randint(0, 1) == 1): print "User", user_id ,"rates item", viewed_item client.create_event( event="rate", entity_type="user", entity_id=user_id, target_entity_type="item", target_entity_id=item_id, properties= { "rating" : float(random.randint(1, 6)) } ) else: print "User", user_id ,"buys item", viewed_item client.create_event( event="buy", entity_type="user", entity_id=user_id, target_entity_type="item", target_entity_id=item_id ) count += 1 print "%s events are imported." % count if __name__ == '__main__': parser = argparse.ArgumentParser( description="Import sample data for recommendation engine") parser.add_argument('--access_key', default='invald_access_key') parser.add_argument('--url', default="http://localhost:7070") args = parser.parse_args() print args client = predictionio.EventClient( access_key=args.access_key, url=args.url, threads=5, qsize=500) import_events(client)
UOMx/edx-platform
refs/heads/master
cms/djangoapps/contentstore/__init__.py
12133432
IRI-Research/django
refs/heads/master
tests/many_to_one_regress/__init__.py
12133432
crateio/crate.pypi
refs/heads/master
crate/pypi/simple/__init__.py
12133432
mig5/felicity
refs/heads/master
fabfile.py
1
from fabric.api import * import ConfigParser def deploy(server): env.user = 'root' # Fetch some values from the config file # @TODO fix this so that fabfile doesn't depend on ConfigParser at all config = ConfigParser.RawConfigParser() config.read('config/felicity.ini') # The Duplicity passphrase, for decrypting during restore passphrase = config.get('Felicity', 'passphrase') # Where should we send the report to? email = config.get('Felicity', 'email') # Where are the backups? backupprovider = config.get('Felicity', 'backupprovider') # Upload scripts scripts = ['backup_list_buckets', 'backup_list_bucket_keys', 'backup_list_containers', 'backup_restore_wrapper', 'backup_restore', 'firewall'] for script in scripts: put('scripts/' + script, '/usr/local/bin/' + script, mode=0755) # Store creds in a file for using by the backup restore script creds = [] if backupprovider == "Amazon": creds.append("export AWS_ACCESS_KEY_ID=%s\n" % config.get('Amazon', 'user')) creds.append("export AWS_SECRET_ACCESS_KEY=%s\n" % config.get('Amazon', 'key')) if backupprovider == "Rackspace": creds.append("export CLOUDFILES_USERNAME=%s\n" % config.get('Rackspace', 'user')) creds.append("export CLOUDFILES_APIKEY=%s\n" % config.get('Rackspace','key')) creds.append("export PASSPHRASE=%s\n" % passphrase) creds_file = open('scripts/backup_restore_creds', 'w') creds_file.writelines(creds) creds_file.close() put('scripts/backup_restore_creds', '/usr/local/etc/backup_restore_creds', mode=0755) # Grab python-cloudfiles run('git clone git://github.com/rackspace/python-cloudfiles.git /opt/python-cloudfiles', pty=True) with cd('/opt/python-cloudfiles/'): run('python setup.py install') # Setting self-destruct for 48 hours run('echo "halt" | at now + 2 days', pty=True) # Disabling password authentication in SSH run('sed -i -r -e "s/^[ #]*(PasswordAuthentication).*/PasswordAuthentication no/" /etc/ssh/sshd_config', pty=True) run('/etc/init.d/ssh restart', pty=True) # Setting a firewall run('/usr/local/bin/firewall start', pty=True) # Preparing restore script to run run('/usr/local/bin/backup_restore_wrapper %s %s %s' % (server, email, backupprovider), pty=True)
geraldinepascal/FROGS
refs/heads/master
assessment/bin/qiime_completTax.py
1
#!/usr/bin/env python2.7 # # Copyright (C) 2016 INRA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # __author__ = 'Sigenae INRA Jouy en Josas' __copyright__ = 'Copyright (C) 2016 INRA' __license__ = 'GNU General Public License' __version__ = '1.0.0' __email__ = '[email protected]' __status__ = 'prod' import argparse def completTax(in_tax,out_tax): utax_ranks = ["d:", "p:", "c:", "o:", "f:", "g:"] max_rank=0 tmp_lines=list() FH_in = open(in_tax) FH_out = open(out_tax,"w") for line in FH_in: taxonomy=line.split("\t")[1] new_tax = taxonomy if not taxonomy.endswith(";") else taxonomy[:-1] # incomplet taxonomy if not taxonomy == "Unassigned" and taxonomy.startswith("d:") : #utax database max_rank=6 if len(new_tax.split(";")) < 6: last_rank = taxonomy.split(";")[-1][0:2] idx_rank = utax_ranks.index(last_rank) for i in xrange(idx_rank+1, len(utax_ranks)): new_tax += ";"+utax_ranks[i]+"unknown_taxa" elif not taxonomy == "Unassigned" : # silva database max_rank=7 idx_rank = len(taxonomy.split(";")) for i in xrange(idx_rank, 7): new_tax += ";"+"unknown_taxa" elif taxonomy=="Unassigned": if max_rank >0: if tmp_lines != []: for l in tmp_lines : tmp_taxonomy = line.split("\t")[1] tmp_new_tax = tmp_taxonomy+";" tmp_new_tax *= max_rank tmp_new_tax = tmp_new_tax[:-1] l = l.replace(tmp_taxonomy,tmp_new_tax) FH_out.write(l) tmp_lines = [] new_tax += ";" new_tax *= max_rank new_tax = new_tax[:-1] else: tmp_lines.append(line) continue line=line.replace(taxonomy,new_tax) FH_out.write(line) if tmp_lines != []: for l in tmp_lines : tmp_taxonomy = line.split("\t")[1] tmp_new_tax = tmp_taxonomy+";" tmp_new_tax *= max_rank tmp_new_tax = tmp_new_tax[:-1] l = l.replace(tmp_taxonomy,tmp_new_tax) FH_out.write(l) FH_in.close() FH_out.close() if __name__ == "__main__": # Manage parameters parser = argparse.ArgumentParser(description="Complete qiime taxonomy to have always 6 ranks (for utax) or 7 ranks (for silva).") group_input = parser.add_argument_group('Inputs') group_input.add_argument('-i', '--input-tax', required=True, help='The Qiime output taxonomy file') group_output = parser.add_argument_group('Outputs') group_output.add_argument('-o', '--output-tax', required=True, help='The completed taxonomy output file') args = parser.parse_args() completTax(args.input_tax,args.output_tax)
lazy404/ansible-for-devops
refs/heads/master
dynamic-inventory/digitalocean/digital_ocean.py
63
#!/usr/bin/env python ''' DigitalOcean external inventory script ====================================== Generates Ansible inventory of DigitalOcean Droplets. In addition to the --list and --host options used by Ansible, there are options for generating JSON of other DigitalOcean data. This is useful when creating droplets. For example, --regions will return all the DigitalOcean Regions. This information can also be easily found in the cache file, whose default location is /tmp/ansible-digital_ocean.cache). The --pretty (-p) option pretty-prints the output for better human readability. ---- Although the cache stores all the information received from DigitalOcean, the cache is not used for current droplet information (in --list, --host, --all, and --droplets). This is so that accurate droplet information is always found. You can force this script to use the cache with --force-cache. ---- Configuration is read from `digital_ocean.ini`, then from environment variables, then and command-line arguments. Most notably, the DigitalOcean Client ID and API Key must be specified. They can be specified in the INI file or with the following environment variables: export DO_CLIENT_ID='DO123' DO_API_KEY='abc123' Alternatively, they can be passed on the command-line with --client-id and --api-key. If you specify DigitalOcean credentials in the INI file, a handy way to get them into your environment (e.g., to use the digital_ocean module) is to use the output of the --env option with export: export $(digital_ocean.py --env) ---- The following groups are generated from --list: - ID (droplet ID) - NAME (droplet NAME) - image_ID - image_NAME - distro_NAME (distribution NAME from image) - region_ID - region_NAME - size_ID - size_NAME - status_STATUS When run against a specific host, this script returns the following variables: - do_created_at - do_distroy - do_id - do_image - do_image_id - do_ip_address - do_name - do_region - do_region_id - do_size - do_size_id - do_status ----- ``` usage: digital_ocean.py [-h] [--list] [--host HOST] [--all] [--droplets] [--regions] [--images] [--sizes] [--ssh-keys] [--domains] [--pretty] [--cache-path CACHE_PATH] [--cache-max_age CACHE_MAX_AGE] [--refresh-cache] [--client-id CLIENT_ID] [--api-key API_KEY] Produce an Ansible Inventory file based on DigitalOcean credentials optional arguments: -h, --help show this help message and exit --list List all active Droplets as Ansible inventory (default: True) --host HOST Get all Ansible inventory variables about a specific Droplet --all List all DigitalOcean information as JSON --droplets List Droplets as JSON --regions List Regions as JSON --images List Images as JSON --sizes List Sizes as JSON --ssh-keys List SSH keys as JSON --domains List Domains as JSON --pretty, -p Pretty-print results --cache-path CACHE_PATH Path to the cache files (default: .) --cache-max_age CACHE_MAX_AGE Maximum age of the cached items (default: 0) --refresh-cache Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files) --client-id CLIENT_ID, -c CLIENT_ID DigitalOcean Client ID --api-key API_KEY, -a API_KEY DigitalOcean API Key ``` ''' # (c) 2013, Evan Wies <[email protected]> # # Inspired by the EC2 inventory plugin: # https://github.com/ansible/ansible/blob/devel/plugins/inventory/ec2.py # # This file is part of Ansible, # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ###################################################################### import os import sys import re import argparse from time import time import ConfigParser try: import json except ImportError: import simplejson as json try: from dopy.manager import DoError, DoManager except ImportError, e: print "failed=True msg='`dopy` library required for this script'" sys.exit(1) class DigitalOceanInventory(object): ########################################################################### # Main execution path ########################################################################### def __init__(self): ''' Main execution path ''' # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory self.index = {} # Various indices of Droplet metadata # Define defaults self.cache_path = '.' self.cache_max_age = 0 # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, 'client_id') or not hasattr(self, 'api_key'): print '''Could not find values for DigitalOcean client_id and api_key. They must be specified via either ini file, command line argument (--client-id and --api-key), or environment variables (DO_CLIENT_ID and DO_API_KEY)''' sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print "DO_CLIENT_ID=%s DO_API_KEY=%s" % (self.client_id, self.api_key) sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid(): self.load_all_data_from_digital_ocean() else: self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: print '''Cache is empty and --force-cache was specified''' sys.exit(-1) self.load_all_data_from_digital_ocean() else: # We always get fresh droplets for --list, --host, --all, and --droplets # unless --force-cache is specified if not self.args.force_cache and ( self.args.list or self.args.host or self.args.all or self.args.droplets): self.load_droplets_from_digital_ocean() # Pick the json_data to print based on the CLI command if self.args.droplets: json_data = { 'droplets': self.data['droplets'] } elif self.args.regions: json_data = { 'regions': self.data['regions'] } elif self.args.images: json_data = { 'images': self.data['images'] } elif self.args.sizes: json_data = { 'sizes': self.data['sizes'] } elif self.args.ssh_keys: json_data = { 'ssh_keys': self.data['ssh_keys'] } elif self.args.domains: json_data = { 'domains': self.data['domains'] } elif self.args.all: json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default json_data = self.inventory if self.args.pretty: print json.dumps(json_data, sort_keys=True, indent=2) else: print json.dumps(json_data) # That's all she wrote... ########################################################################### # Script configuration ########################################################################### def read_settings(self): ''' Reads the settings from the digital_ocean.ini file ''' config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini') # Credentials if config.has_option('digital_ocean', 'client_id'): self.client_id = config.get('digital_ocean', 'client_id') if config.has_option('digital_ocean', 'api_key'): self.api_key = config.get('digital_ocean', 'api_key') # Cache related if config.has_option('digital_ocean', 'cache_path'): self.cache_path = config.get('digital_ocean', 'cache_path') if config.has_option('digital_ocean', 'cache_max_age'): self.cache_max_age = config.getint('digital_ocean', 'cache_max_age') def read_environment(self): ''' Reads the settings from environment variables ''' # Setup credentials if os.getenv("DO_CLIENT_ID"): self.client_id = os.getenv("DO_CLIENT_ID") if os.getenv("DO_API_KEY"): self.api_key = os.getenv("DO_API_KEY") def read_cli_args(self): ''' Command line argument processing ''' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON') parser.add_argument('--regions', action='store_true', help='List Regions as JSON') parser.add_argument('--images', action='store_true', help='List Images as JSON') parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') parser.add_argument('--domains', action='store_true',help='List Domains as JSON') parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results') parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY') parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID') parser.add_argument('--api-key','-a', action='store', help='DigitalOcean API Key') self.args = parser.parse_args() if self.args.client_id: self.client_id = self.args.client_id if self.args.api_key: self.api_key = self.args.api_key if self.args.cache_path: self.cache_path = self.args.cache_path if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age # Make --list default if none of the other commands are specified if (not self.args.droplets and not self.args.regions and not self.args.images and not self.args.sizes and not self.args.ssh_keys and not self.args.domains and not self.args.all and not self.args.host): self.args.list = True ########################################################################### # Data Management ########################################################################### def load_all_data_from_digital_ocean(self): ''' Use dopy to get all the information from DigitalOcean and save data in cache files ''' manager = DoManager(self.client_id, self.api_key) self.data = {} self.data['droplets'] = self.sanitize_list(manager.all_active_droplets()) self.data['regions'] = self.sanitize_list(manager.all_regions()) self.data['images'] = self.sanitize_list(manager.all_images(filter=None)) self.data['sizes'] = self.sanitize_list(manager.sizes()) self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys()) self.data['domains'] = self.sanitize_list(manager.all_domains()) self.index = {} self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name') self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name') self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name') self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution') self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache() def load_droplets_from_digital_ocean(self): ''' Use dopy to get droplet information from DigitalOcean and save data in cache files ''' manager = DoManager(self.client_id, self.api_key) self.data['droplets'] = self.sanitize_list(manager.all_active_droplets()) self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache() def build_index(self, source_seq, key_from, key_to, use_slug=True): dest_dict = {} for item in source_seq: name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to] key = item[key_from] dest_dict[key] = name return dest_dict def build_inventory(self): '''Build Ansible inventory of droplets''' self.inventory = {} # add all droplets by id and name for droplet in self.data['droplets']: dest = droplet['ip_address'] self.inventory[droplet['id']] = [dest] self.push(self.inventory, droplet['name'], dest) self.push(self.inventory, 'region_'+droplet['region_id'], dest) self.push(self.inventory, 'image_' +droplet['image_id'], dest) self.push(self.inventory, 'size_' +droplet['size_id'], dest) self.push(self.inventory, 'status_'+droplet['status'], dest) region_name = self.index['region_to_name'].get(droplet['region_id']) if region_name: self.push(self.inventory, 'region_'+region_name, dest) size_name = self.index['size_to_name'].get(droplet['size_id']) if size_name: self.push(self.inventory, 'size_'+size_name, dest) image_name = self.index['image_to_name'].get(droplet['image_id']) if image_name: self.push(self.inventory, 'image_'+image_name, dest) distro_name = self.index['image_to_distro'].get(droplet['image_id']) if distro_name: self.push(self.inventory, 'distro_'+distro_name, dest) def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = self.to_safe(str(self.args.host)) if not host in self.index['host_to_droplet']: # try updating cache if not self.args.force_cache: self.load_all_data_from_digital_ocean() if not host in self.index['host_to_droplet']: # host might not exist anymore return {} droplet = None if self.cache_refreshed: for drop in self.data['droplets']: if drop['ip_address'] == host: droplet = self.sanitize_dict(drop) break else: # Cache wasn't refreshed this run, so hit DigitalOcean API manager = DoManager(self.client_id, self.api_key) droplet_id = self.index['host_to_droplet'][host] droplet = self.sanitize_dict(manager.show_droplet(droplet_id)) if not droplet: return {} # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info['do_'+k] = v # Generate user-friendly variables (i.e. not the ID's) if droplet.has_key('region_id'): info['do_region'] = self.index['region_to_name'].get(droplet['region_id']) if droplet.has_key('size_id'): info['do_size'] = self.index['size_to_name'].get(droplet['size_id']) if droplet.has_key('image_id'): info['do_image'] = self.index['image_to_name'].get(droplet['image_id']) info['do_distro'] = self.index['image_to_distro'].get(droplet['image_id']) return info ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() if (mod_time + self.cache_max_age) > current_time: return True return False def load_from_cache(self): ''' Reads the data from the cache file and assigns it to member variables as Python Objects''' cache = open(self.cache_filename, 'r') json_data = cache.read() cache.close() data = json.loads(json_data) self.data = data['data'] self.inventory = data['inventory'] self.index = data['index'] def write_to_cache(self): ''' Writes data in JSON format to a file ''' data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory } json_data = json.dumps(data, sort_keys=True, indent=2) cache = open(self.cache_filename, 'w') cache.write(json_data) cache.close() ########################################################################### # Utilities ########################################################################### def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in the dict ''' if key in my_dict: my_dict[key].append(element); else: my_dict[key] = [element] def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' return re.sub("[^A-Za-z0-9\-\.]", "_", word) def sanitize_dict(self, d): new_dict = {} for k, v in d.items(): if v != None: new_dict[self.to_safe(str(k))] = self.to_safe(str(v)) return new_dict def sanitize_list(self, seq): new_seq = [] for d in seq: new_seq.append(self.sanitize_dict(d)) return new_seq ########################################################################### # Run the script DigitalOceanInventory()
lemieuxl/pyGenClean
refs/heads/master
docs/conf.py
1
# -*- coding: utf-8 -*- # # pyGenClean documentation build configuration file, created by # sphinx-quickstart on Wed Dec 12 15:08:13 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinx.ext.doctest'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'pyGenClean' copyright = u'2012, Louis-Philippe Lemieux Perreault' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. import pyGenClean version = ".".join(pyGenClean.__version__.split(".")[:-1]) # The full version, including alpha/beta/rc tags. release = pyGenClean.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'friendly' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinxdoc' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. ## html_theme_options = { ## "full_logo": True ## } # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = "_static/logo.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pyGenCleandoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'pyGenClean.tex', u'pyGenClean Documentation', u'Louis-Philippe Lemieux Perreault', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = "_static/logo.png" # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'statgendatacleanup', u'pyGenClean Documentation', [u'Louis-Philippe Lemieux Perreault'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'pyGenClean', u'pyGenClean Documentation', u'Louis-Philippe Lemieux Perreault', 'pyGenClean', 'Fast and efficient genetic data clean up.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'http://docs.python.org/': None, 'numpy': ('http://docs.scipy.org/doc/numpy/', None), }
OpenDMM/bitbake
refs/heads/master
lib/bb/event.py
2
# ex:ts=4:sw=4:sts=4:et # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- """ BitBake 'Event' implementation Classes and functions for manipulating 'events' in the BitBake build tools. """ # Copyright (C) 2003, 2004 Chris Larson # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import os, sys import warnings try: import cPickle as pickle except ImportError: import pickle import logging import atexit import traceback import bb.utils import bb.compat import bb.exceptions # This is the pid for which we should generate the event. This is set when # the runqueue forks off. worker_pid = 0 worker_fire = None logger = logging.getLogger('BitBake.Event') class Event(object): """Base class for events""" def __init__(self): self.pid = worker_pid Registered = 10 AlreadyRegistered = 14 def get_class_handlers(): return _handlers def set_class_handlers(h): _handlers = h def clean_class_handlers(): return bb.compat.OrderedDict() # Internal _handlers = clean_class_handlers() _ui_handlers = {} _ui_logfilters = {} _ui_handler_seq = 0 _event_handler_map = {} _catchall_handlers = {} def execute_handler(name, handler, event, d): event.data = d try: ret = handler(event) except bb.parse.SkipPackage: raise except Exception: etype, value, tb = sys.exc_info() logger.error("Execution of event handler '%s' failed" % name, exc_info=(etype, value, tb.tb_next)) raise except SystemExit as exc: if exc.code != 0: logger.error("Execution of event handler '%s' failed" % name) raise finally: del event.data def fire_class_handlers(event, d): if isinstance(event, logging.LogRecord): return eid = str(event.__class__)[8:-2] evt_hmap = _event_handler_map.get(eid, {}) for name, handler in _handlers.iteritems(): if name in _catchall_handlers or name in evt_hmap: try: execute_handler(name, handler, event, d) except Exception: continue ui_queue = [] @atexit.register def print_ui_queue(): """If we're exiting before a UI has been spawned, display any queued LogRecords to the console.""" logger = logging.getLogger("BitBake") if not _ui_handlers: from bb.msg import BBLogFormatter console = logging.StreamHandler(sys.stdout) console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s")) logger.handlers = [console] # First check to see if we have any proper messages msgprint = False for event in ui_queue: if isinstance(event, logging.LogRecord): if event.levelno > logging.DEBUG: logger.handle(event) msgprint = True if msgprint: return # Nope, so just print all of the messages we have (including debug messages) for event in ui_queue: if isinstance(event, logging.LogRecord): logger.handle(event) def fire_ui_handlers(event, d): if not _ui_handlers: # No UI handlers registered yet, queue up the messages ui_queue.append(event) return errors = [] for h in _ui_handlers: #print "Sending event %s" % event try: if not _ui_logfilters[h].filter(event): continue # We use pickle here since it better handles object instances # which xmlrpc's marshaller does not. Events *must* be serializable # by pickle. if hasattr(_ui_handlers[h].event, "sendpickle"): _ui_handlers[h].event.sendpickle((pickle.dumps(event))) else: _ui_handlers[h].event.send(event) except: errors.append(h) for h in errors: del _ui_handlers[h] def fire(event, d): """Fire off an Event""" # We can fire class handlers in the worker process context and this is # desired so they get the task based datastore. # UI handlers need to be fired in the server context so we defer this. They # don't have a datastore so the datastore context isn't a problem. fire_class_handlers(event, d) if worker_fire: worker_fire(event, d) else: fire_ui_handlers(event, d) def fire_from_worker(event, d): fire_ui_handlers(event, d) noop = lambda _: None def register(name, handler, mask=[]): """Register an Event handler""" # already registered if name in _handlers: return AlreadyRegistered if handler is not None: # handle string containing python code if isinstance(handler, basestring): tmp = "def %s(e):\n%s" % (name, handler) try: code = compile(tmp, "%s(e)" % name, "exec") except SyntaxError: logger.error("Unable to register event handler '%s':\n%s", name, ''.join(traceback.format_exc(limit=0))) _handlers[name] = noop return env = {} bb.utils.better_exec(code, env) func = bb.utils.better_eval(name, env) _handlers[name] = func else: _handlers[name] = handler if not mask or '*' in mask: _catchall_handlers[name] = True else: for m in mask: if _event_handler_map.get(m, None) is None: _event_handler_map[m] = {} _event_handler_map[m][name] = True return Registered def remove(name, handler): """Remove an Event handler""" _handlers.pop(name) def register_UIHhandler(handler): bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 _ui_handlers[_ui_handler_seq] = handler level, debug_domains = bb.msg.constructLogOptions() _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) return _ui_handler_seq def unregister_UIHhandler(handlerNum): if handlerNum in _ui_handlers: del _ui_handlers[handlerNum] return # Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC class UIEventFilter(object): def __init__(self, level, debug_domains): self.update(None, level, debug_domains) def update(self, eventmask, level, debug_domains): self.eventmask = eventmask self.stdlevel = level self.debug_domains = debug_domains def filter(self, event): if isinstance(event, logging.LogRecord): if event.levelno >= self.stdlevel: return True if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]: return True return False eid = str(event.__class__)[8:-2] if self.eventmask and eid not in self.eventmask: return False return True def set_UIHmask(handlerNum, level, debug_domains, mask): if not handlerNum in _ui_handlers: return False if '*' in mask: _ui_logfilters[handlerNum].update(None, level, debug_domains) else: _ui_logfilters[handlerNum].update(mask, level, debug_domains) return True def getName(e): """Returns the name of a class or class instance""" if getattr(e, "__name__", None) == None: return e.__class__.__name__ else: return e.__name__ class OperationStarted(Event): """An operation has begun""" def __init__(self, msg = "Operation Started"): Event.__init__(self) self.msg = msg class OperationCompleted(Event): """An operation has completed""" def __init__(self, total, msg = "Operation Completed"): Event.__init__(self) self.total = total self.msg = msg class OperationProgress(Event): """An operation is in progress""" def __init__(self, current, total, msg = "Operation in Progress"): Event.__init__(self) self.current = current self.total = total self.msg = msg + ": %s/%s" % (current, total); class ConfigParsed(Event): """Configuration Parsing Complete""" class RecipeEvent(Event): def __init__(self, fn): self.fn = fn Event.__init__(self) class RecipePreFinalise(RecipeEvent): """ Recipe Parsing Complete but not yet finialised""" class RecipeParsed(RecipeEvent): """ Recipe Parsing Complete """ class StampUpdate(Event): """Trigger for any adjustment of the stamp files to happen""" def __init__(self, targets, stampfns): self._targets = targets self._stampfns = stampfns Event.__init__(self) def getStampPrefix(self): return self._stampfns def getTargets(self): return self._targets stampPrefix = property(getStampPrefix) targets = property(getTargets) class BuildBase(Event): """Base class for bbmake run events""" def __init__(self, n, p, failures = 0): self._name = n self._pkgs = p Event.__init__(self) self._failures = failures def getPkgs(self): return self._pkgs def setPkgs(self, pkgs): self._pkgs = pkgs def getName(self): return self._name def setName(self, name): self._name = name def getCfg(self): return self.data def setCfg(self, cfg): self.data = cfg def getFailures(self): """ Return the number of failed packages """ return self._failures pkgs = property(getPkgs, setPkgs, None, "pkgs property") name = property(getName, setName, None, "name property") cfg = property(getCfg, setCfg, None, "cfg property") class BuildStarted(BuildBase, OperationStarted): """bbmake build run started""" def __init__(self, n, p, failures = 0): OperationStarted.__init__(self, "Building Started") BuildBase.__init__(self, n, p, failures) class BuildCompleted(BuildBase, OperationCompleted): """bbmake build run completed""" def __init__(self, total, n, p, failures = 0): if not failures: OperationCompleted.__init__(self, total, "Building Succeeded") else: OperationCompleted.__init__(self, total, "Building Failed") BuildBase.__init__(self, n, p, failures) class DiskFull(Event): """Disk full case build aborted""" def __init__(self, dev, type, freespace, mountpoint): Event.__init__(self) self._dev = dev self._type = type self._free = freespace self._mountpoint = mountpoint class NoProvider(Event): """No Provider for an Event""" def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]): Event.__init__(self) self._item = item self._runtime = runtime self._dependees = dependees self._reasons = reasons self._close_matches = close_matches def getItem(self): return self._item def isRuntime(self): return self._runtime class MultipleProviders(Event): """Multiple Providers""" def __init__(self, item, candidates, runtime = False): Event.__init__(self) self._item = item self._candidates = candidates self._is_runtime = runtime def isRuntime(self): """ Is this a runtime issue? """ return self._is_runtime def getItem(self): """ The name for the to be build item """ return self._item def getCandidates(self): """ Get the possible Candidates for a PROVIDER. """ return self._candidates class ParseStarted(OperationStarted): """Recipe parsing for the runqueue has begun""" def __init__(self, total): OperationStarted.__init__(self, "Recipe parsing Started") self.total = total class ParseCompleted(OperationCompleted): """Recipe parsing for the runqueue has completed""" def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total): OperationCompleted.__init__(self, total, "Recipe parsing Completed") self.cached = cached self.parsed = parsed self.skipped = skipped self.virtuals = virtuals self.masked = masked self.errors = errors self.sofar = cached + parsed class ParseProgress(OperationProgress): """Recipe parsing progress""" def __init__(self, current, total): OperationProgress.__init__(self, current, total, "Recipe parsing") class CacheLoadStarted(OperationStarted): """Loading of the dependency cache has begun""" def __init__(self, total): OperationStarted.__init__(self, "Loading cache Started") self.total = total class CacheLoadProgress(OperationProgress): """Cache loading progress""" def __init__(self, current, total): OperationProgress.__init__(self, current, total, "Loading cache") class CacheLoadCompleted(OperationCompleted): """Cache loading is complete""" def __init__(self, total, num_entries): OperationCompleted.__init__(self, total, "Loading cache Completed") self.num_entries = num_entries class TreeDataPreparationStarted(OperationStarted): """Tree data preparation started""" def __init__(self): OperationStarted.__init__(self, "Preparing tree data Started") class TreeDataPreparationProgress(OperationProgress): """Tree data preparation is in progress""" def __init__(self, current, total): OperationProgress.__init__(self, current, total, "Preparing tree data") class TreeDataPreparationCompleted(OperationCompleted): """Tree data preparation completed""" def __init__(self, total): OperationCompleted.__init__(self, total, "Preparing tree data Completed") class DepTreeGenerated(Event): """ Event when a dependency tree has been generated """ def __init__(self, depgraph): Event.__init__(self) self._depgraph = depgraph class TargetsTreeGenerated(Event): """ Event when a set of buildable targets has been generated """ def __init__(self, model): Event.__init__(self) self._model = model class FilesMatchingFound(Event): """ Event when a list of files matching the supplied pattern has been generated """ def __init__(self, pattern, matches): Event.__init__(self) self._pattern = pattern self._matches = matches class CoreBaseFilesFound(Event): """ Event when a list of appropriate config files has been generated """ def __init__(self, paths): Event.__init__(self) self._paths = paths class ConfigFilesFound(Event): """ Event when a list of appropriate config files has been generated """ def __init__(self, variable, values): Event.__init__(self) self._variable = variable self._values = values class ConfigFilePathFound(Event): """ Event when a path for a config file has been found """ def __init__(self, path): Event.__init__(self) self._path = path class MsgBase(Event): """Base class for messages""" def __init__(self, msg): self._message = msg Event.__init__(self) class MsgDebug(MsgBase): """Debug Message""" class MsgNote(MsgBase): """Note Message""" class MsgWarn(MsgBase): """Warning Message""" class MsgError(MsgBase): """Error Message""" class MsgFatal(MsgBase): """Fatal Message""" class MsgPlain(MsgBase): """General output""" class LogExecTTY(Event): """Send event containing program to spawn on tty of the logger""" def __init__(self, msg, prog, sleep_delay, retries): Event.__init__(self) self.msg = msg self.prog = prog self.sleep_delay = sleep_delay self.retries = retries class LogHandler(logging.Handler): """Dispatch logging messages as bitbake events""" def emit(self, record): if record.exc_info: etype, value, tb = record.exc_info if hasattr(tb, 'tb_next'): tb = list(bb.exceptions.extract_traceback(tb, context=3)) record.bb_exc_info = (etype, value, tb) record.exc_info = None fire(record, None) def filter(self, record): record.taskpid = worker_pid return True class RequestPackageInfo(Event): """ Event to request package information """ class PackageInfo(Event): """ Package information for GUI """ def __init__(self, pkginfolist): Event.__init__(self) self._pkginfolist = pkginfolist class MetadataEvent(Event): """ Generic event that target for OE-Core classes to report information during asynchrous execution """ def __init__(self, eventtype, eventdata): Event.__init__(self) self.type = eventtype self.data = eventdata class SanityCheck(Event): """ Event to issue sanity check """ class SanityCheckPassed(Event): """ Event to indicate sanity check is passed """ class SanityCheckFailed(Event): """ Event to indicate sanity check has failed """ def __init__(self, msg, network_error=False): Event.__init__(self) self._msg = msg self._network_error = network_error class NetworkTest(Event): """ Event to start network test """ class NetworkTestPassed(Event): """ Event to indicate network test has passed """ class NetworkTestFailed(Event): """ Event to indicate network test has failed """
gauravbose/digital-menu
refs/heads/master
django/core/cache/backends/locmem.py
586
"Thread-safe in-memory cache backend." import time from contextlib import contextmanager from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache from django.utils.synch import RWLock try: from django.utils.six.moves import cPickle as pickle except ImportError: import pickle # Global in-memory store of cache data. Keyed by name, to provide # multiple named local memory caches. _caches = {} _expire_info = {} _locks = {} @contextmanager def dummy(): """A context manager that does nothing special.""" yield class LocMemCache(BaseCache): def __init__(self, name, params): BaseCache.__init__(self, params) self._cache = _caches.setdefault(name, {}) self._expire_info = _expire_info.setdefault(name, {}) self._lock = _locks.setdefault(name, RWLock()) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) with self._lock.writer(): if self._has_expired(key): self._set(key, pickled, timeout) return True return False def get(self, key, default=None, version=None, acquire_lock=True): key = self.make_key(key, version=version) self.validate_key(key) pickled = None with (self._lock.reader() if acquire_lock else dummy()): if not self._has_expired(key): pickled = self._cache[key] if pickled is not None: try: return pickle.loads(pickled) except pickle.PickleError: return default with (self._lock.writer() if acquire_lock else dummy()): try: del self._cache[key] del self._expire_info[key] except KeyError: pass return default def _set(self, key, value, timeout=DEFAULT_TIMEOUT): if len(self._cache) >= self._max_entries: self._cull() self._cache[key] = value self._expire_info[key] = self.get_backend_timeout(timeout) def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL) with self._lock.writer(): self._set(key, pickled, timeout) def incr(self, key, delta=1, version=None): with self._lock.writer(): value = self.get(key, version=version, acquire_lock=False) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta key = self.make_key(key, version=version) pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL) self._cache[key] = pickled return new_value def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.reader(): if not self._has_expired(key): return True with self._lock.writer(): try: del self._cache[key] del self._expire_info[key] except KeyError: pass return False def _has_expired(self, key): exp = self._expire_info.get(key, -1) if exp is None or exp > time.time(): return False return True def _cull(self): if self._cull_frequency == 0: self.clear() else: doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0] for k in doomed: self._delete(k) def _delete(self, key): try: del self._cache[key] except KeyError: pass try: del self._expire_info[key] except KeyError: pass def delete(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) with self._lock.writer(): self._delete(key) def clear(self): self._cache.clear() self._expire_info.clear()
bak1an/django
refs/heads/master
tests/decorators/tests.py
53
from functools import update_wrapper, wraps from unittest import TestCase from django.contrib.admin.views.decorators import staff_member_required from django.contrib.auth.decorators import ( login_required, permission_required, user_passes_test, ) from django.http import HttpRequest, HttpResponse, HttpResponseNotAllowed from django.middleware.clickjacking import XFrameOptionsMiddleware from django.test import SimpleTestCase from django.utils.decorators import method_decorator from django.utils.functional import keep_lazy, keep_lazy_text, lazy from django.utils.safestring import mark_safe from django.views.decorators.cache import ( cache_control, cache_page, never_cache, ) from django.views.decorators.clickjacking import ( xframe_options_deny, xframe_options_exempt, xframe_options_sameorigin, ) from django.views.decorators.http import ( condition, require_GET, require_http_methods, require_POST, require_safe, ) from django.views.decorators.vary import vary_on_cookie, vary_on_headers def fully_decorated(request): """Expected __doc__""" return HttpResponse('<html><body>dummy</body></html>') fully_decorated.anything = "Expected __dict__" def compose(*functions): # compose(f, g)(*args, **kwargs) == f(g(*args, **kwargs)) functions = list(reversed(functions)) def _inner(*args, **kwargs): result = functions[0](*args, **kwargs) for f in functions[1:]: result = f(result) return result return _inner full_decorator = compose( # django.views.decorators.http require_http_methods(["GET"]), require_GET, require_POST, require_safe, condition(lambda r: None, lambda r: None), # django.views.decorators.vary vary_on_headers('Accept-language'), vary_on_cookie, # django.views.decorators.cache cache_page(60 * 15), cache_control(private=True), never_cache, # django.contrib.auth.decorators # Apply user_passes_test twice to check #9474 user_passes_test(lambda u: True), login_required, permission_required('change_world'), # django.contrib.admin.views.decorators staff_member_required, # django.utils.functional keep_lazy(HttpResponse), keep_lazy_text, lazy, # django.utils.safestring mark_safe, ) fully_decorated = full_decorator(fully_decorated) class DecoratorsTest(TestCase): def test_attributes(self): """ Built-in decorators set certain attributes of the wrapped function. """ self.assertEqual(fully_decorated.__name__, 'fully_decorated') self.assertEqual(fully_decorated.__doc__, 'Expected __doc__') self.assertEqual(fully_decorated.__dict__['anything'], 'Expected __dict__') def test_user_passes_test_composition(self): """ The user_passes_test decorator can be applied multiple times (#9474). """ def test1(user): user.decorators_applied.append('test1') return True def test2(user): user.decorators_applied.append('test2') return True def callback(request): return request.user.decorators_applied callback = user_passes_test(test1)(callback) callback = user_passes_test(test2)(callback) class DummyUser: pass class DummyRequest: pass request = DummyRequest() request.user = DummyUser() request.user.decorators_applied = [] response = callback(request) self.assertEqual(response, ['test2', 'test1']) def test_cache_page(self): def my_view(request): return "response" my_view_cached = cache_page(123)(my_view) self.assertEqual(my_view_cached(HttpRequest()), "response") my_view_cached2 = cache_page(123, key_prefix="test")(my_view) self.assertEqual(my_view_cached2(HttpRequest()), "response") def test_require_safe_accepts_only_safe_methods(self): """ Test for the require_safe decorator. A view returns either a response or an exception. Refs #15637. """ def my_view(request): return HttpResponse("OK") my_safe_view = require_safe(my_view) request = HttpRequest() request.method = 'GET' self.assertIsInstance(my_safe_view(request), HttpResponse) request.method = 'HEAD' self.assertIsInstance(my_safe_view(request), HttpResponse) request.method = 'POST' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) request.method = 'PUT' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) request.method = 'DELETE' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) # For testing method_decorator, a decorator that assumes a single argument. # We will get type arguments if there is a mismatch in the number of arguments. def simple_dec(func): def wrapper(arg): return func("test:" + arg) return wraps(func)(wrapper) simple_dec_m = method_decorator(simple_dec) # For testing method_decorator, two decorators that add an attribute to the function def myattr_dec(func): def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.myattr = True return wraps(func)(wrapper) myattr_dec_m = method_decorator(myattr_dec) def myattr2_dec(func): def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.myattr2 = True return wraps(func)(wrapper) myattr2_dec_m = method_decorator(myattr2_dec) class ClsDec: def __init__(self, myattr): self.myattr = myattr def __call__(self, f): def wrapped(): return f() and self.myattr return update_wrapper(wrapped, f) class MethodDecoratorTests(SimpleTestCase): """ Tests for method_decorator """ def test_preserve_signature(self): class Test: @simple_dec_m def say(self, arg): return arg self.assertEqual("test:hello", Test().say("hello")) def test_preserve_attributes(self): # Sanity check myattr_dec and myattr2_dec @myattr_dec @myattr2_dec def func(): pass self.assertIs(getattr(func, 'myattr', False), True) self.assertIs(getattr(func, 'myattr2', False), True) # Decorate using method_decorator() on the method. class TestPlain: @myattr_dec_m @myattr2_dec_m def method(self): "A method" pass # Decorate using method_decorator() on both the class and the method. # The decorators applied to the methods are applied before the ones # applied to the class. @method_decorator(myattr_dec_m, "method") class TestMethodAndClass: @method_decorator(myattr2_dec_m) def method(self): "A method" pass # Decorate using an iterable of decorators. decorators = (myattr_dec_m, myattr2_dec_m) @method_decorator(decorators, "method") class TestIterable: def method(self): "A method" pass for Test in (TestPlain, TestMethodAndClass, TestIterable): self.assertIs(getattr(Test().method, 'myattr', False), True) self.assertIs(getattr(Test().method, 'myattr2', False), True) self.assertIs(getattr(Test.method, 'myattr', False), True) self.assertIs(getattr(Test.method, 'myattr2', False), True) self.assertEqual(Test.method.__doc__, 'A method') self.assertEqual(Test.method.__name__, 'method') def test_bad_iterable(self): decorators = {myattr_dec_m, myattr2_dec_m} msg = "'set' object is not subscriptable" with self.assertRaisesMessage(TypeError, msg): @method_decorator(decorators, "method") class TestIterable: def method(self): "A method" pass # Test for argumented decorator def test_argumented(self): class Test: @method_decorator(ClsDec(False)) def method(self): return True self.assertIs(Test().method(), False) def test_descriptors(self): def original_dec(wrapped): def _wrapped(arg): return wrapped(arg) return _wrapped method_dec = method_decorator(original_dec) class bound_wrapper: def __init__(self, wrapped): self.wrapped = wrapped self.__name__ = wrapped.__name__ def __call__(self, arg): return self.wrapped(arg) def __get__(self, instance, cls=None): return self class descriptor_wrapper: def __init__(self, wrapped): self.wrapped = wrapped self.__name__ = wrapped.__name__ def __get__(self, instance, cls=None): return bound_wrapper(self.wrapped.__get__(instance, cls)) class Test: @method_dec @descriptor_wrapper def method(self, arg): return arg self.assertEqual(Test().method(1), 1) def test_class_decoration(self): """ @method_decorator can be used to decorate a class and its methods. """ def deco(func): def _wrapper(*args, **kwargs): return True return _wrapper @method_decorator(deco, name="method") class Test: def method(self): return False self.assertTrue(Test().method()) def test_tuple_of_decorators(self): """ @method_decorator can accept a tuple of decorators. """ def add_question_mark(func): def _wrapper(*args, **kwargs): return func(*args, **kwargs) + "?" return _wrapper def add_exclamation_mark(func): def _wrapper(*args, **kwargs): return func(*args, **kwargs) + "!" return _wrapper # The order should be consistent with the usual order in which # decorators are applied, e.g. # @add_exclamation_mark # @add_question_mark # def func(): # ... decorators = (add_exclamation_mark, add_question_mark) @method_decorator(decorators, name="method") class TestFirst: def method(self): return "hello world" class TestSecond: @method_decorator(decorators) def method(self): return "hello world" self.assertEqual(TestFirst().method(), "hello world?!") self.assertEqual(TestSecond().method(), "hello world?!") def test_invalid_non_callable_attribute_decoration(self): """ @method_decorator on a non-callable attribute raises an error. """ msg = ( "Cannot decorate 'prop' as it isn't a callable attribute of " "<class 'Test'> (1)" ) with self.assertRaisesMessage(TypeError, msg): @method_decorator(lambda: None, name="prop") class Test: prop = 1 @classmethod def __module__(cls): return "tests" def test_invalid_method_name_to_decorate(self): """ @method_decorator on a nonexistent method raises an error. """ msg = ( "The keyword argument `name` must be the name of a method of the " "decorated class: <class 'Test'>. Got 'nonexistent_method' instead" ) with self.assertRaisesMessage(ValueError, msg): @method_decorator(lambda: None, name='nonexistent_method') class Test: @classmethod def __module__(cls): return "tests" class XFrameOptionsDecoratorsTests(TestCase): """ Tests for the X-Frame-Options decorators. """ def test_deny_decorator(self): """ Ensures @xframe_options_deny properly sets the X-Frame-Options header. """ @xframe_options_deny def a_view(request): return HttpResponse() r = a_view(HttpRequest()) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_sameorigin_decorator(self): """ Ensures @xframe_options_sameorigin properly sets the X-Frame-Options header. """ @xframe_options_sameorigin def a_view(request): return HttpResponse() r = a_view(HttpRequest()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_exempt_decorator(self): """ Ensures @xframe_options_exempt properly instructs the XFrameOptionsMiddleware to NOT set the header. """ @xframe_options_exempt def a_view(request): return HttpResponse() req = HttpRequest() resp = a_view(req) self.assertIsNone(resp.get('X-Frame-Options', None)) self.assertTrue(resp.xframe_options_exempt) # Since the real purpose of the exempt decorator is to suppress # the middleware's functionality, let's make sure it actually works... r = XFrameOptionsMiddleware().process_response(req, resp) self.assertIsNone(r.get('X-Frame-Options', None)) class NeverCacheDecoratorTest(TestCase): def test_never_cache_decorator(self): @never_cache def a_view(request): return HttpResponse() r = a_view(HttpRequest()) self.assertEqual( set(r['Cache-Control'].split(', ')), {'max-age=0', 'no-cache', 'no-store', 'must-revalidate'}, )
QuLogic/python-future
refs/heads/master
src/future/moves/dbm/ndbm.py
83
from __future__ import absolute_import from future.utils import PY3 if PY3: from dbm.ndbm import * else: __future_module__ = True from dbm import *
ettm2012/MissionPlanner
refs/heads/master
Lib/macpath.py
49
"""Pathname and path-related operations for the Macintosh.""" import os import warnings from stat import * import genericpath from genericpath import * __all__ = ["normcase","isabs","join","splitdrive","split","splitext", "basename","dirname","commonprefix","getsize","getmtime", "getatime","getctime", "islink","exists","lexists","isdir","isfile", "walk","expanduser","expandvars","normpath","abspath", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", "devnull","realpath","supports_unicode_filenames"] # strings representing various path-related bits and pieces curdir = ':' pardir = '::' extsep = '.' sep = ':' pathsep = '\n' defpath = ':' altsep = None devnull = 'Dev:Null' # Normalize the case of a pathname. Dummy in Posix, but <s>.lower() here. def normcase(path): return path.lower() def isabs(s): """Return true if a path is absolute. On the Mac, relative paths begin with a colon, but as a special case, paths with no colons at all are also relative. Anything else is absolute (the string up to the first colon is the volume name).""" return ':' in s and s[0] != ':' def join(s, *p): path = s for t in p: if (not s) or isabs(t): path = t continue if t[:1] == ':': t = t[1:] if ':' not in path: path = ':' + path if path[-1:] != ':': path = path + ':' path = path + t return path def split(s): """Split a pathname into two parts: the directory leading up to the final bit, and the basename (the filename, without colons, in that directory). The result (s, t) is such that join(s, t) yields the original argument.""" if ':' not in s: return '', s colon = 0 for i in range(len(s)): if s[i] == ':': colon = i + 1 path, file = s[:colon-1], s[colon:] if path and not ':' in path: path = path + ':' return path, file def splitext(p): return genericpath._splitext(p, sep, altsep, extsep) splitext.__doc__ = genericpath._splitext.__doc__ def splitdrive(p): """Split a pathname into a drive specification and the rest of the path. Useful on DOS/Windows/NT; on the Mac, the drive is always empty (don't use the volume name -- it doesn't have the same syntactic and semantic oddities as DOS drive letters, such as there being a separate current directory per drive).""" return '', p # Short interfaces to split() def dirname(s): return split(s)[0] def basename(s): return split(s)[1] def ismount(s): if not isabs(s): return False components = split(s) return len(components) == 2 and components[1] == '' def islink(s): """Return true if the pathname refers to a symbolic link.""" try: import Carbon.File return Carbon.File.ResolveAliasFile(s, 0)[2] except: return False # Is `stat`/`lstat` a meaningful difference on the Mac? This is safe in any # case. def lexists(path): """Test whether a path exists. Returns True for broken symbolic links""" try: st = os.lstat(path) except os.error: return False return True def expandvars(path): """Dummy to retain interface-compatibility with other operating systems.""" return path def expanduser(path): """Dummy to retain interface-compatibility with other operating systems.""" return path class norm_error(Exception): """Path cannot be normalized""" def normpath(s): """Normalize a pathname. Will return the same result for equivalent paths.""" if ":" not in s: return ":"+s comps = s.split(":") i = 1 while i < len(comps)-1: if comps[i] == "" and comps[i-1] != "": if i > 1: del comps[i-1:i+1] i = i - 1 else: # best way to handle this is to raise an exception raise norm_error, 'Cannot use :: immediately after volume name' else: i = i + 1 s = ":".join(comps) # remove trailing ":" except for ":" and "Volume:" if s[-1] == ":" and len(comps) > 2 and s != ":"*len(s): s = s[:-1] return s def walk(top, func, arg): """Directory tree walk with callback function. For each directory in the directory tree rooted at top (including top itself, but excluding '.' and '..'), call func(arg, dirname, fnames). dirname is the name of the directory, and fnames a list of the names of the files and subdirectories in dirname (excluding '.' and '..'). func may modify the fnames list in-place (e.g. via del or slice assignment), and walk will only recurse into the subdirectories whose names remain in fnames; this can be used to implement a filter, or to impose a specific order of visiting. No semantics are defined for, or required of, arg, beyond that arg is always passed to func. It can be used, e.g., to pass a filename pattern, or a mutable object designed to accumulate statistics. Passing None for arg is common.""" warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", stacklevel=2) try: names = os.listdir(top) except os.error: return func(arg, top, names) for name in names: name = join(top, name) if isdir(name) and not islink(name): walk(name, func, arg) def abspath(path): """Return an absolute path.""" if not isabs(path): if isinstance(path, unicode): cwd = os.getcwdu() else: cwd = os.getcwd() path = join(cwd, path) return normpath(path) # realpath is a no-op on systems without islink support def realpath(path): path = abspath(path) try: import Carbon.File except ImportError: return path if not path: return path components = path.split(':') path = components[0] + ':' for c in components[1:]: path = join(path, c) try: path = Carbon.File.FSResolveAliasFile(path, 1)[0].as_pathname() except Carbon.File.Error: pass return path supports_unicode_filenames = True
nicolargo/intellij-community
refs/heads/master
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py
326
"""Fixer for except statements with named exceptions. The following cases will be converted: - "except E, T:" where T is a name: except E as T: - "except E, T:" where T is not a name, tuple or list: except E as t: T = t This is done because the target of an "except" clause must be a name. - "except E, T:" where T is a tuple or list literal: except E as t: T = t.args """ # Author: Collin Winter # Local imports from .. import pytree from ..pgen2 import token from .. import fixer_base from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms def find_excepts(nodes): for i, n in enumerate(nodes): if n.type == syms.except_clause: if n.children[0].value == u'except': yield (n, nodes[i+2]) class FixExcept(fixer_base.BaseFix): BM_compatible = True PATTERN = """ try_stmt< 'try' ':' (simple_stmt | suite) cleanup=(except_clause ':' (simple_stmt | suite))+ tail=(['except' ':' (simple_stmt | suite)] ['else' ':' (simple_stmt | suite)] ['finally' ':' (simple_stmt | suite)]) > """ def transform(self, node, results): syms = self.syms tail = [n.clone() for n in results["tail"]] try_cleanup = [ch.clone() for ch in results["cleanup"]] for except_clause, e_suite in find_excepts(try_cleanup): if len(except_clause.children) == 4: (E, comma, N) = except_clause.children[1:4] comma.replace(Name(u"as", prefix=u" ")) if N.type != token.NAME: # Generate a new N for the except clause new_N = Name(self.new_name(), prefix=u" ") target = N.clone() target.prefix = u"" N.replace(new_N) new_N = new_N.clone() # Insert "old_N = new_N" as the first statement in # the except body. This loop skips leading whitespace # and indents #TODO(cwinter) suite-cleanup suite_stmts = e_suite.children for i, stmt in enumerate(suite_stmts): if isinstance(stmt, pytree.Node): break # The assignment is different if old_N is a tuple or list # In that case, the assignment is old_N = new_N.args if is_tuple(N) or is_list(N): assign = Assign(target, Attr(new_N, Name(u'args'))) else: assign = Assign(target, new_N) #TODO(cwinter) stopgap until children becomes a smart list for child in reversed(suite_stmts[:i]): e_suite.insert_child(0, child) e_suite.insert_child(i, assign) elif N.prefix == u"": # No space after a comma is legal; no space after "as", # not so much. N.prefix = u" " #TODO(cwinter) fix this when children becomes a smart list children = [c.clone() for c in node.children[:3]] + try_cleanup + tail return pytree.Node(node.type, children)
davidwaroquiers/pymatgen
refs/heads/master
pymatgen/util/sequence.py
5
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This module provides utilities to chunk large sequences and display progress bars during processing. """ import math def get_chunks(sequence, size=1): """ Args: sequence (): size (): Returns: """ chunks = int(math.ceil(len(sequence) / float(size))) return [sequence[i * size : (i + 1) * size] for i in range(chunks)] class PBarSafe: """ Progress bar. """ def __init__(self, total): """ Args: total (): Total value. """ self.total = total self.done = 0 self.report() def update(self, amount): """ Update progress bar by amount. Args: amount (float): """ self.done += amount self.report() def report(self): """ Print progress. """ print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total)) try: # noinspection PyUnresolvedReferences if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore from tqdm import tqdm_notebook as PBar else: # likely 'TerminalInteractiveShell' from tqdm import tqdm as PBar except NameError: try: from tqdm import tqdm as PBar except ImportError: PBar = PBarSafe except ImportError: PBar = PBarSafe
hclivess/Stallion
refs/heads/master
nuitka/Cryptodome/SelfTest/Hash/__init__.py
5
# -*- coding: utf-8 -*- # # SelfTest/Hash/__init__.py: Self-test for hash modules # # Written in 2008 by Dwayne C. Litzenberger <[email protected]> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Self-test for hash modules""" __revision__ = "$Id$" def get_tests(config={}): tests = [] from Cryptodome.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_CMAC; tests += test_CMAC.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_RIPEMD160; tests += test_RIPEMD160.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA1; tests += test_SHA1.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA3_224; tests += test_SHA3_224.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA3_256; tests += test_SHA3_256.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA3_384; tests += test_SHA3_384.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA3_512; tests += test_SHA3_512.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_keccak; tests += test_keccak.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHAKE; tests += test_SHAKE.get_tests(config=config) try: from Cryptodome.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) from Cryptodome.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) except ImportError: import sys sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n") from Cryptodome.SelfTest.Hash import test_BLAKE2; tests += test_BLAKE2.get_tests(config=config) return tests if __name__ == '__main__': import unittest suite = lambda: unittest.TestSuite(get_tests()) unittest.main(defaultTest='suite') # vim:set ts=4 sw=4 sts=4 expandtab:
vbannai/neutron
refs/heads/master
neutron/plugins/vmware/nsxlib/versioning.py
36
# Copyright 2014 VMware, Inc. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import inspect from neutron.plugins.vmware.api_client import exception DEFAULT_VERSION = -1 def versioned(func_table): def versioned_function(wrapped_func): func_name = wrapped_func.__name__ def dispatch_versioned_function(cluster, *args, **kwargs): # Call the wrapper function, in case we need to # run validation checks regarding versions. It # should return the NSX version v = (wrapped_func(cluster, *args, **kwargs) or cluster.api_client.get_version()) func = get_function_by_version(func_table, func_name, v) func_kwargs = kwargs arg_spec = inspect.getargspec(func) if not arg_spec.keywords and not arg_spec.varargs: # drop args unknown to function from func_args arg_set = set(func_kwargs.keys()) for arg in arg_set - set(arg_spec.args): del func_kwargs[arg] # NOTE(salvatore-orlando): shall we fail here if a required # argument is not passed, or let the called function raise? return func(cluster, *args, **func_kwargs) return dispatch_versioned_function return versioned_function def get_function_by_version(func_table, func_name, ver): if ver: if ver.major not in func_table[func_name]: major = max(func_table[func_name].keys()) minor = max(func_table[func_name][major].keys()) if major > ver.major: raise NotImplementedError(_("Operation may not be supported")) else: major = ver.major minor = ver.minor if ver.minor not in func_table[func_name][major]: minor = DEFAULT_VERSION return func_table[func_name][major][minor] else: msg = _('NSX version is not set. Unable to complete request ' 'correctly. Check log for NSX communication errors.') raise exception.ServiceUnavailable(message=msg)
fnouama/intellij-community
refs/heads/master
python/testData/quickFixes/PyAugmentAssignmentQuickFixTest/simple.py
83
<weak_warning descr="Assignment can be replaced with augmented assignment">var = <caret>var + 3</weak_warning>
animekita/selvbetjening
refs/heads/master
selvbetjening/sadmin2/templatetags/sadmin2_menu.py
1
from django import template from django.core.urlresolvers import reverse register = template.Library() @register.simple_tag(takes_context=True) def sadmin2_emit_name(context, menu_item): if 'name' in menu_item: return menu_item['name'] return menu_item['name_callback'](context) @register.simple_tag(takes_context=True) def sadmin2_emit_url(context, menu_item): if 'url' in menu_item: return reverse(menu_item['url']) if 'url_callback' in menu_item: return menu_item['url_callback'](context) return '#' @register.assignment_tag(takes_context=True) def sadmin2_hide_item(context, menu_item): return 'hide' in menu_item and menu_item['hide'](context)
jackrzhang/zulip
refs/heads/master
tools/setup/__init__.py
12133432
52-41-4d/fs-generic
refs/heads/master
flowexport/__init__.py
12133432
GheRivero/ansible
refs/heads/devel
test/units/modules/network/iosxr/__init__.py
12133432
rouault/Quantum-GIS
refs/heads/master
python/plugins/processing/algs/saga/ext/__init__.py
12133432
firmanm/ansible
refs/heads/devel
lib/ansible/plugins/shell/powershell.py
35
# (c) 2014, Chris Church <[email protected]> # # This file is part of Ansible. # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import base64 import os import re import random import shlex import time from ansible.utils.unicode import to_bytes, to_unicode _common_args = ['PowerShell', '-NoProfile', '-NonInteractive', '-ExecutionPolicy', 'Unrestricted'] # Primarily for testing, allow explicitly specifying PowerShell version via # an environment variable. _powershell_version = os.environ.get('POWERSHELL_VERSION', None) if _powershell_version: _common_args = ['PowerShell', '-Version', _powershell_version] + _common_args[1:] class ShellModule(object): def env_prefix(self, **kwargs): return '' def join_path(self, *args): parts = [] for arg in args: arg = self._unquote(arg).replace('/', '\\') parts.extend([a for a in arg.split('\\') if a]) path = '\\'.join(parts) if path.startswith('~'): return path return '"%s"' % path def path_has_trailing_slash(self, path): # Allow Windows paths to be specified using either slash. path = self._unquote(path) return path.endswith('/') or path.endswith('\\') def chmod(self, mode, path): return '' def remove(self, path, recurse=False): path = self._escape(self._unquote(path)) if recurse: return self._encode_script('''Remove-Item "%s" -Force -Recurse;''' % path) else: return self._encode_script('''Remove-Item "%s" -Force;''' % path) def mkdtemp(self, basefile, system=False, mode=None): basefile = self._escape(self._unquote(basefile)) # FIXME: Support system temp path! return self._encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile) def expand_user(self, user_home_path): # PowerShell only supports "~" (not "~username"). Resolve-Path ~ does # not seem to work remotely, though by default we are always starting # in the user's home directory. user_home_path = self._unquote(user_home_path) if user_home_path == '~': script = 'Write-Host (Get-Location).Path' elif user_home_path.startswith('~\\'): script = 'Write-Host ((Get-Location).Path + "%s")' % self._escape(user_home_path[1:]) else: script = 'Write-Host "%s"' % self._escape(user_home_path) return self._encode_script(script) def checksum(self, path, *args, **kwargs): path = self._escape(self._unquote(path)) script = ''' If (Test-Path -PathType Leaf "%(path)s") { $sp = new-object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider; $fp = [System.IO.File]::Open("%(path)s", [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read); [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower(); $fp.Dispose(); } ElseIf (Test-Path -PathType Container "%(path)s") { Write-Host "3"; } Else { Write-Host "1"; } ''' % dict(path=path) return self._encode_script(script) def build_module_command(self, env_string, shebang, cmd, rm_tmp=None): cmd_parts = shlex.split(to_bytes(cmd), posix=False) cmd_parts = map(to_unicode, cmd_parts) if shebang and shebang.lower() == '#!powershell': if not self._unquote(cmd_parts[0]).lower().endswith('.ps1'): cmd_parts[0] = '"%s.ps1"' % self._unquote(cmd_parts[0]) cmd_parts.insert(0, '&') elif shebang and shebang.startswith('#!'): cmd_parts.insert(0, shebang[2:]) script = ''' Try { %s } Catch { $_obj = @{ failed = $true } If ($_.Exception.GetType) { $_obj.Add('msg', $_.Exception.Message) } Else { $_obj.Add('msg', $_.ToString()) } If ($_.InvocationInfo.PositionMessage) { $_obj.Add('exception', $_.InvocationInfo.PositionMessage) } ElseIf ($_.ScriptStackTrace) { $_obj.Add('exception', $_.ScriptStackTrace) } Try { $_obj.Add('error_record', ($_ | ConvertTo-Json | ConvertFrom-Json)) } Catch { } Echo $_obj | ConvertTo-Json -Compress -Depth 99 Exit 1 } ''' % (' '.join(cmd_parts)) if rm_tmp: rm_tmp = self._escape(self._unquote(rm_tmp)) rm_cmd = 'Remove-Item "%s" -Force -Recurse -ErrorAction SilentlyContinue' % rm_tmp script = '%s\nFinally { %s }' % (script, rm_cmd) return self._encode_script(script) def _unquote(self, value): '''Remove any matching quotes that wrap the given value.''' value = to_unicode(value or '') m = re.match(r'^\s*?\'(.*?)\'\s*?$', value) if m: return m.group(1) m = re.match(r'^\s*?"(.*?)"\s*?$', value) if m: return m.group(1) return value def _escape(self, value, include_vars=False): '''Return value escaped for use in PowerShell command.''' # http://www.techotopia.com/index.php/Windows_PowerShell_1.0_String_Quoting_and_Escape_Sequences # http://stackoverflow.com/questions/764360/a-list-of-string-replacements-in-python subs = [('\n', '`n'), ('\r', '`r'), ('\t', '`t'), ('\a', '`a'), ('\b', '`b'), ('\f', '`f'), ('\v', '`v'), ('"', '`"'), ('\'', '`\''), ('`', '``'), ('\x00', '`0')] if include_vars: subs.append(('$', '`$')) pattern = '|'.join('(%s)' % re.escape(p) for p, s in subs) substs = [s for p, s in subs] replace = lambda m: substs[m.lastindex - 1] return re.sub(pattern, replace, value) def _encode_script(self, script, as_list=False, strict_mode=True): '''Convert a PowerShell script to a single base64-encoded command.''' script = to_unicode(script) if strict_mode: script = u'Set-StrictMode -Version Latest\r\n%s' % script script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()]) encoded_script = base64.b64encode(script.encode('utf-16-le')) cmd_parts = _common_args + ['-EncodedCommand', encoded_script] if as_list: return cmd_parts return ' '.join(cmd_parts)
sumit4iit/django-guardian
refs/heads/master
guardian/backends.py
12
from django.db import models from guardian.conf import settings from guardian.exceptions import WrongAppError from guardian.core import ObjectPermissionChecker from guardian.models import User class ObjectPermissionBackend(object): supports_object_permissions = True supports_anonymous_user = True supports_inactive_user = True def authenticate(self, username, password): return None def has_perm(self, user_obj, perm, obj=None): """ Returns ``True`` if given ``user_obj`` has ``perm`` for ``obj``. If no ``obj`` is given, ``False`` is returned. .. note:: Remember, that if user is not *active*, all checks would return ``False``. Main difference between Django's ``ModelBackend`` is that we can pass ``obj`` instance here and ``perm`` doesn't have to contain ``app_label`` as it can be retrieved from given ``obj``. **Inactive user support** If user is authenticated but inactive at the same time, all checks always returns ``False``. """ # Backend checks only object permissions if obj is None: return False # Backend checks only permissions for Django models if not isinstance(obj, models.Model): return False # This is how we support anonymous users - simply try to retrieve User # instance and perform checks for that predefined user if not user_obj.is_authenticated(): user_obj = User.objects.get(pk=settings.ANONYMOUS_USER_ID) # Do not check any further if user is not active if not user_obj.is_active: return False if len(perm.split('.')) > 1: app_label, perm = perm.split('.') if app_label != obj._meta.app_label: raise WrongAppError("Passed perm has app label of '%s' and " "given obj has '%s'" % (app_label, obj._meta.app_label)) check = ObjectPermissionChecker(user_obj) return check.has_perm(perm, obj)
mickburgs/taxi-sam
refs/heads/master
node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
1824
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """gypd output module This module produces gyp input as its output. Output files are given the .gypd extension to avoid overwriting the .gyp files that they are generated from. Internal references to .gyp files (such as those found in "dependencies" sections) are not adjusted to point to .gypd files instead; unlike other paths, which are relative to the .gyp or .gypd file, such paths are relative to the directory from which gyp was run to create the .gypd file. This generator module is intended to be a sample and a debugging aid, hence the "d" for "debug" in .gypd. It is useful to inspect the results of the various merges, expansions, and conditional evaluations performed by gyp and to see a representation of what would be fed to a generator module. It's not advisable to rename .gypd files produced by this module to .gyp, because they will have all merges, expansions, and evaluations already performed and the relevant constructs not present in the output; paths to dependencies may be wrong; and various sections that do not belong in .gyp files such as such as "included_files" and "*_excluded" will be present. Output will also be stripped of comments. This is not intended to be a general-purpose gyp pretty-printer; for that, you probably just want to run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip comments but won't do all of the other things done to this module's output. The specific formatting of the output generated by this module is subject to change. """ import gyp.common import errno import os import pprint # These variables should just be spit back out as variable references. _generator_identity_variables = [ 'CONFIGURATION_NAME', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'INTERMEDIATE_DIR', 'LIB_DIR', 'PRODUCT_DIR', 'RULE_INPUT_ROOT', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'RULE_INPUT_NAME', 'RULE_INPUT_PATH', 'SHARED_INTERMEDIATE_DIR', 'SHARED_LIB_DIR', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', ] # gypd doesn't define a default value for OS like many other generator # modules. Specify "-D OS=whatever" on the command line to provide a value. generator_default_variables = { } # gypd supports multiple toolsets generator_supports_multiple_toolsets = True # TODO(mark): This always uses <, which isn't right. The input module should # notify the generator to tell it which phase it is operating in, and this # module should use < for the early phase and then switch to > for the late # phase. Bonus points for carrying @ back into the output too. for v in _generator_identity_variables: generator_default_variables[v] = '<(%s)' % v def GenerateOutput(target_list, target_dicts, data, params): output_files = {} for qualified_target in target_list: [input_file, target] = \ gyp.common.ParseQualifiedTarget(qualified_target)[0:2] if input_file[-4:] != '.gyp': continue input_file_stem = input_file[:-4] output_file = input_file_stem + params['options'].suffix + '.gypd' if not output_file in output_files: output_files[output_file] = input_file for output_file, input_file in output_files.iteritems(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close()
Distrotech/reportlab
refs/heads/master
tests/test_pdfgen_callback.py
15
#!/bin/env python #Copyright ReportLab Europe Ltd. 2000-2012 #see license.txt for license details __doc__='checks callbacks work' __version__=''' $Id$ ''' from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation setOutDir(__name__) import unittest from reportlab.pdfgen.canvas import Canvas from tests.test_pdfgen_general import makeDocument _PAGE_COUNT = 0 class CallBackTestCase(unittest.TestCase): "checks it gets called" def callMe(self, pageNo): self.pageCount = pageNo def test0(self): "Make a PDFgen document with most graphics features" self.pageCount = 0 makeDocument(outputfile('test_pdfgen_callback.pdf'), pageCallBack=self.callMe) #no point saving it! assert self.pageCount >= 7, 'page count not called!' def makeSuite(): return makeSuiteForClasses(CallBackTestCase) #noruntests if __name__ == "__main__": unittest.TextTestRunner().run(makeSuite()) printLocation()
alaski/nova
refs/heads/master
nova/virt/block_device.py
2
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import itertools import operator from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import excutils import six from nova import block_device import nova.conf from nova import exception from nova.i18n import _LE from nova.i18n import _LI from nova.i18n import _LW from nova.volume import encryptors CONF = nova.conf.CONF LOG = logging.getLogger(__name__) class _NotTransformable(Exception): pass class _InvalidType(_NotTransformable): pass def update_db(method): @functools.wraps(method) def wrapped(obj, context, *args, **kwargs): try: ret_val = method(obj, context, *args, **kwargs) finally: obj.save() return ret_val return wrapped def _get_volume_create_az_value(instance): """Determine az to use when creating a volume Uses the cinder.cross_az_attach config option to determine the availability zone value to use when creating a volume. :param nova.objects.Instance instance: The instance for which the volume will be created and attached. :returns: The availability_zone value to pass to volume_api.create """ # If we're allowed to attach a volume in any AZ to an instance in any AZ, # then we don't care what AZ the volume is in so don't specify anything. if CONF.cinder.cross_az_attach: return None # Else the volume has to be in the same AZ as the instance otherwise we # fail. If the AZ is not in Cinder the volume create will fail. But on the # other hand if the volume AZ and instance AZ don't match and # cross_az_attach is False, then volume_api.check_attach will fail too, so # we can't really win. :) # TODO(mriedem): It would be better from a UX perspective if we could do # some validation in the API layer such that if we know we're going to # specify the AZ when creating the volume and that AZ is not in Cinder, we # could fail the boot from volume request early with a 400 rather than # fail to build the instance on the compute node which results in a # NoValidHost error. return instance.availability_zone class DriverBlockDevice(dict): """A dict subclass that represents block devices used by the virt layer. Uses block device objects internally to do the database access. _fields and _legacy_fields class attributes present a set of fields that are expected on a certain DriverBlockDevice type. We may have more legacy versions in the future. If an attribute access is attempted for a name that is found in the _proxy_as_attr set, it will be proxied to the underlying object. This allows us to access stuff that is not part of the data model that all drivers understand. The save() method allows us to update the database using the underlying object. _update_on_save class attribute dictionary keeps the following mapping: {'object field name': 'driver dict field name (or None if same)'} These fields will be updated on the internal object, from the values in the dict, before the actual database update is done. """ _fields = set() _legacy_fields = set() _proxy_as_attr = set() _update_on_save = {'disk_bus': None, 'device_name': None, 'device_type': None} def __init__(self, bdm): self.__dict__['_bdm_obj'] = bdm if self._bdm_obj.no_device: raise _NotTransformable() self.update({field: None for field in self._fields}) self._transform() def __getattr__(self, name): if name in self._proxy_as_attr: return getattr(self._bdm_obj, name) else: super(DriverBlockDevice, self).__getattr__(name) def __setattr__(self, name, value): if name in self._proxy_as_attr: return setattr(self._bdm_obj, name, value) else: super(DriverBlockDevice, self).__setattr__(name, value) def _transform(self): """Transform bdm to the format that is passed to drivers.""" raise NotImplementedError() def legacy(self): """Basic legacy transformation. Basic method will just drop the fields that are not in _legacy_fields set. Override this in subclass if needed. """ return {key: self.get(key) for key in self._legacy_fields} def attach(self, **kwargs): """Make the device available to be used by VMs. To be overridden in subclasses with the connecting logic for the type of device the subclass represents. """ raise NotImplementedError() def save(self): for attr_name, key_name in six.iteritems(self._update_on_save): lookup_name = key_name or attr_name if self[lookup_name] != getattr(self._bdm_obj, attr_name): setattr(self._bdm_obj, attr_name, self[lookup_name]) self._bdm_obj.save() class DriverSwapBlockDevice(DriverBlockDevice): _fields = set(['device_name', 'swap_size', 'disk_bus']) _legacy_fields = _fields - set(['disk_bus']) _update_on_save = {'disk_bus': None, 'device_name': None} def _transform(self): if not block_device.new_format_is_swap(self._bdm_obj): raise _InvalidType self.update({ 'device_name': self._bdm_obj.device_name, 'swap_size': self._bdm_obj.volume_size or 0, 'disk_bus': self._bdm_obj.disk_bus }) class DriverEphemeralBlockDevice(DriverBlockDevice): _new_only_fields = set(['disk_bus', 'device_type', 'guest_format']) _fields = set(['device_name', 'size']) | _new_only_fields _legacy_fields = (_fields - _new_only_fields | set(['num', 'virtual_name'])) def _transform(self): if not block_device.new_format_is_ephemeral(self._bdm_obj): raise _InvalidType self.update({ 'device_name': self._bdm_obj.device_name, 'size': self._bdm_obj.volume_size or 0, 'disk_bus': self._bdm_obj.disk_bus, 'device_type': self._bdm_obj.device_type, 'guest_format': self._bdm_obj.guest_format }) def legacy(self, num=0): legacy_bdm = super(DriverEphemeralBlockDevice, self).legacy() legacy_bdm['num'] = num legacy_bdm['virtual_name'] = 'ephemeral' + str(num) return legacy_bdm class DriverVolumeBlockDevice(DriverBlockDevice): _legacy_fields = set(['connection_info', 'mount_device', 'delete_on_termination']) _new_fields = set(['guest_format', 'device_type', 'disk_bus', 'boot_index']) _fields = _legacy_fields | _new_fields _valid_source = 'volume' _valid_destination = 'volume' _proxy_as_attr = set(['volume_size', 'volume_id']) _update_on_save = {'disk_bus': None, 'device_name': 'mount_device', 'device_type': None} def _transform(self): if (not self._bdm_obj.source_type == self._valid_source or not self._bdm_obj.destination_type == self._valid_destination): raise _InvalidType self.update( {k: v for k, v in six.iteritems(self._bdm_obj) if k in self._new_fields | set(['delete_on_termination'])} ) self['mount_device'] = self._bdm_obj.device_name try: self['connection_info'] = jsonutils.loads( self._bdm_obj.connection_info) except TypeError: self['connection_info'] = None def _preserve_multipath_id(self, connection_info): if self['connection_info'] and 'data' in self['connection_info']: if 'multipath_id' in self['connection_info']['data']: connection_info['data']['multipath_id'] =\ self['connection_info']['data']['multipath_id'] LOG.info(_LI('preserve multipath_id %s'), connection_info['data']['multipath_id']) @update_db def attach(self, context, instance, volume_api, virt_driver, do_check_attach=True, do_driver_attach=False, **kwargs): volume = volume_api.get(context, self.volume_id) if do_check_attach: volume_api.check_attach(context, volume, instance=instance) volume_id = volume['id'] context = context.elevated() connector = virt_driver.get_volume_connector(instance) connection_info = volume_api.initialize_connection(context, volume_id, connector) if 'serial' not in connection_info: connection_info['serial'] = self.volume_id self._preserve_multipath_id(connection_info) # If do_driver_attach is False, we will attach a volume to an instance # at boot time. So actual attach is done by instance creation code. if do_driver_attach: encryption = encryptors.get_encryption_metadata( context, volume_api, volume_id, connection_info) try: virt_driver.attach_volume( context, connection_info, instance, self['mount_device'], disk_bus=self['disk_bus'], device_type=self['device_type'], encryption=encryption) except Exception: with excutils.save_and_reraise_exception(): LOG.exception(_LE("Driver failed to attach volume " "%(volume_id)s at %(mountpoint)s"), {'volume_id': volume_id, 'mountpoint': self['mount_device']}, instance=instance) volume_api.terminate_connection(context, volume_id, connector) self['connection_info'] = connection_info if self.volume_size is None: self.volume_size = volume.get('size') mode = 'rw' if 'data' in connection_info: mode = connection_info['data'].get('access_mode', 'rw') if volume['attach_status'] == "detached": # NOTE(mriedem): save our current state so connection_info is in # the database before the volume status goes to 'in-use' because # after that we can detach and connection_info is required for # detach. self.save() try: volume_api.attach(context, volume_id, instance.uuid, self['mount_device'], mode=mode) except Exception: with excutils.save_and_reraise_exception(): if do_driver_attach: try: virt_driver.detach_volume(connection_info, instance, self['mount_device'], encryption=encryption) except Exception: LOG.warning(_LW("Driver failed to detach volume " "%(volume_id)s at %(mount_point)s."), {'volume_id': volume_id, 'mount_point': self['mount_device']}, exc_info=True, instance=instance) volume_api.terminate_connection(context, volume_id, connector) # Cinder-volume might have completed volume attach. So # we should detach the volume. If the attach did not # happen, the detach request will be ignored. volume_api.detach(context, volume_id) @update_db def refresh_connection_info(self, context, instance, volume_api, virt_driver): # NOTE (ndipanov): A no-op if there is no connection info already if not self['connection_info']: return connector = virt_driver.get_volume_connector(instance) connection_info = volume_api.initialize_connection(context, self.volume_id, connector) if 'serial' not in connection_info: connection_info['serial'] = self.volume_id self._preserve_multipath_id(connection_info) self['connection_info'] = connection_info def save(self): # NOTE(ndipanov): we might want to generalize this by adding it to the # _update_on_save and adding a transformation function. try: connection_info_string = jsonutils.dumps( self.get('connection_info')) if connection_info_string != self._bdm_obj.connection_info: self._bdm_obj.connection_info = connection_info_string except TypeError: pass super(DriverVolumeBlockDevice, self).save() def _call_wait_func(self, context, wait_func, volume_api, volume_id): try: wait_func(context, volume_id) except exception.VolumeNotCreated: with excutils.save_and_reraise_exception(): if self['delete_on_termination']: try: volume_api.delete(context, volume_id) except Exception as exc: LOG.warning( _LW('Failed to delete volume: %(volume_id)s ' 'due to %(exc)s'), {'volume_id': volume_id, 'exc': exc}) class DriverSnapshotBlockDevice(DriverVolumeBlockDevice): _valid_source = 'snapshot' _proxy_as_attr = set(['volume_size', 'volume_id', 'snapshot_id']) def attach(self, context, instance, volume_api, virt_driver, wait_func=None, do_check_attach=True): if not self.volume_id: av_zone = _get_volume_create_az_value(instance) snapshot = volume_api.get_snapshot(context, self.snapshot_id) vol = volume_api.create(context, self.volume_size, '', '', snapshot, availability_zone=av_zone) if wait_func: self._call_wait_func(context, wait_func, volume_api, vol['id']) self.volume_id = vol['id'] # Call the volume attach now super(DriverSnapshotBlockDevice, self).attach( context, instance, volume_api, virt_driver, do_check_attach=do_check_attach) class DriverImageBlockDevice(DriverVolumeBlockDevice): _valid_source = 'image' _proxy_as_attr = set(['volume_size', 'volume_id', 'image_id']) def attach(self, context, instance, volume_api, virt_driver, wait_func=None, do_check_attach=True): if not self.volume_id: av_zone = _get_volume_create_az_value(instance) vol = volume_api.create(context, self.volume_size, '', '', image_id=self.image_id, availability_zone=av_zone) if wait_func: self._call_wait_func(context, wait_func, volume_api, vol['id']) self.volume_id = vol['id'] super(DriverImageBlockDevice, self).attach( context, instance, volume_api, virt_driver, do_check_attach=do_check_attach) class DriverBlankBlockDevice(DriverVolumeBlockDevice): _valid_source = 'blank' _proxy_as_attr = set(['volume_size', 'volume_id', 'image_id']) def attach(self, context, instance, volume_api, virt_driver, wait_func=None, do_check_attach=True): if not self.volume_id: vol_name = instance.uuid + '-blank-vol' av_zone = _get_volume_create_az_value(instance) vol = volume_api.create(context, self.volume_size, vol_name, '', availability_zone=av_zone) if wait_func: self._call_wait_func(context, wait_func, volume_api, vol['id']) self.volume_id = vol['id'] super(DriverBlankBlockDevice, self).attach( context, instance, volume_api, virt_driver, do_check_attach=do_check_attach) def _convert_block_devices(device_type, block_device_mapping): devices = [] for bdm in block_device_mapping: try: devices.append(device_type(bdm)) except _NotTransformable: pass return devices convert_swap = functools.partial(_convert_block_devices, DriverSwapBlockDevice) convert_ephemerals = functools.partial(_convert_block_devices, DriverEphemeralBlockDevice) convert_volumes = functools.partial(_convert_block_devices, DriverVolumeBlockDevice) convert_snapshots = functools.partial(_convert_block_devices, DriverSnapshotBlockDevice) convert_images = functools.partial(_convert_block_devices, DriverImageBlockDevice) convert_blanks = functools.partial(_convert_block_devices, DriverBlankBlockDevice) def convert_all_volumes(*volume_bdms): source_volume = convert_volumes(volume_bdms) source_snapshot = convert_snapshots(volume_bdms) source_image = convert_images(volume_bdms) source_blank = convert_blanks(volume_bdms) return [vol for vol in itertools.chain(source_volume, source_snapshot, source_image, source_blank)] def convert_volume(volume_bdm): try: return convert_all_volumes(volume_bdm)[0] except IndexError: pass def attach_block_devices(block_device_mapping, *attach_args, **attach_kwargs): def _log_and_attach(bdm): instance = attach_args[1] if bdm.get('volume_id'): LOG.info(_LI('Booting with volume %(volume_id)s at ' '%(mountpoint)s'), {'volume_id': bdm.volume_id, 'mountpoint': bdm['mount_device']}, instance=instance) elif bdm.get('snapshot_id'): LOG.info(_LI('Booting with volume snapshot %(snapshot_id)s at ' '%(mountpoint)s'), {'snapshot_id': bdm.snapshot_id, 'mountpoint': bdm['mount_device']}, instance=instance) elif bdm.get('image_id'): LOG.info(_LI('Booting with volume-backed-image %(image_id)s at ' '%(mountpoint)s'), {'image_id': bdm.image_id, 'mountpoint': bdm['mount_device']}, instance=instance) else: LOG.info(_LI('Booting with blank volume at %(mountpoint)s'), {'mountpoint': bdm['mount_device']}, instance=instance) bdm.attach(*attach_args, **attach_kwargs) map(_log_and_attach, block_device_mapping) return block_device_mapping def refresh_conn_infos(block_device_mapping, *refresh_args, **refresh_kwargs): map(operator.methodcaller('refresh_connection_info', *refresh_args, **refresh_kwargs), block_device_mapping) return block_device_mapping def legacy_block_devices(block_device_mapping): bdms = [bdm.legacy() for bdm in block_device_mapping] # Re-enumerate ephemeral devices if all(isinstance(bdm, DriverEphemeralBlockDevice) for bdm in block_device_mapping): for i, dev in enumerate(bdms): dev['virtual_name'] = dev['virtual_name'][:-1] + str(i) dev['num'] = i return bdms def get_swap(transformed_list): """Get the swap device out of the list context. The block_device_info needs swap to be a single device, not a list - otherwise this is a no-op. """ if not all(isinstance(device, DriverSwapBlockDevice) or 'swap_size' in device for device in transformed_list): return None try: return transformed_list.pop() except IndexError: return None _IMPLEMENTED_CLASSES = (DriverSwapBlockDevice, DriverEphemeralBlockDevice, DriverVolumeBlockDevice, DriverSnapshotBlockDevice, DriverImageBlockDevice, DriverBlankBlockDevice) def is_implemented(bdm): for cls in _IMPLEMENTED_CLASSES: try: cls(bdm) return True except _NotTransformable: pass return False def is_block_device_mapping(bdm): return (bdm.source_type in ('image', 'volume', 'snapshot', 'blank') and bdm.destination_type == 'volume' and is_implemented(bdm))
olemis/sqlalchemy
refs/heads/master
examples/sharding/__init__.py
30
"""A basic example of using the SQLAlchemy Sharding API. Sharding refers to horizontally scaling data across multiple databases. The basic components of a "sharded" mapping are: * multiple databases, each assigned a 'shard id' * a function which can return a single shard id, given an instance to be saved; this is called "shard_chooser" * a function which can return a list of shard ids which apply to a particular instance identifier; this is called "id_chooser". If it returns all shard ids, all shards will be searched. * a function which can return a list of shard ids to try, given a particular Query ("query_chooser"). If it returns all shard ids, all shards will be queried and the results joined together. In this example, four sqlite databases will store information about weather data on a database-per-continent basis. We provide example shard_chooser, id_chooser and query_chooser functions. The query_chooser illustrates inspection of the SQL expression element in order to attempt to determine a single shard being requested. The construction of generic sharding routines is an ambitious approach to the issue of organizing instances among multiple databases. For a more plain-spoken alternative, the "distinct entity" approach is a simple method of assigning objects to different tables (and potentially database nodes) in an explicit way - described on the wiki at `EntityName <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_. .. autosource:: """
lamby/live-studio
refs/heads/master
contrib/django/conf/project_template/__init__.py
12133432
bumper-app/bumper-bianca
refs/heads/master
bianca/orm/__init__.py
12133432
lwiecek/django
refs/heads/master
tests/admin_scripts/management/commands/__init__.py
12133432
DamienIrving/ocean-analysis
refs/heads/master
data_processing/calc_annual.py
1
""" Filename: calc_annual.py Author: Damien Irving, [email protected] Description: Apply annual timescale smoothing """ # Import general Python modules import sys, os, pdb import argparse import iris import cmdline_provenance as cmdprov # Import my modules cwd = os.getcwd() repo_dir = '/' for directory in cwd.split('/')[1:]: repo_dir = os.path.join(repo_dir, directory) if directory == 'ocean-analysis': break modules_dir = os.path.join(repo_dir, 'modules') sys.path.append(modules_dir) try: import timeseries except ImportError: raise ImportError('Must run this script from anywhere within the ocean-analysis git repo') # Define functions def main(inargs): """Run the program.""" cube = iris.load_cube(inargs.infile, inargs.var) cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk) log = cmdprov.new_log(infile_history={inargs.infile: cube.attributes['history']}, git_repo=repo_dir) cube.attributes['history'] = log #assert cube.data.dtype == numpy.float32 #iris.save(cube, outfile, netcdf_format='NETCDF3_CLASSIC') iris.save(cube, inargs.outfile) if __name__ == '__main__': extra_info =""" example: author: Damien Irving, [email protected] """ description='Apply annual timescale smoothing' parser = argparse.ArgumentParser(description=description, epilog=extra_info, argument_default=argparse.SUPPRESS, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("infile", type=str, help="Input file") parser.add_argument("var", type=str, help="Variable name") parser.add_argument("outfile", type=str, help="Output file") parser.add_argument("--chunk", type=int, default=None, help="Integer number of time steps used in chunking (for monthly data this would be a multiple of 12)") args = parser.parse_args() main(args)
40223247/2015cdb_0622
refs/heads/master
static/Brython3.1.1-20150328-091302/Lib/browser/websocket.py
618
from browser import window import javascript WebSocket = javascript.JSConstructor(window.WebSocket)
jeffchao/xen-3.3-tcg
refs/heads/master
tools/python/build/lib.linux-i686-2.6/xen/xend/XendBase.py
8
#!/usr/bin/python #============================================================================ # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # License as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #============================================================================ # Copyright (C) 2007 Tom Wilkie <[email protected]> #============================================================================ """ Base class for all XenAPI classes """ from xen.xend.XendError import * from xen.xend import XendAPIStore class XendBase: # # These functions describe the object, and what is exposed via the API # def getClass(self): return "Base" def getAttrRO(self): return ['uuid'] def getAttrRW(self): return [] def getAttrInst(self): return [] def getMethods(self): return ["get_record"] def getFuncs(self): return ["get_all", "get_by_uuid", "get_all_records"] getClass = classmethod(getClass) getAttrRO = classmethod(getAttrRO) getAttrRW = classmethod(getAttrRW) getAttrInst = classmethod(getAttrInst) getMethods = classmethod(getMethods) getFuncs = classmethod(getFuncs) def __init__(self, uuid, record): self.__uuid = uuid # First check this class implements all the correct methods: for attr_ro in self.getAttrRO() + self.getAttrRW(): if not hasattr(self, "get_%s" % attr_ro): raise ImplementationError(self.getClass(), "get_%s" % attr_ro) for attr_rw in self.getAttrRW(): if not hasattr(self, "set_%s" % attr_rw): raise ImplementationError(self.getClass(), "set_%s" % attr_rw) for method in self.getMethods(): if not hasattr(self, method): raise ImplementationError(self.getClass(), method) for func in self.getFuncs(): if not hasattr(self.__class__, func): raise ImplementationError(self.getClass(), func) # Next check that the class is being created with the correct # parameters if not isinstance(record, dict): raise CreateUnspecifiedAttributeError( "record" , self.getClass()) for attr_inst in self.getAttrInst(): if attr_inst not in record: raise CreateUnspecifiedAttributeError( attr_inst, self.getClass()) setattr(self, attr_inst, record[attr_inst]) # Finally register it XendAPIStore.register(uuid, self.getClass(), self) def destroy(self): XendAPIStore.deregister(self.get_uuid(), self.getClass()) def get_uuid(self): return self.__uuid def get_record(self): keys = self.getAttrRO() + self.getAttrRW() return dict([(key, getattr(self, "get_%s" % key)()) for key in keys]) # # Class methods # def get_all(cls): return XendAPIStore.get_all_uuid(cls.getClass()) def get_by_uuid(cls, uuid): # Sanity check the uuid is one of us me = XendAPIStore.get(uuid, cls.getClass()) if me is not None and me.getClass() == cls.getClass(): # In OSS, ref == uuid return uuid else: raise "Big Error.. TODO!" def get_all_records(cls): return dict([(inst.get_uuid(), inst.get_record()) for inst in XendAPIStore.get_all(cls.getClass())]) get_all = classmethod(get_all) get_by_uuid = classmethod(get_by_uuid) get_all_records = classmethod(get_all_records)
Adel-Magebinary/odoo
refs/heads/8.0
addons/website_project/models/__init__.py
391
import website_project
tchellomello/home-assistant
refs/heads/dev
homeassistant/components/totalconnect/binary_sensor.py
14
"""Interfaces with TotalConnect sensors.""" import logging from homeassistant.components.binary_sensor import ( DEVICE_CLASS_DOOR, DEVICE_CLASS_GAS, DEVICE_CLASS_SMOKE, BinarySensorEntity, ) from .const import DOMAIN _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass, entry, async_add_entities) -> None: """Set up TotalConnect device sensors based on a config entry.""" sensors = [] client_locations = hass.data[DOMAIN][entry.entry_id].locations for location_id, location in client_locations.items(): for zone_id, zone in location.zones.items(): sensors.append(TotalConnectBinarySensor(zone_id, location_id, zone)) async_add_entities(sensors, True) class TotalConnectBinarySensor(BinarySensorEntity): """Represent an TotalConnect zone.""" def __init__(self, zone_id, location_id, zone): """Initialize the TotalConnect status.""" self._zone_id = zone_id self._location_id = location_id self._zone = zone self._name = self._zone.description self._unique_id = f"{location_id} {zone_id}" self._is_on = None self._is_tampered = None self._is_low_battery = None @property def unique_id(self): """Return the unique id.""" return self._unique_id @property def name(self): """Return the name of the device.""" return self._name def update(self): """Return the state of the device.""" self._is_tampered = self._zone.is_tampered() self._is_low_battery = self._zone.is_low_battery() if self._zone.is_faulted() or self._zone.is_triggered(): self._is_on = True else: self._is_on = False @property def is_on(self): """Return true if the binary sensor is on.""" return self._is_on @property def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" if self._zone.is_type_security(): return DEVICE_CLASS_DOOR if self._zone.is_type_fire(): return DEVICE_CLASS_SMOKE if self._zone.is_type_carbon_monoxide(): return DEVICE_CLASS_GAS return None @property def device_state_attributes(self): """Return the state attributes.""" attributes = { "zone_id": self._zone_id, "location_id": self._location_id, "low_battery": self._is_low_battery, "tampered": self._is_tampered, } return attributes
tis-intern-apparel/ApparelStrategy
refs/heads/master
server/dialogue_system/backend/apis/__init__.py
1349
# -*- coding: utf-8 -*-
PaddlePaddle/models
refs/heads/develop
PaddleCV/video/application/video_tag/tsn_extractor.py
1
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve. # #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. import os import sys import time import logging import argparse import ast import numpy as np try: import cPickle as pickle except: import pickle import paddle.fluid as fluid from utils.config_utils import * import models from reader import get_reader from metrics import get_metrics from utils.utility import check_cuda from utils.utility import check_version logging.root.handlers = [] FORMAT = '[%(levelname)s: %(filename)s: %(lineno)4d]: %(message)s' logging.basicConfig(level=logging.DEBUG, format=FORMAT, stream=sys.stdout) logger = logging.getLogger(__name__) def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( '--model_name', type=str, default='AttentionCluster', help='name of model to train.') parser.add_argument( '--config', type=str, default='configs/attention_cluster.txt', help='path to config file of model') parser.add_argument( '--use_gpu', type=ast.literal_eval, default=True, help='default use gpu.') parser.add_argument( '--weights', type=str, default=None, help='weight path, None to automatically download weights provided by Paddle.' ) parser.add_argument( '--batch_size', type=int, default=1, help='sample number in a batch for inference.') parser.add_argument( '--filelist', type=str, default='./data/TsnExtractor.list', help='path to inferenece data file lists file.') parser.add_argument( '--log_interval', type=int, default=1, help='mini-batch interval to log.') parser.add_argument( '--infer_topk', type=int, default=20, help='topk predictions to restore.') parser.add_argument( '--save_dir', type=str, default=os.path.join('data', 'tsn_features'), help='directory to store tsn feature results') parser.add_argument( '--video_path', type=str, default=None, help='directory to store results') args = parser.parse_args() return args def infer(args): # parse config config = parse_config(args.config) infer_config = merge_configs(config, 'infer', vars(args)) print_configs(infer_config, "Infer") infer_model = models.get_model( args.model_name, infer_config, mode='infer', is_videotag=True) infer_model.build_input(use_dataloader=False) infer_model.build_model() infer_feeds = infer_model.feeds() infer_outputs = infer_model.outputs() place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) filelist = args.filelist or infer_config.INFER.filelist filepath = args.video_path or infer_config.INFER.get('filepath', '') if filepath != '': assert os.path.exists(filepath), "{} not exist.".format(filepath) else: assert os.path.exists(filelist), "{} not exist.".format(filelist) # get infer reader infer_reader = get_reader(args.model_name.upper(), 'infer', infer_config) if args.weights: assert os.path.exists( args.weights), "Given weight dir {} not exist.".format(args.weights) # if no weight files specified, download weights from paddle weights = args.weights or infer_model.get_weights() infer_model.load_test_weights(exe, weights, fluid.default_main_program()) infer_feeder = fluid.DataFeeder(place=place, feed_list=infer_feeds) fetch_list = infer_model.fetches() infer_metrics = get_metrics(args.model_name.upper(), 'infer', infer_config) infer_metrics.reset() if not os.path.isdir(args.save_dir): os.makedirs(args.save_dir) for infer_iter, data in enumerate(infer_reader()): data_feed_in = [items[:-1] for items in data] video_id = [items[-1] for items in data] bs = len(video_id) feature_outs = exe.run(fetch_list=fetch_list, feed=infer_feeder.feed(data_feed_in)) for i in range(bs): filename = video_id[i].split('/')[-1][:-4] np.save( os.path.join(args.save_dir, filename + '.npy'), feature_outs[0][i]) #shape: seg_num*feature_dim logger.info("Feature extraction End~") if __name__ == "__main__": args = parse_args() # check whether the installed paddle is compiled with GPU check_cuda(args.use_gpu) check_version() logger.info(args) infer(args)
sxjscience/tvm
refs/heads/master
tests/python/contrib/test_arm_compute_lib/test_reshape.py
2
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Arm Compute Library integration reshape tests.""" import numpy as np import tvm from tvm import relay from .infrastructure import ( skip_runtime_test, skip_codegen_test, build_and_run, verify, verify_codegen, ) from .infrastructure import Device def _get_model(input_shape, output_shape, dtype, var_names): """Return a model and any parameters it may have.""" a = relay.var(next(var_names), shape=input_shape, dtype=dtype) reshape = relay.reshape(a, output_shape) return reshape def _get_expected_codegen(input_shape, output_shape, dtype): node = { "op": "kernel", "name": "reshape", "inputs": [[0, 0, 0]], "attrs": { "num_inputs": "1", "num_outputs": "1", "newshape": [[str(s) for s in output_shape]], "shape": [[list(output_shape)]], "dtype": [[dtype]], "reverse": [["0"]], }, } input = { "op": "input", "name": "", "attrs": {"shape": [[list(input_shape)]], "dtype": [[dtype]]}, } return [input, node] def test_reshape(): Device.load("test_config.json") if skip_runtime_test(): return device = Device() np.random.seed(0) for dtype, low, high, atol, rtol in [ ("float32", -127, 128, 0.001, 0.001), ("uint8", 0, 255, 0, 0), ]: inputs = {"a": tvm.nd.array(np.random.uniform(low, high, (1, 1, 1, 1000)).astype(dtype))} for new_shape in [(1, 1000), (10, 10, 10)]: outputs = [] func = _get_model(inputs["a"].shape, new_shape, dtype, iter(inputs)) for acl in [False, True]: outputs.append(build_and_run(func, inputs, 1, None, device, enable_acl=acl)[0]) config = { "new shape": inputs["a"].shape, "shape": new_shape, "dtype": dtype, } verify(outputs, atol=1e-7, rtol=1e-7, config=config) def test_codegen_reshape(): if skip_codegen_test(): return shape = (1, 1, 1, 1000) inputs = {"a"} for dtype in ["float32", "uint8"]: for new_shape in [(1, 1000), (10, 10, 10)]: args = (shape, new_shape, dtype) func = _get_model(*args, iter(inputs)) exp_codegen = _get_expected_codegen(*args) verify_codegen(func, exp_codegen, 1) if __name__ == "__main__": test_reshape() test_codegen_reshape()
vaygr/ansible
refs/heads/devel
lib/ansible/plugins/action/fail.py
31
# (c) 2012-2014, Michael DeHaan <[email protected]> # (c) 2012, Dag Wieers <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.plugins.action import ActionBase class ActionModule(ActionBase): ''' Fail with custom message ''' TRANSFERS_FILES = False def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect msg = 'Failed as requested from task' if self._task.args and 'msg' in self._task.args: msg = self._task.args.get('msg') result['failed'] = True result['msg'] = msg return result
40223236/2015cd_midterm_1
refs/heads/master
static/Brython3.1.1-20150328-091302/Lib/importlib/machinery.py
635
"""The machinery of importlib: finders, loaders, hooks, etc.""" import _imp from ._bootstrap import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES, OPTIMIZED_BYTECODE_SUFFIXES, #BYTECODE_SUFFIXES, EXTENSION_SUFFIXES) from ._bootstrap import BuiltinImporter from ._bootstrap import FrozenImporter from ._bootstrap import WindowsRegistryFinder from ._bootstrap import PathFinder from ._bootstrap import FileFinder from ._bootstrap import SourceFileLoader from ._bootstrap import SourcelessFileLoader from ._bootstrap import ExtensionFileLoader #def all_suffixes(): # """Returns a list of all recognized module suffixes for this process""" # return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES
remybaranx/qtaste
refs/heads/master
tools/jython/lib/Lib/encodings/koi8_r.py
9
""" Python Character Mapping Codec generated from 'KOI8-R.TXT' with gencodec.py. Written by Marc-Andre Lemburg ([email protected]). (c) Copyright CNRI, All Rights Reserved. NO WARRANTY. (c) Copyright 2000 Guido van Rossum. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_map) class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return (Codec().encode,Codec().decode,StreamReader,StreamWriter) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL 0x0081: 0x2502, # BOX DRAWINGS LIGHT VERTICAL 0x0082: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x0083: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x0084: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT 0x0085: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT 0x0086: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x0087: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x0088: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x0089: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x008a: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x008b: 0x2580, # UPPER HALF BLOCK 0x008c: 0x2584, # LOWER HALF BLOCK 0x008d: 0x2588, # FULL BLOCK 0x008e: 0x258c, # LEFT HALF BLOCK 0x008f: 0x2590, # RIGHT HALF BLOCK 0x0090: 0x2591, # LIGHT SHADE 0x0091: 0x2592, # MEDIUM SHADE 0x0092: 0x2593, # DARK SHADE 0x0093: 0x2320, # TOP HALF INTEGRAL 0x0094: 0x25a0, # BLACK SQUARE 0x0095: 0x2219, # BULLET OPERATOR 0x0096: 0x221a, # SQUARE ROOT 0x0097: 0x2248, # ALMOST EQUAL TO 0x0098: 0x2264, # LESS-THAN OR EQUAL TO 0x0099: 0x2265, # GREATER-THAN OR EQUAL TO 0x009a: 0x00a0, # NO-BREAK SPACE 0x009b: 0x2321, # BOTTOM HALF INTEGRAL 0x009c: 0x00b0, # DEGREE SIGN 0x009d: 0x00b2, # SUPERSCRIPT TWO 0x009e: 0x00b7, # MIDDLE DOT 0x009f: 0x00f7, # DIVISION SIGN 0x00a0: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL 0x00a1: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL 0x00a2: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x00a3: 0x0451, # CYRILLIC SMALL LETTER IO 0x00a4: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x00a5: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x00a6: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x00a7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x00a8: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x00a9: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x00aa: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x00ab: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x00ac: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x00ad: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x00ae: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT 0x00af: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x00b0: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x00b1: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x00b2: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x00b3: 0x0401, # CYRILLIC CAPITAL LETTER IO 0x00b4: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x00b5: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x00b6: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x00b7: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x00b8: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x00b9: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x00ba: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x00bb: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x00bc: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x00bd: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x00be: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x00bf: 0x00a9, # COPYRIGHT SIGN 0x00c0: 0x044e, # CYRILLIC SMALL LETTER YU 0x00c1: 0x0430, # CYRILLIC SMALL LETTER A 0x00c2: 0x0431, # CYRILLIC SMALL LETTER BE 0x00c3: 0x0446, # CYRILLIC SMALL LETTER TSE 0x00c4: 0x0434, # CYRILLIC SMALL LETTER DE 0x00c5: 0x0435, # CYRILLIC SMALL LETTER IE 0x00c6: 0x0444, # CYRILLIC SMALL LETTER EF 0x00c7: 0x0433, # CYRILLIC SMALL LETTER GHE 0x00c8: 0x0445, # CYRILLIC SMALL LETTER HA 0x00c9: 0x0438, # CYRILLIC SMALL LETTER I 0x00ca: 0x0439, # CYRILLIC SMALL LETTER SHORT I 0x00cb: 0x043a, # CYRILLIC SMALL LETTER KA 0x00cc: 0x043b, # CYRILLIC SMALL LETTER EL 0x00cd: 0x043c, # CYRILLIC SMALL LETTER EM 0x00ce: 0x043d, # CYRILLIC SMALL LETTER EN 0x00cf: 0x043e, # CYRILLIC SMALL LETTER O 0x00d0: 0x043f, # CYRILLIC SMALL LETTER PE 0x00d1: 0x044f, # CYRILLIC SMALL LETTER YA 0x00d2: 0x0440, # CYRILLIC SMALL LETTER ER 0x00d3: 0x0441, # CYRILLIC SMALL LETTER ES 0x00d4: 0x0442, # CYRILLIC SMALL LETTER TE 0x00d5: 0x0443, # CYRILLIC SMALL LETTER U 0x00d6: 0x0436, # CYRILLIC SMALL LETTER ZHE 0x00d7: 0x0432, # CYRILLIC SMALL LETTER VE 0x00d8: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN 0x00d9: 0x044b, # CYRILLIC SMALL LETTER YERU 0x00da: 0x0437, # CYRILLIC SMALL LETTER ZE 0x00db: 0x0448, # CYRILLIC SMALL LETTER SHA 0x00dc: 0x044d, # CYRILLIC SMALL LETTER E 0x00dd: 0x0449, # CYRILLIC SMALL LETTER SHCHA 0x00de: 0x0447, # CYRILLIC SMALL LETTER CHE 0x00df: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN 0x00e0: 0x042e, # CYRILLIC CAPITAL LETTER YU 0x00e1: 0x0410, # CYRILLIC CAPITAL LETTER A 0x00e2: 0x0411, # CYRILLIC CAPITAL LETTER BE 0x00e3: 0x0426, # CYRILLIC CAPITAL LETTER TSE 0x00e4: 0x0414, # CYRILLIC CAPITAL LETTER DE 0x00e5: 0x0415, # CYRILLIC CAPITAL LETTER IE 0x00e6: 0x0424, # CYRILLIC CAPITAL LETTER EF 0x00e7: 0x0413, # CYRILLIC CAPITAL LETTER GHE 0x00e8: 0x0425, # CYRILLIC CAPITAL LETTER HA 0x00e9: 0x0418, # CYRILLIC CAPITAL LETTER I 0x00ea: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I 0x00eb: 0x041a, # CYRILLIC CAPITAL LETTER KA 0x00ec: 0x041b, # CYRILLIC CAPITAL LETTER EL 0x00ed: 0x041c, # CYRILLIC CAPITAL LETTER EM 0x00ee: 0x041d, # CYRILLIC CAPITAL LETTER EN 0x00ef: 0x041e, # CYRILLIC CAPITAL LETTER O 0x00f0: 0x041f, # CYRILLIC CAPITAL LETTER PE 0x00f1: 0x042f, # CYRILLIC CAPITAL LETTER YA 0x00f2: 0x0420, # CYRILLIC CAPITAL LETTER ER 0x00f3: 0x0421, # CYRILLIC CAPITAL LETTER ES 0x00f4: 0x0422, # CYRILLIC CAPITAL LETTER TE 0x00f5: 0x0423, # CYRILLIC CAPITAL LETTER U 0x00f6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE 0x00f7: 0x0412, # CYRILLIC CAPITAL LETTER VE 0x00f8: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN 0x00f9: 0x042b, # CYRILLIC CAPITAL LETTER YERU 0x00fa: 0x0417, # CYRILLIC CAPITAL LETTER ZE 0x00fb: 0x0428, # CYRILLIC CAPITAL LETTER SHA 0x00fc: 0x042d, # CYRILLIC CAPITAL LETTER E 0x00fd: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA 0x00fe: 0x0427, # CYRILLIC CAPITAL LETTER CHE 0x00ff: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN }) ### Encoding Map encoding_map = codecs.make_encoding_map(decoding_map)
barbour-em/osf.io
refs/heads/develop
website/util/rubeus.py
1
# -*- coding: utf-8 -*- """Contains helper functions for generating correctly formatted hgrid list/folders. """ import datetime import hurry.filesize from modularodm import Q from framework.auth.decorators import Auth from website.util import paths from website.util import sanitize from website.settings import ( ALL_MY_PROJECTS_ID, ALL_MY_REGISTRATIONS_ID, ALL_MY_PROJECTS_NAME, ALL_MY_REGISTRATIONS_NAME, DISK_SAVING_MODE ) FOLDER = 'folder' FILE = 'file' KIND = 'kind' # TODO: Validate the JSON schema, esp. for addons DEFAULT_PERMISSIONS = { 'view': True, 'edit': False, } def format_filesize(size): return hurry.filesize.size(size, system=hurry.filesize.alternative) def default_urls(node_api, short_name): return { 'fetch': u'{node_api}{addonshort}/hgrid/'.format(node_api=node_api, addonshort=short_name), 'upload': u'{node_api}{addonshort}/'.format(node_api=node_api, addonshort=short_name), } def to_hgrid(node, auth, **data): """Converts a node into a rubeus grid format :param Node node: the node to be parsed :param Auth auth: the user authorization object :returns: rubeus-formatted dict """ return NodeFileCollector(node, auth, **data).to_hgrid() def to_project_hgrid(node, auth, **data): """Converts a node into a rubeus grid format :param node Node: the node to be parsed :param auth Auth: the user authorization object :returns: rubeus-formatted dict """ return NodeProjectCollector(node, auth, **data).to_hgrid() def to_project_root(node, auth, **data): return NodeProjectCollector(node, auth, **data).get_root() def build_addon_root(node_settings, name, permissions=None, urls=None, extra=None, buttons=None, user=None, **kwargs): """Builds the root or "dummy" folder for an addon. :param addonNodeSettingsBase node_settings: Addon settings :param String name: Additional information for the folder title eg. Repo name for Github or bucket name for S3 :param dict or Auth permissions: Dictionary of permissions for the addon's content or Auth for use in node.can_X methods :param dict urls: Hgrid related urls :param String extra: Html to be appened to the addon folder name eg. Branch switcher for github :param list of dicts buttons: List of buttons to appear in HGrid row. Each dict must have 'text', a string that will appear on the button, and 'action', the name of a function in :param dict kwargs: Any additional information to add to the root folder :return dict: Hgrid formatted dictionary for the addon root folder """ permissions = permissions or DEFAULT_PERMISSIONS if name: name = u'{0}: {1}'.format(node_settings.config.full_name, name) else: name = node_settings.config.full_name if hasattr(node_settings.config, 'urls') and node_settings.config.urls: urls = node_settings.config.urls if urls is None: urls = default_urls(node_settings.owner.api_url, node_settings.config.short_name) forbid_edit = DISK_SAVING_MODE if node_settings.config.short_name == 'osfstorage' else False if isinstance(permissions, Auth): auth = permissions permissions = { 'view': node_settings.owner.can_view(auth), 'edit': (node_settings.owner.can_edit(auth) and not node_settings.owner.is_registration and not forbid_edit), } max_size = node_settings.config.max_file_size if user and 'high_upload_limit' in user.system_tags: max_size = node_settings.config.high_max_file_size ret = { 'provider': node_settings.config.short_name, 'addonFullname': node_settings.config.full_name, 'name': name, 'iconUrl': node_settings.config.icon_url, KIND: FOLDER, 'extra': extra, 'buttons': buttons, 'isAddonRoot': True, 'permissions': permissions, 'accept': { 'maxSize': max_size, 'acceptedFiles': node_settings.config.accept_extensions, }, 'urls': urls, 'isPointer': False, 'nodeId': node_settings.owner._id, 'nodeUrl': node_settings.owner.url, 'nodeApiUrl': node_settings.owner.api_url, } ret.update(kwargs) return ret def build_addon_button(text, action, title=""): """Builds am action button to be rendered in HGrid :param str text: A string or html to appear on the button itself :param str action: The name of the HGrid action for the button to call. The callback for the HGrid action must be defined as a member of HGrid.Actions :return dict: Hgrid formatted dictionary for custom buttons """ button = { 'text': text, 'action': action, } if title: button['attributes'] = 'title="{title}" data-toggle="tooltip" data-placement="right" '.format(title=title) return button def sort_by_name(hgrid_data): return_value = hgrid_data if hgrid_data is not None: return_value = sorted(hgrid_data, key=lambda item: item['name'].lower()) return return_value class NodeProjectCollector(object): """A utility class for creating rubeus formatted node data for project organization""" def __init__(self, node, auth, just_one_level=False, **kwargs): self.node = node self.auth = auth self.extra = kwargs self.can_view = node.can_view(auth) self.can_edit = node.can_edit(auth) and not node.is_registration self.just_one_level = just_one_level def _collect_components(self, node, visited): rv = [] for child in reversed(node.nodes): # (child.resolve()._id not in visited or node.is_folder) and if child is not None and not child.is_deleted and child.resolve().can_view(auth=self.auth) and node.can_view(self.auth): # visited.append(child.resolve()._id) rv.append(self._serialize_node(child, visited=None, parent_is_folder=node.is_folder)) return rv def collect_all_projects_smart_folder(self): contributed = self.auth.user.node__contributed all_my_projects = contributed.find( Q('category', 'eq', 'project') & Q('is_deleted', 'eq', False) & Q('is_registration', 'eq', False) & Q('is_folder', 'eq', False) & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'eq', None) ) comps = contributed.find( # components only Q('category', 'ne', 'project') & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'nin', all_my_projects.get_keys()) & # exclude deleted nodes Q('is_deleted', 'eq', False) & # exclude registrations Q('is_registration', 'eq', False) ) children_count = all_my_projects.count() + comps.count() return self.make_smart_folder(ALL_MY_PROJECTS_NAME, ALL_MY_PROJECTS_ID, children_count) def collect_all_registrations_smart_folder(self): contributed = self.auth.user.node__contributed all_my_registrations = contributed.find( Q('category', 'eq', 'project') & Q('is_deleted', 'eq', False) & Q('is_registration', 'eq', True) & Q('is_folder', 'eq', False) & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'eq', None) ) comps = contributed.find( # components only Q('category', 'ne', 'project') & # parent is not in the nodes list Q('__backrefs.parent.node.nodes', 'nin', all_my_registrations.get_keys()) & # exclude deleted nodes Q('is_deleted', 'eq', False) & # exclude registrations Q('is_registration', 'eq', True) ) children_count = all_my_registrations.count() + comps.count() return self.make_smart_folder(ALL_MY_REGISTRATIONS_NAME, ALL_MY_REGISTRATIONS_ID, children_count) def make_smart_folder(self, title, node_id, children_count=0): return_value = { 'name': title, 'kind': FOLDER, 'permissions': { 'edit': False, 'view': True, 'copyable': False, 'movable': False, 'acceptsDrops': False, }, 'urls': { 'upload': None, 'fetch': None, }, 'children': [], 'type': 'smart-folder', 'expand': False, 'isPointer': False, 'isFolder': True, 'isSmartFolder': True, 'dateModified': None, 'modifiedDelta': 0, 'modifiedBy': None, 'parentIsFolder': True, 'isDashboard': False, 'contributors': [], 'node_id': node_id, 'childrenCount': children_count, } return return_value def get_root(self): root = self._serialize_node(self.node, visited=None, parent_is_folder=False) return root def to_hgrid(self): """Return the Rubeus.JS representation of the node's children, not including addons """ root = self._collect_components(self.node, visited=None) # This will be important when we mix files and projects together: self._collect_addons(self.node) + if self.node.is_dashboard: root.insert(0, self.collect_all_projects_smart_folder()) root.insert(0, self.collect_all_registrations_smart_folder()) return root def _serialize_node(self, node, visited=None, parent_is_folder=False): """Returns the rubeus representation of a node folder for the project organizer. """ visited = visited or [] visited.append(node.resolve()._id) can_edit = node.can_edit(auth=self.auth) and not node.is_registration expanded = node.is_expanded(user=self.auth.user) can_view = node.can_view(auth=self.auth) children = [] modified_delta = delta_date(node.date_modified) date_modified = node.date_modified.isoformat() contributors = [] for contributor in node.contributors: if contributor._id in node.visible_contributor_ids: contributor_name = [ contributor.family_name, contributor.given_name, contributor.fullname, ] contributors.append({ 'name': next(name for name in contributor_name if name), 'url': contributor.url, }) try: user = node.logs[-1].user modified_by = user.family_name or user.given_name except AttributeError: modified_by = '' child_nodes = node.nodes readable_children = [] for child in child_nodes: if child is not None: resolved = child.resolve() if resolved.can_view(auth=self.auth) and not resolved.is_deleted: readable_children.append(child) children_count = len(readable_children) is_pointer = not node.primary is_component = node.category != 'project' is_project = node.category == 'project' is_file = False type_ = 'project' if is_file: type_ = 'file' if is_pointer and not parent_is_folder: type_ = 'pointer' if node.is_folder: type_ = 'folder' if is_component: type_ = 'component' if node.is_dashboard: to_expand = True elif not is_pointer or parent_is_folder: to_expand = expanded else: to_expand = False return { # TODO: Remove safe_unescape_html when mako html safe comes in 'name': sanitize.safe_unescape_html(node.title) if can_view else u'Private Component', 'kind': FOLDER, 'category': node.category, # Once we get files into the project organizer, files would be kind of FILE 'permissions': { 'edit': can_edit, 'view': can_view, 'copyable': not node.is_folder, 'movable': parent_is_folder, 'acceptsFolders': node.is_folder, 'acceptsMoves': node.is_folder, 'acceptsCopies': node.is_folder or is_project, 'acceptsComponents': node.is_folder, }, 'urls': { 'upload': None, 'fetch': node.url if not node.is_folder else None, }, 'type': type_, 'children': children, 'expand': to_expand, # TODO: (bgeiger) replace these flags with a Kind property or something 'isProject': is_project, 'isPointer': is_pointer, 'isComponent': is_component, 'isFolder': node.is_folder, 'isDashboard': node.is_dashboard, 'isFile': is_file, 'dateModified': date_modified, 'modifiedDelta': max(1, modified_delta), 'modifiedBy': modified_by, 'parentIsFolder': parent_is_folder, 'contributors': contributors, 'node_id': node.resolve()._id, 'isSmartFolder': False, 'apiURL': node.api_url, 'isRegistration': node.is_registration, 'description': node.description, 'registeredMeta': node.registered_meta, 'childrenCount': children_count, 'nodeType': node.project_or_component, } def _collect_addons(self, node): return_value = [] for addon in node.get_addons(): if addon.config.has_hgrid_files: temp = self._upgrade_addon_meta(addon.config.get_hgrid_data(addon, self.auth, **self.extra)) for item in temp: item["node_id"] = node._id item["apiURL"] = node.api_url return_value.extend(temp or []) return return_value def _upgrade_addon_meta(self, data): for datum in data: datum["expand"] = False datum["isProject"] = False datum["isPointer"] = False datum["isComponent"] = False datum["isFolder"] = False datum["isDashboard"] = False datum["isFile"] = True datum["dateModified"] = None datum["modifiedDelta"] = 0 datum["modifiedBy"] = "" datum["parentIsFolder"] = False datum["contributors"] = [] datum["isSmartFolder"] = False datum["isRegistration"] = False datum["description"] = "" datum["registeredMeta"] = {} datum["permissions"]["copyable"] = False datum["permissions"]["movable"] = False datum["permissions"]["acceptsFolders"] = False datum["permissions"]["acceptsMoves"] = False datum["permissions"]["acceptsCopies"] = False datum["permissions"]["acceptsComponents"] = False return data class NodeFileCollector(object): """A utility class for creating rubeus formatted node data""" def __init__(self, node, auth, **kwargs): self.node = node self.auth = auth self.extra = kwargs self.can_view = node.can_view(auth) self.can_edit = node.can_edit(auth) and not node.is_registration def to_hgrid(self): """Return the Rubeus.JS representation of the node's file data, including addons and components """ root = self._serialize_node(self.node) return [root] def _collect_components(self, node, visited): rv = [] for child in node.nodes: if child.resolve()._id not in visited and not child.is_deleted and node.can_view(self.auth): visited.append(child.resolve()._id) rv.append(self._serialize_node(child, visited=visited)) return rv def _serialize_node(self, node, visited=None): """Returns the rubeus representation of a node folder. """ visited = visited or [] visited.append(node.resolve()._id) can_view = node.can_view(auth=self.auth) if can_view: children = self._collect_addons(node) + self._collect_components(node, visited) else: children = [] return { # TODO: Remove safe_unescape_html when mako html safe comes in 'name': u'{0}: {1}'.format(node.project_or_component.capitalize(), sanitize.safe_unescape_html(node.title)) if can_view else u'Private Component', 'category': node.category, 'kind': FOLDER, 'permissions': { 'edit': node.can_edit(self.auth) and not node.is_registration, 'view': can_view, }, 'urls': { 'upload': None, 'fetch': None, }, 'children': children, 'isPointer': not node.primary, 'isSmartFolder': False, 'nodeType': node.project_or_component, 'nodeID': node.resolve()._id, } def _collect_addons(self, node): rv = [] for addon in node.get_addons(): if addon.config.has_hgrid_files: # WARNING: get_hgrid_data can return None if the addon is added but has no credentials. temp = addon.config.get_hgrid_data(addon, self.auth, **self.extra) rv.extend(sort_by_name(temp) or []) return rv # TODO: these might belong in addons module def collect_addon_assets(node): """Return a dictionary containing lists of JS and CSS assets for a node's addons. :rtype: {'tree_js': <list of JS scripts>, 'tree_css': <list of CSS files>} """ return { 'tree_js': list(collect_addon_js(node)), 'tree_css': list(collect_addon_css(node)), } # TODO: Abstract static collectors def collect_addon_js(node, visited=None, filename='files.js', config_entry='files'): """Collect JavaScript includes for all add-ons implementing HGrid views. :return list: List of JavaScript include paths """ # NOTE: must coerce to list so it is JSON-serializable visited = visited or [] visited.append(node._id) js = set() for addon in node.get_addons(): # JS modules configured in each addon's __init__ file js = js.union(addon.config.include_js.get(config_entry, [])) # Webpack bundle js_path = paths.resolve_addon_path(addon.config, filename) if js_path: js.add(js_path) for each in node.nodes: if each._id not in visited: visited.append(each._id) js = js.union(collect_addon_js(each, visited=visited)) return js def collect_addon_css(node, visited=None): """Collect CSS includes for all addons-ons implementing Hgrid views. :return: List of CSS include paths :rtype: list """ visited = visited or [] visited.append(node._id) css = set() for addon in node.get_addons(): css = css.union(addon.config.include_css.get('files', [])) for each in node.nodes: if each._id not in visited: visited.append(each._id) css = css.union(collect_addon_css(each, visited=visited)) return css def delta_date(d): diff = d - datetime.datetime.utcnow() s = diff.total_seconds() return s
TeachAtTUM/edx-platform
refs/heads/master
common/djangoapps/third_party_auth/models.py
3
# -*- coding: utf-8 -*- """ Models used to implement SAML SSO support in third_party_auth (inlcuding Shibboleth support) """ from __future__ import absolute_import import json import logging import re from config_models.models import ConfigurationModel, cache from django.conf import settings from django.contrib.sites.models import Site from django.core.exceptions import ValidationError from django.db import models from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from provider.oauth2.models import Client from provider.utils import long_token from six import text_type from social_core.backends.base import BaseAuth from social_core.backends.oauth import OAuthAuth from social_core.backends.saml import SAMLAuth from social_core.exceptions import SocialAuthBaseException from social_core.utils import module_member from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers from openedx.core.djangoapps.theming.helpers import get_current_request from .lti import LTI_PARAMS_KEY, LTIAuthBackend from .saml import STANDARD_SAML_PROVIDER_KEY, get_saml_idp_choices, get_saml_idp_class log = logging.getLogger(__name__) REGISTRATION_FORM_FIELD_BLACKLIST = [ 'name', 'username' ] # A dictionary of {name: class} entries for each python-social-auth backend available. # Because this setting can specify arbitrary code to load and execute, it is set via # normal Django settings only and cannot be changed at runtime: def _load_backend_classes(base_class=BaseAuth): """ Load the list of python-social-auth backend classes from Django settings """ for class_path in settings.AUTHENTICATION_BACKENDS: auth_class = module_member(class_path) if issubclass(auth_class, base_class): yield auth_class _PSA_BACKENDS = {backend_class.name: backend_class for backend_class in _load_backend_classes()} _PSA_OAUTH2_BACKENDS = [backend_class.name for backend_class in _load_backend_classes(OAuthAuth)] _PSA_SAML_BACKENDS = [backend_class.name for backend_class in _load_backend_classes(SAMLAuth)] _LTI_BACKENDS = [backend_class.name for backend_class in _load_backend_classes(LTIAuthBackend)] def clean_json(value, of_type): """ Simple helper method to parse and clean JSON """ if not value.strip(): return json.dumps(of_type()) try: value_python = json.loads(value) except ValueError as err: raise ValidationError("Invalid JSON: {}".format(text_type(err))) if not isinstance(value_python, of_type): raise ValidationError("Expected a JSON {}".format(of_type)) return json.dumps(value_python, indent=4) def clean_username(username=''): """ Simple helper method to ensure a username is compatible with our system requirements. """ return re.sub(r'[^-\w]+', '_', username)[:30] class AuthNotConfigured(SocialAuthBaseException): """ Exception when SAMLProviderData or other required info is missing """ def __init__(self, provider_name): super(AuthNotConfigured, self).__init__() self.provider_name = provider_name def __str__(self): return _('Authentication with {} is currently unavailable.').format( self.provider_name ) class ProviderConfig(ConfigurationModel): """ Abstract Base Class for configuring a third_party_auth provider """ icon_class = models.CharField( max_length=50, blank=True, default='fa-sign-in', help_text=( 'The Font Awesome (or custom) icon class to use on the login button for this provider. ' 'Examples: fa-google-plus, fa-facebook, fa-linkedin, fa-sign-in, fa-university' ), ) # We use a FileField instead of an ImageField here because ImageField # doesn't support SVG. This means we don't get any image validation, but # that should be fine because only trusted users should be uploading these # anyway. icon_image = models.FileField( blank=True, help_text=( 'If there is no Font Awesome icon available for this provider, upload a custom image. ' 'SVG images are recommended as they can scale to any size.' ), ) name = models.CharField(max_length=50, blank=False, help_text="Name of this provider (shown to users)") secondary = models.BooleanField( default=False, help_text=_( 'Secondary providers are displayed less prominently, ' 'in a separate list of "Institution" login providers.' ), ) site = models.ForeignKey( Site, default=settings.SITE_ID, related_name='%(class)ss', help_text=_( 'The Site that this provider configuration belongs to.' ), ) skip_hinted_login_dialog = models.BooleanField( default=False, help_text=_( "If this option is enabled, users that visit a \"TPA hinted\" URL for this provider " "(e.g. a URL ending with `?tpa_hint=[provider_name]`) will be forwarded directly to " "the login URL of the provider instead of being first prompted with a login dialog." ), ) skip_registration_form = models.BooleanField( default=False, help_text=_( "If this option is enabled, users will not be asked to confirm their details " "(name, email, etc.) during the registration process. Only select this option " "for trusted providers that are known to provide accurate user information." ), ) skip_email_verification = models.BooleanField( default=False, help_text=_( "If this option is selected, users will not be required to confirm their " "email, and their account will be activated immediately upon registration." ), ) visible = models.BooleanField( default=False, help_text=_( "If this option is not selected, users will not be presented with the provider " "as an option to authenticate with on the login screen, but manual " "authentication using the correct link is still possible." ), ) max_session_length = models.PositiveIntegerField( null=True, blank=True, default=None, verbose_name='Max session length (seconds)', help_text=_( "If this option is set, then users logging in using this SSO provider will have " "their session length limited to no longer than this value. If set to 0 (zero), " "the session will expire upon the user closing their browser. If left blank, the " "Django platform session default length will be used." ) ) send_to_registration_first = models.BooleanField( default=False, help_text=_( "If this option is selected, users will be directed to the registration page " "immediately after authenticating with the third party instead of the login page." ), ) sync_learner_profile_data = models.BooleanField( default=False, help_text=_( "Synchronize user profile data received from the identity provider with the edX user " "account on each SSO login. The user will be notified if the email address associated " "with their account is changed as a part of this synchronization." ) ) prefix = None # used for provider_id. Set to a string value in subclass backend_name = None # Set to a field or fixed value in subclass accepts_logins = True # Whether to display a sign-in button when the provider is enabled # "enabled" field is inherited from ConfigurationModel class Meta(object): app_label = "third_party_auth" abstract = True def clean(self): """ Ensure that either `icon_class` or `icon_image` is set """ super(ProviderConfig, self).clean() if bool(self.icon_class) == bool(self.icon_image): raise ValidationError('Either an icon class or an icon image must be given (but not both)') @property def provider_id(self): """ Unique string key identifying this provider. Must be URL and css class friendly. """ assert self.prefix is not None return "-".join((self.prefix, ) + tuple(getattr(self, field) for field in self.KEY_FIELDS)) @property def backend_class(self): """ Get the python-social-auth backend class used for this provider """ return _PSA_BACKENDS[self.backend_name] def get_url_params(self): """ Get a dict of GET parameters to append to login links for this provider """ return {} def is_active_for_pipeline(self, pipeline): """ Is this provider being used for the specified pipeline? """ return self.backend_name == pipeline['backend'] def match_social_auth(self, social_auth): """ Is this provider being used for this UserSocialAuth entry? """ return self.backend_name == social_auth.provider def get_remote_id_from_social_auth(self, social_auth): """ Given a UserSocialAuth object, return the remote ID used by this provider. """ # This is generally the same thing as the UID, expect when one backend is used for multiple providers assert self.match_social_auth(social_auth) return social_auth.uid def get_social_auth_uid(self, remote_id): """ Return the uid in social auth. This is default implementation. Subclass may override with a different one. """ return remote_id @classmethod def get_register_form_data(cls, pipeline_kwargs): """Gets dict of data to display on the register form. common.djangoapps.student.views.register_user uses this to populate the new account creation form with values supplied by the user's chosen provider, preventing duplicate data entry. Args: pipeline_kwargs: dict of string -> object. Keyword arguments accumulated by the pipeline thus far. Returns: Dict of string -> string. Keys are names of form fields; values are values for that field. Where there is no value, the empty string must be used. """ registration_form_data = {} # Details about the user sent back from the provider. details = pipeline_kwargs.get('details').copy() # Set the registration form to use the `fullname` detail for the `name` field. registration_form_data['name'] = details.get('fullname', '') # Get the username separately to take advantage of the de-duping logic # built into the pipeline. The provider cannot de-dupe because it can't # check the state of taken usernames in our system. Note that there is # technically a data race between the creation of this value and the # creation of the user object, so it is still possible for users to get # an error on submit. registration_form_data['username'] = clean_username(pipeline_kwargs.get('username') or '') # Any other values that are present in the details dict should be copied # into the registration form details. This may include details that do # not map to a value that exists in the registration form. However, # because the fields that are actually rendered are not based on this # list, only those values that map to a valid registration form field # will actually be sent to the form as default values. for blacklisted_field in REGISTRATION_FORM_FIELD_BLACKLIST: details.pop(blacklisted_field, None) registration_form_data.update(details) return registration_form_data def get_authentication_backend(self): """Gets associated Django settings.AUTHENTICATION_BACKEND string.""" return '{}.{}'.format(self.backend_class.__module__, self.backend_class.__name__) @property def display_for_login(self): """ Determines whether the provider ought to be shown as an option with which to authenticate on the login screen, registration screen, and elsewhere. """ return bool(self.enabled_for_current_site and self.accepts_logins and self.visible) @property def enabled_for_current_site(self): """ Determines if the provider is able to be used with the current site. """ return self.enabled and self.site == Site.objects.get_current(get_current_request()) class OAuth2ProviderConfig(ProviderConfig): """ Configuration Entry for an OAuth2 based provider. Also works for OAuth1 providers. """ prefix = 'oa2' KEY_FIELDS = ('provider_slug', ) # Backend name is unique backend_name = models.CharField( max_length=50, blank=False, db_index=True, help_text=( "Which python-social-auth OAuth2 provider backend to use. " "The list of backend choices is determined by the THIRD_PARTY_AUTH_BACKENDS setting." # To be precise, it's set by AUTHENTICATION_BACKENDS - which aws.py sets from THIRD_PARTY_AUTH_BACKENDS ) ) provider_slug = models.SlugField( max_length=30, db_index=True, help_text=( 'A short string uniquely identifying this provider. ' 'Cannot contain spaces and should be a usable as a CSS class. Examples: "ubc", "mit-staging"' )) key = models.TextField(blank=True, verbose_name="Client ID") secret = models.TextField( blank=True, verbose_name="Client Secret", help_text=( 'For increased security, you can avoid storing this in your database by leaving ' ' this field blank and setting ' 'SOCIAL_AUTH_OAUTH_SECRETS = {"(backend name)": "secret", ...} ' 'in your instance\'s Django settings (or lms.auth.json)' ) ) other_settings = models.TextField(blank=True, help_text="Optional JSON object with advanced settings, if any.") class Meta(object): app_label = "third_party_auth" verbose_name = "Provider Configuration (OAuth)" verbose_name_plural = verbose_name def clean(self): """ Standardize and validate fields """ super(OAuth2ProviderConfig, self).clean() self.other_settings = clean_json(self.other_settings, dict) def get_setting(self, name): """ Get the value of a setting, or raise KeyError """ if name == "KEY": return self.key if name == "SECRET": if self.secret: return self.secret # To allow instances to avoid storing secrets in the DB, the secret can also be set via Django: return getattr(settings, 'SOCIAL_AUTH_OAUTH_SECRETS', {}).get(self.backend_name, '') if self.other_settings: other_settings = json.loads(self.other_settings) assert isinstance(other_settings, dict), "other_settings should be a JSON object (dictionary)" return other_settings[name] raise KeyError class SAMLConfiguration(ConfigurationModel): """ General configuration required for this edX instance to act as a SAML Service Provider and allow users to authenticate via third party SAML Identity Providers (IdPs) """ KEY_FIELDS = ('site_id', 'slug') site = models.ForeignKey( Site, default=settings.SITE_ID, related_name='%(class)ss', help_text=_( 'The Site that this SAML configuration belongs to.' ), ) slug = models.SlugField( max_length=30, default='default', help_text=( 'A short string uniquely identifying this configuration. ' 'Cannot contain spaces. Examples: "ubc", "mit-staging"' ), ) private_key = models.TextField( help_text=( 'To generate a key pair as two files, run ' '"openssl req -new -x509 -days 3652 -nodes -out saml.crt -keyout saml.key". ' 'Paste the contents of saml.key here. ' 'For increased security, you can avoid storing this in your database by leaving ' 'this field blank and setting it via the SOCIAL_AUTH_SAML_SP_PRIVATE_KEY setting ' 'in your instance\'s Django settings (or lms.auth.json).' ), blank=True, ) public_key = models.TextField( help_text=( 'Public key certificate. ' 'For increased security, you can avoid storing this in your database by leaving ' 'this field blank and setting it via the SOCIAL_AUTH_SAML_SP_PUBLIC_CERT setting ' 'in your instance\'s Django settings (or lms.auth.json).' ), blank=True, ) entity_id = models.CharField(max_length=255, default="http://saml.example.com", verbose_name="Entity ID") org_info_str = models.TextField( verbose_name="Organization Info", default='{"en-US": {"url": "http://www.example.com", "displayname": "Example Inc.", "name": "example"}}', help_text="JSON dictionary of 'url', 'displayname', and 'name' for each language", ) other_config_str = models.TextField( default='{\n"SECURITY_CONFIG": {"metadataCacheDuration": 604800, "signMetadata": false}\n}', help_text=( "JSON object defining advanced settings that are passed on to python-saml. " "Valid keys that can be set here include: SECURITY_CONFIG and SP_EXTRA" ), ) class Meta(object): app_label = "third_party_auth" verbose_name = "SAML Configuration" verbose_name_plural = verbose_name def __str__(self): """ Return human-readable string representation. """ return "SAMLConfiguration {site}: {slug} on {date:%Y-%m-%d %H:%M:%S}".format( site=self.site.name, slug=self.slug, date=self.change_date, ) def clean(self): """ Standardize and validate fields """ super(SAMLConfiguration, self).clean() self.org_info_str = clean_json(self.org_info_str, dict) self.other_config_str = clean_json(self.other_config_str, dict) self.private_key = ( self.private_key .replace("-----BEGIN RSA PRIVATE KEY-----", "") .replace("-----BEGIN PRIVATE KEY-----", "") .replace("-----END RSA PRIVATE KEY-----", "") .replace("-----END PRIVATE KEY-----", "") .strip() ) self.public_key = ( self.public_key .replace("-----BEGIN CERTIFICATE-----", "") .replace("-----END CERTIFICATE-----", "") .strip() ) def get_setting(self, name): """ Get the value of a setting, or raise KeyError """ default_saml_contact = { # Default contact information to put into the SAML metadata that gets generated by python-saml. "givenName": _("{platform_name} Support").format( platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME) ), "emailAddress": configuration_helpers.get_value('TECH_SUPPORT_EMAIL', settings.TECH_SUPPORT_EMAIL), } if name == "ORG_INFO": return json.loads(self.org_info_str) if name == "SP_ENTITY_ID": return self.entity_id if name == "SP_PUBLIC_CERT": if self.public_key: return self.public_key # To allow instances to avoid storing keys in the DB, the key pair can also be set via Django: if self.slug == 'default': return getattr(settings, 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT', '') else: public_certs = getattr(settings, 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT_DICT', {}) return public_certs.get(self.slug, '') if name == "SP_PRIVATE_KEY": if self.private_key: return self.private_key # To allow instances to avoid storing keys in the DB, the private key can also be set via Django: if self.slug == 'default': return getattr(settings, 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', '') else: private_keys = getattr(settings, 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY_DICT', {}) return private_keys.get(self.slug, '') other_config = { # These defaults can be overriden by self.other_config_str "GET_ALL_EXTRA_DATA": True, # Save all attribute values the IdP sends into the UserSocialAuth table "TECHNICAL_CONTACT": default_saml_contact, "SUPPORT_CONTACT": default_saml_contact, } other_config.update(json.loads(self.other_config_str)) return other_config[name] # SECURITY_CONFIG, SP_EXTRA, or similar extra settings def active_saml_configurations_filter(): """ Returns a mapping to be used for the SAMLProviderConfig to limit the SAMLConfiguration choices to the current set. """ query_set = SAMLConfiguration.objects.current_set() return {'id__in': query_set.values_list('id', flat=True)} class SAMLProviderConfig(ProviderConfig): """ Configuration Entry for a SAML/Shibboleth provider. """ prefix = 'saml' KEY_FIELDS = ('idp_slug', ) backend_name = models.CharField( max_length=50, default='tpa-saml', blank=False, help_text="Which python-social-auth provider backend to use. 'tpa-saml' is the standard edX SAML backend.") idp_slug = models.SlugField( max_length=30, db_index=True, help_text=( 'A short string uniquely identifying this provider. ' 'Cannot contain spaces and should be a usable as a CSS class. Examples: "ubc", "mit-staging"' )) entity_id = models.CharField( max_length=255, verbose_name="Entity ID", help_text="Example: https://idp.testshib.org/idp/shibboleth") metadata_source = models.CharField( max_length=255, help_text=( "URL to this provider's XML metadata. Should be an HTTPS URL. " "Example: https://www.testshib.org/metadata/testshib-providers.xml" )) attr_user_permanent_id = models.CharField( max_length=128, blank=True, verbose_name="User ID Attribute", help_text="URN of the SAML attribute that we can use as a unique, persistent user ID. Leave blank for default.") attr_full_name = models.CharField( max_length=128, blank=True, verbose_name="Full Name Attribute", help_text="URN of SAML attribute containing the user's full name. Leave blank for default.") attr_first_name = models.CharField( max_length=128, blank=True, verbose_name="First Name Attribute", help_text="URN of SAML attribute containing the user's first name. Leave blank for default.") attr_last_name = models.CharField( max_length=128, blank=True, verbose_name="Last Name Attribute", help_text="URN of SAML attribute containing the user's last name. Leave blank for default.") attr_username = models.CharField( max_length=128, blank=True, verbose_name="Username Hint Attribute", help_text="URN of SAML attribute to use as a suggested username for this user. Leave blank for default.") attr_email = models.CharField( max_length=128, blank=True, verbose_name="Email Attribute", help_text="URN of SAML attribute containing the user's email address[es]. Leave blank for default.") automatic_refresh_enabled = models.BooleanField( default=True, verbose_name="Enable automatic metadata refresh", help_text="When checked, the SAML provider's metadata will be included " "in the automatic refresh job, if configured." ) identity_provider_type = models.CharField( max_length=128, blank=False, verbose_name="Identity Provider Type", default=STANDARD_SAML_PROVIDER_KEY, choices=get_saml_idp_choices(), help_text=( "Some SAML providers require special behavior. For example, SAP SuccessFactors SAML providers require an " "additional API call to retrieve user metadata not provided in the SAML response. Select the provider type " "which best matches your use case. If in doubt, choose the Standard SAML Provider type." ) ) debug_mode = models.BooleanField( default=False, verbose_name="Debug Mode", help_text=( "In debug mode, all SAML XML requests and responses will be logged. " "This is helpful for testing/setup but should always be disabled before users start using this provider." ), ) other_settings = models.TextField( verbose_name="Advanced settings", blank=True, help_text=( 'For advanced use cases, enter a JSON object with addtional configuration. ' 'The tpa-saml backend supports {"requiredEntitlements": ["urn:..."]}, ' 'which can be used to require the presence of a specific eduPersonEntitlement, ' 'and {"extra_field_definitions": [{"name": "...", "urn": "..."},...]}, which can be ' 'used to define registration form fields and the URNs that can be used to retrieve ' 'the relevant values from the SAML response. Custom provider types, as selected ' 'in the "Identity Provider Type" field, may make use of the information stored ' 'in this field for additional configuration.' )) archived = models.BooleanField(default=False) saml_configuration = models.ForeignKey( SAMLConfiguration, on_delete=models.SET_NULL, limit_choices_to=active_saml_configurations_filter, null=True, blank=True, ) def clean(self): """ Standardize and validate fields """ super(SAMLProviderConfig, self).clean() self.other_settings = clean_json(self.other_settings, dict) class Meta(object): app_label = "third_party_auth" verbose_name = "Provider Configuration (SAML IdP)" verbose_name_plural = "Provider Configuration (SAML IdPs)" def get_url_params(self): """ Get a dict of GET parameters to append to login links for this provider """ return {'idp': self.idp_slug} def is_active_for_pipeline(self, pipeline): """ Is this provider being used for the specified pipeline? """ return self.backend_name == pipeline['backend'] and self.idp_slug == pipeline['kwargs']['response']['idp_name'] def match_social_auth(self, social_auth): """ Is this provider being used for this UserSocialAuth entry? """ prefix = self.idp_slug + ":" return self.backend_name == social_auth.provider and social_auth.uid.startswith(prefix) def get_remote_id_from_social_auth(self, social_auth): """ Given a UserSocialAuth object, return the remote ID used by this provider. """ assert self.match_social_auth(social_auth) # Remove the prefix from the UID return social_auth.uid[len(self.idp_slug) + 1:] def get_social_auth_uid(self, remote_id): """ Get social auth uid from remote id by prepending idp_slug to the remote id """ return '{}:{}'.format(self.idp_slug, remote_id) def get_config(self): """ Return a SAMLIdentityProvider instance for use by SAMLAuthBackend. Essentially this just returns the values of this object and its associated 'SAMLProviderData' entry. """ if self.other_settings: conf = json.loads(self.other_settings) else: conf = {} attrs = ( 'attr_user_permanent_id', 'attr_full_name', 'attr_first_name', 'attr_last_name', 'attr_username', 'attr_email', 'entity_id') for field in attrs: val = getattr(self, field) if val: conf[field] = val # Now get the data fetched automatically from the metadata.xml: data = SAMLProviderData.current(self.entity_id) if not data or not data.is_valid(): log.error( 'No SAMLProviderData found for provider "%s" with entity id "%s" and IdP slug "%s". ' 'Run "manage.py saml pull" to fix or debug.', self.name, self.entity_id, self.idp_slug ) raise AuthNotConfigured(provider_name=self.name) conf['x509cert'] = data.public_key conf['url'] = data.sso_url # Add SAMLConfiguration appropriate for this IdP conf['saml_sp_configuration'] = ( self.saml_configuration or SAMLConfiguration.current(self.site.id, 'default') ) idp_class = get_saml_idp_class(self.identity_provider_type) return idp_class(self.idp_slug, **conf) class SAMLProviderData(models.Model): """ Data about a SAML IdP that is fetched automatically by 'manage.py saml pull' This data is only required during the actual authentication process. """ cache_timeout = 600 fetched_at = models.DateTimeField(db_index=True, null=False) expires_at = models.DateTimeField(db_index=True, null=True) entity_id = models.CharField(max_length=255, db_index=True) # This is the key for lookups in this table sso_url = models.URLField(verbose_name="SSO URL") public_key = models.TextField() class Meta(object): app_label = "third_party_auth" verbose_name = "SAML Provider Data" verbose_name_plural = verbose_name ordering = ('-fetched_at', ) def is_valid(self): """ Is this data valid? """ if self.expires_at and timezone.now() > self.expires_at: return False return bool(self.entity_id and self.sso_url and self.public_key) is_valid.boolean = True @classmethod def cache_key_name(cls, entity_id): """ Return the name of the key to use to cache the current data """ return 'configuration/{}/current/{}'.format(cls.__name__, entity_id) @classmethod def current(cls, entity_id): """ Return the active data entry, if any, otherwise None """ cached = cache.get(cls.cache_key_name(entity_id)) if cached is not None: return cached try: current = cls.objects.filter(entity_id=entity_id).order_by('-fetched_at')[0] except IndexError: current = None cache.set(cls.cache_key_name(entity_id), current, cls.cache_timeout) return current class LTIProviderConfig(ProviderConfig): """ Configuration required for this edX instance to act as a LTI Tool Provider and allow users to authenticate and be enrolled in a course via third party LTI Tool Consumers. """ prefix = 'lti' backend_name = 'lti' # This provider is not visible to users icon_class = None icon_image = None secondary = False # LTI login cannot be initiated by the tool provider accepts_logins = False KEY_FIELDS = ('lti_consumer_key', ) lti_consumer_key = models.CharField( max_length=255, help_text=( 'The name that the LTI Tool Consumer will use to identify itself' ) ) lti_hostname = models.CharField( default='localhost', max_length=255, help_text=( 'The domain that will be acting as the LTI consumer.' ), db_index=True ) lti_consumer_secret = models.CharField( default=long_token, max_length=255, help_text=( 'The shared secret that the LTI Tool Consumer will use to ' 'authenticate requests. Only this edX instance and this ' 'tool consumer instance should know this value. ' 'For increased security, you can avoid storing this in ' 'your database by leaving this field blank and setting ' 'SOCIAL_AUTH_LTI_CONSUMER_SECRETS = {"consumer key": "secret", ...} ' 'in your instance\'s Django setttigs (or lms.auth.json)' ), blank=True, ) lti_max_timestamp_age = models.IntegerField( default=10, help_text=( 'The maximum age of oauth_timestamp values, in seconds.' ) ) def match_social_auth(self, social_auth): """ Is this provider being used for this UserSocialAuth entry? """ prefix = self.lti_consumer_key + ":" return self.backend_name == social_auth.provider and social_auth.uid.startswith(prefix) def get_remote_id_from_social_auth(self, social_auth): """ Given a UserSocialAuth object, return the remote ID used by this provider. """ assert self.match_social_auth(social_auth) # Remove the prefix from the UID return social_auth.uid[len(self.lti_consumer_key) + 1:] def is_active_for_pipeline(self, pipeline): """ Is this provider being used for the specified pipeline? """ try: return ( self.backend_name == pipeline['backend'] and self.lti_consumer_key == pipeline['kwargs']['response'][LTI_PARAMS_KEY]['oauth_consumer_key'] ) except KeyError: return False def get_lti_consumer_secret(self): """ If the LTI consumer secret is not stored in the database, check Django settings instead """ if self.lti_consumer_secret: return self.lti_consumer_secret return getattr(settings, 'SOCIAL_AUTH_LTI_CONSUMER_SECRETS', {}).get(self.lti_consumer_key, '') class Meta(object): app_label = "third_party_auth" verbose_name = "Provider Configuration (LTI)" verbose_name_plural = verbose_name class ProviderApiPermissions(models.Model): """ This model links OAuth2 client with provider Id. It gives permission for a OAuth2 client to access the information under certain IdPs. """ client = models.ForeignKey(Client) provider_id = models.CharField( max_length=255, help_text=( 'Uniquely identify a provider. This is different from backend_name.' ) ) class Meta(object): app_label = "third_party_auth" verbose_name = "Provider API Permission" verbose_name_plural = verbose_name + 's'
rvlad1987/repository.rvlad1987.xbmc-addons
refs/heads/master
helix/source/plugin.video.diafilms/resources/__init__.py
12133432
xaviercobain88/framework-python
refs/heads/master
build/lib.linux-i686-2.7/openerp/addons/point_of_sale/wizard/pos_discount.py
55
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv, fields class pos_discount(osv.osv_memory): _name = 'pos.discount' _description = 'Add a Global Discount' _columns = { 'discount': fields.float('Discount (%)', required=True, digits=(16,2)), } _defaults = { 'discount': 5, } # def view_init(self, cr, uid, fields_list, context=None): # """ # Creates view dynamically and adding fields at runtime. # @param self: The object pointer. # @param cr: A database cursor # @param uid: ID of the user currently logged in # @param context: A standard dictionary # @return: New arch of view with new columns. # """ # if context is None: # context = {} # super(pos_discount, self).view_init(cr, uid, fields_list, context=context) # record_id = context and context.get('active_id', False) or False # True def apply_discount(self, cr, uid, ids, context=None): """ To give the discount of product and check the. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return : nothing """ order_ref = self.pool.get('pos.order') order_line_ref = self.pool.get('pos.order.line') if context is None: context = {} this = self.browse(cr, uid, ids[0], context=context) record_id = context and context.get('active_id', False) if isinstance(record_id, (int, long)): record_id = [record_id] for order in order_ref.browse(cr, uid, record_id, context=context): order_line_ref.write(cr, uid, [x.id for x in order.lines], {'discount':this.discount}, context=context) return {} pos_discount() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
CGATOxford/Optic
refs/heads/master
scripts/tree2svg.py
1
''' tree2svg.py - plot a tree in svg format ======================================== :Author: Andreas Heger :Release: $Id$ :Date: |today| :Tags: Python Purpose ------- plot a tree in New-Hampshire format. The output is svg. Usage ----- Type:: python <script_name>.py --help for command line help. Examples -------- The following plots a tree with branch length from :file:`ks.tree` adding branch length information from :file:`kaks.tree`, `ks2.tree` and `kaks2.tree`:: cat ks.tree kaks.tree ks2.tree kaks2.tree |\ python plot_tree.py --verbose=0 --show-branchlengths --annotation=kaks --annotation=master --annotation=kaks --font-style-tips=italic > tree.svg Adding tables to trees ---------------------- Branches can be decorated with tables, for example:: cat test.tree tables.tree | python plot_tree.py -v 0 --filename-tables=test.table --annotation=tables Here, the file :file:`test.tree` is:: (((A:1,B:1):1,C:2),D:3); The file :file:`tables.tree` links branches to tables. The table number is given as the branch length:: (((A:5,B:1):2,C:3),D:4); Note that table ids need to start with 1, but need not be consecutive numbers. Finally, the tables are in the file :file:`test.table` given as argument to the options ``--filename-tables``. table=3 header1 header2 header3 0row1 col1 col2 0row2 col1 col2 table=1 header1 header2 header3 1row1 col1 col2 1row2 col1 col2 table=2 header1 header2 header3 2row1 col1 col2 2row2 col1 col2 A table is started by the line ``table=#`` where ``#`` is an integer number. The actual table follows as a tab-separated table with the first line being interpreted as a header. Lines starting with ``#`` are ignored. Command line options -------------------- ''' import os import sys import string import re import getopt import time import optparse import math import tempfile import subprocess from types import * import CGAT.Experiment as E import CGAT.TreeTools as TreeTools import CGAT.IOTools as IOTools import CGAT.SVGTree as SVGTree import CGAT.SVGdraw as SVGdraw ################################################################### ################################################################### ################################################################### ## ################################################################### class NodeDecoratorSupportPieChart(SVGTree.NodeDecorator): """class for decorating internal nodes using a pie-chart. Uses the support information in the tree. """ mRadius = 40 mFontSize = 12 mFont = "Verdana" mSeparator = 5 mStrokeWidth = 1 def __init__(self, tree, *args, **kwargs): SVGTree.NodeDecorator.__init__(self, tree, *args, **kwargs) def getElements(self, node_id, x, y): node = self.mTree.node(node_id) e = [] p = node.data.support if p == 1.0: e.append(SVGdraw.circle(x, y, self.mRadius, stroke="rgb(%i,%i,%i)" % SVGTree.BLACK, fill="rgb(%i,%i,%i)" % SVGTree.RED)) elif p > 0.0: e.append(SVGdraw.circle(x, y, self.mRadius, stroke="rgb(%i,%i,%i)" % SVGTree.BLACK, fill="rgb(%i,%i,%i)" % SVGTree.WHITE)) d = SVGdraw.pathdata(x, y) d.line(x + self.mRadius, y) angle = 360.0 * p dx = self.mRadius * math.cos(math.radians(angle)) + x dy = self.mRadius * math.sin(math.radians(angle)) + y if p <= 0.5: d.ellarc(self.mRadius, self.mRadius, 0, 0, 1, dx, dy) else: d.ellarc(self.mRadius, self.mRadius, 0, 1, 1, dx, dy) e.append(SVGdraw.path(d, stroke="rgb(%i,%i,%i)" % SVGTree.RED, fill="rgb(%i,%i,%i)" % SVGTree.RED, stroke_width=self.mStrokeWidth)) else: pass return e def getHeight(self, node_id): return 0 def getWidth(self, node_id): return 0 ################################################################### ################################################################### ################################################################### ## ################################################################### class BranchDecoratorTable(SVGTree.BranchDecoratorHorizontal): """branch decorator - add a table onto a branch length. The table will be plotted below each branch. This decorator requires labeled branches within a tree. """ mBranchLengthFormat = "%5.2f" mFontSize = 10 def __init__(self, tree, filename, *args, **kwargs): SVGTree.BranchDecoratorHorizontal.__init__(self, tree, *args, **kwargs) self.mWritten = 0 infile = open(filename, "r") self.mTables = {} table_id = None for line in infile: if line.startswith("#"): continue if line.startswith("table="): if table_id: self.mTables[table_id] = table table_id = re.search("table=(\d+)", line).groups()[0] if int(table_id) == 0: raise ValueError("table id 0 is invalid") table = [] continue table.append(line[:-1].split("\t")) if table_id: self.mTables[table_id] = table self.mColumnWidths = {} for id, table in self.mTables.iteritems(): if len(table) == 0: raise ValueError("empty table %s" % id) column_widths = [0] * len(table[0]) for row in table: if len(column_widths) != len(row): raise ValueError("table %s has unequal row lengths" % id) for x, col in enumerate(row): column_widths[x] = max(column_widths[x], len(col)) self.mColumnWidths[id] = column_widths def getElements(self, node_id, x1, x2, y): e = SVGTree.BranchDecoratorHorizontal.getElements( self, node_id, x1, x2, y) table_id = str(int(self.mTree.node(node_id).data.branchlength)) if table_id not in self.mTables: return e startx = x1 + self.mFontSize y = y + self.mFontSize table, column_widths = self.mTables[ table_id], self.mColumnWidths[table_id] font_weight = "bold" for r, row in enumerate(table): x = startx for c, col in enumerate(row): e.append(SVGdraw.text(x, y, col, 20, self.mFont, stroke="rgb(%i,%i,%i)" % self.mFontColour, font_weight=font_weight, text_anchor="left")) x += column_widths[c] * self.mFontSize // 2 y += self.mFontSize font_weight = "normal" return e def getHeight(self, node_id): table_id = str(int(self.mTree.node(node_id).data.branchlength)) if table_id in self.mTables: table_height = len(self.mTables[table_id]) * self.mFontSize else: table_height = 0 return 5 + table_height def getWidth(self, node_id): table_id = str(int(self.mTree.node(node_id).data.branchlength)) if table_id in self.mTables: return 5 + sum(self.mColumnWidths[table_id]) return 5 # ------------------------------------------------------------------------ def main(argv=None): parser = E.OptionParser( version="%prog version: $Id: plot_tree.py 2782 2009-09-10 11:40:29Z andreas $") parser.add_option("-i", "--title", dest="title", type="string", help="page title.") parser.add_option("-f", "--footer", dest="footer", type="string", help="page footer.") parser.add_option("-s", "--tree-nh-file", dest="filename_tree", type="string", help="filename with tree.") parser.add_option("-t", "--tree", dest="tree", type="string", help="tree.") parser.add_option("-r", "--species-regex", dest="species_regex", type="string", help="regular expression to extract species from identifier.") parser.add_option("--colour-by-species", dest="colour_by_species", action="store_true", help="colour by species.") parser.add_option("--support-style", dest="support_style", type="choice", choices=("pie", "number"), help="style for support information.") parser.add_option("--error-style", dest="error_style", type="choice", choices=("pie", "number"), help="style for error information.") parser.add_option("--branch-scale", dest="branch_scale", type="float", help="branch length scale factor.") parser.add_option("--height-scale", dest="height_scale", type="float", help="height scale factor.") parser.add_option("-a", "--annotations", dest="annotations", type="choice", action="append", choices=( "support", "error", "kaks", "master", "value", "tables"), help="annotations given by further trees.") parser.add_option("--filename-tables", dest="filename_tables", type="string", help="add tables from file (need also set options -a tables) [%default]") parser.add_option("--show-branchlengths", dest="show_branchlengths", action="store_true", help="show branch lengths.") parser.add_option("--leaf-symbol", dest="plot_leaf_symbol", type="choice", choices=("square", "circle"), help="Symbol for leaves.") parser.add_option("--font-size-branches", dest="font_size_branches", type="int", help="set font size for branches.") parser.add_option("--font-size-tips", dest="font_size_tips", type="int", help="set font size for tips.") parser.add_option("--font-style-tips", dest="font_style_tips", type="choice", choices=("normal", "italic",), help="set font style for tips.") parser.add_option("--map-tsv-file", dest="filename_map", type="string", help="filename with a name translation table.") parser.add_option("--filename-map-species2colour", dest="filename_colour_map", type="string", help="filename with a map of species to colour.") parser.add_option("--no-leaf-labels", dest="plot_leaf_labels", action="store_false", help="do not show labels at leafs.") parser.add_option("--no-ruler", dest="plot_ruler", action="store_false", help="do not plot ruler.") parser.set_defaults( titles="", title="", footer="", filename_tree=None, species_regex="^([^|]+)\|", colour_by_species=None, tree=None, branch_scale=0, height_scale=0, support_style=None, error_style="number", kaks_style="number", annotations=None, show_branchlengths=False, branch_length_format="%5.2f", font_size_tips=None, font_size_branches=None, font_style_tips=None, filename_map=None, filename_colour_map=None, plot_leaf_labels=True, plot_leaf_symbol=None, plot_ruler=True, filename_tables=None, ) (options, args) = E.Start(parser, add_pipe_options=True) if options.filename_tree: tree_lines = open(options.filename_tree, "r").readlines() elif options.tree: tree_lines = options.tree else: tree_lines = sys.stdin.readlines() nexus = TreeTools.Newick2Nexus(tree_lines) master_tree = nexus.trees[0] if options.filename_map: map_names = IOTools.ReadMap(open(options.filename_map, "r")) for id, node in master_tree.chain.items(): if node.data.taxon in map_names: node.data.taxon = map_names[node.data.taxon] if options.loglevel >= 2: master_tree.display() plot = SVGTree.SVGTree(master_tree) if options.branch_scale: plot.setBranchScale(options.branch_scale) if options.height_scale is not None: plot.setHeightScale(options.height_scale) if options.font_size_tips is not None: plot.setFontSize(options.font_size_tips) if options.plot_ruler is False: plot.setRulerElements([]) if options.show_branchlengths: b = SVGTree.BranchDecoratorHorizontalBranchLength(master_tree) if options.font_size_branches: b.setFontSize(options.font_size_branches) plot.setDecoratorHorizontalBranches(b) if options.colour_by_species: if options.filename_colour_map: map_species2colour = IOTools.ReadMap( open(options.filename_colour_map, "r")) else: map_species2colour = None rx = re.compile(options.species_regex) extract_species = lambda x: rx.search(x).groups()[0] plot.setDecoratorExternalNodes(SVGTree.NodeDecoratorBySpecies(master_tree, plot_symbol=options.plot_leaf_symbol, plot_label=options.plot_leaf_labels, map_species2colour=map_species2colour, extract_species=extract_species)) if options.font_style_tips: plot.getDecoratorExternalNodes().setFontStyle(options.font_style_tips) plot.getDecoratorExternalNodes().setPlotLabel(options.plot_leaf_labels) current_tree = 1 # add annotations by further trees given on the command line branch_length_annotations = [] current_reference_tree = master_tree if options.annotations: for annotation in options.annotations: tree = nexus.trees[current_tree] if annotation == "support": tree.branchlength2support() for id, node in tree.chain.items(): node.data.branchlength = 1.0 if options.support_style == "pie": plot.setDecoratorInternalNodes( NodeDecoratorSupportPieChart(nexus.trees[current_tree])) elif annotation == "error": if options.error_style == "number": b = SVGTree.BranchDecoratorHorizontalBranchLengthError( current_reference_tree, tree) if options.font_size_branches: b.setFontSize(options.font_size_branches) branch_length_annotations.append(b) elif annotation == "kaks": if options.kaks_style == "number": b = SVGTree.BranchDecoratorHorizontalBranchLengthWithKaks( current_reference_tree, tree) if options.font_size_branches: b.setFontSize(options.font_size_branches) branch_length_annotations.append(b) elif annotation == "value": b = SVGTree.BranchDecoratorHorizontalBranchLength(tree) if options.font_size_branches: b.setFontSize(options.font_size_branches) branch_length_annotations.append(b) elif annotation == "master": current_reference_tree = tree elif annotation == "tables": b = BranchDecoratorTable( tree, filename=options.filename_tables) plot.setDecoratorHorizontalBranches(b) current_tree += 1 if len(branch_length_annotations) == 1: b = branch_length_annotations[0] elif len(branch_length_annotations) == 2: b1, b2 = branch_length_annotations b1.setFontColour(SVGTree.BLUE) b2.setFontColour(SVGTree.RED) b = SVGTree.BranchDecoratorHorizontalAboveBelow( master_tree, b1, b2) elif len(branch_length_annotations) > 2: raise "obtained more than three branch length annotations. Layout not implemented" plot.setDecoratorHorizontalBranches(b) plot.initializePlot() plot.writeToFile(sys.stdout) E.Stop() if __name__ == "__main__": sys.exit(main())
javierj/kobudo-katas
refs/heads/master
Kata-RestConsumer/main.py
1
__author__ = 'Javier' from gindex_presenter import GIndexPresenter class ConsoleView(object): def show_gindex(self, gindex): print("Gindex: ", gindex) # Runner presenter = GIndexPresenter(ConsoleView()) presenter.request_gindex_for("Pybonacci", "scikit-aero")
skrueger111/zazzie
refs/heads/dev
src/sassie/tools/contrast_calculator/gui_mimic_contrast_calculator.py
2
''' Driver method to run the contrast calculator module ''' import sys import os import shutil import time import sassie.tools.contrast_calculator.contrast_calculator as contrast_calculator #import contrast_calculator as contrast_calculator import sassie.interface.input_filter as input_filter import sassie.interface.contrast_calculator.contrast_calculator_filter as contrast_calculator_filter #import contrast_calculator_filter as contrast_calculator_filter import multiprocessing def user_variables(self, **kwargs): #### user input #### #### user input #### #### user input #### self.runname = 'run_0' self.inpath = './' self.outfile = 'test' # self.numfiles = '2' self.numfiles = '1' # self.numfiles = '0' self.solute_conc = '1.0' self.d2ostep = '5' self.fexchp = '0.95' self.fexchn = '1.0' # self.seqfiles = ['protein_sequence.txt', 'dna_sequence.txt'] self.seqfiles = ['pai_seq.txt'] # self.seqfiles = ['protein_sequence.txt'] # self.seqfiles = ['dna_sequence.txt'] # self.seqfiles = ['rna_sequence.txt'] # self.seqfiles = ['trunc2a_min.pdb'] # self.seqfiles = ['hiv1_gag.pdb'] # self.seqfiles = ['c36_dsDNA60_min.pdb'] # self.seqfiles = ['pai_seq.txt','vn_seq.txt'] # self.seqfiles = ['skp_trimer.pdb','ompA.pdb'] # self.seqfiles = ['pai_seq.txt','c36_dsDNA60_min.pdb'] # self.numunits = ['1', '1'] self.numunits = ['1'] # self.numunits = ['2'] # self.fracdeut = ['0', '0'] self.fracdeut = ['0'] # self.fracdeut = ['0.0','0.6'] # self.moltype = ['protein', 'dna'] # self.moltype = ['dna'] self.moltype = ['protein'] # self.moltype = ['rna'] # self.moltype = ['protein','protein'] # self.isFasta = ['1', '1'] self.isFasta = ['1'] # self.isFasta = ['0'] # self.isFasta = ['1', '1'] # self.isFasta = ['0', '0'] # self.isFasta = ['1', '0'] self.plotflag = '1' # self.numsolv = '0' self.numsolv = '2' self.solv_comp = ['NaCl','KCl'] self.solv_conc = ['0.15','0.05'] self.number_of_chemicals = '0' # self.number_of_chemicals = '1' # self.number_of_chemicals = '2' self.formula_array = ['(C3H4O3)12', '(C3H4O3)12'] # self.formula_array = ['(C42H82NO8P)130'] self.number_exchangeable_hydrogens = ['12', '5'] self.fraction_exchangeable_hydrogens = ['0.95', '0.45'] self.mass_density = ['1.1', '1.3'] # self.number_exchangeable_hydrogens = ['0'] # self.fraction_exchangeable_hydrogens = ['0.0'] # self.mass_density = ['1.0'] self.testflag = False #### end user input #### #### end user input #### #### end user input #### def test_variables(self,paths): ''' users of gui_mimic as a driver script to run this module should not edit the values below as they are used for development tests this module defines variables that will be used to test the module as well as its input filter variables are defined outside the gui_mimic_align class so that they can be used by these other programs ''' pdb_data_path = paths['pdb_data_path'] dcd_data_path = paths['dcd_data_path'] module_data_path = paths['module_data_path'] other_data_path = paths['other_data_path'] self.runname = 'run_0' self.inpath = other_data_path self.outfile = 'test' self.numfiles = '1' self.solute_conc = '1.0' self.d2ostep = '5' self.fexchp = '0.95' self.fexchn = '1.0' self.seqfiles = ['pai_seq.txt'] self.numunits = ['1'] self.fracdeut = ['0'] self.moltype = ['protein'] self.isFasta = ['1'] self.plotflag = '0' self.numsolv = '0' self.solv_comp = [] self.solv_conc = [] self.number_of_chemicals = '0' self.formula_array = [] self.number_exchangeable_hydrogens = [] self.fraction_exchangeable_hydrogens = [] self.mass_density = [] self.testflag = True self.precision = 3 def run_module(self, **kwargs): ''' method to run the module and/or its input filter only the module input filter is run if kwargs is: test_filter=True method is defined outside the class so that it can be used by other programs such as test_module and test_module_filter ''' svariables={} svariables['runname'] = (self.runname,'string') svariables['inpath'] = (self.inpath,'string') svariables['outfile'] = (self.outfile,'string') svariables['numfiles'] = (self.numfiles,'int') svariables['solute_conc'] = (self.solute_conc,'float') svariables['d2ostep'] = (self.d2ostep,'int') svariables['numsolv'] = (self.numsolv,'int') svariables['fexchp'] = (self.fexchp,'float') svariables['fexchn'] = (self.fexchn,'float') svariables['number_of_chemicals'] = (self.number_of_chemicals,'int') svariables['plotflag'] = (self.plotflag,'int') error, self.variables = input_filter.type_check_and_convert(svariables) # print 'error, length: ', error, len(error) # print 'variables in run module: ', self.variables if len(error) > 0: print 'error = ', error if not(self.testflag): sys.exit() return error error1 = contrast_calculator_filter.check_numfiles(self.variables['numfiles'][0]) error2 = contrast_calculator_filter.check_numsolvcomp(self.variables['numsolv'][0]) error3 = contrast_calculator_filter.check_numchemcomp(self.variables['number_of_chemicals'][0]) if len(error1) > 0 or len(error2) > 0 or len(error3) > 0: error = error1 + error2 + error3 print 'error = ', error if not(self.testflag): sys.exit() return error else: error = contrast_calculator_filter.check_contrast(self.variables) if len(error) > 0: print 'error = ', error if not(self.testflag): sys.exit() return error self.ivariables = [] if(int(self.numfiles) > 0): for i in xrange(int(self.numfiles)): self.ivariables.append([self.seqfiles[i], self.numunits[i], self.fracdeut[i], self.moltype[i], self.isFasta[i]]) error = contrast_calculator_filter.check_ivariables(self.inpath,self.ivariables) if len(error) > 0: print 'error = ', error if not(self.testflag): sys.exit() return error self.solvvariables = [] if(int(self.numsolv) > 0): error, self.solv_formula = input_filter.check_and_convert_formula(self.solv_comp) if(len(error) > 0): print 'error = ', str(error) if not(self.testflag): sys.exit() return error else: for i in xrange(int(self.numsolv)): # self.solvvariables.append([self.solv_comp[i],self.solv_conc[i]]) self.solvvariables.append([self.solv_formula[i], self.solv_conc[i]]) error = contrast_calculator_filter.check_solvvariables(self.solvvariables) if len(error) > 0: print 'error = ', error if not(self.testflag): sys.exit() return error self.chemvariables = [] if(int(self.number_of_chemicals) > 0): error, self.formulas = input_filter.check_and_convert_formula(self.formula_array) if(len(error) > 0): print 'error = ', str(error) if not(self.testflag): sys.exit() return error else: for i in xrange(int(self.number_of_chemicals)): this_chemical_formula = self.formulas[i] this_number_exchangeable_hydrogens = self.number_exchangeable_hydrogens[i] this_fraction_exchangeable_hydrogens = self.fraction_exchangeable_hydrogens[i] this_mass_density = self.mass_density[i] self.chemvariables.append([this_chemical_formula, this_number_exchangeable_hydrogens, this_fraction_exchangeable_hydrogens, this_mass_density]) error = contrast_calculator_filter.check_chemvariables(self.chemvariables) if(len(error) > 0): print 'error = ', str(error) if not(self.testflag): sys.exit() return error try: if kwargs['test_filter']: return error except: pass runname = self.variables['runname'][0] if os.path.exists(os.path.join(runname, self.module)): shutil.rmtree(os.path.join(runname, self.module)) txtQueue = multiprocessing.JoinableQueue() this_contrast_calculator = contrast_calculator.contrast_calculator() this_contrast_calculator.main(self.variables,self.ivariables,self.solvvariables,self.chemvariables,txtQueue) class gui_mimic_contrast_calculator(): ''' gui_mimic class contains the name of the module ''' module = 'contrast_calculator' def __init__(self, test, paths): if not test: user_variables(self) else: test_variables(self, paths) run_module(self) if __name__ == '__main__': test = False # option to run with test variables not implemented in 1.0. paths = None # We are thinking of defining the install path so the gui mimic can be run from anywhere as long as it is called from that particular python # That way, the test files will always be available to the user. if test: pdb_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'pdb_common') + os.path.sep dcd_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'dcd_common') + os.path.sep module_data_path = os.path.join(os.path.dirname(os.path.realpath( __file__)), '..', '..', 'data', 'interface', 'align') + os.path.sep paths = {'pdb_data_path': pdb_data_path, 'dcd_data_path': dcd_data_path, 'module_data_path': module_data_path} start = time.time() run_gui = gui_mimic_contrast_calculator(test, paths) print 'time used: ', time.time() - start
VigneshMohan1/spark-branch-2.3
refs/heads/master
python/pyspark/sql/streaming.py
12
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sys import json if sys.version >= '3': intlike = int basestring = unicode = str else: intlike = (int, long) from abc import ABCMeta, abstractmethod from pyspark import since, keyword_only from pyspark.rdd import ignore_unicode_prefix from pyspark.sql.column import _to_seq from pyspark.sql.readwriter import OptionUtils, to_str from pyspark.sql.types import * from pyspark.sql.utils import StreamingQueryException __all__ = ["StreamingQuery", "StreamingQueryManager", "DataStreamReader", "DataStreamWriter"] class StreamingQuery(object): """ A handle to a query that is executing continuously in the background as new data arrives. All these methods are thread-safe. .. note:: Evolving .. versionadded:: 2.0 """ def __init__(self, jsq): self._jsq = jsq @property @since(2.0) def id(self): """Returns the unique id of this query that persists across restarts from checkpoint data. That is, this id is generated when a query is started for the first time, and will be the same every time it is restarted from checkpoint data. There can only be one query with the same id active in a Spark cluster. Also see, `runId`. """ return self._jsq.id().toString() @property @since(2.1) def runId(self): """Returns the unique id of this query that does not persist across restarts. That is, every query that is started (or restarted from checkpoint) will have a different runId. """ return self._jsq.runId().toString() @property @since(2.0) def name(self): """Returns the user-specified name of the query, or null if not specified. This name can be specified in the `org.apache.spark.sql.streaming.DataStreamWriter` as `dataframe.writeStream.queryName("query").start()`. This name, if set, must be unique across all active queries. """ return self._jsq.name() @property @since(2.0) def isActive(self): """Whether this streaming query is currently active or not. """ return self._jsq.isActive() @since(2.0) def awaitTermination(self, timeout=None): """Waits for the termination of `this` query, either by :func:`query.stop()` or by an exception. If the query has terminated with an exception, then the exception will be thrown. If `timeout` is set, it returns whether the query has terminated or not within the `timeout` seconds. If the query has terminated, then all subsequent calls to this method will either return immediately (if the query was terminated by :func:`stop()`), or throw the exception immediately (if the query has terminated with exception). throws :class:`StreamingQueryException`, if `this` query has terminated with an exception """ if timeout is not None: if not isinstance(timeout, (int, float)) or timeout < 0: raise ValueError("timeout must be a positive integer or float. Got %s" % timeout) return self._jsq.awaitTermination(int(timeout * 1000)) else: return self._jsq.awaitTermination() @property @since(2.1) def status(self): """ Returns the current status of the query. """ return json.loads(self._jsq.status().json()) @property @since(2.1) def recentProgress(self): """Returns an array of the most recent [[StreamingQueryProgress]] updates for this query. The number of progress updates retained for each stream is configured by Spark session configuration `spark.sql.streaming.numRecentProgressUpdates`. """ return [json.loads(p.json()) for p in self._jsq.recentProgress()] @property @since(2.1) def lastProgress(self): """ Returns the most recent :class:`StreamingQueryProgress` update of this streaming query or None if there were no progress updates :return: a map """ lastProgress = self._jsq.lastProgress() if lastProgress: return json.loads(lastProgress.json()) else: return None @since(2.0) def processAllAvailable(self): """Blocks until all available data in the source has been processed and committed to the sink. This method is intended for testing. .. note:: In the case of continually arriving data, this method may block forever. Additionally, this method is only guaranteed to block until data that has been synchronously appended data to a stream source prior to invocation. (i.e. `getOffset` must immediately reflect the addition). """ return self._jsq.processAllAvailable() @since(2.0) def stop(self): """Stop this streaming query. """ self._jsq.stop() @since(2.1) def explain(self, extended=False): """Prints the (logical and physical) plans to the console for debugging purpose. :param extended: boolean, default ``False``. If ``False``, prints only the physical plan. >>> sq = sdf.writeStream.format('memory').queryName('query_explain').start() >>> sq.processAllAvailable() # Wait a bit to generate the runtime plans. >>> sq.explain() == Physical Plan == ... >>> sq.explain(True) == Parsed Logical Plan == ... == Analyzed Logical Plan == ... == Optimized Logical Plan == ... == Physical Plan == ... >>> sq.stop() """ # Cannot call `_jsq.explain(...)` because it will print in the JVM process. # We should print it in the Python process. print(self._jsq.explainInternal(extended)) @since(2.1) def exception(self): """ :return: the StreamingQueryException if the query was terminated by an exception, or None. """ if self._jsq.exception().isDefined(): je = self._jsq.exception().get() msg = je.toString().split(': ', 1)[1] # Drop the Java StreamingQueryException type info stackTrace = '\n\t at '.join(map(lambda x: x.toString(), je.getStackTrace())) return StreamingQueryException(msg, stackTrace) else: return None class StreamingQueryManager(object): """A class to manage all the :class:`StreamingQuery` StreamingQueries active. .. note:: Evolving .. versionadded:: 2.0 """ def __init__(self, jsqm): self._jsqm = jsqm @property @ignore_unicode_prefix @since(2.0) def active(self): """Returns a list of active queries associated with this SQLContext >>> sq = sdf.writeStream.format('memory').queryName('this_query').start() >>> sqm = spark.streams >>> # get the list of active streaming queries >>> [q.name for q in sqm.active] [u'this_query'] >>> sq.stop() """ return [StreamingQuery(jsq) for jsq in self._jsqm.active()] @ignore_unicode_prefix @since(2.0) def get(self, id): """Returns an active query from this SQLContext or throws exception if an active query with this name doesn't exist. >>> sq = sdf.writeStream.format('memory').queryName('this_query').start() >>> sq.name u'this_query' >>> sq = spark.streams.get(sq.id) >>> sq.isActive True >>> sq = sqlContext.streams.get(sq.id) >>> sq.isActive True >>> sq.stop() """ return StreamingQuery(self._jsqm.get(id)) @since(2.0) def awaitAnyTermination(self, timeout=None): """Wait until any of the queries on the associated SQLContext has terminated since the creation of the context, or since :func:`resetTerminated()` was called. If any query was terminated with an exception, then the exception will be thrown. If `timeout` is set, it returns whether the query has terminated or not within the `timeout` seconds. If a query has terminated, then subsequent calls to :func:`awaitAnyTermination()` will either return immediately (if the query was terminated by :func:`query.stop()`), or throw the exception immediately (if the query was terminated with exception). Use :func:`resetTerminated()` to clear past terminations and wait for new terminations. In the case where multiple queries have terminated since :func:`resetTermination()` was called, if any query has terminated with exception, then :func:`awaitAnyTermination()` will throw any of the exception. For correctly documenting exceptions across multiple queries, users need to stop all of them after any of them terminates with exception, and then check the `query.exception()` for each query. throws :class:`StreamingQueryException`, if `this` query has terminated with an exception """ if timeout is not None: if not isinstance(timeout, (int, float)) or timeout < 0: raise ValueError("timeout must be a positive integer or float. Got %s" % timeout) return self._jsqm.awaitAnyTermination(int(timeout * 1000)) else: return self._jsqm.awaitAnyTermination() @since(2.0) def resetTerminated(self): """Forget about past terminated queries so that :func:`awaitAnyTermination()` can be used again to wait for new terminations. >>> spark.streams.resetTerminated() """ self._jsqm.resetTerminated() class DataStreamReader(OptionUtils): """ Interface used to load a streaming :class:`DataFrame` from external storage systems (e.g. file systems, key-value stores, etc). Use :func:`spark.readStream` to access this. .. note:: Evolving. .. versionadded:: 2.0 """ def __init__(self, spark): self._jreader = spark._ssql_ctx.readStream() self._spark = spark def _df(self, jdf): from pyspark.sql.dataframe import DataFrame return DataFrame(jdf, self._spark) @since(2.0) def format(self, source): """Specifies the input data source format. .. note:: Evolving. :param source: string, name of the data source, e.g. 'json', 'parquet'. >>> s = spark.readStream.format("text") """ self._jreader = self._jreader.format(source) return self @since(2.0) def schema(self, schema): """Specifies the input schema. Some data sources (e.g. JSON) can infer the input schema automatically from data. By specifying the schema here, the underlying data source can skip the schema inference step, and thus speed up data loading. .. note:: Evolving. :param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). >>> s = spark.readStream.schema(sdf_schema) >>> s = spark.readStream.schema("col0 INT, col1 DOUBLE") """ from pyspark.sql import SparkSession spark = SparkSession.builder.getOrCreate() if isinstance(schema, StructType): jschema = spark._jsparkSession.parseDataType(schema.json()) self._jreader = self._jreader.schema(jschema) elif isinstance(schema, basestring): self._jreader = self._jreader.schema(schema) else: raise TypeError("schema should be StructType or string") return self @since(2.0) def option(self, key, value): """Adds an input option for the underlying data source. You can set the following option(s) for reading files: * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps in the JSON/CSV datasources or partition values. If it isn't set, it uses the default value, session local timezone. .. note:: Evolving. >>> s = spark.readStream.option("x", 1) """ self._jreader = self._jreader.option(key, to_str(value)) return self @since(2.0) def options(self, **options): """Adds input options for the underlying data source. You can set the following option(s) for reading files: * ``timeZone``: sets the string that indicates a timezone to be used to parse timestamps in the JSON/CSV datasources or partition values. If it isn't set, it uses the default value, session local timezone. .. note:: Evolving. >>> s = spark.readStream.options(x="1", y=2) """ for k in options: self._jreader = self._jreader.option(k, to_str(options[k])) return self @since(2.0) def load(self, path=None, format=None, schema=None, **options): """Loads a data stream from a data source and returns it as a :class`DataFrame`. .. note:: Evolving. :param path: optional string for file-system backed data sources. :param format: optional string for format of the data source. Default to 'parquet'. :param schema: optional :class:`pyspark.sql.types.StructType` for the input schema or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). :param options: all other string options >>> json_sdf = spark.readStream.format("json") \\ ... .schema(sdf_schema) \\ ... .load(tempfile.mkdtemp()) >>> json_sdf.isStreaming True >>> json_sdf.schema == sdf_schema True """ if format is not None: self.format(format) if schema is not None: self.schema(schema) self.options(**options) if path is not None: if type(path) != str or len(path.strip()) == 0: raise ValueError("If the path is provided for stream, it needs to be a " + "non-empty string. List of paths are not supported.") return self._df(self._jreader.load(path)) else: return self._df(self._jreader.load()) @since(2.0) def json(self, path, schema=None, primitivesAsString=None, prefersDecimal=None, allowComments=None, allowUnquotedFieldNames=None, allowSingleQuotes=None, allowNumericLeadingZero=None, allowBackslashEscapingAnyCharacter=None, mode=None, columnNameOfCorruptRecord=None, dateFormat=None, timestampFormat=None, multiLine=None): """ Loads a JSON file stream and returns the results as a :class:`DataFrame`. `JSON Lines <http://jsonlines.org/>`_ (newline-delimited JSON) is supported by default. For JSON (one record per file), set the ``multiLine`` parameter to ``true``. If the ``schema`` parameter is not specified, this function goes through the input once to determine the input schema. .. note:: Evolving. :param path: string represents path to the JSON dataset, or RDD of Strings storing JSON objects. :param schema: an optional :class:`pyspark.sql.types.StructType` for the input schema or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). :param primitivesAsString: infers all primitive values as a string type. If None is set, it uses the default value, ``false``. :param prefersDecimal: infers all floating-point values as a decimal type. If the values do not fit in decimal, then it infers them as doubles. If None is set, it uses the default value, ``false``. :param allowComments: ignores Java/C++ style comment in JSON records. If None is set, it uses the default value, ``false``. :param allowUnquotedFieldNames: allows unquoted JSON field names. If None is set, it uses the default value, ``false``. :param allowSingleQuotes: allows single quotes in addition to double quotes. If None is set, it uses the default value, ``true``. :param allowNumericLeadingZero: allows leading zeros in numbers (e.g. 00012). If None is set, it uses the default value, ``false``. :param allowBackslashEscapingAnyCharacter: allows accepting quoting of all character using backslash quoting mechanism. If None is set, it uses the default value, ``false``. :param mode: allows a mode for dealing with corrupt records during parsing. If None is set, it uses the default value, ``PERMISSIVE``. * ``PERMISSIVE`` : sets other fields to ``null`` when it meets a corrupted \ record, and puts the malformed string into a field configured by \ ``columnNameOfCorruptRecord``. To keep corrupt records, an user can set \ a string type field named ``columnNameOfCorruptRecord`` in an user-defined \ schema. If a schema does not have the field, it drops corrupt records during \ parsing. When inferring a schema, it implicitly adds a \ ``columnNameOfCorruptRecord`` field in an output schema. * ``DROPMALFORMED`` : ignores the whole corrupted records. * ``FAILFAST`` : throws an exception when it meets corrupted records. :param columnNameOfCorruptRecord: allows renaming the new field having malformed string created by ``PERMISSIVE`` mode. This overrides ``spark.sql.columnNameOfCorruptRecord``. If None is set, it uses the value specified in ``spark.sql.columnNameOfCorruptRecord``. :param dateFormat: sets the string that indicates a date format. Custom date formats follow the formats at ``java.text.SimpleDateFormat``. This applies to date type. If None is set, it uses the default value, ``yyyy-MM-dd``. :param timestampFormat: sets the string that indicates a timestamp format. Custom date formats follow the formats at ``java.text.SimpleDateFormat``. This applies to timestamp type. If None is set, it uses the default value, ``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``. :param multiLine: parse one record, which may span multiple lines, per file. If None is set, it uses the default value, ``false``. >>> json_sdf = spark.readStream.json(tempfile.mkdtemp(), schema = sdf_schema) >>> json_sdf.isStreaming True >>> json_sdf.schema == sdf_schema True """ self._set_opts( schema=schema, primitivesAsString=primitivesAsString, prefersDecimal=prefersDecimal, allowComments=allowComments, allowUnquotedFieldNames=allowUnquotedFieldNames, allowSingleQuotes=allowSingleQuotes, allowNumericLeadingZero=allowNumericLeadingZero, allowBackslashEscapingAnyCharacter=allowBackslashEscapingAnyCharacter, mode=mode, columnNameOfCorruptRecord=columnNameOfCorruptRecord, dateFormat=dateFormat, timestampFormat=timestampFormat, multiLine=multiLine) if isinstance(path, basestring): return self._df(self._jreader.json(path)) else: raise TypeError("path can be only a single string") @since(2.0) def parquet(self, path): """Loads a Parquet file stream, returning the result as a :class:`DataFrame`. You can set the following Parquet-specific option(s) for reading Parquet files: * ``mergeSchema``: sets whether we should merge schemas collected from all \ Parquet part-files. This will override ``spark.sql.parquet.mergeSchema``. \ The default value is specified in ``spark.sql.parquet.mergeSchema``. .. note:: Evolving. >>> parquet_sdf = spark.readStream.schema(sdf_schema).parquet(tempfile.mkdtemp()) >>> parquet_sdf.isStreaming True >>> parquet_sdf.schema == sdf_schema True """ if isinstance(path, basestring): return self._df(self._jreader.parquet(path)) else: raise TypeError("path can be only a single string") @ignore_unicode_prefix @since(2.0) def text(self, path): """ Loads a text file stream and returns a :class:`DataFrame` whose schema starts with a string column named "value", and followed by partitioned columns if there are any. Each line in the text file is a new row in the resulting DataFrame. .. note:: Evolving. :param paths: string, or list of strings, for input path(s). >>> text_sdf = spark.readStream.text(tempfile.mkdtemp()) >>> text_sdf.isStreaming True >>> "value" in str(text_sdf.schema) True """ if isinstance(path, basestring): return self._df(self._jreader.text(path)) else: raise TypeError("path can be only a single string") @since(2.0) def csv(self, path, schema=None, sep=None, encoding=None, quote=None, escape=None, comment=None, header=None, inferSchema=None, ignoreLeadingWhiteSpace=None, ignoreTrailingWhiteSpace=None, nullValue=None, nanValue=None, positiveInf=None, negativeInf=None, dateFormat=None, timestampFormat=None, maxColumns=None, maxCharsPerColumn=None, maxMalformedLogPerPartition=None, mode=None, columnNameOfCorruptRecord=None, multiLine=None): """Loads a CSV file stream and returns the result as a :class:`DataFrame`. This function will go through the input once to determine the input schema if ``inferSchema`` is enabled. To avoid going through the entire data once, disable ``inferSchema`` option or specify the schema explicitly using ``schema``. .. note:: Evolving. :param path: string, or list of strings, for input path(s). :param schema: an optional :class:`pyspark.sql.types.StructType` for the input schema or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). :param sep: sets the single character as a separator for each field and value. If None is set, it uses the default value, ``,``. :param encoding: decodes the CSV files by the given encoding type. If None is set, it uses the default value, ``UTF-8``. :param quote: sets the single character used for escaping quoted values where the separator can be part of the value. If None is set, it uses the default value, ``"``. If you would like to turn off quotations, you need to set an empty string. :param escape: sets the single character used for escaping quotes inside an already quoted value. If None is set, it uses the default value, ``\``. :param comment: sets the single character used for skipping lines beginning with this character. By default (None), it is disabled. :param header: uses the first line as names of columns. If None is set, it uses the default value, ``false``. :param inferSchema: infers the input schema automatically from data. It requires one extra pass over the data. If None is set, it uses the default value, ``false``. :param ignoreLeadingWhiteSpace: a flag indicating whether or not leading whitespaces from values being read should be skipped. If None is set, it uses the default value, ``false``. :param ignoreTrailingWhiteSpace: a flag indicating whether or not trailing whitespaces from values being read should be skipped. If None is set, it uses the default value, ``false``. :param nullValue: sets the string representation of a null value. If None is set, it uses the default value, empty string. Since 2.0.1, this ``nullValue`` param applies to all supported types including the string type. :param nanValue: sets the string representation of a non-number value. If None is set, it uses the default value, ``NaN``. :param positiveInf: sets the string representation of a positive infinity value. If None is set, it uses the default value, ``Inf``. :param negativeInf: sets the string representation of a negative infinity value. If None is set, it uses the default value, ``Inf``. :param dateFormat: sets the string that indicates a date format. Custom date formats follow the formats at ``java.text.SimpleDateFormat``. This applies to date type. If None is set, it uses the default value, ``yyyy-MM-dd``. :param timestampFormat: sets the string that indicates a timestamp format. Custom date formats follow the formats at ``java.text.SimpleDateFormat``. This applies to timestamp type. If None is set, it uses the default value, ``yyyy-MM-dd'T'HH:mm:ss.SSSXXX``. :param maxColumns: defines a hard limit of how many columns a record can have. If None is set, it uses the default value, ``20480``. :param maxCharsPerColumn: defines the maximum number of characters allowed for any given value being read. If None is set, it uses the default value, ``-1`` meaning unlimited length. :param maxMalformedLogPerPartition: this parameter is no longer used since Spark 2.2.0. If specified, it is ignored. :param mode: allows a mode for dealing with corrupt records during parsing. If None is set, it uses the default value, ``PERMISSIVE``. * ``PERMISSIVE`` : sets other fields to ``null`` when it meets a corrupted \ record, and puts the malformed string into a field configured by \ ``columnNameOfCorruptRecord``. To keep corrupt records, an user can set \ a string type field named ``columnNameOfCorruptRecord`` in an \ user-defined schema. If a schema does not have the field, it drops corrupt \ records during parsing. When a length of parsed CSV tokens is shorter than \ an expected length of a schema, it sets `null` for extra fields. * ``DROPMALFORMED`` : ignores the whole corrupted records. * ``FAILFAST`` : throws an exception when it meets corrupted records. :param columnNameOfCorruptRecord: allows renaming the new field having malformed string created by ``PERMISSIVE`` mode. This overrides ``spark.sql.columnNameOfCorruptRecord``. If None is set, it uses the value specified in ``spark.sql.columnNameOfCorruptRecord``. :param multiLine: parse one record, which may span multiple lines. If None is set, it uses the default value, ``false``. >>> csv_sdf = spark.readStream.csv(tempfile.mkdtemp(), schema = sdf_schema) >>> csv_sdf.isStreaming True >>> csv_sdf.schema == sdf_schema True """ self._set_opts( schema=schema, sep=sep, encoding=encoding, quote=quote, escape=escape, comment=comment, header=header, inferSchema=inferSchema, ignoreLeadingWhiteSpace=ignoreLeadingWhiteSpace, ignoreTrailingWhiteSpace=ignoreTrailingWhiteSpace, nullValue=nullValue, nanValue=nanValue, positiveInf=positiveInf, negativeInf=negativeInf, dateFormat=dateFormat, timestampFormat=timestampFormat, maxColumns=maxColumns, maxCharsPerColumn=maxCharsPerColumn, maxMalformedLogPerPartition=maxMalformedLogPerPartition, mode=mode, columnNameOfCorruptRecord=columnNameOfCorruptRecord, multiLine=multiLine) if isinstance(path, basestring): return self._df(self._jreader.csv(path)) else: raise TypeError("path can be only a single string") class DataStreamWriter(object): """ Interface used to write a streaming :class:`DataFrame` to external storage systems (e.g. file systems, key-value stores, etc). Use :func:`DataFrame.writeStream` to access this. .. note:: Evolving. .. versionadded:: 2.0 """ def __init__(self, df): self._df = df self._spark = df.sql_ctx self._jwrite = df._jdf.writeStream() def _sq(self, jsq): from pyspark.sql.streaming import StreamingQuery return StreamingQuery(jsq) @since(2.0) def outputMode(self, outputMode): """Specifies how data of a streaming DataFrame/Dataset is written to a streaming sink. Options include: * `append`:Only the new rows in the streaming DataFrame/Dataset will be written to the sink * `complete`:All the rows in the streaming DataFrame/Dataset will be written to the sink every time these is some updates * `update`:only the rows that were updated in the streaming DataFrame/Dataset will be written to the sink every time there are some updates. If the query doesn't contain aggregations, it will be equivalent to `append` mode. .. note:: Evolving. >>> writer = sdf.writeStream.outputMode('append') """ if not outputMode or type(outputMode) != str or len(outputMode.strip()) == 0: raise ValueError('The output mode must be a non-empty string. Got: %s' % outputMode) self._jwrite = self._jwrite.outputMode(outputMode) return self @since(2.0) def format(self, source): """Specifies the underlying output data source. .. note:: Evolving. :param source: string, name of the data source, which for now can be 'parquet'. >>> writer = sdf.writeStream.format('json') """ self._jwrite = self._jwrite.format(source) return self @since(2.0) def option(self, key, value): """Adds an output option for the underlying data source. You can set the following option(s) for writing files: * ``timeZone``: sets the string that indicates a timezone to be used to format timestamps in the JSON/CSV datasources or partition values. If it isn't set, it uses the default value, session local timezone. .. note:: Evolving. """ self._jwrite = self._jwrite.option(key, to_str(value)) return self @since(2.0) def options(self, **options): """Adds output options for the underlying data source. You can set the following option(s) for writing files: * ``timeZone``: sets the string that indicates a timezone to be used to format timestamps in the JSON/CSV datasources or partition values. If it isn't set, it uses the default value, session local timezone. .. note:: Evolving. """ for k in options: self._jwrite = self._jwrite.option(k, to_str(options[k])) return self @since(2.0) def partitionBy(self, *cols): """Partitions the output by the given columns on the file system. If specified, the output is laid out on the file system similar to Hive's partitioning scheme. .. note:: Evolving. :param cols: name of columns """ if len(cols) == 1 and isinstance(cols[0], (list, tuple)): cols = cols[0] self._jwrite = self._jwrite.partitionBy(_to_seq(self._spark._sc, cols)) return self @since(2.0) def queryName(self, queryName): """Specifies the name of the :class:`StreamingQuery` that can be started with :func:`start`. This name must be unique among all the currently active queries in the associated SparkSession. .. note:: Evolving. :param queryName: unique name for the query >>> writer = sdf.writeStream.queryName('streaming_query') """ if not queryName or type(queryName) != str or len(queryName.strip()) == 0: raise ValueError('The queryName must be a non-empty string. Got: %s' % queryName) self._jwrite = self._jwrite.queryName(queryName) return self @keyword_only @since(2.0) def trigger(self, processingTime=None, once=None): """Set the trigger for the stream query. If this is not set it will run the query as fast as possible, which is equivalent to setting the trigger to ``processingTime='0 seconds'``. .. note:: Evolving. :param processingTime: a processing time interval as a string, e.g. '5 seconds', '1 minute'. >>> # trigger the query for execution every 5 seconds >>> writer = sdf.writeStream.trigger(processingTime='5 seconds') >>> # trigger the query for just once batch of data >>> writer = sdf.writeStream.trigger(once=True) """ jTrigger = None if processingTime is not None: if once is not None: raise ValueError('Multiple triggers not allowed.') if type(processingTime) != str or len(processingTime.strip()) == 0: raise ValueError('Value for processingTime must be a non empty string. Got: %s' % processingTime) interval = processingTime.strip() jTrigger = self._spark._sc._jvm.org.apache.spark.sql.streaming.Trigger.ProcessingTime( interval) elif once is not None: if once is not True: raise ValueError('Value for once must be True. Got: %s' % once) jTrigger = self._spark._sc._jvm.org.apache.spark.sql.streaming.Trigger.Once() else: raise ValueError('No trigger provided') self._jwrite = self._jwrite.trigger(jTrigger) return self @ignore_unicode_prefix @since(2.0) def start(self, path=None, format=None, outputMode=None, partitionBy=None, queryName=None, **options): """Streams the contents of the :class:`DataFrame` to a data source. The data source is specified by the ``format`` and a set of ``options``. If ``format`` is not specified, the default data source configured by ``spark.sql.sources.default`` will be used. .. note:: Evolving. :param path: the path in a Hadoop supported file system :param format: the format used to save :param outputMode: specifies how data of a streaming DataFrame/Dataset is written to a streaming sink. * `append`:Only the new rows in the streaming DataFrame/Dataset will be written to the sink * `complete`:All the rows in the streaming DataFrame/Dataset will be written to the sink every time these is some updates * `update`:only the rows that were updated in the streaming DataFrame/Dataset will be written to the sink every time there are some updates. If the query doesn't contain aggregations, it will be equivalent to `append` mode. :param partitionBy: names of partitioning columns :param queryName: unique name for the query :param options: All other string options. You may want to provide a `checkpointLocation` for most streams, however it is not required for a `memory` stream. >>> sq = sdf.writeStream.format('memory').queryName('this_query').start() >>> sq.isActive True >>> sq.name u'this_query' >>> sq.stop() >>> sq.isActive False >>> sq = sdf.writeStream.trigger(processingTime='5 seconds').start( ... queryName='that_query', outputMode="append", format='memory') >>> sq.name u'that_query' >>> sq.isActive True >>> sq.stop() """ self.options(**options) if outputMode is not None: self.outputMode(outputMode) if partitionBy is not None: self.partitionBy(partitionBy) if format is not None: self.format(format) if queryName is not None: self.queryName(queryName) if path is None: return self._sq(self._jwrite.start()) else: return self._sq(self._jwrite.start(path)) def _test(): import doctest import os import tempfile from pyspark.sql import Row, SparkSession, SQLContext import pyspark.sql.streaming os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.streaming.__dict__.copy() try: spark = SparkSession.builder.getOrCreate() except py4j.protocol.Py4JError: spark = SparkSession(sc) globs['tempfile'] = tempfile globs['os'] = os globs['spark'] = spark globs['sqlContext'] = SQLContext.getOrCreate(spark.sparkContext) globs['sdf'] = \ spark.readStream.format('text').load('python/test_support/sql/streaming') globs['sdf_schema'] = StructType([StructField("data", StringType(), False)]) globs['df'] = \ globs['spark'].readStream.format('text').load('python/test_support/sql/streaming') (failure_count, test_count) = doctest.testmod( pyspark.sql.streaming, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF) globs['spark'].stop() if failure_count: exit(-1) if __name__ == "__main__": _test()
cmorgan/zipline
refs/heads/master
zipline/utils/tradingcalendar_lse.py
35
# # Copyright 2013 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # References: # http://www.londonstockexchange.com # /about-the-exchange/company-overview/business-days/business-days.htm # http://en.wikipedia.org/wiki/Bank_holiday # http://www.adviceguide.org.uk/england/work_e/work_time_off_work_e/ # bank_and_public_holidays.htm import pytz import pandas as pd from datetime import datetime from dateutil import rrule from zipline.utils.tradingcalendar import end start = datetime(2002, 1, 1, tzinfo=pytz.utc) non_trading_rules = [] # Weekends weekends = rrule.rrule( rrule.YEARLY, byweekday=(rrule.SA, rrule.SU), cache=True, dtstart=start, until=end ) non_trading_rules.append(weekends) # New Year's Day new_year = rrule.rrule( rrule.MONTHLY, byyearday=1, cache=True, dtstart=start, until=end ) # If new years day is on Saturday then Monday 3rd is a holiday # If new years day is on Sunday then Monday 2nd is a holiday weekend_new_year = rrule.rrule( rrule.MONTHLY, bymonth=1, bymonthday=[2, 3], byweekday=(rrule.MO), cache=True, dtstart=start, until=end ) non_trading_rules.append(new_year) non_trading_rules.append(weekend_new_year) # Good Friday good_friday = rrule.rrule( rrule.DAILY, byeaster=-2, cache=True, dtstart=start, until=end ) non_trading_rules.append(good_friday) # Easter Monday easter_monday = rrule.rrule( rrule.DAILY, byeaster=1, cache=True, dtstart=start, until=end ) non_trading_rules.append(easter_monday) # Early May Bank Holiday (1st Monday in May) may_bank = rrule.rrule( rrule.MONTHLY, bymonth=5, byweekday=(rrule.MO(1)), cache=True, dtstart=start, until=end ) non_trading_rules.append(may_bank) # Spring Bank Holiday (Last Monday in May) spring_bank = rrule.rrule( rrule.MONTHLY, bymonth=5, byweekday=(rrule.MO(-1)), cache=True, dtstart=datetime(2003, 1, 1, tzinfo=pytz.utc), until=end ) non_trading_rules.append(spring_bank) # Summer Bank Holiday (Last Monday in August) summer_bank = rrule.rrule( rrule.MONTHLY, bymonth=8, byweekday=(rrule.MO(-1)), cache=True, dtstart=start, until=end ) non_trading_rules.append(summer_bank) # Christmas Day christmas = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=25, cache=True, dtstart=start, until=end ) # If christmas day is Saturday Monday 27th is a holiday # If christmas day is sunday the Tuesday 27th is a holiday weekend_christmas = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=27, byweekday=(rrule.MO, rrule.TU), cache=True, dtstart=start, until=end ) non_trading_rules.append(christmas) non_trading_rules.append(weekend_christmas) # Boxing Day boxing_day = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=26, cache=True, dtstart=start, until=end ) # If boxing day is saturday then Monday 28th is a holiday # If boxing day is sunday then Tuesday 28th is a holiday weekend_boxing_day = rrule.rrule( rrule.MONTHLY, bymonth=12, bymonthday=28, byweekday=(rrule.MO, rrule.TU), cache=True, dtstart=start, until=end ) non_trading_rules.append(boxing_day) non_trading_rules.append(weekend_boxing_day) non_trading_ruleset = rrule.rruleset() # In 2002 May bank holiday was moved to 4th June to follow the Queens # Golden Jubilee non_trading_ruleset.exdate(datetime(2002, 9, 27, tzinfo=pytz.utc)) non_trading_ruleset.rdate(datetime(2002, 6, 3, tzinfo=pytz.utc)) non_trading_ruleset.rdate(datetime(2002, 6, 4, tzinfo=pytz.utc)) # TODO: not sure why Feb 18 2008 is not available in the yahoo data non_trading_ruleset.rdate(datetime(2008, 2, 18, tzinfo=pytz.utc)) # In 2011 The Friday before Mayday was the Royal Wedding non_trading_ruleset.rdate(datetime(2011, 4, 29, tzinfo=pytz.utc)) # In 2012 May bank holiday was moved to 4th June to preceed the Queens # Diamond Jubilee non_trading_ruleset.exdate(datetime(2012, 5, 28, tzinfo=pytz.utc)) non_trading_ruleset.rdate(datetime(2012, 6, 4, tzinfo=pytz.utc)) non_trading_ruleset.rdate(datetime(2012, 6, 5, tzinfo=pytz.utc)) for rule in non_trading_rules: non_trading_ruleset.rrule(rule) non_trading_days = non_trading_ruleset.between(start, end, inc=True) non_trading_day_index = pd.DatetimeIndex(sorted(non_trading_days)) business_days = pd.DatetimeIndex(start=start, end=end, freq=pd.datetools.BDay()) trading_days = business_days.difference(non_trading_day_index)
ejconlon/bottle-bootstrap
refs/heads/master
bottle.py
6
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Bottle is a fast and simple micro-framework for small web applications. It offers request dispatching (Routes) with url parameter support, templates, a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and template engines - all in a single file and with no dependencies other than the Python Standard Library. Homepage and documentation: http://bottlepy.org/ Copyright (c) 2011, Marcel Hellkamp. License: MIT (see LICENSE.txt for details) """ from __future__ import with_statement __author__ = 'Marcel Hellkamp' __version__ = '0.10.9' __license__ = 'MIT' # The gevent server adapter needs to patch some modules before they are imported # This is why we parse the commandline parameters here but handle them later if __name__ == '__main__': from optparse import OptionParser _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app") _opt = _cmd_parser.add_option _opt("--version", action="store_true", help="show version number.") _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.") _opt("-p", "--plugin", action="append", help="install additional plugin/s.") _opt("--debug", action="store_true", help="start server in debug mode.") _opt("--reload", action="store_true", help="auto-reload on file changes.") _cmd_options, _cmd_args = _cmd_parser.parse_args() if _cmd_options.server and _cmd_options.server.startswith('gevent'): import gevent.monkey; gevent.monkey.patch_all() import sys import base64 import cgi import email.utils import functools import hmac import httplib import imp import itertools import mimetypes import os import re import subprocess import tempfile import thread import threading import time import warnings from Cookie import SimpleCookie from datetime import date as datedate, datetime, timedelta from tempfile import TemporaryFile from traceback import format_exc, print_exc from urlparse import urljoin, SplitResult as UrlSplitResult # Workaround for a bug in some versions of lib2to3 (fixed on CPython 2.7 and 3.2) import urllib urlencode = urllib.urlencode urlquote = urllib.quote urlunquote = urllib.unquote try: from collections import MutableMapping as DictMixin except ImportError: # pragma: no cover from UserDict import DictMixin try: from urlparse import parse_qsl except ImportError: # pragma: no cover from cgi import parse_qsl try: import cPickle as pickle except ImportError: # pragma: no cover import pickle try: from json import dumps as json_dumps, loads as json_lds except ImportError: # pragma: no cover try: from simplejson import dumps as json_dumps, loads as json_lds except ImportError: # pragma: no cover try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds except ImportError: # pragma: no cover def json_dumps(data): raise ImportError("JSON support requires Python 2.6 or simplejson.") json_lds = json_dumps py3k = sys.version_info >= (3,0,0) NCTextIOWrapper = None if sys.version_info < (2,6,0): msg = "Python 2.5 support may be dropped in future versions of Bottle." warnings.warn(msg, DeprecationWarning) if py3k: # pragma: no cover json_loads = lambda s: json_lds(touni(s)) # See Request.POST from io import BytesIO def touni(x, enc='utf8', err='strict'): """ Convert anything to unicode """ return str(x, enc, err) if isinstance(x, bytes) else str(x) if sys.version_info < (3,2,0): from io import TextIOWrapper class NCTextIOWrapper(TextIOWrapper): ''' Garbage collecting an io.TextIOWrapper(buffer) instance closes the wrapped buffer. This subclass keeps it open. ''' def close(self): pass else: json_loads = json_lds from StringIO import StringIO as BytesIO bytes = str def touni(x, enc='utf8', err='strict'): """ Convert anything to unicode """ return x if isinstance(x, unicode) else unicode(str(x), enc, err) def tob(data, enc='utf8'): """ Convert anything to bytes """ return data.encode(enc) if isinstance(data, unicode) else bytes(data) tonat = touni if py3k else tob tonat.__doc__ = """ Convert anything to native strings """ def try_update_wrapper(wrapper, wrapped, *a, **ka): try: # Bug: functools breaks if wrapper is an instane method functools.update_wrapper(wrapper, wrapped, *a, **ka) except AttributeError: pass # Backward compatibility def depr(message): warnings.warn(message, DeprecationWarning, stacklevel=3) # Small helpers def makelist(data): if isinstance(data, (tuple, list, set, dict)): return list(data) elif data: return [data] else: return [] class DictProperty(object): ''' Property that maps to a key in a local dict-like attribute. ''' def __init__(self, attr, key=None, read_only=False): self.attr, self.key, self.read_only = attr, key, read_only def __call__(self, func): functools.update_wrapper(self, func, updated=[]) self.getter, self.key = func, self.key or func.__name__ return self def __get__(self, obj, cls): if obj is None: return self key, storage = self.key, getattr(obj, self.attr) if key not in storage: storage[key] = self.getter(obj) return storage[key] def __set__(self, obj, value): if self.read_only: raise AttributeError("Read-Only property.") getattr(obj, self.attr)[self.key] = value def __delete__(self, obj): if self.read_only: raise AttributeError("Read-Only property.") del getattr(obj, self.attr)[self.key] class CachedProperty(object): ''' A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the property. ''' def __init__(self, func): self.func = func def __get__(self, obj, cls): if obj is None: return self value = obj.__dict__[self.func.__name__] = self.func(obj) return value cached_property = CachedProperty class lazy_attribute(object): # Does not need configuration -> lower-case name ''' A property that caches itself to the class object. ''' def __init__(self, func): functools.update_wrapper(self, func, updated=[]) self.getter = func def __get__(self, obj, cls): value = self.getter(cls) setattr(cls, self.__name__, value) return value ############################################################################### # Exceptions and Events ######################################################## ############################################################################### class BottleException(Exception): """ A base class for exceptions used by bottle. """ pass #TODO: These should subclass BaseRequest class HTTPResponse(BottleException): """ Used to break execution and immediately finish the response """ def __init__(self, output='', status=200, header=None): super(BottleException, self).__init__("HTTP Response %d" % status) self.status = int(status) self.output = output self.headers = HeaderDict(header) if header else None def apply(self, response): if self.headers: for key, value in self.headers.iterallitems(): response.headers[key] = value response.status = self.status class HTTPError(HTTPResponse): """ Used to generate an error page """ def __init__(self, code=500, output='Unknown Error', exception=None, traceback=None, header=None): super(HTTPError, self).__init__(output, code, header) self.exception = exception self.traceback = traceback def __repr__(self): return tonat(template(ERROR_PAGE_TEMPLATE, e=self)) ############################################################################### # Routing ###################################################################### ############################################################################### class RouteError(BottleException): """ This is a base class for all routing related exceptions """ class RouteReset(BottleException): """ If raised by a plugin or request handler, the route is reset and all plugins are re-applied. """ class RouterUnknownModeError(RouteError): pass class RouteSyntaxError(RouteError): """ The route parser found something not supported by this router """ class RouteBuildError(RouteError): """ The route could not been built """ class Router(object): ''' A Router is an ordered collection of route->target pairs. It is used to efficiently match WSGI requests against a number of routes and return the first target that satisfies the request. The target may be anything, usually a string, ID or callable object. A route consists of a path-rule and a HTTP method. The path-rule is either a static path (e.g. `/contact`) or a dynamic path that contains wildcards (e.g. `/wiki/<page>`). The wildcard syntax and details on the matching order are described in docs:`routing`. ''' default_pattern = '[^/]+' default_filter = 're' #: Sorry for the mess. It works. Trust me. rule_syntax = re.compile('(\\\\*)'\ '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\ '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\ '(?::((?:\\\\.|[^\\\\>]+)+)?)?)?>))') def __init__(self, strict=False): self.rules = {} # A {rule: Rule} mapping self.builder = {} # A rule/name->build_info mapping self.static = {} # Cache for static routes: {path: {method: target}} self.dynamic = [] # Cache for dynamic routes. See _compile() #: If true, static routes are no longer checked first. self.strict_order = strict self.filters = {'re': self.re_filter, 'int': self.int_filter, 'float': self.float_filter, 'path': self.path_filter} def re_filter(self, conf): return conf or self.default_pattern, None, None def int_filter(self, conf): return r'-?\d+', int, lambda x: str(int(x)) def float_filter(self, conf): return r'-?[\d.]+', float, lambda x: str(float(x)) def path_filter(self, conf): return r'.*?', None, None def add_filter(self, name, func): ''' Add a filter. The provided function is called with the configuration string as parameter and must return a (regexp, to_python, to_url) tuple. The first element is a string, the last two are callables or None. ''' self.filters[name] = func def parse_rule(self, rule): ''' Parses a rule into a (name, filter, conf) token stream. If mode is None, name contains a static rule part. ''' offset, prefix = 0, '' for match in self.rule_syntax.finditer(rule): prefix += rule[offset:match.start()] g = match.groups() if len(g[0])%2: # Escaped wildcard prefix += match.group(0)[len(g[0]):] offset = match.end() continue if prefix: yield prefix, None, None name, filtr, conf = g[1:4] if not g[2] is None else g[4:7] if not filtr: filtr = self.default_filter yield name, filtr, conf or None offset, prefix = match.end(), '' if offset <= len(rule) or prefix: yield prefix+rule[offset:], None, None def add(self, rule, method, target, name=None): ''' Add a new route or replace the target for an existing route. ''' if rule in self.rules: self.rules[rule][method] = target if name: self.builder[name] = self.builder[rule] return target = self.rules[rule] = {method: target} # Build pattern and other structures for dynamic routes anons = 0 # Number of anonymous wildcards pattern = '' # Regular expression pattern filters = [] # Lists of wildcard input filters builder = [] # Data structure for the URL builder is_static = True for key, mode, conf in self.parse_rule(rule): if mode: is_static = False mask, in_filter, out_filter = self.filters[mode](conf) if key: pattern += '(?P<%s>%s)' % (key, mask) else: pattern += '(?:%s)' % mask key = 'anon%d' % anons; anons += 1 if in_filter: filters.append((key, in_filter)) builder.append((key, out_filter or str)) elif key: pattern += re.escape(key) builder.append((None, key)) self.builder[rule] = builder if name: self.builder[name] = builder if is_static and not self.strict_order: self.static[self.build(rule)] = target return def fpat_sub(m): return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:' flat_pattern = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, pattern) try: re_match = re.compile('^(%s)$' % pattern).match except re.error, e: raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, e)) def match(path): """ Return an url-argument dictionary. """ url_args = re_match(path).groupdict() for name, wildcard_filter in filters: try: url_args[name] = wildcard_filter(url_args[name]) except ValueError: raise HTTPError(400, 'Path has wrong format.') return url_args try: combined = '%s|(^%s$)' % (self.dynamic[-1][0].pattern, flat_pattern) self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1]) self.dynamic[-1][1].append((match, target)) except (AssertionError, IndexError), e: # AssertionError: Too many groups self.dynamic.append((re.compile('(^%s$)' % flat_pattern), [(match, target)])) return match def build(self, _name, *anons, **query): ''' Build an URL by filling the wildcards in a rule. ''' builder = self.builder.get(_name) if not builder: raise RouteBuildError("No route with that name.", _name) try: for i, value in enumerate(anons): query['anon%d'%i] = value url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder]) return url if not query else url+'?'+urlencode(query) except KeyError, e: raise RouteBuildError('Missing URL argument: %r' % e.args[0]) def match(self, environ): ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). ''' path, targets, urlargs = environ['PATH_INFO'] or '/', None, {} if path in self.static: targets = self.static[path] else: for combined, rules in self.dynamic: match = combined.match(path) if not match: continue getargs, targets = rules[match.lastindex - 1] urlargs = getargs(path) if getargs else {} break if not targets: raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO'])) method = environ['REQUEST_METHOD'].upper() if method in targets: return targets[method], urlargs if method == 'HEAD' and 'GET' in targets: return targets['GET'], urlargs if 'ANY' in targets: return targets['ANY'], urlargs allowed = [verb for verb in targets if verb != 'ANY'] if 'GET' in allowed and 'HEAD' not in allowed: allowed.append('HEAD') raise HTTPError(405, "Method not allowed.", header=[('Allow',",".join(allowed))]) class Route(object): ''' This class wraps a route callback along with route specific metadata and configuration and applies Plugins on demand. It is also responsible for turing an URL path rule into a regular expression usable by the Router. ''' def __init__(self, app, rule, method, callback, name=None, plugins=None, skiplist=None, **config): #: The application this route is installed to. self.app = app #: The path-rule string (e.g. ``/wiki/:page``). self.rule = rule #: The HTTP method as a string (e.g. ``GET``). self.method = method #: The original callback with no plugins applied. Useful for introspection. self.callback = callback #: The name of the route (if specified) or ``None``. self.name = name or None #: A list of route-specific plugins (see :meth:`Bottle.route`). self.plugins = plugins or [] #: A list of plugins to not apply to this route (see :meth:`Bottle.route`). self.skiplist = skiplist or [] #: Additional keyword arguments passed to the :meth:`Bottle.route` #: decorator are stored in this dictionary. Used for route-specific #: plugin configuration and meta-data. self.config = ConfigDict(config) def __call__(self, *a, **ka): depr("Some APIs changed to return Route() instances instead of"\ " callables. Make sure to use the Route.call method and not to"\ " call Route instances directly.") return self.call(*a, **ka) @cached_property def call(self): ''' The route callback with all plugins applied. This property is created on demand and then cached to speed up subsequent requests.''' return self._make_callback() def reset(self): ''' Forget any cached values. The next time :attr:`call` is accessed, all plugins are re-applied. ''' self.__dict__.pop('call', None) def prepare(self): ''' Do all on-demand work immediately (useful for debugging).''' self.call @property def _context(self): depr('Switch to Plugin API v2 and access the Route object directly.') return dict(rule=self.rule, method=self.method, callback=self.callback, name=self.name, app=self.app, config=self.config, apply=self.plugins, skip=self.skiplist) def all_plugins(self): ''' Yield all Plugins affecting this route. ''' unique = set() for p in reversed(self.app.plugins + self.plugins): if True in self.skiplist: break name = getattr(p, 'name', False) if name and (name in self.skiplist or name in unique): continue if p in self.skiplist or type(p) in self.skiplist: continue if name: unique.add(name) yield p def _make_callback(self): callback = self.callback for plugin in self.all_plugins(): try: if hasattr(plugin, 'apply'): api = getattr(plugin, 'api', 1) context = self if api > 1 else self._context callback = plugin.apply(callback, context) else: callback = plugin(callback) except RouteReset: # Try again with changed configuration. return self._make_callback() if not callback is self.callback: try_update_wrapper(callback, self.callback) return callback ############################################################################### # Application Object ########################################################### ############################################################################### class Bottle(object): """ WSGI application """ def __init__(self, catchall=True, autojson=True, config=None): """ Create a new bottle instance. You usually don't do that. Use `bottle.app.push()` instead. """ self.routes = [] # List of installed :class:`Route` instances. self.router = Router() # Maps requests to :class:`Route` instances. self.plugins = [] # List of installed plugins. self.error_handler = {} #: If true, most exceptions are catched and returned as :exc:`HTTPError` self.config = ConfigDict(config or {}) self.catchall = catchall #: An instance of :class:`HooksPlugin`. Empty by default. self.hooks = HooksPlugin() self.install(self.hooks) if autojson: self.install(JSONPlugin()) self.install(TemplatePlugin()) def mount(self, prefix, app, **options): ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific URL prefix. Example:: root_app.mount('/admin/', admin_app) :param prefix: path prefix or `mount-point`. If it ends in a slash, that slash is mandatory. :param app: an instance of :class:`Bottle` or a WSGI application. All other parameters are passed to the underlying :meth:`route` call. ''' if isinstance(app, basestring): prefix, app = app, prefix depr('Parameter order of Bottle.mount() changed.') # 0.10 parts = filter(None, prefix.split('/')) if not parts: raise ValueError('Empty path prefix.') path_depth = len(parts) options.setdefault('skip', True) options.setdefault('method', 'ANY') @self.route('/%s/:#.*#' % '/'.join(parts), **options) def mountpoint(): try: request.path_shift(path_depth) rs = BaseResponse([], 200) def start_response(status, header): rs.status = status for name, value in header: rs.add_header(name, value) return rs.body.append rs.body = itertools.chain(rs.body, app(request.environ, start_response)) return HTTPResponse(rs.body, rs.status_code, rs.headers) finally: request.path_shift(-path_depth) if not prefix.endswith('/'): self.route('/' + '/'.join(parts), callback=mountpoint, **options) def install(self, plugin): ''' Add a plugin to the list of plugins and prepare it for being applied to all routes of this application. A plugin may be a simple decorator or an object that implements the :class:`Plugin` API. ''' if hasattr(plugin, 'setup'): plugin.setup(self) if not callable(plugin) and not hasattr(plugin, 'apply'): raise TypeError("Plugins must be callable or implement .apply()") self.plugins.append(plugin) self.reset() return plugin def uninstall(self, plugin): ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type object to remove all plugins that match that type, a string to remove all plugins with a matching ``name`` attribute or ``True`` to remove all plugins. Return the list of removed plugins. ''' removed, remove = [], plugin for i, plugin in list(enumerate(self.plugins))[::-1]: if remove is True or remove is plugin or remove is type(plugin) \ or getattr(plugin, 'name', True) == remove: removed.append(plugin) del self.plugins[i] if hasattr(plugin, 'close'): plugin.close() if removed: self.reset() return removed def reset(self, route=None): ''' Reset all routes (force plugins to be re-applied) and clear all caches. If an ID or route object is given, only that specific route is affected. ''' if route is None: routes = self.routes elif isinstance(route, Route): routes = [route] else: routes = [self.routes[route]] for route in routes: route.reset() if DEBUG: for route in routes: route.prepare() self.hooks.trigger('app_reset') def close(self): ''' Close the application and all installed plugins. ''' for plugin in self.plugins: if hasattr(plugin, 'close'): plugin.close() self.stopped = True def match(self, environ): """ Search for a matching route and return a (:class:`Route` , urlargs) tuple. The second value is a dictionary with parameters extracted from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" return self.router.match(environ) def get_url(self, routename, **kargs): """ Return a string that matches a named route """ scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' location = self.router.build(routename, **kargs).lstrip('/') return urljoin(urljoin('/', scriptname), location) def route(self, path=None, method='GET', callback=None, name=None, apply=None, skip=None, **config): """ A decorator to bind a function to a request URL. Example:: @app.route('/hello/:name') def hello(name): return 'Hello %s' % name The ``:name`` part is a wildcard. See :class:`Router` for syntax details. :param path: Request path or a list of paths to listen to. If no path is specified, it is automatically generated from the signature of the function. :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of methods to listen to. (default: `GET`) :param callback: An optional shortcut to avoid the decorator syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` :param name: The name for this route. (default: None) :param apply: A decorator or plugin or a list of plugins. These are applied to the route callback in addition to installed plugins. :param skip: A list of plugins, plugin classes or names. Matching plugins are not installed to this route. ``True`` skips all. Any additional keyword arguments are stored as route-specific configuration and passed to plugins (see :meth:`Plugin.apply`). """ if callable(path): path, callback = None, path plugins = makelist(apply) skiplist = makelist(skip) def decorator(callback): # TODO: Documentation and tests if isinstance(callback, basestring): callback = load(callback) for rule in makelist(path) or yieldroutes(callback): for verb in makelist(method): verb = verb.upper() route = Route(self, rule, verb, callback, name=name, plugins=plugins, skiplist=skiplist, **config) self.routes.append(route) self.router.add(rule, verb, route, name=name) if DEBUG: route.prepare() return callback return decorator(callback) if callback else decorator def get(self, path=None, method='GET', **options): """ Equals :meth:`route`. """ return self.route(path, method, **options) def post(self, path=None, method='POST', **options): """ Equals :meth:`route` with a ``POST`` method parameter. """ return self.route(path, method, **options) def put(self, path=None, method='PUT', **options): """ Equals :meth:`route` with a ``PUT`` method parameter. """ return self.route(path, method, **options) def delete(self, path=None, method='DELETE', **options): """ Equals :meth:`route` with a ``DELETE`` method parameter. """ return self.route(path, method, **options) def error(self, code=500): """ Decorator: Register an output handler for a HTTP error code""" def wrapper(handler): self.error_handler[int(code)] = handler return handler return wrapper def hook(self, name): """ Return a decorator that attaches a callback to a hook. """ def wrapper(func): self.hooks.add(name, func) return func return wrapper def handle(self, path, method='GET'): """ (deprecated) Execute the first matching route callback and return the result. :exc:`HTTPResponse` exceptions are catched and returned. If :attr:`Bottle.catchall` is true, other exceptions are catched as well and returned as :exc:`HTTPError` instances (500). """ depr("This method will change semantics in 0.10. Try to avoid it.") if isinstance(path, dict): return self._handle(path) return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()}) def _handle(self, environ): try: route, args = self.router.match(environ) environ['route.handle'] = environ['bottle.route'] = route environ['route.url_args'] = args return route.call(**args) except HTTPResponse, r: return r except RouteReset: route.reset() return self._handle(environ) except (KeyboardInterrupt, SystemExit, MemoryError): raise except Exception, e: if not self.catchall: raise stacktrace = format_exc(10) environ['wsgi.errors'].write(stacktrace) return HTTPError(500, "Internal Server Error", e, stacktrace) def _cast(self, out, request, response, peek=None): """ Try to convert the parameter into something WSGI compatible and set correct HTTP headers when possible. Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, iterable of strings and iterable of unicodes """ # Empty output is done here if not out: response['Content-Length'] = 0 return [] # Join lists of byte or unicode strings. Mixed lists are NOT supported if isinstance(out, (tuple, list))\ and isinstance(out[0], (bytes, unicode)): out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' # Encode unicode strings if isinstance(out, unicode): out = out.encode(response.charset) # Byte Strings are just returned if isinstance(out, bytes): response['Content-Length'] = len(out) return [out] # HTTPError or HTTPException (recursive, because they may wrap anything) # TODO: Handle these explicitly in handle() or make them iterable. if isinstance(out, HTTPError): out.apply(response) out = self.error_handler.get(out.status, repr)(out) if isinstance(out, HTTPResponse): depr('Error handlers must not return :exc:`HTTPResponse`.') #0.9 return self._cast(out, request, response) if isinstance(out, HTTPResponse): out.apply(response) return self._cast(out.output, request, response) # File-like objects. if hasattr(out, 'read'): if 'wsgi.file_wrapper' in request.environ: return request.environ['wsgi.file_wrapper'](out) elif hasattr(out, 'close') or not hasattr(out, '__iter__'): return WSGIFileWrapper(out) # Handle Iterables. We peek into them to detect their inner type. try: out = iter(out) first = out.next() while not first: first = out.next() except StopIteration: return self._cast('', request, response) except HTTPResponse, e: first = e except Exception, e: first = HTTPError(500, 'Unhandled exception', e, format_exc(10)) if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\ or not self.catchall: raise # These are the inner types allowed in iterator or generator objects. if isinstance(first, HTTPResponse): return self._cast(first, request, response) if isinstance(first, bytes): return itertools.chain([first], out) if isinstance(first, unicode): return itertools.imap(lambda x: x.encode(response.charset), itertools.chain([first], out)) return self._cast(HTTPError(500, 'Unsupported response type: %s'\ % type(first)), request, response) def wsgi(self, environ, start_response): """ The bottle WSGI-interface. """ try: environ['bottle.app'] = self request.bind(environ) response.bind() out = self._cast(self._handle(environ), request, response) # rfc2616 section 4.3 if response._status_code in (100, 101, 204, 304)\ or request.method == 'HEAD': if hasattr(out, 'close'): out.close() out = [] start_response(response._status_line, list(response.iter_headers())) return out except (KeyboardInterrupt, SystemExit, MemoryError): raise except Exception, e: if not self.catchall: raise err = '<h1>Critical error while processing request: %s</h1>' \ % html_escape(environ.get('PATH_INFO', '/')) if DEBUG: err += '<h2>Error:</h2>\n<pre>\n%s\n</pre>\n' \ '<h2>Traceback:</h2>\n<pre>\n%s\n</pre>\n' \ % (html_escape(repr(e)), html_escape(format_exc(10))) environ['wsgi.errors'].write(err) headers = [('Content-Type', 'text/html; charset=UTF-8')] start_response('500 INTERNAL SERVER ERROR', headers) return [tob(err)] def __call__(self, environ, start_response): ''' Each instance of :class:'Bottle' is a WSGI application. ''' return self.wsgi(environ, start_response) ############################################################################### # HTTP and WSGI Tools ########################################################## ############################################################################### class BaseRequest(DictMixin): """ A wrapper for WSGI environment dictionaries that adds a lot of convenient access methods and properties. Most of them are read-only.""" #: Maximum size of memory buffer for :attr:`body` in bytes. MEMFILE_MAX = 102400 #: Maximum number pr GET or POST parameters per request MAX_PARAMS = 100 def __init__(self, environ): """ Wrap a WSGI environ dictionary. """ #: The wrapped WSGI environ dictionary. This is the only real attribute. #: All other attributes actually are read-only properties. self.environ = environ environ['bottle.request'] = self @property def path(self): ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix broken clients and avoid the "empty path" edge case). ''' return '/' + self.environ.get('PATH_INFO','').lstrip('/') @property def method(self): ''' The ``REQUEST_METHOD`` value as an uppercase string. ''' return self.environ.get('REQUEST_METHOD', 'GET').upper() @DictProperty('environ', 'bottle.request.headers', read_only=True) def headers(self): ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to HTTP request headers. ''' return WSGIHeaderDict(self.environ) def get_header(self, name, default=None): ''' Return the value of a request header, or a given default value. ''' return self.headers.get(name, default) @DictProperty('environ', 'bottle.request.cookies', read_only=True) def cookies(self): """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT decoded. Use :meth:`get_cookie` if you expect signed cookies. """ cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')) cookies = list(cookies.values())[:self.MAX_PARAMS] return FormsDict((c.key, c.value) for c in cookies) def get_cookie(self, key, default=None, secret=None): """ Return the content of a cookie. To read a `Signed Cookie`, the `secret` must match the one used to create the cookie (see :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing cookie or wrong signature), return a default value. """ value = self.cookies.get(key) if secret and value: dec = cookie_decode(value, secret) # (key, value) tuple or None return dec[1] if dec and dec[0] == key else default return value or default @DictProperty('environ', 'bottle.request.query', read_only=True) def query(self): ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These values are sometimes called "URL arguments" or "GET parameters", but not to be confused with "URL wildcards" as they are provided by the :class:`Router`. ''' pairs = parse_qsl(self.query_string, keep_blank_values=True) get = self.environ['bottle.get'] = FormsDict() for key, value in pairs[:self.MAX_PARAMS]: get[key] = value return get @DictProperty('environ', 'bottle.request.forms', read_only=True) def forms(self): """ Form values parsed from an `url-encoded` or `multipart/form-data` encoded POST or PUT request body. The result is retuned as a :class:`FormsDict`. All keys and values are strings. File uploads are stored separately in :attr:`files`. """ forms = FormsDict() for name, item in self.POST.iterallitems(): if not hasattr(item, 'filename'): forms[name] = item return forms @DictProperty('environ', 'bottle.request.params', read_only=True) def params(self): """ A :class:`FormsDict` with the combined values of :attr:`query` and :attr:`forms`. File uploads are stored in :attr:`files`. """ params = FormsDict() for key, value in self.query.iterallitems(): params[key] = value for key, value in self.forms.iterallitems(): params[key] = value return params @DictProperty('environ', 'bottle.request.files', read_only=True) def files(self): """ File uploads parsed from an `url-encoded` or `multipart/form-data` encoded POST or PUT request body. The values are instances of :class:`cgi.FieldStorage`. The most important attributes are: filename The filename, if specified; otherwise None; this is the client side filename, *not* the file name on which it is stored (that's a temporary file you don't deal with) file The file(-like) object from which you can read the data. value The value as a *string*; for file uploads, this transparently reads the file every time you request the value. Do not do this on big files. """ files = FormsDict() for name, item in self.POST.iterallitems(): if hasattr(item, 'filename'): files[name] = item return files @DictProperty('environ', 'bottle.request.json', read_only=True) def json(self): ''' If the ``Content-Type`` header is ``application/json``, this property holds the parsed content of the request body. Only requests smaller than :attr:`MEMFILE_MAX` are processed to avoid memory exhaustion. ''' if 'application/json' in self.environ.get('CONTENT_TYPE', '') \ and 0 < self.content_length < self.MEMFILE_MAX: return json_loads(self.body.read(self.MEMFILE_MAX)) return None @DictProperty('environ', 'bottle.request.body', read_only=True) def _body(self): maxread = max(0, self.content_length) stream = self.environ['wsgi.input'] body = BytesIO() if maxread < self.MEMFILE_MAX else TemporaryFile(mode='w+b') while maxread > 0: part = stream.read(min(maxread, self.MEMFILE_MAX)) if not part: break body.write(part) maxread -= len(part) self.environ['wsgi.input'] = body body.seek(0) return body @property def body(self): """ The HTTP request body as a seek-able file-like object. Depending on :attr:`MEMFILE_MAX`, this is either a temporary file or a :class:`io.BytesIO` instance. Accessing this property for the first time reads and replaces the ``wsgi.input`` environ variable. Subsequent accesses just do a `seek(0)` on the file object. """ self._body.seek(0) return self._body #: An alias for :attr:`query`. GET = query @DictProperty('environ', 'bottle.request.post', read_only=True) def POST(self): """ The values of :attr:`forms` and :attr:`files` combined into a single :class:`FormsDict`. Values are either strings (form values) or instances of :class:`cgi.FieldStorage` (file uploads). """ post = FormsDict() safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): if key in self.environ: safe_env[key] = self.environ[key] if NCTextIOWrapper: fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n') else: fb = self.body data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True) for item in (data.list or [])[:self.MAX_PARAMS]: post[item.name] = item if item.filename else item.value return post @property def COOKIES(self): ''' Alias for :attr:`cookies` (deprecated). ''' depr('BaseRequest.COOKIES was renamed to BaseRequest.cookies (lowercase).') return self.cookies @property def url(self): """ The full request URI including hostname and scheme. If your app lives behind a reverse proxy or load balancer and you get confusing results, make sure that the ``X-Forwarded-Host`` header is set correctly. """ return self.urlparts.geturl() @DictProperty('environ', 'bottle.request.urlparts', read_only=True) def urlparts(self): ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. The tuple contains (scheme, host, path, query_string and fragment), but the fragment is always empty because it is not visible to the server. ''' env = self.environ http = env.get('wsgi.url_scheme', 'http') host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') if not host: # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. host = env.get('SERVER_NAME', '127.0.0.1') port = env.get('SERVER_PORT') if port and port != ('80' if http == 'http' else '443'): host += ':' + port path = urlquote(self.fullpath) return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '') @property def fullpath(self): """ Request path including :attr:`script_name` (if present). """ return urljoin(self.script_name, self.path.lstrip('/')) @property def query_string(self): """ The raw :attr:`query` part of the URL (everything in between ``?`` and ``#``) as a string. """ return self.environ.get('QUERY_STRING', '') @property def script_name(self): ''' The initial portion of the URL's `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing slashes. ''' script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return '/' + script_name + '/' if script_name else '/' def path_shift(self, shift=1): ''' Shift path segments from :attr:`path` to :attr:`script_name` and vice versa. :param shift: The number of path segments to shift. May be negative to change the shift direction. (default: 1) ''' script = self.environ.get('SCRIPT_NAME','/') self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift) @property def content_length(self): ''' The request body length as an integer. The client is responsible to set this header. Otherwise, the real length of the body is unknown and -1 is returned. In this case, :attr:`body` will be empty. ''' return int(self.environ.get('CONTENT_LENGTH') or -1) @property def is_xhr(self): ''' True if the request was triggered by a XMLHttpRequest. This only works with JavaScript libraries that support the `X-Requested-With` header (most of the popular libraries do). ''' requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','') return requested_with.lower() == 'xmlhttprequest' @property def is_ajax(self): ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. ''' return self.is_xhr @property def auth(self): """ HTTP authentication data as a (user, password) tuple. This implementation currently supports basic (not digest) authentication only. If the authentication happened at a higher level (e.g. in the front web-server or a middleware), the password field is None, but the user field is looked up from the ``REMOTE_USER`` environ variable. On any errors, None is returned. """ basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION','')) if basic: return basic ruser = self.environ.get('REMOTE_USER') if ruser: return (ruser, None) return None @property def remote_route(self): """ A list of all IPs that were involved in this request, starting with the client IP and followed by zero or more proxies. This does only work if all proxies support the ```X-Forwarded-For`` header. Note that this information can be forged by malicious clients. """ proxy = self.environ.get('HTTP_X_FORWARDED_FOR') if proxy: return [ip.strip() for ip in proxy.split(',')] remote = self.environ.get('REMOTE_ADDR') return [remote] if remote else [] @property def remote_addr(self): """ The client IP as a string. Note that this information can be forged by malicious clients. """ route = self.remote_route return route[0] if route else None def copy(self): """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """ return Request(self.environ.copy()) def __getitem__(self, key): return self.environ[key] def __delitem__(self, key): self[key] = ""; del(self.environ[key]) def __iter__(self): return iter(self.environ) def __len__(self): return len(self.environ) def keys(self): return self.environ.keys() def __setitem__(self, key, value): """ Change an environ value and clear all caches that depend on it. """ if self.environ.get('bottle.request.readonly'): raise KeyError('The environ dictionary is read-only.') self.environ[key] = value todelete = () if key == 'wsgi.input': todelete = ('body', 'forms', 'files', 'params', 'post', 'json') elif key == 'QUERY_STRING': todelete = ('query', 'params') elif key.startswith('HTTP_'): todelete = ('headers', 'cookies') for key in todelete: self.environ.pop('bottle.request.'+key, None) def __repr__(self): return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url) def _hkey(s): return s.title().replace('_','-') class HeaderProperty(object): def __init__(self, name, reader=None, writer=str, default=''): self.name, self.reader, self.writer, self.default = name, reader, writer, default self.__doc__ = 'Current value of the %r header.' % name.title() def __get__(self, obj, cls): if obj is None: return self value = obj.headers.get(self.name) return self.reader(value) if (value and self.reader) else (value or self.default) def __set__(self, obj, value): if self.writer: value = self.writer(value) obj.headers[self.name] = value def __delete__(self, obj): if self.name in obj.headers: del obj.headers[self.name] class BaseResponse(object): """ Storage class for a response body as well as headers and cookies. This class does support dict-like case-insensitive item-access to headers, but is NOT a dict. Most notably, iterating over a response yields parts of the body and not the headers. """ default_status = 200 default_content_type = 'text/html; charset=UTF-8' # Header blacklist for specific response codes # (rfc2616 section 10.2.3 and 10.3.5) bad_headers = { 204: set(('Content-Type',)), 304: set(('Allow', 'Content-Encoding', 'Content-Language', 'Content-Length', 'Content-Range', 'Content-Type', 'Content-Md5', 'Last-Modified'))} def __init__(self, body='', status=None, **headers): self._status_line = None self._status_code = None self.body = body self._cookies = None self._headers = {'Content-Type': [self.default_content_type]} self.status = status or self.default_status if headers: for name, value in headers.items(): self[name] = value def copy(self): ''' Returns a copy of self. ''' copy = Response() copy.status = self.status copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) return copy def __iter__(self): return iter(self.body) def close(self): if hasattr(self.body, 'close'): self.body.close() @property def status_line(self): ''' The HTTP status line as a string (e.g. ``404 Not Found``).''' return self._status_line @property def status_code(self): ''' The HTTP status code as an integer (e.g. 404).''' return self._status_code def _set_status(self, status): if isinstance(status, int): code, status = status, _HTTP_STATUS_LINES.get(status) elif ' ' in status: status = status.strip() code = int(status.split()[0]) else: raise ValueError('String status line without a reason phrase.') if not 100 <= code <= 999: raise ValueError('Status code out of range.') self._status_code = code self._status_line = status or ('%d Unknown' % code) def _get_status(self): depr('BaseRequest.status will change to return a string in 0.11. Use'\ ' status_line and status_code to make sure.') #0.10 return self._status_code status = property(_get_status, _set_status, None, ''' A writeable property to change the HTTP response status. It accepts either a numeric code (100-999) or a string with a custom reason phrase (e.g. "404 Brain not found"). Both :data:`status_line` and :data:`status_code` are updates accordingly. The return value is always a numeric code. ''') del _get_status, _set_status @property def headers(self): ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like view on the response headers. ''' self.__dict__['headers'] = hdict = HeaderDict() hdict.dict = self._headers return hdict def __contains__(self, name): return _hkey(name) in self._headers def __delitem__(self, name): del self._headers[_hkey(name)] def __getitem__(self, name): return self._headers[_hkey(name)][-1] def __setitem__(self, name, value): self._headers[_hkey(name)] = [str(value)] def get_header(self, name, default=None): ''' Return the value of a previously defined header. If there is no header with that name, return a default value. ''' return self._headers.get(_hkey(name), [default])[-1] def set_header(self, name, value, append=False): ''' Create a new response header, replacing any previously defined headers with the same name. ''' if append: self.add_header(name, value) else: self._headers[_hkey(name)] = [str(value)] def add_header(self, name, value): ''' Add an additional response header, not removing duplicates. ''' self._headers.setdefault(_hkey(name), []).append(str(value)) def iter_headers(self): ''' Yield (header, value) tuples, skipping headers that are not allowed with the current response status code. ''' headers = self._headers.iteritems() bad_headers = self.bad_headers.get(self.status_code) if bad_headers: headers = [h for h in headers if h[0] not in bad_headers] for name, values in headers: for value in values: yield name, value if self._cookies: for c in self._cookies.values(): yield 'Set-Cookie', c.OutputString() def wsgiheader(self): depr('The wsgiheader method is deprecated. See headerlist.') #0.10 return self.headerlist @property def headerlist(self): ''' WSGI conform list of (header, value) tuples. ''' return list(self.iter_headers()) content_type = HeaderProperty('Content-Type') content_length = HeaderProperty('Content-Length', reader=int) @property def charset(self): """ Return the charset specified in the content-type header (default: utf8). """ if 'charset=' in self.content_type: return self.content_type.split('charset=')[-1].split(';')[0].strip() return 'UTF-8' @property def COOKIES(self): """ A dict-like SimpleCookie instance. This should not be used directly. See :meth:`set_cookie`. """ depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10 if not self._cookies: self._cookies = SimpleCookie() return self._cookies def set_cookie(self, name, value, secret=None, **options): ''' Create a new cookie or replace an old one. If the `secret` parameter is set, create a `Signed Cookie` (described below). :param name: the name of the cookie. :param value: the value of the cookie. :param secret: a signature key required for signed cookies. Additionally, this method accepts all RFC 2109 attributes that are supported by :class:`cookie.Morsel`, including: :param max_age: maximum age in seconds. (default: None) :param expires: a datetime object or UNIX timestamp. (default: None) :param domain: the domain that is allowed to read the cookie. (default: current domain) :param path: limits the cookie to a given path (default: current path) :param secure: limit the cookie to HTTPS connections (default: off). :param httponly: prevents client-side javascript to read this cookie (default: off, requires Python 2.6 or newer). If neither `expires` nor `max_age` is set (default), the cookie will expire at the end of the browser session (as soon as the browser window is closed). Signed cookies may store any pickle-able object and are cryptographically signed to prevent manipulation. Keep in mind that cookies are limited to 4kb in most browsers. Warning: Signed cookies are not encrypted (the client can still see the content) and not copy-protected (the client can restore an old cookie). The main intention is to make pickling and unpickling save, not to store secret information at client side. ''' if not self._cookies: self._cookies = SimpleCookie() if secret: value = touni(cookie_encode((name, value), secret)) elif not isinstance(value, basestring): raise TypeError('Secret key missing for non-string Cookie.') if len(value) > 4096: raise ValueError('Cookie value to long.') self._cookies[name] = value for key, value in options.iteritems(): if key == 'max_age': if isinstance(value, timedelta): value = value.seconds + value.days * 24 * 3600 if key == 'expires': if isinstance(value, (datedate, datetime)): value = value.timetuple() elif isinstance(value, (int, float)): value = time.gmtime(value) value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) self._cookies[name][key.replace('_', '-')] = value def delete_cookie(self, key, **kwargs): ''' Delete a cookie. Be sure to use the same `domain` and `path` settings as used to create the cookie. ''' kwargs['max_age'] = -1 kwargs['expires'] = 0 self.set_cookie(key, '', **kwargs) def __repr__(self): out = '' for name, value in self.headerlist: out += '%s: %s\n' % (name.title(), value.strip()) return out class LocalRequest(BaseRequest, threading.local): ''' A thread-local subclass of :class:`BaseRequest`. ''' def __init__(self): pass bind = BaseRequest.__init__ class LocalResponse(BaseResponse, threading.local): ''' A thread-local subclass of :class:`BaseResponse`. ''' bind = BaseResponse.__init__ Response = LocalResponse # BC 0.9 Request = LocalRequest # BC 0.9 ############################################################################### # Plugins ###################################################################### ############################################################################### class PluginError(BottleException): pass class JSONPlugin(object): name = 'json' api = 2 def __init__(self, json_dumps=json_dumps): self.json_dumps = json_dumps def apply(self, callback, context): dumps = self.json_dumps if not dumps: return callback def wrapper(*a, **ka): rv = callback(*a, **ka) if isinstance(rv, dict): #Attempt to serialize, raises exception on failure json_response = dumps(rv) #Set content type only if serialization succesful response.content_type = 'application/json' return json_response return rv return wrapper class HooksPlugin(object): name = 'hooks' api = 2 _names = 'before_request', 'after_request', 'app_reset' def __init__(self): self.hooks = dict((name, []) for name in self._names) self.app = None def _empty(self): return not (self.hooks['before_request'] or self.hooks['after_request']) def setup(self, app): self.app = app def add(self, name, func): ''' Attach a callback to a hook. ''' was_empty = self._empty() self.hooks.setdefault(name, []).append(func) if self.app and was_empty and not self._empty(): self.app.reset() def remove(self, name, func): ''' Remove a callback from a hook. ''' was_empty = self._empty() if name in self.hooks and func in self.hooks[name]: self.hooks[name].remove(func) if self.app and not was_empty and self._empty(): self.app.reset() def trigger(self, name, *a, **ka): ''' Trigger a hook and return a list of results. ''' hooks = self.hooks[name] if ka.pop('reversed', False): hooks = hooks[::-1] return [hook(*a, **ka) for hook in hooks] def apply(self, callback, context): if self._empty(): return callback def wrapper(*a, **ka): self.trigger('before_request') rv = callback(*a, **ka) self.trigger('after_request', reversed=True) return rv return wrapper class TemplatePlugin(object): ''' This plugin applies the :func:`view` decorator to all routes with a `template` config parameter. If the parameter is a tuple, the second element must be a dict with additional options (e.g. `template_engine`) or default variables for the template. ''' name = 'template' api = 2 def apply(self, callback, route): conf = route.config.get('template') if isinstance(conf, (tuple, list)) and len(conf) == 2: return view(conf[0], **conf[1])(callback) elif isinstance(conf, str) and 'template_opts' in route.config: depr('The `template_opts` parameter is deprecated.') #0.9 return view(conf, **route.config['template_opts'])(callback) elif isinstance(conf, str): return view(conf)(callback) else: return callback #: Not a plugin, but part of the plugin API. TODO: Find a better place. class _ImportRedirect(object): def __init__(self, name, impmask): ''' Create a virtual package that redirects imports (see PEP 302). ''' self.name = name self.impmask = impmask self.module = sys.modules.setdefault(name, imp.new_module(name)) self.module.__dict__.update({'__file__': __file__, '__path__': [], '__all__': [], '__loader__': self}) sys.meta_path.append(self) def find_module(self, fullname, path=None): if '.' not in fullname: return packname, modname = fullname.rsplit('.', 1) if packname != self.name: return return self def load_module(self, fullname): if fullname in sys.modules: return sys.modules[fullname] packname, modname = fullname.rsplit('.', 1) realname = self.impmask % modname __import__(realname) module = sys.modules[fullname] = sys.modules[realname] setattr(self.module, modname, module) module.__loader__ = self return module ############################################################################### # Common Utilities ############################################################# ############################################################################### class MultiDict(DictMixin): """ This dict stores multiple values per key, but behaves exactly like a normal dict in that it returns only the newest value for any given key. There are special methods available to access the full list of values. """ def __init__(self, *a, **k): self.dict = dict((k, [v]) for k, v in dict(*a, **k).iteritems()) def __len__(self): return len(self.dict) def __iter__(self): return iter(self.dict) def __contains__(self, key): return key in self.dict def __delitem__(self, key): del self.dict[key] def __getitem__(self, key): return self.dict[key][-1] def __setitem__(self, key, value): self.append(key, value) def iterkeys(self): return self.dict.iterkeys() def itervalues(self): return (v[-1] for v in self.dict.itervalues()) def iteritems(self): return ((k, v[-1]) for (k, v) in self.dict.iteritems()) def iterallitems(self): for key, values in self.dict.iteritems(): for value in values: yield key, value # 2to3 is not able to fix these automatically. keys = iterkeys if py3k else lambda self: list(self.iterkeys()) values = itervalues if py3k else lambda self: list(self.itervalues()) items = iteritems if py3k else lambda self: list(self.iteritems()) allitems = iterallitems if py3k else lambda self: list(self.iterallitems()) def get(self, key, default=None, index=-1, type=None): ''' Return the most recent value for a key. :param default: The default value to be returned if the key is not present or the type conversion fails. :param index: An index for the list of available values. :param type: If defined, this callable is used to cast the value into a specific type. Exception are suppressed and result in the default value to be returned. ''' try: val = self.dict[key][index] return type(val) if type else val except Exception, e: pass return default def append(self, key, value): ''' Add a new value to the list of values for this key. ''' self.dict.setdefault(key, []).append(value) def replace(self, key, value): ''' Replace the list of values with a single value. ''' self.dict[key] = [value] def getall(self, key): ''' Return a (possibly empty) list of values for a key. ''' return self.dict.get(key) or [] #: Aliases for WTForms to mimic other multi-dict APIs (Django) getone = get getlist = getall class FormsDict(MultiDict): ''' This :class:`MultiDict` subclass is used to store request form data. Additionally to the normal dict-like item access methods (which return unmodified data as native strings), this container also supports attribute-like access to its values. Attribues are automatiically de- or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing attributes default to an empty string. ''' #: Encoding used for attribute values. input_encoding = 'utf8' def getunicode(self, name, default=None, encoding=None): value, enc = self.get(name, default), encoding or self.input_encoding try: if isinstance(value, bytes): # Python 2 WSGI return value.decode(enc) elif isinstance(value, unicode): # Python 3 WSGI return value.encode('latin1').decode(enc) return value except UnicodeError, e: return default def __getattr__(self, name): return self.getunicode(name, default=u'') class HeaderDict(MultiDict): """ A case-insensitive version of :class:`MultiDict` that defaults to replace the old value instead of appending it. """ def __init__(self, *a, **ka): self.dict = {} if a or ka: self.update(*a, **ka) def __contains__(self, key): return _hkey(key) in self.dict def __delitem__(self, key): del self.dict[_hkey(key)] def __getitem__(self, key): return self.dict[_hkey(key)][-1] def __setitem__(self, key, value): self.dict[_hkey(key)] = [str(value)] def append(self, key, value): self.dict.setdefault(_hkey(key), []).append(str(value)) def replace(self, key, value): self.dict[_hkey(key)] = [str(value)] def getall(self, key): return self.dict.get(_hkey(key)) or [] def get(self, key, default=None, index=-1): return MultiDict.get(self, _hkey(key), default, index) def filter(self, names): for name in map(_hkey, names): if name in self.dict: del self.dict[name] class WSGIHeaderDict(DictMixin): ''' This dict-like class wraps a WSGI environ dict and provides convenient access to HTTP_* fields. Keys and values are native strings (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI environment contains non-native string values, these are de- or encoded using a lossless 'latin1' character set. The API will remain stable even on changes to the relevant PEPs. Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one that uses non-native strings.) ''' #: List of keys that do not have a 'HTTP_' prefix. cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') def __init__(self, environ): self.environ = environ def _ekey(self, key): ''' Translate header field name to CGI/WSGI environ key. ''' key = key.replace('-','_').upper() if key in self.cgikeys: return key return 'HTTP_' + key def raw(self, key, default=None): ''' Return the header value as is (may be bytes or unicode). ''' return self.environ.get(self._ekey(key), default) def __getitem__(self, key): return tonat(self.environ[self._ekey(key)], 'latin1') def __setitem__(self, key, value): raise TypeError("%s is read-only." % self.__class__) def __delitem__(self, key): raise TypeError("%s is read-only." % self.__class__) def __iter__(self): for key in self.environ: if key[:5] == 'HTTP_': yield key[5:].replace('_', '-').title() elif key in self.cgikeys: yield key.replace('_', '-').title() def keys(self): return [x for x in self] def __len__(self): return len(self.keys()) def __contains__(self, key): return self._ekey(key) in self.environ class ConfigDict(dict): ''' A dict-subclass with some extras: You can access keys like attributes. Uppercase attributes create new ConfigDicts and act as name-spaces. Other missing attributes return None. Calling a ConfigDict updates its values and returns itself. >>> cfg = ConfigDict() >>> cfg.Namespace.value = 5 >>> cfg.OtherNamespace(a=1, b=2) >>> cfg {'Namespace': {'value': 5}, 'OtherNamespace': {'a': 1, 'b': 2}} ''' def __getattr__(self, key): if key not in self and key[0].isupper(): self[key] = ConfigDict() return self.get(key) def __setattr__(self, key, value): if hasattr(dict, key): raise AttributeError('Read-only attribute.') if key in self and self[key] and isinstance(self[key], ConfigDict): raise AttributeError('Non-empty namespace attribute.') self[key] = value def __delattr__(self, key): if key in self: del self[key] def __call__(self, *a, **ka): for key, value in dict(*a, **ka).iteritems(): setattr(self, key, value) return self class AppStack(list): """ A stack-like list. Calling it returns the head of the stack. """ def __call__(self): """ Return the current default application. """ return self[-1] def push(self, value=None): """ Add a new :class:`Bottle` instance to the stack """ if not isinstance(value, Bottle): value = Bottle() self.append(value) return value class WSGIFileWrapper(object): def __init__(self, fp, buffer_size=1024*64): self.fp, self.buffer_size = fp, buffer_size for attr in ('fileno', 'close', 'read', 'readlines'): if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) def __iter__(self): read, buff = self.fp.read, self.buffer_size while True: part = read(buff) if not part: break yield part ############################################################################### # Application Helper ########################################################### ############################################################################### def abort(code=500, text='Unknown Error: Application stopped.'): """ Aborts execution and causes a HTTP error. """ raise HTTPError(code, text) def redirect(url, code=None): """ Aborts execution and causes a 303 or 302 redirect, depending on the HTTP protocol version. """ if code is None: code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302 location = urljoin(request.url, url) raise HTTPResponse("", status=code, header=dict(Location=location)) def static_file(filename, root, mimetype='auto', download=False): """ Open a file in a safe way and return :exc:`HTTPResponse` with status code 200, 305, 401 or 404. Set Content-Type, Content-Encoding, Content-Length and Last-Modified header. Obey If-Modified-Since header and HEAD requests. """ root = os.path.abspath(root) + os.sep filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) header = dict() if not filename.startswith(root): return HTTPError(403, "Access denied.") if not os.path.exists(filename) or not os.path.isfile(filename): return HTTPError(404, "File does not exist.") if not os.access(filename, os.R_OK): return HTTPError(403, "You do not have permission to access this file.") if mimetype == 'auto': mimetype, encoding = mimetypes.guess_type(filename) if mimetype: header['Content-Type'] = mimetype if encoding: header['Content-Encoding'] = encoding elif mimetype: header['Content-Type'] = mimetype if download: download = os.path.basename(filename if download == True else download) header['Content-Disposition'] = 'attachment; filename="%s"' % download stats = os.stat(filename) header['Content-Length'] = stats.st_size lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)) header['Last-Modified'] = lm ims = request.environ.get('HTTP_IF_MODIFIED_SINCE') if ims: ims = parse_date(ims.split(";")[0].strip()) if ims is not None and ims >= int(stats.st_mtime): header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) return HTTPResponse(status=304, header=header) body = '' if request.method == 'HEAD' else open(filename, 'rb') return HTTPResponse(body, header=header) ############################################################################### # HTTP Utilities and MISC (TODO) ############################################### ############################################################################### def debug(mode=True): """ Change the debug level. There is only one debug level supported at the moment.""" global DEBUG DEBUG = bool(mode) def parse_date(ims): """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ try: ts = email.utils.parsedate_tz(ims) return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone except (TypeError, ValueError, IndexError, OverflowError): return None def parse_auth(header): """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" try: method, data = header.split(None, 1) if method.lower() == 'basic': #TODO: Add 2to3 save base64[encode/decode] functions. user, pwd = touni(base64.b64decode(tob(data))).split(':',1) return user, pwd except (KeyError, ValueError): return None def _lscmp(a, b): ''' Compares two strings in a cryptographically save way: Runtime is not affected by length of common prefix. ''' return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) def cookie_encode(data, key): ''' Encode and sign a pickle-able object. Return a (byte) string ''' msg = base64.b64encode(pickle.dumps(data, -1)) sig = base64.b64encode(hmac.new(tob(key), msg).digest()) return tob('!') + sig + tob('?') + msg def cookie_decode(data, key): ''' Verify and decode an encoded string. Return an object or None.''' data = tob(data) if cookie_is_encoded(data): sig, msg = data.split(tob('?'), 1) if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg).digest())): return pickle.loads(base64.b64decode(msg)) return None def cookie_is_encoded(data): ''' Return True if the argument looks like a encoded cookie.''' return bool(data.startswith(tob('!')) and tob('?') in data) def html_escape(string): ''' Escape HTML special characters ``&<>`` and quotes ``'"``. ''' return string.replace('&','&amp;').replace('<','&lt;').replace('>','&gt;')\ .replace('"','&quot;').replace("'",'&#039;') def html_quote(string): ''' Escape and quote a string to be used as an HTTP attribute.''' return '"%s"' % html_escape(string).replace('\n','%#10;')\ .replace('\r','&#13;').replace('\t','&#9;') def yieldroutes(func): """ Return a generator for routes that match the signature (name, args) of the func parameter. This may yield more than one route if the function takes optional keyword arguments. The output is best described by example:: a() -> '/a' b(x, y) -> '/b/:x/:y' c(x, y=5) -> '/c/:x' and '/c/:x/:y' d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y' """ import inspect # Expensive module. Only import if necessary. path = '/' + func.__name__.replace('__','/').lstrip('/') spec = inspect.getargspec(func) argc = len(spec[0]) - len(spec[3] or []) path += ('/:%s' * argc) % tuple(spec[0][:argc]) yield path for arg in spec[0][argc:]: path += '/:%s' % arg yield path def path_shift(script_name, path_info, shift=1): ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. :return: The modified paths. :param script_name: The SCRIPT_NAME path. :param script_name: The PATH_INFO path. :param shift: The number of path fragments to shift. May be negative to change the shift direction. (default: 1) ''' if shift == 0: return script_name, path_info pathlist = path_info.strip('/').split('/') scriptlist = script_name.strip('/').split('/') if pathlist and pathlist[0] == '': pathlist = [] if scriptlist and scriptlist[0] == '': scriptlist = [] if shift > 0 and shift <= len(pathlist): moved = pathlist[:shift] scriptlist = scriptlist + moved pathlist = pathlist[shift:] elif shift < 0 and shift >= -len(scriptlist): moved = scriptlist[shift:] pathlist = moved + pathlist scriptlist = scriptlist[:shift] else: empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' raise AssertionError("Cannot shift. Nothing left from %s" % empty) new_script_name = '/' + '/'.join(scriptlist) new_path_info = '/' + '/'.join(pathlist) if path_info.endswith('/') and pathlist: new_path_info += '/' return new_script_name, new_path_info def validate(**vkargs): """ Validates and manipulates keyword arguments by user defined callables. Handles ValueError and missing arguments by raising HTTPError(403). """ depr('Use route wildcard filters instead.') def decorator(func): @functools.wraps(func) def wrapper(*args, **kargs): for key, value in vkargs.iteritems(): if key not in kargs: abort(403, 'Missing parameter: %s' % key) try: kargs[key] = value(kargs[key]) except ValueError: abort(403, 'Wrong parameter format for: %s' % key) return func(*args, **kargs) return wrapper return decorator def auth_basic(check, realm="private", text="Access denied"): ''' Callback decorator to require HTTP auth (basic). TODO: Add route(check_auth=...) parameter. ''' def decorator(func): def wrapper(*a, **ka): user, password = request.auth or (None, None) if user is None or not check(user, password): response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm return HTTPError(401, text) return func(*a, **ka) return wrapper return decorator def make_default_app_wrapper(name): ''' Return a callable that relays calls to the current default app. ''' @functools.wraps(getattr(Bottle, name)) def wrapper(*a, **ka): return getattr(app(), name)(*a, **ka) return wrapper for name in '''route get post put delete error mount hook install uninstall'''.split(): globals()[name] = make_default_app_wrapper(name) url = make_default_app_wrapper('get_url') del name ############################################################################### # Server Adapter ############################################################### ############################################################################### class ServerAdapter(object): quiet = False def __init__(self, host='127.0.0.1', port=8080, **config): self.options = config self.host = host self.port = int(port) def run(self, handler): # pragma: no cover pass def __repr__(self): args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) return "%s(%s)" % (self.__class__.__name__, args) class CGIServer(ServerAdapter): quiet = True def run(self, handler): # pragma: no cover from wsgiref.handlers import CGIHandler def fixed_environ(environ, start_response): environ.setdefault('PATH_INFO', '') return handler(environ, start_response) CGIHandler().run(fixed_environ) class FlupFCGIServer(ServerAdapter): def run(self, handler): # pragma: no cover import flup.server.fcgi self.options.setdefault('bindAddress', (self.host, self.port)) flup.server.fcgi.WSGIServer(handler, **self.options).run() class WSGIRefServer(ServerAdapter): def run(self, handler): # pragma: no cover from wsgiref.simple_server import make_server, WSGIRequestHandler if self.quiet: class QuietHandler(WSGIRequestHandler): def log_request(*args, **kw): pass self.options['handler_class'] = QuietHandler srv = make_server(self.host, self.port, handler, **self.options) srv.serve_forever() class CherryPyServer(ServerAdapter): def run(self, handler): # pragma: no cover from cherrypy import wsgiserver server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler) try: server.start() finally: server.stop() class PasteServer(ServerAdapter): def run(self, handler): # pragma: no cover from paste import httpserver if not self.quiet: from paste.translogger import TransLogger handler = TransLogger(handler) httpserver.serve(handler, host=self.host, port=str(self.port), **self.options) class MeinheldServer(ServerAdapter): def run(self, handler): from meinheld import server server.listen((self.host, self.port)) server.run(handler) class FapwsServer(ServerAdapter): """ Extremely fast webserver using libev. See http://www.fapws.org/ """ def run(self, handler): # pragma: no cover import fapws._evwsgi as evwsgi from fapws import base, config port = self.port if float(config.SERVER_IDENT[-2:]) > 0.4: # fapws3 silently changed its API in 0.5 port = str(port) evwsgi.start(self.host, port) # fapws3 never releases the GIL. Complain upstream. I tried. No luck. if 'BOTTLE_CHILD' in os.environ and not self.quiet: print "WARNING: Auto-reloading does not work with Fapws3." print " (Fapws3 breaks python thread support)" evwsgi.set_base_module(base) def app(environ, start_response): environ['wsgi.multiprocess'] = False return handler(environ, start_response) evwsgi.wsgi_cb(('', app)) evwsgi.run() class TornadoServer(ServerAdapter): """ The super hyped asynchronous server by facebook. Untested. """ def run(self, handler): # pragma: no cover import tornado.wsgi, tornado.httpserver, tornado.ioloop container = tornado.wsgi.WSGIContainer(handler) server = tornado.httpserver.HTTPServer(container) server.listen(port=self.port) tornado.ioloop.IOLoop.instance().start() class AppEngineServer(ServerAdapter): """ Adapter for Google App Engine. """ quiet = True def run(self, handler): from google.appengine.ext.webapp import util # A main() function in the handler script enables 'App Caching'. # Lets makes sure it is there. This _really_ improves performance. module = sys.modules.get('__main__') if module and not hasattr(module, 'main'): module.main = lambda: util.run_wsgi_app(handler) util.run_wsgi_app(handler) class TwistedServer(ServerAdapter): """ Untested. """ def run(self, handler): from twisted.web import server, wsgi from twisted.python.threadpool import ThreadPool from twisted.internet import reactor thread_pool = ThreadPool() thread_pool.start() reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) reactor.listenTCP(self.port, factory, interface=self.host) reactor.run() class DieselServer(ServerAdapter): """ Untested. """ def run(self, handler): from diesel.protocols.wsgi import WSGIApplication app = WSGIApplication(handler, port=self.port) app.run() class GeventServer(ServerAdapter): """ Untested. Options: * `monkey` (default: True) fixes the stdlib to use greenthreads. * `fast` (default: False) uses libevent's http server, but has some issues: No streaming, no pipelining, no SSL. """ def run(self, handler): from gevent import wsgi as wsgi_fast, pywsgi, monkey, local if self.options.get('monkey', True): if not threading.local is local.local: monkey.patch_all() wsgi = wsgi_fast if self.options.get('fast') else pywsgi wsgi.WSGIServer((self.host, self.port), handler).serve_forever() class GunicornServer(ServerAdapter): """ Untested. See http://gunicorn.org/configure.html for options. """ def run(self, handler): from gunicorn.app.base import Application config = {'bind': "%s:%d" % (self.host, int(self.port))} config.update(self.options) class GunicornApplication(Application): def init(self, parser, opts, args): return config def load(self): return handler GunicornApplication().run() class EventletServer(ServerAdapter): """ Untested """ def run(self, handler): from eventlet import wsgi, listen wsgi.server(listen((self.host, self.port)), handler) class RocketServer(ServerAdapter): """ Untested. """ def run(self, handler): from rocket import Rocket server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) server.start() class BjoernServer(ServerAdapter): """ Fast server written in C: https://github.com/jonashaag/bjoern """ def run(self, handler): from bjoern import run run(handler, self.host, self.port) class AutoServer(ServerAdapter): """ Untested. """ adapters = [PasteServer, CherryPyServer, TwistedServer, WSGIRefServer] def run(self, handler): for sa in self.adapters: try: return sa(self.host, self.port, **self.options).run(handler) except ImportError: pass server_names = { 'cgi': CGIServer, 'flup': FlupFCGIServer, 'wsgiref': WSGIRefServer, 'cherrypy': CherryPyServer, 'paste': PasteServer, 'fapws3': FapwsServer, 'tornado': TornadoServer, 'gae': AppEngineServer, 'twisted': TwistedServer, 'diesel': DieselServer, 'meinheld': MeinheldServer, 'gunicorn': GunicornServer, 'eventlet': EventletServer, 'gevent': GeventServer, 'rocket': RocketServer, 'bjoern' : BjoernServer, 'auto': AutoServer, } ############################################################################### # Application Control ########################################################## ############################################################################### def load(target, **namespace): """ Import a module or fetch an object from a module. * ``package.module`` returns `module` as a module object. * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. The last form accepts not only function calls, but any type of expression. Keyword arguments passed to this function are available as local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` """ module, target = target.split(":", 1) if ':' in target else (target, None) if module not in sys.modules: __import__(module) if not target: return sys.modules[module] if target.isalnum(): return getattr(sys.modules[module], target) package_name = module.split('.')[0] namespace[package_name] = sys.modules[package_name] return eval('%s.%s' % (module, target), namespace) def load_app(target): """ Load a bottle application from a module and make sure that the import does not affect the current default application, but returns a separate application object. See :func:`load` for the target parameter. """ global NORUN; NORUN, nr_old = True, NORUN try: tmp = default_app.push() # Create a new "default application" rv = load(target) # Import the target module return rv if callable(rv) else tmp finally: default_app.remove(tmp) # Remove the temporary added default application NORUN = nr_old def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, interval=1, reloader=False, quiet=False, plugins=None, **kargs): """ Start a server instance. This method blocks until the server terminates. :param app: WSGI application or target string supported by :func:`load_app`. (default: :func:`default_app`) :param server: Server adapter to use. See :data:`server_names` keys for valid names or pass a :class:`ServerAdapter` subclass. (default: `wsgiref`) :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on all interfaces including the external one. (default: 127.0.0.1) :param port: Server port to bind to. Values below 1024 require root privileges. (default: 8080) :param reloader: Start auto-reloading server? (default: False) :param interval: Auto-reloader interval in seconds (default: 1) :param quiet: Suppress output to stdout and stderr? (default: False) :param options: Options passed to the server adapter. """ if NORUN: return if reloader and not os.environ.get('BOTTLE_CHILD'): try: fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock') os.close(fd) # We only need this file to exist. We never write to it while os.path.exists(lockfile): args = [sys.executable] + sys.argv environ = os.environ.copy() environ['BOTTLE_CHILD'] = 'true' environ['BOTTLE_LOCKFILE'] = lockfile p = subprocess.Popen(args, env=environ) while p.poll() is None: # Busy wait... os.utime(lockfile, None) # I am alive! time.sleep(interval) if p.poll() != 3: if os.path.exists(lockfile): os.unlink(lockfile) sys.exit(p.poll()) except KeyboardInterrupt: pass finally: if os.path.exists(lockfile): os.unlink(lockfile) return stderr = sys.stderr.write try: app = app or default_app() if isinstance(app, basestring): app = load_app(app) if not callable(app): raise ValueError("Application is not callable: %r" % app) for plugin in plugins or []: app.install(plugin) if server in server_names: server = server_names.get(server) if isinstance(server, basestring): server = load(server) if isinstance(server, type): server = server(host=host, port=port, **kargs) if not isinstance(server, ServerAdapter): raise ValueError("Unknown or unsupported server: %r" % server) server.quiet = server.quiet or quiet if not server.quiet: stderr("Bottle server starting up (using %s)...\n" % repr(server)) stderr("Listening on http://%s:%d/\n" % (server.host, server.port)) stderr("Hit Ctrl-C to quit.\n\n") if reloader: lockfile = os.environ.get('BOTTLE_LOCKFILE') bgcheck = FileCheckerThread(lockfile, interval) with bgcheck: server.run(app) if bgcheck.status == 'reload': sys.exit(3) else: server.run(app) except KeyboardInterrupt: pass except (SyntaxError, ImportError): if not reloader: raise if not getattr(server, 'quiet', False): print_exc() sys.exit(3) finally: if not getattr(server, 'quiet', False): stderr('Shutdown...\n') class FileCheckerThread(threading.Thread): ''' Interrupt main-thread as soon as a changed module file is detected, the lockfile gets deleted or gets to old. ''' def __init__(self, lockfile, interval): threading.Thread.__init__(self) self.lockfile, self.interval = lockfile, interval #: Is one of 'reload', 'error' or 'exit' self.status = None def run(self): exists = os.path.exists mtime = lambda path: os.stat(path).st_mtime files = dict() for module in sys.modules.values(): path = getattr(module, '__file__', '') if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] if path and exists(path): files[path] = mtime(path) while not self.status: if not exists(self.lockfile)\ or mtime(self.lockfile) < time.time() - self.interval - 5: self.status = 'error' thread.interrupt_main() for path, lmtime in files.iteritems(): if not exists(path) or mtime(path) > lmtime: self.status = 'reload' thread.interrupt_main() break time.sleep(self.interval) def __enter__(self): self.start() def __exit__(self, exc_type, exc_val, exc_tb): if not self.status: self.status = 'exit' # silent exit self.join() return issubclass(exc_type, KeyboardInterrupt) ############################################################################### # Template Adapters ############################################################ ############################################################################### class TemplateError(HTTPError): def __init__(self, message): HTTPError.__init__(self, 500, message) class BaseTemplate(object): """ Base class and minimal API for template adapters """ extensions = ['tpl','html','thtml','stpl'] settings = {} #used in prepare() defaults = {} #used in render() def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): """ Create a new template. If the source parameter (str or buffer) is missing, the name argument is used to guess a template filename. Subclasses can assume that self.source and/or self.filename are set. Both are strings. The lookup, encoding and settings parameters are stored as instance variables. The lookup parameter stores a list containing directory paths. The encoding parameter should be used to decode byte strings or files. The settings parameter contains a dict for engine-specific settings. """ self.name = name self.source = source.read() if hasattr(source, 'read') else source self.filename = source.filename if hasattr(source, 'filename') else None self.lookup = map(os.path.abspath, lookup) self.encoding = encoding self.settings = self.settings.copy() # Copy from class variable self.settings.update(settings) # Apply if not self.source and self.name: self.filename = self.search(self.name, self.lookup) if not self.filename: raise TemplateError('Template %s not found.' % repr(name)) if not self.source and not self.filename: raise TemplateError('No template specified.') self.prepare(**self.settings) @classmethod def search(cls, name, lookup=[]): """ Search name in all directories specified in lookup. First without, then with common extensions. Return first hit. """ if os.path.isfile(name): return name for spath in lookup: fname = os.path.join(spath, name) if os.path.isfile(fname): return fname for ext in cls.extensions: if os.path.isfile('%s.%s' % (fname, ext)): return '%s.%s' % (fname, ext) @classmethod def global_config(cls, key, *args): ''' This reads or sets the global settings stored in class.settings. ''' if args: cls.settings = cls.settings.copy() # Make settings local to class cls.settings[key] = args[0] else: return cls.settings[key] def prepare(self, **options): """ Run preparations (parsing, caching, ...). It should be possible to call this again to refresh a template or to update settings. """ raise NotImplementedError def render(self, *args, **kwargs): """ Render the template with the specified local variables and return a single byte or unicode string. If it is a byte string, the encoding must match self.encoding. This method must be thread-safe! Local variables may be provided in dictionaries (*args) or directly, as keywords (**kwargs). """ raise NotImplementedError class MakoTemplate(BaseTemplate): def prepare(self, **options): from mako.template import Template from mako.lookup import TemplateLookup options.update({'input_encoding':self.encoding}) options.setdefault('format_exceptions', bool(DEBUG)) lookup = TemplateLookup(directories=self.lookup, **options) if self.source: self.tpl = Template(self.source, lookup=lookup, **options) else: self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) def render(self, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) _defaults = self.defaults.copy() _defaults.update(kwargs) return self.tpl.render(**_defaults) class CheetahTemplate(BaseTemplate): def prepare(self, **options): from Cheetah.Template import Template self.context = threading.local() self.context.vars = {} options['searchList'] = [self.context.vars] if self.source: self.tpl = Template(source=self.source, **options) else: self.tpl = Template(file=self.filename, **options) def render(self, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) self.context.vars.update(self.defaults) self.context.vars.update(kwargs) out = str(self.tpl) self.context.vars.clear() return out class Jinja2Template(BaseTemplate): def prepare(self, filters=None, tests=None, **kwargs): from jinja2 import Environment, FunctionLoader if 'prefix' in kwargs: # TODO: to be removed after a while raise RuntimeError('The keyword argument `prefix` has been removed. ' 'Use the full jinja2 environment name line_statement_prefix instead.') self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) if filters: self.env.filters.update(filters) if tests: self.env.tests.update(tests) if self.source: self.tpl = self.env.from_string(self.source) else: self.tpl = self.env.get_template(self.filename) def render(self, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) _defaults = self.defaults.copy() _defaults.update(kwargs) return self.tpl.render(**_defaults) def loader(self, name): fname = self.search(name, self.lookup) if fname: with open(fname, "rb") as f: return f.read().decode(self.encoding) class SimpleTALTemplate(BaseTemplate): ''' Untested! ''' def prepare(self, **options): from simpletal import simpleTAL # TODO: add option to load METAL files during render if self.source: self.tpl = simpleTAL.compileHTMLTemplate(self.source) else: with open(self.filename, 'rb') as fp: self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read())) def render(self, *args, **kwargs): from simpletal import simpleTALES for dictarg in args: kwargs.update(dictarg) # TODO: maybe reuse a context instead of always creating one context = simpleTALES.Context() for k,v in self.defaults.items(): context.addGlobal(k, v) for k,v in kwargs.items(): context.addGlobal(k, v) output = StringIO() self.tpl.expand(context, output) return output.getvalue() class SimpleTemplate(BaseTemplate): blocks = ('if', 'elif', 'else', 'try', 'except', 'finally', 'for', 'while', 'with', 'def', 'class') dedent_blocks = ('elif', 'else', 'except', 'finally') @lazy_attribute def re_pytokens(cls): ''' This matches comments and all kinds of quoted strings but does NOT match comments (#...) within quoted strings. (trust me) ''' return re.compile(r''' (''(?!')|""(?!")|'{6}|"{6} # Empty strings (all 4 types) |'(?:[^\\']|\\.)+?' # Single quotes (') |"(?:[^\\"]|\\.)+?" # Double quotes (") |'{3}(?:[^\\]|\\.|\n)+?'{3} # Triple-quoted strings (') |"{3}(?:[^\\]|\\.|\n)+?"{3} # Triple-quoted strings (") |\#.* # Comments )''', re.VERBOSE) def prepare(self, escape_func=html_escape, noescape=False, **kwargs): self.cache = {} enc = self.encoding self._str = lambda x: touni(x, enc) self._escape = lambda x: escape_func(touni(x, enc)) if noescape: self._str, self._escape = self._escape, self._str @classmethod def split_comment(cls, code): """ Removes comments (#...) from python code. """ if '#' not in code: return code #: Remove comments only (leave quoted strings as they are) subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0) return re.sub(cls.re_pytokens, subf, code) @cached_property def co(self): return compile(self.code, self.filename or '<string>', 'exec') @cached_property def code(self): stack = [] # Current Code indentation lineno = 0 # Current line of code ptrbuffer = [] # Buffer for printable strings and token tuple instances codebuffer = [] # Buffer for generated python code multiline = dedent = oneline = False template = self.source or open(self.filename, 'rb').read() def yield_tokens(line): for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)): if i % 2: if part.startswith('!'): yield 'RAW', part[1:] else: yield 'CMD', part else: yield 'TXT', part def flush(): # Flush the ptrbuffer if not ptrbuffer: return cline = '' for line in ptrbuffer: for token, value in line: if token == 'TXT': cline += repr(value) elif token == 'RAW': cline += '_str(%s)' % value elif token == 'CMD': cline += '_escape(%s)' % value cline += ', ' cline = cline[:-2] + '\\\n' cline = cline[:-2] if cline[:-1].endswith('\\\\\\\\\\n'): cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr' cline = '_printlist([' + cline + '])' del ptrbuffer[:] # Do this before calling code() again code(cline) def code(stmt): for line in stmt.splitlines(): codebuffer.append(' ' * len(stack) + line.strip()) for line in template.splitlines(True): lineno += 1 line = line if isinstance(line, unicode)\ else unicode(line, encoding=self.encoding) sline = line.lstrip() if lineno <= 2: m = re.search(r"%.*coding[:=]\s*([-\w\.]+)", line) if m: self.encoding = m.group(1) if m: line = line.replace('coding','coding (removed)') if sline and sline[0] == '%' and sline[:2] != '%%': line = line.split('%',1)[1].lstrip() # Full line following the % cline = self.split_comment(line).strip() cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0] flush() # You are actually reading this? Good luck, it's a mess :) if cmd in self.blocks or multiline: cmd = multiline or cmd dedent = cmd in self.dedent_blocks # "else:" if dedent and not oneline and not multiline: cmd = stack.pop() code(line) oneline = not cline.endswith(':') # "if 1: pass" multiline = cmd if cline.endswith('\\') else False if not oneline and not multiline: stack.append(cmd) elif cmd == 'end' and stack: code('#end(%s) %s' % (stack.pop(), line.strip()[3:])) elif cmd == 'include': p = cline.split(None, 2)[1:] if len(p) == 2: code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1])) elif p: code("_=_include(%s, _stdout)" % repr(p[0])) else: # Empty %include -> reverse of %rebase code("_printlist(_base)") elif cmd == 'rebase': p = cline.split(None, 2)[1:] if len(p) == 2: code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1])) elif p: code("globals()['_rebase']=(%s, {})" % repr(p[0])) else: code(line) else: # Line starting with text (not '%') or '%%' (escaped) if line.strip().startswith('%%'): line = line.replace('%%', '%', 1) ptrbuffer.append(yield_tokens(line)) flush() return '\n'.join(codebuffer) + '\n' def subtemplate(self, _name, _stdout, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) if _name not in self.cache: self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) return self.cache[_name].execute(_stdout, kwargs) def execute(self, _stdout, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) env = self.defaults.copy() env.update({'_stdout': _stdout, '_printlist': _stdout.extend, '_include': self.subtemplate, '_str': self._str, '_escape': self._escape, 'get': env.get, 'setdefault': env.setdefault, 'defined': env.__contains__}) env.update(kwargs) eval(self.co, env) if '_rebase' in env: subtpl, rargs = env['_rebase'] rargs['_base'] = _stdout[:] #copy stdout del _stdout[:] # clear stdout return self.subtemplate(subtpl,_stdout,rargs) return env def render(self, *args, **kwargs): """ Render the template using keyword arguments as local variables. """ for dictarg in args: kwargs.update(dictarg) stdout = [] self.execute(stdout, kwargs) return ''.join(stdout) def template(*args, **kwargs): ''' Get a rendered template as a string iterator. You can use a name, a filename or a template string as first parameter. Template rendering arguments can be passed as dictionaries or directly (as keyword arguments). ''' tpl = args[0] if args else None template_adapter = kwargs.pop('template_adapter', SimpleTemplate) if tpl not in TEMPLATES or DEBUG: settings = kwargs.pop('template_settings', {}) lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) if isinstance(tpl, template_adapter): TEMPLATES[tpl] = tpl if settings: TEMPLATES[tpl].prepare(**settings) elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings) else: TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings) if not TEMPLATES[tpl]: abort(500, 'Template (%s) not found' % tpl) for dictarg in args[1:]: kwargs.update(dictarg) return TEMPLATES[tpl].render(kwargs) mako_template = functools.partial(template, template_adapter=MakoTemplate) cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) jinja2_template = functools.partial(template, template_adapter=Jinja2Template) simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate) def view(tpl_name, **defaults): ''' Decorator: renders a template for a handler. The handler can control its behavior like that: - return a dict of template vars to fill out the template - return something other than a dict and the view decorator will not process the template, but return the handler result as is. This includes returning a HTTPResponse(dict) to get, for instance, JSON with autojson or other castfilters. ''' def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): result = func(*args, **kwargs) if isinstance(result, (dict, DictMixin)): tplvars = defaults.copy() tplvars.update(result) return template(tpl_name, **tplvars) return result return wrapper return decorator mako_view = functools.partial(view, template_adapter=MakoTemplate) cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) jinja2_view = functools.partial(view, template_adapter=Jinja2Template) simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate) ############################################################################### # Constants and Globals ######################################################## ############################################################################### TEMPLATE_PATH = ['./', './views/'] TEMPLATES = {} DEBUG = False NORUN = False # If set, run() does nothing. Used by load_app() #: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') HTTP_CODES = httplib.responses HTTP_CODES[418] = "I'm a teapot" # RFC 2324 HTTP_CODES[428] = "Precondition Required" HTTP_CODES[429] = "Too Many Requests" HTTP_CODES[431] = "Request Header Fields Too Large" HTTP_CODES[511] = "Network Authentication Required" _HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.iteritems()) #: The default template used for error pages. Override with @error() ERROR_PAGE_TEMPLATE = """ %try: %from bottle import DEBUG, HTTP_CODES, request, touni %status_name = HTTP_CODES.get(e.status, 'Unknown').title() <!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN"> <html> <head> <title>Error {{e.status}}: {{status_name}}</title> <style type="text/css"> html {background-color: #eee; font-family: sans;} body {background-color: #fff; border: 1px solid #ddd; padding: 15px; margin: 15px;} pre {background-color: #eee; border: 1px solid #ddd; padding: 5px;} </style> </head> <body> <h1>Error {{e.status}}: {{status_name}}</h1> <p>Sorry, the requested URL <tt>{{repr(request.url)}}</tt> caused an error:</p> <pre>{{e.output}}</pre> %if DEBUG and e.exception: <h2>Exception:</h2> <pre>{{repr(e.exception)}}</pre> %end %if DEBUG and e.traceback: <h2>Traceback:</h2> <pre>{{e.traceback}}</pre> %end </body> </html> %except ImportError: <b>ImportError:</b> Could not generate the error page. Please add bottle to the import path. %end """ #: A thread-safe instance of :class:`Request` representing the `current` request. request = Request() #: A thread-safe instance of :class:`Response` used to build the HTTP response. response = Response() #: A thread-safe namespace. Not used by Bottle. local = threading.local() # Initialize app stack (create first empty Bottle app) # BC: 0.6.4 and needed for run() app = default_app = AppStack() app.push() #: A virtual package that redirects import statements. #: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. ext = _ImportRedirect(__name__+'.ext', 'bottle_%s').module if __name__ == '__main__': opt, args, parser = _cmd_options, _cmd_args, _cmd_parser if opt.version: print 'Bottle', __version__; sys.exit(0) if not args: parser.print_help() print '\nError: No application specified.\n' sys.exit(1) try: sys.path.insert(0, '.') sys.modules.setdefault('bottle', sys.modules['__main__']) except (AttributeError, ImportError), e: parser.error(e.args[0]) if opt.bind and ':' in opt.bind: host, port = opt.bind.rsplit(':', 1) else: host, port = (opt.bind or 'localhost'), 8080 debug(opt.debug) run(args[0], host=host, port=port, server=opt.server, reloader=opt.reload, plugins=opt.plugin) # THE END