repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
alphagov/notifications-admin | tests/app/main/views/test_find_services.py | 1 | 3171 | from flask import url_for
from tests import service_json
def test_find_services_by_name_page_loads_correctly(client_request, platform_admin_user):
client_request.login(platform_admin_user)
document = client_request.get('main.find_services_by_name')
assert document.h1.text.strip() == 'Find services by name'
assert len(document.find_all('input', {'type': 'search'})) > 0
def test_find_services_by_name_displays_services_found(
client_request,
platform_admin_user,
mocker
):
client_request.login(platform_admin_user)
get_services = mocker.patch(
'app.service_api_client.find_services_by_name',
return_value={"data": [service_json()]}
)
document = client_request.post(
'main.find_services_by_name',
_data={"search": "Test Service"},
_expected_status=200
)
get_services.assert_called_once_with(service_name="Test Service")
result = document.select_one('.browse-list-item a')
assert result.text.strip() == 'Test Service'
assert result.attrs["href"] == "/services/1234"
def test_find_services_by_name_displays_multiple_services(
client_request,
platform_admin_user,
mocker
):
client_request.login(platform_admin_user)
mocker.patch(
'app.service_api_client.find_services_by_name',
return_value={"data": [service_json(name="Tadfield Police"), service_json(name="Tadfield Air Base")]}
)
document = client_request.post('main.find_services_by_name', _data={"search": "Tadfield"}, _expected_status=200)
results = document.find_all('li', {'class': 'browse-list-item'})
assert len(results) == 2
assert sorted([result.text.strip() for result in results]) == ["Tadfield Air Base", "Tadfield Police"]
def test_find_services_by_name_displays_message_if_no_services_found(
client_request,
platform_admin_user,
mocker
):
client_request.login(platform_admin_user)
mocker.patch('app.service_api_client.find_services_by_name', return_value={"data": []})
document = client_request.post(
'main.find_services_by_name', _data={"search": "Nabuchodonosorian Empire"}, _expected_status=200
)
assert document.find('p', {'class': 'browse-list-hint'}).text.strip() == 'No services found.'
def test_find_services_by_name_validates_against_empty_search_submission(
client_request,
platform_admin_user,
mocker
):
client_request.login(platform_admin_user)
document = client_request.post('main.find_services_by_name', _data={"search": ""}, _expected_status=200)
expected_message = "Error: You need to enter full or partial name to search by."
assert document.find('span', {'class': 'govuk-error-message'}).text.strip() == expected_message
def test_find_services_by_name_redirects_for_uuid(
client_request,
platform_admin_user,
mocker,
fake_uuid
):
client_request.login(platform_admin_user)
client_request.post(
'main.find_services_by_name',
_data={"search": fake_uuid},
_expected_redirect=url_for(
'main.service_dashboard',
service_id=fake_uuid,
_external=True,
),
)
| mit | 1,560,063,213,959,604,200 | 33.096774 | 116 | 0.668243 | false |
rwl/PyCIM | CIM14/IEC61968/Metering/DynamicDemand.py | 1 | 2894 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.Element import Element
class DynamicDemand(Element):
"""Dynamic demand description. The formula by which demand is measured is an important underlying definition to the measurement. Generally speaking, all of the meters in a given utility will be configured to measure demand the same way. Nevertheless, it must be defined. An 'interval' of 60, 30, 15, 10, or 5 minutes must be defined to describe the interval of time over which usage is measured. When demand is defined to be DemandKind.rollingBlock, both an 'interval' and a 'subinterval' must be defined, where the 'subinterval' must be a multiple of the 'interval' which contains it. A common setting is '15-minute rolling block with 5-minute subintervals.'
"""
def __init__(self, kind="logarithmic", interval=0.0, subInterval=0.0, *args, **kw_args):
"""Initialises a new 'DynamicDemand' instance.
@param kind: Kind of demand. Values are: "logarithmic", "fixedBlock", "rollingBlock"
@param interval: Demand interval.
@param subInterval: (if 'kind'=rollingBlock) Subinterval, must be multiple of 'interval' that contains it.
"""
#: Kind of demand. Values are: "logarithmic", "fixedBlock", "rollingBlock"
self.kind = kind
#: Demand interval.
self.interval = interval
#: (if 'kind'=rollingBlock) Subinterval, must be multiple of 'interval' that contains it.
self.subInterval = subInterval
super(DynamicDemand, self).__init__(*args, **kw_args)
_attrs = ["kind", "interval", "subInterval"]
_attr_types = {"kind": str, "interval": float, "subInterval": float}
_defaults = {"kind": "logarithmic", "interval": 0.0, "subInterval": 0.0}
_enums = {"kind": "DemandKind"}
_refs = []
_many_refs = []
| mit | -8,450,176,945,924,258,000 | 55.745098 | 663 | 0.719074 | false |
cmrust/VineyardMonitor-ATT | tem.py | 1 | 1668 | import serial
import threading
from datetime import datetime
from m2x.client import M2XClient
# instantiate our M2X API client
client = M2XClient(key='#REMOVED#')
# instantiate our serial connection to the Arduino
arduino = serial.Serial('/dev/ttyUSB0', 9600)
# instantiate our global variables
temp = 0
light = 0
now = datetime.utcnow()
def pollArduino():
# update these globally
global temp
global light
global now
# poll time (m2x is only UTC currently)
now = datetime.utcnow()
# data from the serial port comes in comma-seperated for:
# $temp,$light
# poll temp/light values (rstrip() removes the \n's)
values = arduino.readline().rstrip().split(',')
# if our array is not empty
if len(values) > 1:
temp = values[0]
light = values[1]
# print values to the console
print "tempF: " + temp
print "light: " + light
print
# clear the serial input buffer
# this keeps it from building up a backlog and causing delays
arduino.flushInput()
def pushM2X():
# iterate through any feeds from blueprints
for feed in client.feeds.search(type='blueprint'):
# iterate through steams in feeds
for stream in feed.streams:
# upload the current values for each stream
if stream.name == 'temperature':
stream.values.add_value(temp, now)
if stream.name == 'light':
stream.values.add_value(light, now)
while True:
pollArduino()
# m2x calls were proving slow, so we've threaded it here so
# that the arduino doesn't get backed up while we're waiting
# create thread for m2x
m2x_thread = threading.Thread(target=pushM2X)
# if thread is still alive, pass
if m2x_thread.is_alive() is not True:
m2x_thread.start()
| mit | 6,485,409,181,414,895,000 | 23.173913 | 62 | 0.716427 | false |
nwokeo/supysonic | supysonic/web.py | 1 | 2140 | # coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os.path
from flask import Flask, g
from werkzeug.local import LocalProxy
from supysonic import config
from supysonic.db import get_store
def get_db_store():
store = getattr(g, 'store', None)
if store:
return store
g.store = get_store(config.get('base', 'database_uri'))
return g.store
store = LocalProxy(get_db_store)
def teardown_db(exception):
store = getattr(g, 'store', None)
if store:
store.close()
def create_application():
global app
if not config.check():
return None
if not os.path.exists(config.get('webapp', 'cache_dir')):
os.makedirs(config.get('webapp', 'cache_dir'))
app = Flask(__name__)
app.secret_key = '?9huDM\\H'
app.teardown_appcontext(teardown_db)
if config.get('webapp', 'log_file'):
import logging
from logging.handlers import TimedRotatingFileHandler
handler = TimedRotatingFileHandler(config.get('webapp', 'log_file'), when = 'midnight')
if config.get('webapp', 'log_level'):
mapping = {
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'WARNING': logging.WARNING,
'ERROR': logging.ERROR,
'CRTICAL': logging.CRITICAL
}
handler.setLevel(mapping.get(config.get('webapp', 'log_level').upper(), logging.NOTSET))
app.logger.addHandler(handler)
from supysonic import frontend
from supysonic import api
return app
| agpl-3.0 | 6,537,720,197,808,881,000 | 27.52 | 91 | 0.7223 | false |
mscuthbert/abjad | abjad/tools/abjadbooktools/test/test_LaTeXDocumentHandler_hide.py | 1 | 2580 | # -*- encoding: utf-8 -*-
import platform
import unittest
from abjad.tools import abjadbooktools
from abjad.tools import systemtools
@unittest.skipIf(
platform.python_implementation() != 'CPython',
'Only for CPython',
)
class TestLaTeXDocumentHandler(unittest.TestCase):
def test_hide_1(self):
input_file_contents = [
'\\begin{comment}',
'<abjad>[hide=true]',
'def do_something(expr):',
" print('before')",
' print(expr + 1)',
" print('after')",
'',
'</abjad>',
'\\end{comment}',
'',
'\\begin{comment}',
'<abjad>',
'do_something(23)',
'</abjad>',
'\\end{comment}',
]
document_handler = abjadbooktools.LaTeXDocumentHandler()
input_blocks = document_handler.collect_input_blocks(input_file_contents)
input_blocks = tuple(input_blocks.values())
assert input_blocks[0].code_block_specifier is not None
assert input_blocks[0].code_block_specifier.hide
assert input_blocks[1].code_block_specifier is None
def test_hide_2(self):
input_file_contents = [
'\\begin{comment}',
'<abjad>[hide=true]',
'def do_something(expr):',
" print('before')",
' print(expr + 1)',
" print('after')",
'',
'</abjad>',
'\\end{comment}',
'',
'\\begin{comment}',
'<abjad>',
'do_something(23)',
'</abjad>',
'\\end{comment}',
]
document_handler = abjadbooktools.LaTeXDocumentHandler(
input_file_contents=input_file_contents,
)
rebuilt_source = document_handler(return_source=True)
assert rebuilt_source == systemtools.TestManager.clean_string(
"""
\\begin{comment}
<abjad>[hide=true]
def do_something(expr):
print('before')
print(expr + 1)
print('after')
</abjad>
\\end{comment}
\\begin{comment}
<abjad>
do_something(23)
</abjad>
\\end{comment}
%%% ABJADBOOK START %%%
\\begin{lstlisting}
>>> do_something(23)
before
24
after
\\end{lstlisting}
%%% ABJADBOOK END %%%
""",
) | gpl-3.0 | 6,283,707,193,494,840,000 | 28.329545 | 81 | 0.468992 | false |
dcherian/pyroms | pyroms/pyroms/remapping/remap.py | 1 | 5885 | import numpy as np
try:
import netCDF4 as netCDF
except:
import netCDF3 as netCDF
import pyroms
def remap(src_array, remap_file, src_grad1=None, src_grad2=None, \
src_grad3=None, spval=1e37, verbose=False):
'''
remap based on addresses and weights computed in a setup phase
'''
# get info from remap_file
data = netCDF.Dataset(remap_file, 'r')
title = data.title
map_method = data.map_method
normalization = data.normalization
src_grid_name = data.source_grid
dst_grid_name = data.dest_grid
src_grid_size = len(data.dimensions['src_grid_size'])
dst_grid_size = len(data.dimensions['dst_grid_size'])
num_links = len(data.dimensions['num_links'])
src_grid_dims = data.variables['src_grid_dims']
dst_grid_dims = data.variables['dst_grid_dims']
# get weights and addresses from remap_file
map_wts = data.variables['remap_matrix'][:]
dst_add = data.variables['dst_address'][:]
src_add = data.variables['src_address'][:]
# get destination mask
dst_mask = data.variables['dst_grid_imask'][:]
# remap from src grid to dst grid
if src_grad1 is not None:
iorder = 2
else:
iorder = 1
if verbose is True:
print 'Reading remapping: ', title
print 'From file: ', remap_file
print ' '
print 'Remapping between:'
print src_grid_name
print 'and'
print dst_grid_name
print 'Remapping method: ', map_method
ndim = len(src_array.squeeze().shape)
if (ndim == 2):
tmp_dst_array = np.zeros((dst_grid_size))
tmp_src_array = src_array.flatten()
if iorder == 1:
# first order remapping
# insure that map_wts is a (num_links,4) array
tmp_map_wts = np.zeros((num_links,4))
tmp_map_wts[:,0] = map_wts[:,0].copy()
map_wts = tmp_map_wts
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array)
if iorder == 2:
# second order remapping
if map_method == 'conservative':
# insure that map_wts is a (num_links,4) array
tmp_map_wts = np.zeros((num_links,4))
tmp_map_wts[:,0:2] = map_wts[:,0:2].copy()
map_wts = tmp_map_wts
tmp_src_grad1 = src_grad1.flatten()
tmp_src_grad2 = src_grad2.flatten()
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array, \
tmp_src_grad1, tmp_src_grad2)
elif map_method == 'bicubic':
tmp_src_grad1 = src_grad1.flatten()
tmp_src_grad2 = src_grad2.flatten()
tmp_src_grad3 = src_grad3.flatten()
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array, \
tmp_src_grad1, tmp_src_grad2, \
tmp_src_grad3)
else:
raise ValueError, 'Unknow method'
# mask dst_array
idx = np.where(dst_mask == 0)
tmp_dst_array[idx] = spval
tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval)
# reshape
dst_array = np.reshape(tmp_dst_array, (dst_grid_dims[1], \
dst_grid_dims[0]))
elif (ndim == 3):
nlev = src_array.shape[0]
dst_array = np.zeros((nlev, dst_grid_dims[1], dst_grid_dims[0]))
# loop over vertical level
for k in range(nlev):
tmp_src_array = src_array[k,:,:].flatten()
tmp_dst_array = np.zeros((dst_grid_size))
if iorder == 1:
# first order remapping
# insure that map_wts is a (num_links,4) array
tmp_map_wts = np.zeros((num_links,4))
tmp_map_wts[:,0] = map_wts[:,0].copy()
map_wts = tmp_map_wts
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array)
if iorder == 2:
# second order remapping
if map_method == 'conservative':
tmp_src_grad1 = src_grad1.flatten()
tmp_src_grad2 = src_grad2.flatten()
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array, \
tmp_src_grad1, tmp_src_grad2)
elif map_method == 'bicubic':
tmp_src_grad1 = src_grad1.flatten()
tmp_src_grad2 = src_grad2.flatten()
tmp_src_grad3 = src_grad3.flatten()
pyroms.remapping.scrip.remap(tmp_dst_array, map_wts, \
dst_add, src_add, tmp_src_array, \
tmp_src_grad1, tmp_src_grad2, \
tmp_src_grad3)
else:
raise ValueError, 'Unknow method'
# mask dst_array
idx = np.where(dst_mask == 0)
tmp_dst_array[idx] = spval
tmp_dst_array = np.ma.masked_values(tmp_dst_array, spval)
# reshape
dst_array[k,:,:] = np.reshape(tmp_dst_array, (dst_grid_dims[1], \
dst_grid_dims[0]))
else:
raise ValueError, 'src_array must have two or three dimensions'
# close data file
data.close()
return dst_array
| bsd-3-clause | 3,416,160,935,299,170,300 | 36.967742 | 83 | 0.492438 | false |
joshzarrabi/e-mission-server | emission/net/api/cfc_webapp.py | 1 | 21790 | # Standard imports
import json
from random import randrange
from bottle import route, post, get, run, template, static_file, request, app, HTTPError, abort, BaseRequest, JSONPlugin
import bottle as bt
# To support dynamic loading of client-specific libraries
import sys
import os
import logging
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(thread)d:%(message)s',
filename='webserver_debug.log', level=logging.DEBUG)
logging.debug("This should go to the log file")
from datetime import datetime
import time
# So that we can set the socket timeout
import socket
# For decoding JWTs using the google decode URL
import urllib
import requests
# For decoding JWTs on the client side
import oauth2client.client
from oauth2client.crypt import AppIdentityError
import traceback
import xmltodict
import urllib2
import bson.json_util
# Our imports
import modeshare, zipcode, distance, tripManager, \
Berkeley, visualize, stats, usercache, timeline
import emission.net.ext_service.moves.register as auth
import emission.analysis.result.carbon as carbon
import emission.analysis.classification.inference.commute as commute
import emission.analysis.modelling.work_time as work_time
import emission.analysis.result.userclient as userclient
import emission.core.common as common
from emission.core.wrapper.client import Client
from emission.core.wrapper.user import User
from emission.core.get_database import get_uuid_db, get_mode_db
import emission.core.wrapper.motionactivity as ecwm
config_file = open('conf/net/api/webserver.conf')
config_data = json.load(config_file)
static_path = config_data["paths"]["static_path"]
python_path = config_data["paths"]["python_path"]
server_host = config_data["server"]["host"]
server_port = config_data["server"]["port"]
socket_timeout = config_data["server"]["timeout"]
log_base_dir = config_data["paths"]["log_base_dir"]
key_file = open('conf/net/keys.json')
key_data = json.load(key_file)
ssl_cert = key_data["ssl_certificate"]
private_key = key_data["private_key"]
client_key = key_data["client_key"]
client_key_old = key_data["client_key_old"]
ios_client_key = key_data["ios_client_key"]
BaseRequest.MEMFILE_MAX = 1024 * 1024 * 1024 # Allow the request size to be 1G
# to accomodate large section sizes
skipAuth = False
print "Finished configuring logging for %s" % logging.getLogger()
app = app()
# On MacOS, the current working directory is always in the python path However,
# on ubuntu, it looks like the script directory (api in our case) is in the
# python path, but the pwd is not. This means that "main" is not seen even if
# we run from the CFC_WebApp directory. Let's make sure to manually add it to
# the python path so that we can keep our separation between the main code and
# the webapp layer
#Simple path that serves up a static landing page with javascript in it
@route('/')
def index():
return static_file("server/index.html", static_path)
# Bunch of static pages that constitute our website
# Should we have gone for something like django instead after all?
# If this gets to be too much, we should definitely consider that
@route("/<filename>")
def doc(filename):
if filename != "privacy" and filename != "support" and filename != "about" and filename != "consent":
return HTTPError(404, "Don't try to hack me, you evil spammer")
else:
return static_file("%s.html" % filename, "%s/docs/" % static_path)
# Serve up javascript and css files properly
@route('/front/<filename:path>')
def server_static(filename):
logging.debug("static filename = %s" % filename)
return static_file(filename, static_path)
@route('/clients/<clientname>/front/<filename>')
def server_static(clientname, filename):
logging.debug("returning file %s from client %s " % (filename, clientname))
return static_file(filename, "clients/%s/%s" % (clientname, static_path))
# Returns the proportion of survey takers who use each mode
@route('/result/commute.modeshare.distance')
def getCommuteModeShare():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
return modeshare.get_Alluser_mode_share_by_distance("commute",
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# return modeshare.getModeShare()
@route('/result/internal.modeshare.distance')
def getBerkeleyModeShare():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
return Berkeley.get_berkeley_mode_share_by_distance(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# return modeshare.getModeShare()
# Returns the modeshare by zipcode
@route('/result/commute.modeshare/zipcode/<zc>')
def getCommuteModeShare(zc):
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
return zipcode.get_mode_share_by_Zipcode(zc, "commute",
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# Returns the proportion of survey takers from different zip codes
@route('/result/home.zipcode')
def getZipcode():
return zipcode.getZipcode()
# Returns the proportion of commute distances
@route('/result/commute.distance.to')
def getDistance():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
distances = distance.get_morning_commute_distance_pie(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# logging.debug("Returning distances = %s" % distances)
return distances
@route('/result/commute.distance.from')
def getDistance():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
distances = distance.get_evening_commute_distance_pie(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# logging.debug("Returning distances = %s" % distances)
return distances
# Returns the distribution of commute arrival and departure times
@route('/result/commute.arrivalTime')
def getArrivalTime():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
retVal = work_time.get_Alluser_work_start_time_pie(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# retVal = common.generateRandomResult(['00-04', '04-08', '08-10'])
# logging.debug("In getArrivalTime, retVal is %s" % retVal)
return retVal
@route('/result/commute.departureTime')
def getDepartureTime():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
retVal = work_time.get_Alluser_work_end_time_pie(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# retVal = common.generateRandomResult(['00-04', '04-08', '08-10'])
# logging.debug("In getDepartureTime, retVal is %s" % retVal)
return retVal
@route("/result/heatmap/carbon")
def getCarbonHeatmap():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
retVal = visualize.carbon_by_zip(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# retVal = common.generateRandomResult(['00-04', '04-08', '08-10'])
# logging.debug("In getCarbonHeatmap, retVal is %s" % retVal)
return retVal
@route("/result/heatmap/pop.route/cal")
def getCalPopRoute():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
retVal = visualize.Berkeley_pop_route(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# retVal = common.generateRandomResult(['00-04', '04-08', '08-10'])
# logging.debug("In getCalPopRoute, retVal is %s" % retVal)
return retVal
@route("/result/heatmap/pop.route/commute/<selMode>")
def getCommutePopRoute(selMode):
map_mode = {"motorized" : "MotionTypes.IN_VEHICLE", "walking" : "MotionTypes.ON_FOOT", "cycling" : "MotionTypes.BICYCLING"}
fromTs = request.query.from_ts
toTs = request.query.to_ts
mode = map_mode[selMode]
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
retVal = visualize.Commute_pop_route(mode,
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
# retVal = common.generateRandomResult(['00-04', '04-08', '08-10'])
# logging.debug("In getCalPopRoute, retVal is %s" % retVal)
return retVal
@get('/result/carbon/all/summary')
def carbonSummaryAllTrips():
fromTs = request.query.from_ts
toTs = request.query.to_ts
logging.debug("Filtering values for range %s -> %s" % (fromTs, toTs))
return carbon.getSummaryAllTrips(
datetime.fromtimestamp(float(fromTs)/1000), datetime.fromtimestamp(float(toTs)/1000))
@get('/tripManager/getModeOptions')
def getModeOptions():
return tripManager.getModeOptions()
@post('/tripManager/getUnclassifiedSections')
def getUnclassifiedSections():
user_uuid=getUUID(request)
return tripManager.getUnclassifiedSections(user_uuid)
@post('/tripManager/setSectionClassification')
def setSectionClassification():
user_uuid=getUUID(request)
updates = request.json['updates']
return tripManager.setSectionClassification(user_uuid, updates)
@post('/tripManager/storeSensedTrips')
def storeSensedTrips():
logging.debug("Called storeSensedTrips")
user_uuid=getUUID(request)
print "user_uuid %s" % user_uuid
logging.debug("user_uuid %s" % user_uuid)
sections = request.json['sections']
return tripManager.storeSensedTrips(user_uuid, sections)
@post('/usercache/get')
def getFromCache():
logging.debug("Called userCache.get")
user_uuid=getUUID(request)
logging.debug("user_uuid %s" % user_uuid)
to_phone = usercache.sync_server_to_phone(user_uuid)
return {'server_to_phone': to_phone}
@post('/usercache/put')
def putIntoCache():
logging.debug("Called userCache.put")
user_uuid=getUUID(request)
logging.debug("user_uuid %s" % user_uuid)
from_phone = request.json['phone_to_server']
return usercache.sync_phone_to_server(user_uuid, from_phone)
@post('/timeline/getTrips/<day>')
def getTrips(day):
logging.debug("Called timeline.getTrips/%s" % day)
user_uuid=getUUID(request)
force_refresh = request.query.get('refresh', False)
logging.debug("user_uuid %s" % user_uuid)
ret_geojson = timeline.get_trips_for_day(user_uuid, day, force_refresh)
logging.debug("type(ret_geojson) = %s" % type(ret_geojson))
ret_dict = {"timeline": ret_geojson}
logging.debug("type(ret_dict) = %s" % type(ret_dict))
return ret_dict
@post('/profile/create')
def createUserProfile():
logging.debug("Called createUserProfile")
userToken = request.json['user']
# This is the only place we should use the email, since we may not have a
# UUID yet. All others should only use the UUID.
if skipAuth:
userEmail = userToken
else:
userEmail = verifyUserToken(userToken)
logging.debug("userEmail = %s" % userEmail)
user = User.register(userEmail)
logging.debug("Looked up user = %s" % user)
logging.debug("Returning result %s" % {'uuid': str(user.uuid)})
return {'uuid': str(user.uuid)}
@post('/profile/update')
def updateUserProfile():
logging.debug("Called updateUserProfile")
user_uuid = getUUID(request)
user = User.fromUUID(user_uuid)
mpg_array = request.json['mpg_array']
return user.setMpgArray(mpg_array)
@post('/profile/consent')
def setConsentInProfile():
user_uuid = getUUID(request)
version = request.json['version']
print "Setting accepted version to %s for user %s" % (version, user_uuid)
logging.debug("Setting accepted version to %s for user %s" % (version, user_uuid))
return None
@post('/profile/settings')
def getCustomizationForProfile():
user_uuid = getUUID(request)
user = User.fromUUID(user_uuid)
logging.debug("Returning settings for user %s" % user_uuid)
return user.getSettings()
@post('/stats/set')
def setStats():
user_uuid=getUUID(request)
inStats = request.json['stats']
stats.setClientMeasurements(user_uuid, inStats)
@post('/compare')
def postCarbonCompare():
from clients.data import data
from clients.choice import choice
if not skipAuth:
if request.json == None:
return "Waiting for user data to become available..."
if 'user' not in request.json:
return "Waiting for user data to be become available.."
user_uuid = getUUID(request)
clientResult = userclient.getClientSpecificResult(user_uuid)
if clientResult != None:
logging.debug("Found overriding client result for user %s, returning it" % user_uuid)
return clientResult
else:
logging.debug("No overriding client result for user %s, returning choice " % user_uuid)
return choice.getResult(user_uuid)
@get('/compare')
def getCarbonCompare():
for key, val in request.headers.items():
print(" %s: %s" % (key, val))
from clients.data import data
if not skipAuth:
if 'User' not in request.headers or request.headers.get('User') == '':
return "Waiting for user data to become available..."
from clients.choice import choice
user_uuid = getUUID(request, inHeader=True)
print ('UUID', user_uuid)
clientResult = userclient.getClientSpecificResult(user_uuid)
if clientResult != None:
logging.debug("Found overriding client result for user %s, returning it" % user_uuid)
return clientResult
else:
logging.debug("No overriding client result for user %s, returning choice" % user_uuid)
return choice.getResult(user_uuid)
# Client related code START
@post("/client/<clientname>/<method>")
def callStudy(clientname, method):
user_uuid = getUUID(request)
request['user'] = user_uuid
return Client(clientname).callMethod(method, request)
@get('/client/pre-register')
def registeredForStudy():
userEmail = request.query.email
client = request.query.client
client_key = request.query.client_key
logging.debug("request = %s" % (request))
logging.debug("userEmail = %s, client = %s, client_key = %s" % (userEmail, client, client_key))
# try:
newSignupCount = Client(client).preRegister(client_key, userEmail)
# except Exception as e:
# abort(e.code, e.msg)
return {'email': userEmail, 'client': client, 'signup_count': newSignupCount }
@get('/client/<clientName>/<method>')
def javascriptCallback(clientName, method):
from clients.choice import choice
client = Client(clientName)
client_key = request.query.client_key
client.callJavascriptCallback(client_key, method, request.params)
return {'status': 'ok'}
# proxy used to request and process XML from an external API, then convert it to JSON
# original URL should be encoded in UTF-8
@get("/asJSON/<originalXMLWebserviceURL>")
def xmlProxy(originalXMLWebserviceURL):
decodedURL = urllib2.unquote(originalXMLWebserviceURL)
f = urllib2.urlopen(decodedURL)
xml = f.read()
parsedXML = xmltodict.parse(xml)
return json.dumps(parsedXML)
# Client related code END
# Data source integration START
@post('/movesCallback')
def movesCallback():
logging.debug("Request from user = %s" % request)
logging.debug("Request.json from user = %s" % request.json)
user_uuid = getUUID(request)
if user_uuid is None:
# Hack to support older clients that don't call register before calling movesCallback
# Remove by Dec 31, 2014
createUserProfile()
user_uuid = getUUID(request)
assert(user_uuid is not None)
code = request.json['code']
state = request.json['state']
return auth.movesCallback(code, state, user_uuid)
# Data source integration END
@app.hook('before_request')
def before_request():
print("START %s %s %s" % (datetime.now(), request.method, request.path))
request.params.start_ts = time.time()
logging.debug("START %s %s" % (request.method, request.path))
@app.hook('after_request')
def after_request():
msTimeNow = time.time()
duration = msTimeNow - request.params.start_ts
print("END %s %s %s %s %s " % (datetime.now(), request.method, request.path, request.params.user_uuid, duration))
logging.debug("END %s %s %s %s " % (request.method, request.path, request.params.user_uuid, duration))
# Keep track of the time and duration for each call
stats.storeServerEntry(request.params.user_uuid, "%s %s" % (request.method, request.path),
msTimeNow, duration)
# Auth helpers BEGIN
# This should only be used by createUserProfile since we may not have a UUID
# yet. All others should use the UUID.
def verifyUserToken(token):
try:
# attempt to validate token on the client-side
logging.debug("Using OAuth2Client to verify id token of length %d from android phones" % len(token))
tokenFields = oauth2client.client.verify_id_token(token,client_key)
logging.debug(tokenFields)
except AppIdentityError as androidExp:
try:
logging.debug("Using OAuth2Client to verify id token of length %d from android phones using old token" % len(token))
tokenFields = oauth2client.client.verify_id_token(token,client_key_old)
logging.debug(tokenFields)
except AppIdentityError as androidExpOld:
try:
logging.debug("Using OAuth2Client to verify id token from iOS phones")
tokenFields = oauth2client.client.verify_id_token(token, ios_client_key)
logging.debug(tokenFields)
except AppIdentityError as iOSExp:
traceback.print_exc()
logging.debug("OAuth failed to verify id token, falling back to constructedURL")
#fallback to verifying using Google API
constructedURL = ("https://www.googleapis.com/oauth2/v1/tokeninfo?id_token=%s" % token)
r = requests.get(constructedURL)
tokenFields = json.loads(r.content)
in_client_key = tokenFields['audience']
if (in_client_key != client_key):
if (in_client_key != ios_client_key):
abort(401, "Invalid client key %s" % in_client_key)
logging.debug("Found user email %s" % tokenFields['email'])
return tokenFields['email']
def getUUIDFromToken(token):
userEmail = verifyUserToken(token)
return __getUUIDFromEmail__(userEmail)
# This should not be used for general API calls
def __getUUIDFromEmail__(userEmail):
user=User.fromEmail(userEmail)
if user is None:
return None
user_uuid=user.uuid
return user_uuid
def __getToken__(request, inHeader):
if inHeader:
userHeaderSplitList = request.headers.get('User').split()
if len(userHeaderSplitList) == 1:
userToken = userHeaderSplitList[0]
else:
userToken = userHeaderSplitList[1]
else:
userToken = request.json['user']
return userToken
def getUUID(request, inHeader=False):
retUUID = None
if skipAuth:
if 'User' in request.headers or 'user' in request.json:
# skipAuth = true, so the email will be sent in plaintext
userEmail = __getToken__(request, inHeader)
retUUID = __getUUIDFromEmail__(userEmail)
logging.debug("skipAuth = %s, returning UUID directly from email %s" % (skipAuth, retUUID))
else:
# Return a random user to make it easy to experiment without having to specify a user
# TODO: Remove this if it is not actually used
from get_database import get_uuid_db
user_uuid = get_uuid_db().find_one()['uuid']
retUUID = user_uuid
logging.debug("skipAuth = %s, returning arbitrary UUID %s" % (skipAuth, retUUID))
if Client("choice").getClientKey() is None:
Client("choice").update(createKey = True)
else:
userToken = __getToken__(request, inHeader)
retUUID = getUUIDFromToken(userToken)
if retUUID is None:
raise HTTPError(403, "token is valid, but no account found for user")
request.params.user_uuid = retUUID
return retUUID
# Auth helpers END
# We have see the sockets hang in practice. Let's set the socket timeout = 1
# hour to be on the safe side, and see if it is hit.
socket.setdefaulttimeout(float(socket_timeout))
for plugin in app.plugins:
if isinstance(plugin, JSONPlugin):
print("Replaced json_dumps in plugin with the one from bson")
plugin.json_dumps = bson.json_util.dumps
print("Changing bt.json_loads from %s to %s" % (bt.json_loads, bson.json_util.loads))
bt.json_loads = bson.json_util.loads
# The selection of SSL versus non-SSL should really be done through a config
# option and not through editing source code, so let's make this keyed off the
# port number
if server_port == "443":
# We support SSL and want to use it
run(host=server_host, port=server_port, server='cherrypy', debug=True,
certfile=ssl_cert, keyfile=private_key, ssl_module='builtin')
else:
# Non SSL option for testing on localhost
# We can theoretically use a separate skipAuth flag specified in the config file,
# but then we have to define the behavior if SSL is true and we are not
# running on localhost but still want to run without authentication. That is
# not really an important use case now, and it makes people have to change
# two values and increases the chance of bugs. So let's key the auth skipping from this as well.
skipAuth = True
print "Running with HTTPS turned OFF, skipAuth = True"
run(host=server_host, port=server_port, server='cherrypy', debug=True)
# run(host="0.0.0.0", port=server_port, server='cherrypy', debug=True)
| bsd-3-clause | -3,426,859,423,235,193,300 | 38.403255 | 128 | 0.716613 | false |
igmhub/pyLyA | py/picca/data.py | 1 | 43998 | """This module defines data structure to deal with line of sight data.
This module provides with three classes (QSO, Forest, Delta)
to manage the line-of-sight data.
See the respective docstrings for more details
"""
import numpy as np
import iminuit
import fitsio
from picca import constants
from picca.utils import userprint, unred
from picca.dla import DLA
class QSO(object):
"""Class to represent quasar objects.
Attributes:
ra: float
Right-ascension of the quasar (in radians).
dec: float
Declination of the quasar (in radians).
z_qso: float
Redshift of the quasar.
plate: integer
Plate number of the observation.
fiberid: integer
Fiberid of the observation.
mjd: integer
Modified Julian Date of the observation.
thingid: integer
Thingid of the observation.
x_cart: float
The x coordinate when representing ra, dec in a cartesian
coordinate system.
y_cart: float
The y coordinate when representing ra, dec in a cartesian
coordinate system.
z_cart: float
The z coordinate when representing ra, dec in a cartesian
coordinate system.
cos_dec: float
Cosine of the declination angle.
weights: float
Weight assigned to object
r_comov: float or None
Comoving distance to the object
dist_m: float or None
Angular diameter distance to object
log_lambda: float or None
Wavelength associated with the quasar redshift
Note that plate-fiberid-mjd is a unique identifier
for the quasar.
Methods:
__init__: Initialize class instance.
get_angle_between: Computes the angular separation between two quasars.
"""
def __init__(self, thingid, ra, dec, z_qso, plate, mjd, fiberid):
"""Initializes class instance.
Args:
thingid: integer
Thingid of the observation.
ra: float
Right-ascension of the quasar (in radians).
dec: float
Declination of the quasar (in radians).
z_qso: float
Redshift of the quasar.
plate: integer
Plate number of the observation.
mjd: integer
Modified Julian Date of the observation.
fiberid: integer
Fiberid of the observation.
"""
self.ra = ra
self.dec = dec
self.plate = plate
self.mjd = mjd
self.fiberid = fiberid
## cartesian coordinates
self.x_cart = np.cos(ra) * np.cos(dec)
self.y_cart = np.sin(ra) * np.cos(dec)
self.z_cart = np.sin(dec)
self.cos_dec = np.cos(dec)
self.z_qso = z_qso
self.thingid = thingid
# variables computed in function io.read_objects
self.weight = None
self.r_comov = None
self.dist_m = None
# variables computed in modules bin.picca_xcf_angl and bin.picca_xcf1d
self.log_lambda = None
def get_angle_between(self, data):
"""Computes the angular separation between two quasars.
Args:
data: QSO or list of QSO
Objects with which the angular separation will
be computed.
Returns
A float or an array (depending on input data) with the angular
separation between this quasar and the object(s) in data.
"""
# case 1: data is list-like
try:
x_cart = np.array([d.x_cart for d in data])
y_cart = np.array([d.y_cart for d in data])
z_cart = np.array([d.z_cart for d in data])
ra = np.array([d.ra for d in data])
dec = np.array([d.dec for d in data])
cos = x_cart * self.x_cart + y_cart * self.y_cart + z_cart * self.z_cart
w = cos >= 1.
if w.sum() != 0:
userprint('WARNING: {} pairs have cos>=1.'.format(w.sum()))
cos[w] = 1.
w = cos <= -1.
if w.sum() != 0:
userprint('WARNING: {} pairs have cos<=-1.'.format(w.sum()))
cos[w] = -1.
angl = np.arccos(cos)
w = ((np.absolute(ra - self.ra) < constants.SMALL_ANGLE_CUT_OFF) &
(np.absolute(dec - self.dec) < constants.SMALL_ANGLE_CUT_OFF))
if w.sum() != 0:
angl[w] = np.sqrt((dec[w] - self.dec)**2 +
(self.cos_dec * (ra[w] - self.ra))**2)
# case 2: data is a QSO
except TypeError:
x_cart = data.x_cart
y_cart = data.y_cart
z_cart = data.z_cart
ra = data.ra
dec = data.dec
cos = x_cart * self.x_cart + y_cart * self.y_cart + z_cart * self.z_cart
if cos >= 1.:
userprint('WARNING: 1 pair has cosinus>=1.')
cos = 1.
elif cos <= -1.:
userprint('WARNING: 1 pair has cosinus<=-1.')
cos = -1.
angl = np.arccos(cos)
if ((np.absolute(ra - self.ra) < constants.SMALL_ANGLE_CUT_OFF) &
(np.absolute(dec - self.dec) < constants.SMALL_ANGLE_CUT_OFF)):
angl = np.sqrt((dec - self.dec)**2 + (self.cos_dec *
(ra - self.ra))**2)
return angl
class Forest(QSO):
"""Class to represent a Lyman alpha (or other absorption) forest
This class stores the information of an absorption forest.
This includes the information required to extract the delta
field from it: flux correction, inverse variance corrections,
dlas, absorbers, ...
Attributes:
## Inherits from QSO ##
log_lambda : array of floats
Array containing the logarithm of the wavelengths (in Angs)
flux : array of floats
Array containing the flux associated to each wavelength
ivar: array of floats
Array containing the inverse variance associated to each flux
mean_optical_depth: array of floats or None
Mean optical depth at the redshift of each pixel in the forest
dla_transmission: array of floats or None
Decrease of the transmitted flux due to the presence of a Damped
Lyman alpha absorbers
mean_expected_flux_frac: array of floats or None
Mean expected flux fraction using the mock continuum
order: 0 or 1
Order of the log10(lambda) polynomial for the continuum fit
exposures_diff: array of floats or None
Difference between exposures
reso: array of floats or None
Resolution of the forest
mean_snr: float or None
Mean signal-to-noise ratio in the forest
mean_reso: float or None
Mean resolution of the forest
mean_z: float or None
Mean redshift of the forest
cont: array of floats or None
Quasar continuum
p0: float or None
Zero point of the linear function (flux mean)
p1: float or None
Slope of the linear function (evolution of the flux)
bad_cont: string or None
Reason as to why the continuum fit is not acceptable
abs_igm: string
Name of the absorption line in picca.constants defining the
redshift of the forest pixels
Class attributes:
log_lambda_max: float
Logarithm of the maximum wavelength (in Angs) to be considered in a
forest.
log_lambda_min: float
Logarithm of the minimum wavelength (in Angs) to be considered in a
forest.
log_lambda_max_rest_frame: float
As log_lambda_max but for rest-frame wavelength.
log_lambda_min_rest_frame: float
As log_lambda_min but for rest-frame wavelength.
rebin: integer
Rebin wavelength grid by combining this number of adjacent pixels
(inverse variance weighting).
delta_log_lambda: float
Variation of the logarithm of the wavelength (in Angs) between two
pixels.
extinction_bv_map: dict
B-V extinction due to dust. Maps thingids (integers) to the dust
correction (array).
absorber_mask_width: float
Mask width on each side of the absorber central observed wavelength
in units of 1e4*dlog10(lambda/Angs).
dla_mask_limit: float
Lower limit on the DLA transmission. Transmissions below this
number are masked.
Methods:
__init__: Initializes class instances.
__add__: Adds the information of another forest.
correct_flux: Corrects for multiplicative errors in pipeline flux
calibration.
correct_ivar: Corrects for multiplicative errors in pipeline inverse
variance calibration.
get_var_lss: Interpolates the pixel variance due to the Large Scale
Strucure on the wavelength array.
get_eta: Interpolates the correction factor to the contribution of the
pipeline estimate of the instrumental noise to the variance on the
wavelength array.
get_fudge: Interpolates the fudge contribution to the variance on the
wavelength array.
get_mean_cont: Interpolates the mean quasar continuum over the whole
sample on the wavelength array.
mask: Applies wavelength masking.
add_optical_depth: Adds the contribution of a given species to the mean
optical depth.
add_dla: Adds DLA to forest. Masks it by removing the afffected pixels.
add_absorber: Adds absorber to forest. Masks it by removing the
afffected pixels.
cont_fit: Computes the forest continuum.
"""
log_lambda_min = None
log_lambda_max = None
log_lambda_min_rest_frame = None
log_lambda_max_rest_frame = None
rebin = None
delta_log_lambda = None
@classmethod
def correct_flux(cls, log_lambda):
"""Corrects for multiplicative errors in pipeline flux calibration.
Empty function to be loaded at run-time.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
@classmethod
def correct_ivar(cls, log_lambda):
"""Corrects for multiplicative errors in pipeline inverse variance
calibration.
Empty function to be loaded at run-time.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
# map of g-band extinction to thingids for dust correction
extinction_bv_map = None
# absorber pixel mask limit
absorber_mask_width = None
## minumum dla transmission
dla_mask_limit = None
@classmethod
def get_var_lss(cls, log_lambda):
"""Interpolates the pixel variance due to the Large Scale Strucure on
the wavelength array.
Empty function to be loaded at run-time.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
@classmethod
def get_eta(cls, log_lambda):
"""Interpolates the correction factor to the contribution of the
pipeline estimate of the instrumental noise to the variance on the
wavelength array.
See equation 4 of du Mas des Bourboux et al. 2020 for details.
Empty function to be loaded at run-time.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
@classmethod
def get_mean_cont(cls, log_lambda):
"""Interpolates the mean quasar continuum over the whole
sample on the wavelength array.
See equation 2 of du Mas des Bourboux et al. 2020 for details.
Empty function to be loaded at run-time.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
@classmethod
def get_fudge(cls, log_lambda):
"""Interpolates the fudge contribution to the variance on the
wavelength array.
See function epsilon in equation 4 of du Mas des Bourboux et al.
2020 for details.
Args:
log_lambda: array of float
Array containing the logarithm of the wavelengths (in Angs)
Returns:
An array with the correction
Raises:
NotImplementedError: Function was not specified
"""
raise NotImplementedError("Function should be specified at run-time")
def __init__(self,
log_lambda,
flux,
ivar,
thingid,
ra,
dec,
z_qso,
plate,
mjd,
fiberid,
exposures_diff=None,
reso=None,
mean_expected_flux_frac=None,
abs_igm="LYA"):
"""Initializes class instances.
Args:
log_lambda : array of floats
Array containing the logarithm of the wavelengths (in Angs).
flux : array of floats
Array containing the flux associated to each wavelength.
ivar : array of floats
Array containing the inverse variance associated to each flux.
thingis : float
ThingID of the observation.
ra: float
Right-ascension of the quasar (in radians).
dec: float
Declination of the quasar (in radians).
z_qso: float
Redshift of the quasar.
plate: integer
Plate number of the observation.
mjd: integer
Modified Julian Date of the observation.
fiberid: integer
Fiberid of the observation.
exposures_diff: array of floats or None - default: None
Difference between exposures.
reso: array of floats or None - default: None
Resolution of the forest.
mean_expected_flux_frac: array of floats or None - default: None
Mean expected flux fraction using the mock continuum
abs_igm: string - default: "LYA"
Name of the absorption in picca.constants defining the
redshift of the forest pixels
"""
QSO.__init__(self, thingid, ra, dec, z_qso, plate, mjd, fiberid)
# apply dust extinction correction
if Forest.extinction_bv_map is not None:
corr = unred(10**log_lambda, Forest.extinction_bv_map[thingid])
flux /= corr
ivar *= corr**2
if not exposures_diff is None:
exposures_diff /= corr
## cut to specified range
bins = (np.floor((log_lambda - Forest.log_lambda_min) /
Forest.delta_log_lambda + 0.5).astype(int))
log_lambda = Forest.log_lambda_min + bins * Forest.delta_log_lambda
w = (log_lambda >= Forest.log_lambda_min)
w = w & (log_lambda < Forest.log_lambda_max)
w = w & (log_lambda - np.log10(1. + self.z_qso) >
Forest.log_lambda_min_rest_frame)
w = w & (log_lambda - np.log10(1. + self.z_qso) <
Forest.log_lambda_max_rest_frame)
w = w & (ivar > 0.)
if w.sum() == 0:
return
bins = bins[w]
log_lambda = log_lambda[w]
flux = flux[w]
ivar = ivar[w]
if mean_expected_flux_frac is not None:
mean_expected_flux_frac = mean_expected_flux_frac[w]
if exposures_diff is not None:
exposures_diff = exposures_diff[w]
if reso is not None:
reso = reso[w]
# rebin arrays
rebin_log_lambda = (Forest.log_lambda_min +
np.arange(bins.max() + 1) * Forest.delta_log_lambda)
rebin_flux = np.zeros(bins.max() + 1)
rebin_ivar = np.zeros(bins.max() + 1)
if mean_expected_flux_frac is not None:
rebin_mean_expected_flux_frac = np.zeros(bins.max() + 1)
rebin_flux_aux = np.bincount(bins, weights=ivar * flux)
rebin_ivar_aux = np.bincount(bins, weights=ivar)
if mean_expected_flux_frac is not None:
rebin_mean_expected_flux_frac_aux = np.bincount(
bins, weights=ivar * mean_expected_flux_frac)
if exposures_diff is not None:
rebin_exposures_diff = np.bincount(bins,
weights=ivar * exposures_diff)
if reso is not None:
rebin_reso = np.bincount(bins, weights=ivar * reso)
rebin_flux[:len(rebin_flux_aux)] += rebin_flux_aux
rebin_ivar[:len(rebin_ivar_aux)] += rebin_ivar_aux
if mean_expected_flux_frac is not None:
rebin_mean_expected_flux_frac[:len(
rebin_mean_expected_flux_frac_aux
)] += rebin_mean_expected_flux_frac_aux
w = (rebin_ivar > 0.)
if w.sum() == 0:
return
log_lambda = rebin_log_lambda[w]
flux = rebin_flux[w] / rebin_ivar[w]
ivar = rebin_ivar[w]
if mean_expected_flux_frac is not None:
mean_expected_flux_frac = (rebin_mean_expected_flux_frac[w] /
rebin_ivar[w])
if exposures_diff is not None:
exposures_diff = rebin_exposures_diff[w] / rebin_ivar[w]
if reso is not None:
reso = rebin_reso[w] / rebin_ivar[w]
# Flux calibration correction
try:
correction = Forest.correct_flux(log_lambda)
flux /= correction
ivar *= correction**2
except NotImplementedError:
pass
# Inverse variance correction
try:
correction = Forest.correct_ivar(log_lambda)
ivar /= correction
except NotImplementedError:
pass
# keep the results so far in this instance
self.mean_optical_depth = None
self.dla_transmission = None
self.log_lambda = log_lambda
self.flux = flux
self.ivar = ivar
self.mean_expected_flux_frac = mean_expected_flux_frac
self.exposures_diff = exposures_diff
self.reso = reso
self.abs_igm = abs_igm
# compute mean quality variables
if reso is not None:
self.mean_reso = reso.mean()
else:
self.mean_reso = None
error = 1.0 / np.sqrt(ivar)
snr = flux / error
self.mean_snr = sum(snr) / float(len(snr))
lambda_abs_igm = constants.ABSORBER_IGM[self.abs_igm]
self.mean_z = ((np.power(10., log_lambda[len(log_lambda) - 1]) +
np.power(10., log_lambda[0])) / 2. / lambda_abs_igm -
1.0)
# continuum-related variables
self.cont = None
self.p0 = None
self.p1 = None
self.bad_cont = None
self.order = None
def coadd(self, other):
"""Coadds the information of another forest.
Forests are coadded by using inverse variance weighting.
Args:
other: Forest
The forest instance to be coadded. If other does not have the
attribute log_lambda, then the method returns without doing
anything.
Returns:
The coadded forest.
"""
if self.log_lambda is None or other.log_lambda is None:
return self
# this should contain all quantities that are to be coadded using
# ivar weighting
ivar_coadd_data = {}
log_lambda = np.append(self.log_lambda, other.log_lambda)
ivar_coadd_data['flux'] = np.append(self.flux, other.flux)
ivar = np.append(self.ivar, other.ivar)
if self.mean_expected_flux_frac is not None:
mean_expected_flux_frac = np.append(self.mean_expected_flux_frac,
other.mean_expected_flux_frac)
ivar_coadd_data['mean_expected_flux_frac'] = mean_expected_flux_frac
if self.exposures_diff is not None:
ivar_coadd_data['exposures_diff'] = np.append(
self.exposures_diff, other.exposures_diff)
if self.reso is not None:
ivar_coadd_data['reso'] = np.append(self.reso, other.reso)
# coadd the deltas by rebinning
bins = np.floor((log_lambda - Forest.log_lambda_min) /
Forest.delta_log_lambda + 0.5).astype(int)
rebin_log_lambda = Forest.log_lambda_min + (np.arange(bins.max() + 1) *
Forest.delta_log_lambda)
rebin_ivar = np.zeros(bins.max() + 1)
rebin_ivar_aux = np.bincount(bins, weights=ivar)
rebin_ivar[:len(rebin_ivar_aux)] += rebin_ivar_aux
w = (rebin_ivar > 0.)
self.log_lambda = rebin_log_lambda[w]
self.ivar = rebin_ivar[w]
# rebin using inverse variance weighting
for key, value in ivar_coadd_data.items():
rebin_value = np.zeros(bins.max() + 1)
rebin_value_aux = np.bincount(bins, weights=ivar * value)
rebin_value[:len(rebin_value_aux)] += rebin_value_aux
setattr(self, key, rebin_value[w] / rebin_ivar[w])
# recompute means of quality variables
if self.reso is not None:
self.mean_reso = self.reso.mean()
error = 1. / np.sqrt(self.ivar)
snr = self.flux / error
self.mean_snr = snr.mean()
lambda_abs_igm = constants.ABSORBER_IGM[self.abs_igm]
self.mean_z = ((np.power(10., log_lambda[len(log_lambda) - 1]) +
np.power(10., log_lambda[0])) / 2. / lambda_abs_igm -
1.0)
return self
def mask(self, mask_table):
"""Applies wavelength masking.
Pixels are masked according to a set of lines both in observed frame
and in the rest-frame. Masking is done by simply removing the pixels
from the arrays. Does nothing if the forest doesn't have the attribute
log_lambda set.
Args:
mask_table: astropy table
Table containing minimum and maximum wavelenths of absorption
lines to mask (in both rest frame and observed frame)
"""
if len(mask_table)==0:
return
select_rest_frame_mask = mask_table['frame'] == 'RF'
select_obs_mask = mask_table['frame'] == 'OBS'
mask_rest_frame = mask_table[select_rest_frame_mask]
mask_obs_frame = mask_table[select_obs_mask]
if len(mask_rest_frame)+len(mask_obs_frame)==0:
return
if self.log_lambda is None:
return
w = np.ones(self.log_lambda.size, dtype=bool)
for mask_range in mask_obs_frame:
w &= ((self.log_lambda < mask_range['log_wave_min']) |
(self.log_lambda > mask_range['log_wave_max']))
for mask_range in mask_rest_frame:
rest_frame_log_lambda = self.log_lambda - np.log10(1. + self.z_qso)
w &= ((rest_frame_log_lambda < mask_range['log_wave_min']) |
(rest_frame_log_lambda > mask_range['log_wave_max']))
parameters = [
'ivar', 'log_lambda', 'flux', 'dla_transmission',
'mean_optical_depth', 'mean_expected_flux_frac', 'exposures_diff',
'reso'
]
for param in parameters:
if hasattr(self, param) and (getattr(self, param) is not None):
setattr(self, param, getattr(self, param)[w])
return
def add_optical_depth(self, tau, gamma, lambda_rest_frame):
"""Adds the contribution of a given species to the mean optical depth.
Flux will be corrected by the mean optical depth. This correction is
governed by the optical depth-flux relation:
`F = exp(tau(1+z)^gamma)`
Args:
tau: float
Mean optical depth
gamma: float
Optical depth redshift evolution. Optical depth evolves as
`(1+z)^gamma`
lambda_rest_frame: float
Restframe wavelength of the element responsible for the absorption.
In Angstroms
"""
if self.log_lambda is None:
return
if self.mean_optical_depth is None:
self.mean_optical_depth = np.ones(self.log_lambda.size)
w = 10.**self.log_lambda / (1. + self.z_qso) <= lambda_rest_frame
z = 10.**self.log_lambda / lambda_rest_frame - 1.
self.mean_optical_depth[w] *= np.exp(-tau * (1. + z[w])**gamma)
return
def add_dla(self, z_abs, nhi, mask_table=None):
"""Adds DLA to forest. Masks it by removing the afffected pixels.
Args:
z_abs: float
Redshift of the DLA absorption
nhi : float
DLA column density in log10(cm^-2)
mask_table : astropy table for masking
Wavelengths to be masked in DLA rest-frame wavelength
"""
if self.log_lambda is None:
return
if self.dla_transmission is None:
self.dla_transmission = np.ones(len(self.log_lambda))
self.dla_transmission *= DLA(self, z_abs, nhi).transmission
w = self.dla_transmission > Forest.dla_mask_limit
if len(mask_table)>0:
select_dla_mask = mask_table['frame'] == 'RF_DLA'
mask = mask_table[select_dla_mask]
if len(mask)>0:
for mask_range in mask:
w &= ((self.log_lambda - np.log10(1. + z_abs) < mask_range['log_wave_min']) |
(self.log_lambda - np.log10(1. + z_abs) > mask_range['log_wave_max']))
# do the actual masking
parameters = [
'ivar', 'log_lambda', 'flux', 'dla_transmission',
'mean_optical_depth', 'mean_expected_flux_frac', 'exposures_diff',
'reso'
]
for param in parameters:
if hasattr(self, param) and (getattr(self, param) is not None):
setattr(self, param, getattr(self, param)[w])
return
def add_absorber(self, lambda_absorber):
"""Adds absorber to forest. Masks it by removing the afffected pixels.
Args:
lambda_absorber: float
Wavelength of the absorber
"""
if self.log_lambda is None:
return
w = np.ones(self.log_lambda.size, dtype=bool)
w &= (np.fabs(1.e4 * (self.log_lambda - np.log10(lambda_absorber))) >
Forest.absorber_mask_width)
parameters = [
'ivar', 'log_lambda', 'flux', 'dla_transmission',
'mean_optical_depth', 'mean_expected_flux_frac', 'exposures_diff',
'reso'
]
for param in parameters:
if hasattr(self, param) and (getattr(self, param) is not None):
setattr(self, param, getattr(self, param)[w])
return
def cont_fit(self):
"""Computes the forest continuum.
Fits a model based on the mean quasar continuum and linear function
(see equation 2 of du Mas des Bourboux et al. 2020)
Flags the forest with bad_cont if the computation fails.
"""
log_lambda_max = (Forest.log_lambda_max_rest_frame +
np.log10(1 + self.z_qso))
log_lambda_min = (Forest.log_lambda_min_rest_frame +
np.log10(1 + self.z_qso))
# get mean continuum
try:
mean_cont = Forest.get_mean_cont(self.log_lambda -
np.log10(1 + self.z_qso))
except ValueError:
raise Exception("Problem found when loading get_mean_cont")
# add the optical depth correction
# (previously computed using method add_optical_depth)
if not self.mean_optical_depth is None:
mean_cont *= self.mean_optical_depth
# add the dla transmission correction
# (previously computed using method add_dla)
if not self.dla_transmission is None:
mean_cont *= self.dla_transmission
# pixel variance due to the Large Scale Strucure
var_lss = Forest.get_var_lss(self.log_lambda)
# correction factor to the contribution of the pipeline
# estimate of the instrumental noise to the variance.
eta = Forest.get_eta(self.log_lambda)
# fudge contribution to the variance
fudge = Forest.get_fudge(self.log_lambda)
def get_cont_model(p0, p1):
"""Models the flux continuum by multiplying the mean_continuum
by a linear function
Args:
p0: float
Zero point of the linear function (flux mean)
p1: float
Slope of the linear function (evolution of the flux)
Global args (defined only in the scope of function cont_fit)
log_lambda_min: float
Minimum logarithm of the wavelength (in Angs)
log_lambda_max: float
Minimum logarithm of the wavelength (in Angs)
mean_cont: array of floats
Mean continuum
"""
line = (p1 * (self.log_lambda - log_lambda_min) /
(log_lambda_max - log_lambda_min) + p0)
return line * mean_cont
def chi2(p0, p1):
"""Computes the chi2 of a given model (see function model above).
Args:
p0: float
Zero point of the linear function (see function model above)
p1: float
Slope of the linear function (see function model above)
Global args (defined only in the scope of function cont_fit)
eta: array of floats
Correction factor to the contribution of the pipeline
estimate of the instrumental noise to the variance.
Returns:
The obtained chi2
"""
cont_model = get_cont_model(p0, p1)
var_pipe = 1. / self.ivar / cont_model**2
## prep_del.variance is the variance of delta
## we want here the weights = ivar(flux)
variance = eta * var_pipe + var_lss + fudge / var_pipe
weights = 1.0 / cont_model**2 / variance
# force weights=1 when use-constant-weight
# TODO: make this condition clearer, maybe pass an option
# use_constant_weights?
if (eta == 0).all():
weights = np.ones(len(weights))
chi2_contribution = (self.flux - cont_model)**2 * weights
return chi2_contribution.sum() - np.log(weights).sum()
p0 = (self.flux * self.ivar).sum() / self.ivar.sum()
p1 = 0.0
minimizer = iminuit.Minuit(chi2,
p0=p0,
p1=p1,
error_p0=p0 / 2.,
error_p1=p0 / 2.,
errordef=1.,
print_level=0,
fix_p1=(self.order == 0))
minimizer_result, _ = minimizer.migrad()
self.cont = get_cont_model(minimizer.values["p0"],
minimizer.values["p1"])
self.p0 = minimizer.values["p0"]
self.p1 = minimizer.values["p1"]
self.bad_cont = None
if not minimizer_result.is_valid:
self.bad_cont = "minuit didn't converge"
if np.any(self.cont <= 0):
self.bad_cont = "negative continuum"
## if the continuum is negative, then set it to a very small number
## so that this forest is ignored
if self.bad_cont is not None:
self.cont = self.cont * 0 + 1e-10
self.p0 = 0.
self.p1 = 0.
class Delta(QSO):
"""Class to represent the mean transimission fluctuation field (delta)
This class stores the information for the deltas for a given line of sight
Attributes:
## Inherits from QSO ##
log_lambda : array of floats
Array containing the logarithm of the wavelengths (in Angs)
weights : array of floats
Weights associated to pixel. Overloaded from parent class
cont: array of floats
Quasar continuum
delta: array of floats
Mean transmission fluctuation (delta field)
order: 0 or 1
Order of the log10(lambda) polynomial for the continuum fit
ivar: array of floats
Inverse variance associated to each flux
exposures_diff: array of floats
Difference between exposures
mean_snr: float
Mean signal-to-noise ratio in the forest
mean_reso: float
Mean resolution of the forest
mean_z: float
Mean redshift of the forest
delta_log_lambda: float
Variation of the logarithm of the wavelength between two pixels
z: array of floats or None
Redshift of the abosrption
r_comov: array of floats or None
Comoving distance to the object. Overloaded from parent class
dist_m: array of floats or None
Angular diameter distance to object. Overloaded from parent
class
neighbours: list of Delta or QSO or None
Neighbouring deltas/quasars
fname: string or None
String identifying Delta as part of a group
Methods:
__init__: Initializes class instances.
from_fitsio: Initialize instance from a fits file.
from_ascii: Initialize instance from an ascii file.
from_image: Initialize instance from an ascii file.
project: Project the delta field.
"""
def __init__(self, thingid, ra, dec, z_qso, plate, mjd, fiberid, log_lambda,
weights, cont, delta, order, ivar, exposures_diff, mean_snr,
mean_reso, mean_z, delta_log_lambda):
"""Initializes class instances.
Args:
thingid: integer
Thingid of the observation.
ra: float
Right-ascension of the quasar (in radians).
dec: float
Declination of the quasar (in radians).
z_qso: float
Redshift of the quasar.
plate: integer
Plate number of the observation.
mjd: integer
Modified Julian Date of the observation.
fiberid: integer
Fiberid of the observation.
log_lambda: array of floats
Logarithm of the wavelengths (in Angs)
weights: array of floats
Pixel weights
cont: array of floats
Quasar continuum
delta: array of floats
Mean transmission fluctuation (delta field)
order: 0 or 1
Order of the log10(lambda) polynomial for the continuum fit
ivar: array of floats
Inverse variance associated to each flux
exposures_diff: array of floats
Difference between exposures
mean_snr: float
Mean signal-to-noise ratio in the forest
mean_reso: float
Mean resolution of the forest
mean_z: float
Mean redshift of the forest
delta_log_lambda: float
Variation of the logarithm of the wavelength between two pixels
"""
QSO.__init__(self, thingid, ra, dec, z_qso, plate, mjd, fiberid)
self.log_lambda = log_lambda
self.weights = weights
self.cont = cont
self.delta = delta
self.order = order
self.ivar = ivar
self.exposures_diff = exposures_diff
self.mean_snr = mean_snr
self.mean_reso = mean_reso
self.mean_z = mean_z
self.delta_log_lambda = delta_log_lambda
# variables computed in function io.read_deltas
self.z = None
self.r_comov = None
self.dist_m = None
# variables computed in function cf.fill_neighs or xcf.fill_neighs
self.neighbours = None
# variables used in function cf.compute_wick_terms and
# main from bin.picca_wick
self.fname = None
@classmethod
def from_fitsio(cls, hdu, pk1d_type=False):
"""Initialize instance from a fits file.
Args:
hdu: fitsio.hdu.table.TableHDU
A Header Data Unit opened with fitsio
pk1d_type: bool - default: False
Specifies if the fits file is formatted for the 1D Power
Spectrum analysis
Returns:
a Delta instance
"""
header = hdu.read_header()
delta = hdu['DELTA'][:].astype(float)
log_lambda = hdu['LOGLAM'][:].astype(float)
if pk1d_type:
ivar = hdu['IVAR'][:].astype(float)
exposures_diff = hdu['DIFF'][:].astype(float)
mean_snr = header['MEANSNR']
mean_reso = header['MEANRESO']
mean_z = header['MEANZ']
delta_log_lambda = header['DLL']
weights = None
cont = None
else:
ivar = None
exposures_diff = None
mean_snr = None
mean_reso = None
delta_log_lambda = None
mean_z = None
weights = hdu['WEIGHT'][:].astype(float)
cont = hdu['CONT'][:].astype(float)
thingid = header['THING_ID']
ra = header['RA']
dec = header['DEC']
z_qso = header['Z']
plate = header['PLATE']
mjd = header['MJD']
fiberid = header['FIBERID']
try:
order = header['ORDER']
except KeyError:
order = 1
return cls(thingid, ra, dec, z_qso, plate, mjd, fiberid, log_lambda,
weights, cont, delta, order, ivar, exposures_diff, mean_snr,
mean_reso, mean_z, delta_log_lambda)
@classmethod
def from_ascii(cls, line):
"""Initialize instance from an ascii file.
Args:
line: string
A line of the ascii file containing information from a line
of sight
Returns:
a Delta instance
"""
cols = line.split()
plate = int(cols[0])
mjd = int(cols[1])
fiberid = int(cols[2])
ra = float(cols[3])
dec = float(cols[4])
z_qso = float(cols[5])
mean_z = float(cols[6])
mean_snr = float(cols[7])
mean_reso = float(cols[8])
delta_log_lambda = float(cols[9])
num_pixels = int(cols[10])
delta = np.array(cols[11:11 + num_pixels]).astype(float)
log_lambda = np.array(cols[11 + num_pixels:11 +
2 * num_pixels]).astype(float)
ivar = np.array(cols[11 + 2 * num_pixels:11 +
3 * num_pixels]).astype(float)
exposures_diff = np.array(cols[11 + 3 * num_pixels:11 +
4 * num_pixels]).astype(float)
thingid = 0
order = 0
weights = None
cont = None
return cls(thingid, ra, dec, z_qso, plate, mjd, fiberid, log_lambda,
weights, cont, delta, order, ivar, exposures_diff, mean_snr,
mean_reso, mean_z, delta_log_lambda)
@staticmethod
def from_image(file):
"""Initialize instance from an ascii file.
Args:
file: string
Name of the fits file containing the image data
Returns:
a list of Delta instances
"""
hdu = fitsio.FITS(file)
deltas_image = hdu[0].read().astype(float)
ivar_image = hdu[1].read().astype(float)
log_lambda_image = hdu[2].read().astype(float)
ra = hdu[3]["RA"][:].astype(np.float64) * np.pi / 180.
dec = hdu[3]["DEC"][:].astype(np.float64) * np.pi / 180.
z = hdu[3]["Z"][:].astype(np.float64)
plate = hdu[3]["PLATE"][:]
mjd = hdu[3]["MJD"][:]
fiberid = hdu[3]["FIBER"]
thingid = hdu[3]["THING_ID"][:]
nspec = hdu[0].read().shape[1]
deltas = []
for index in range(nspec):
if index % 100 == 0:
userprint("\rreading deltas {} of {}".format(index, nspec),
end="")
delta = deltas_image[:, index]
ivar = ivar_image[:, index]
w = ivar > 0
delta = delta[w]
aux_ivar = ivar[w]
log_lambda = log_lambda_image[w]
order = 1
exposures_diff = None
mean_snr = None
mean_reso = None
delta_log_lambda = None
mean_z = None
deltas.append(
Delta(thingid[index], ra[index], dec[index], z[index],
plate[index], mjd[index], fiberid[index], log_lambda,
aux_ivar, None, delta, order, ivar, exposures_diff,
mean_snr, mean_reso, mean_z, delta_log_lambda))
hdu.close()
return deltas
def project(self):
"""Project the delta field.
The projection gets rid of the distortion caused by the continuum
fitiing. See equations 5 and 6 of du Mas des Bourboux et al. 2020
"""
# 2nd term in equation 6
mean_delta = np.average(self.delta, weights=self.weights)
# 3rd term in equation 6
res = 0
if (self.order == 1) and self.delta.shape[0] > 1:
mean_log_lambda = np.average(self.log_lambda, weights=self.weights)
meanless_log_lambda = self.log_lambda - mean_log_lambda
mean_delta_log_lambda = (
np.sum(self.weights * self.delta * meanless_log_lambda) /
np.sum(self.weights * meanless_log_lambda**2))
res = mean_delta_log_lambda * meanless_log_lambda
elif self.order == 1:
res = self.delta
self.delta -= mean_delta + res
| gpl-3.0 | 5,412,465,735,005,068 | 36.097808 | 97 | 0.557275 | false |
FylmTM/edX-code | MITx_6.00.1x/final_exam/problem_7.py | 1 | 1961 | class Frob(object):
def __init__(self, name):
self.name = name
self.before = None
self.after = None
def setBefore(self, before):
# example: a.setBefore(b) sets b before a
self.before = before
def setAfter(self, after):
# example: a.setAfter(b) sets b after a
self.after = after
def getBefore(self):
return self.before
def getAfter(self):
return self.after
def myName(self):
return self.name
def insert(atMe, newFrob):
def get_latest(node):
while node.getAfter() is not None:
node = node.getAfter()
return node
def innerInsert(innerAtMe, innerNewFrob):
if innerAtMe.myName() > innerNewFrob.myName():
if innerAtMe.getBefore() is None:
innerAtMe.setBefore(innerNewFrob)
innerNewFrob.setAfter(innerAtMe)
else:
innerInsert(innerAtMe.getBefore(), innerNewFrob)
else:
temp = innerAtMe.getAfter()
if temp is None:
innerAtMe.setAfter(innerNewFrob)
innerNewFrob.setBefore(innerAtMe)
else:
innerAtMe.setAfter(innerNewFrob)
innerNewFrob.setBefore(innerAtMe)
innerNewFrob.setAfter(temp)
temp.setBefore(innerNewFrob)
innerInsert(get_latest(atMe), newFrob)
def print_frobs(start):
if start.getAfter() is not None:
return start.myName() + " - " + print_frobs(start.getAfter())
else:
return start.myName()
eric = Frob('eric')
andrew = Frob('andrew')
ruth = Frob('ruth')
fred = Frob('fred')
martha = Frob('martha')
insert(eric, andrew)
print print_frobs(andrew)
print
insert(eric, ruth)
print print_frobs(andrew)
print
insert(eric, fred)
print print_frobs(andrew)
print
insert(ruth, martha)
print print_frobs(andrew)
print
insert(eric, Frob('martha'))
print print_frobs(andrew)
print
| mit | 4,611,521,960,080,144,400 | 26.236111 | 69 | 0.609893 | false |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2017_06_01_preview/aio/_policy_client.py | 1 | 4542 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import PolicyClientConfiguration
from .operations import PolicyAssignmentsOperations
from .operations import PolicySetDefinitionsOperations
from .operations import PolicyDefinitionsOperations
from .. import models
class PolicyClient(object):
"""To manage and control access to your resources, you can define customized policies and assign them at a scope.
:ivar policy_assignments: PolicyAssignmentsOperations operations
:vartype policy_assignments: azure.mgmt.resource.policy.v2017_06_01_preview.aio.operations.PolicyAssignmentsOperations
:ivar policy_set_definitions: PolicySetDefinitionsOperations operations
:vartype policy_set_definitions: azure.mgmt.resource.policy.v2017_06_01_preview.aio.operations.PolicySetDefinitionsOperations
:ivar policy_definitions: PolicyDefinitionsOperations operations
:vartype policy_definitions: azure.mgmt.resource.policy.v2017_06_01_preview.aio.operations.PolicyDefinitionsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = PolicyClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.policy_assignments = PolicyAssignmentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.policy_set_definitions = PolicySetDefinitionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.policy_definitions = PolicyDefinitionsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "PolicyClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| mit | -8,831,847,569,806,232,000 | 48.912088 | 129 | 0.698591 | false |
rbaumg/trac | trac/wiki/tests/macros.py | 1 | 44372 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2019 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import io
import os
import unittest
from trac.attachment import Attachment
from trac.config import BoolOption, ConfigSection, IntOption, ListOption, \
Option
from trac.test import locale_en, mkdtemp, rmtree
from trac.util.datefmt import datetime_now, format_date, utc
from trac.wiki.model import WikiPage
from trac.wiki.tests import formatter
def add_pages(tc, names):
now = datetime_now(utc)
for name in names:
w = WikiPage(tc.env)
w.name = name
w.text = '--'
w.save('joe', 'the page ' + name, now)
def add_attachment(tc, realm, id, file):
attachment = Attachment(tc.env, realm, id)
attachment.description = "image in %s" % id
attachment.insert(file, io.BytesIO(), 0, 2)
# == [[Image]]
def image_setup(tc):
add_pages(tc, ['page:fr', 'page'])
tc.env.path = mkdtemp()
add_attachment(tc, 'wiki', 'page:fr', 'img.png')
add_attachment(tc, 'wiki', 'page', 'img.png')
add_attachment(tc, 'wiki', 'page', '][img.png')
tc.env.config.set('interwiki', 'shields', 'https://img.shields.io/')
tc.env.config.set('interwiki', 'travis',
'https://travis-ci.org/$1?branch=$2')
htdocs_location = 'http://assets.example.org/common'
tc.context.req.chrome['htdocs_location'] = htdocs_location
tc.env.config.set('trac', 'htdocs_location', htdocs_location)
def image_teardown(tc):
rmtree(os.path.join(tc.env.path, 'files'))
os.rmdir(tc.env.path) # there was only 'files' below tc.env.path
tc.env.reset_db()
# Note: using `« test »` string in the following tests for checking
# unicode robustness and whitespace support (first space is
# normal ASCII SPACE, second is Unicode NO-BREAK SPACE).
IMAGE_MACRO_TEST_CASES = u"""
============================== Image, no arguments
[[Image]]
------------------------------
============================== Image, no arguments
[[Image()]]
------------------------------
============================== Image, multiple no arguments
[[Image(,)]]
------------------------------
============================== Image, whitespace argument
[[Image( )]]
------------------------------
============================== Image, ZWSP argument
[[Image()]]
------------------------------
============================== source: Image, no other arguments
[[Image(source:« test ».png)]]
------------------------------
<p>
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB.png" style="padding:0; border:none"><img alt="source:« test ».png" src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" title="source:« test ».png" /></a>
</p>
------------------------------
[[Image(...)]]
============================== source: Image, inline
[[Image(source:« test ».png, inline)]]
------------------------------
<p>
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB.png" style="padding:0; border:none"><img alt="source:« test ».png" src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" title="source:« test ».png" /></a>
</p>
------------------------------
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB.png" style="padding:0; border:none"><img alt="source:« test ».png" src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" title="source:« test ».png" /></a>
============================== intertrac:source: Image, no other arguments
[[Image(trac:source:/trunk/doc/images/bkgnd_pattern_« test ».png)]]
------------------------------
<p>
<a href="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png" style="padding:0; border:none"><img alt="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" crossorigin="anonymous" src="http://trac.edgewall.org/intertrac/source%3A/trunk/doc/images/bkgnd_pattern_%C2%AB%20test%C2%A0%C2%BB.png%3Fformat%3Draw" title="source:/trunk/doc/images/bkgnd_pattern_« test ».png in Trac's Trac" /></a>
</p>
============================== source: Image, nolink
[[Image(source:« test », nolink)]]
------------------------------
<p>
<img alt="source:« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" title="source:« test »" />
</p>
============================== source: Image, normal args
[[Image(source:« test », align=left, title=Test)]]
------------------------------
<p>
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB" style="padding:0; border:none"><img alt="source:« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" style="float:left" title="Test" /></a>
</p>
============================== source: Image, size arg
[[Image(source:« test », 30%)]]
------------------------------
<p>
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB" style="padding:0; border:none"><img alt="source:« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" title="source:« test »" width="30%" /></a>
</p>
============================== source: Image, keyword alignment
[[Image(source:« test », right)]]
------------------------------
<p>
<a href="/browser/%C2%AB%20test%C2%A0%C2%BB" style="padding:0; border:none"><img alt="source:« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" style="float:right" title="source:« test »" /></a>
</p>
============================== http: Image, nolink
[[Image(http://www.edgewall.com/gfx/shredder_« test ».png, nolink)]]
------------------------------
<p>
<img alt="http://www.edgewall.com/gfx/shredder_« test ».png" crossorigin="anonymous" src="http://www.edgewall.com/gfx/shredder_« test ».png" title="http://www.edgewall.com/gfx/shredder_« test ».png" />
</p>
============================== http: Image, absolute, many ':'
[[Image(http://chart.apis.google.com:80/chart?cht=p3&chd=s:hW&chs=250x100&chl=Héllo|Wôrld, title=Google & Charting, link=)]]
------------------------------
<p>
<img alt="http://chart.apis.google.com:80/chart" crossorigin="anonymous" src="http://chart.apis.google.com:80/chart?cht=p3&chd=s:hW&chs=250x100&chl=Héllo|Wôrld" title="Google & Charting" />
</p>
============================== // Image, server-relative
[[Image(//browser/« test »?format=raw, link=)]]
------------------------------
<p>
<img alt="/browser/« test »" src="/browser/« test »?format=raw" title="/browser/« test »" />
</p>
============================== / Image, project-relative, link to WikiStart
[[Image(/browser/« test »?format=raw, link=wiki:WikiStart)]]
------------------------------
<p>
<a href="/wiki/WikiStart" style="padding:0; border:none"><img alt="/browser/« test »" src="/browser/%C2%AB%20test%C2%A0%C2%BB?format=raw" title="/browser/« test »" /></a>
</p>
============================== Strip unicode white-spaces and ZWSPs (#10668)
[[Image( source:« test ».png , nolink, 100% )]]
------------------------------
<p>
<img alt="source:« test ».png" src="/browser/%C2%AB%20test%C2%A0%C2%BB.png?format=raw" title="source:« test ».png" width="100%" />
</p>
============================== Attachments on page with ':' characters (#10562)
[[Image("page:fr":img.png,nolink)]]
------------------------------
<p>
<img alt="image in page:fr" src="/raw-attachment/wiki/page%3Afr/img.png" title="image in page:fr" />
</p>
============================== htdocs: Image, nolink
[[Image(htdocs:trac_logo.png, nolink)]]
------------------------------
<p>
<img alt="trac_logo.png" src="/chrome/site/trac_logo.png" title="trac_logo.png" />
</p>
============================== shared: Image, nolink
[[Image(shared:trac_logo.png, nolink)]]
------------------------------
<p>
<img alt="trac_logo.png" src="/chrome/shared/trac_logo.png" title="trac_logo.png" />
</p>
==============================
[[Image("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=")]]
------------------------------
<p>
<a href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" style="padding:0; border:none"><img alt="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" title="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" /></a>
</p>
==============================
[[Image("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=", nolink)]]
------------------------------
<p>
<img alt="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" title="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoAQMAAAC2MCouAAAAA1BMVEXLQ0MOAUiXAAAAC0lEQVQIHWMYYQAAAPAAASEIRrcAAAAASUVORK5CYII=" />
</p>
============================== InterWiki
[[Image(shields:travis/edgewall/trac.svg, link=trac:source:/trunk)]]
[[Image(travis:edgewall/trac.svg:1.0-stable, link=trac:source:/branches/1.0-stable)]]
------------------------------
<p>
<a href="http://trac.edgewall.org/intertrac/source%3A/trunk" style="padding:0; border:none"><img alt="travis/edgewall/trac.svg in shields" crossorigin="anonymous" src="https://img.shields.io/travis/edgewall/trac.svg" title="travis/edgewall/trac.svg in shields" /></a>
<a href="http://trac.edgewall.org/intertrac/source%3A/branches/1.0-stable" style="padding:0; border:none"><img alt="edgewall/trac.svg:1.0-stable in travis" crossorigin="anonymous" src="https://travis-ci.org/edgewall/trac.svg?branch=1.0-stable" title="edgewall/trac.svg:1.0-stable in travis" /></a>
</p>
============================== InterWiki, nolink
[[Image(shields:pypi/dm/trac.svg, nolink)]]
------------------------------
<p>
<img alt="pypi/dm/trac.svg in shields" crossorigin="anonymous" src="https://img.shields.io/pypi/dm/trac.svg" title="pypi/dm/trac.svg in shields" />
</p>
============================== No attachment, nolink
[[Image(notfound.png, nolink)]]
------------------------------
<p>
<img alt="No image "notfound.png" attached to WikiStart" crossorigin="anonymous" src="http://assets.example.org/common/attachment.png" title="No image "notfound.png" attached to WikiStart" />
</p>
============================== No attachment, correct CSS
[[Image(img.png, margin-bottom=-1)]]
------------------------------
<p>
<img alt="No image "img.png" attached to WikiStart" crossorigin="anonymous" src="http://assets.example.org/common/attachment.png" style="margin-bottom: 1px" title="No image "img.png" attached to WikiStart" />
</p>
============================== No attachment, invalid arg
[[Image(img.png, margin-bottom=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, margin-bottom=--) failed</strong><pre>Invalid macro argument <code>margin-bottom=--</code></pre></div>
</p>
============================== No attachment, invalid arg 2
[[Image(img.png, margin-top=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, margin-top=--) failed</strong><pre>Invalid macro argument <code>margin-top=--</code></pre></div>
</p>
============================== No attachment, invalid arg 3
[[Image(img.png, margin=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, margin=--) failed</strong><pre>Invalid macro argument <code>margin=--</code></pre></div>
</p>
============================== No attachment, invalid arg 3
[[Image(img.png, margin-left=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, margin-left=--) failed</strong><pre>Invalid macro argument <code>margin-left=--</code></pre></div>
</p>
============================== No attachment, invalid arg 4
[[Image(img.png, margin-right=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, margin-right=--) failed</strong><pre>Invalid macro argument <code>margin-right=--</code></pre></div>
</p>
============================== No attachment, invalid arg 5
[[Image(img.png, border=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro Image(img.png, border=--) failed</strong><pre>Invalid macro argument <code>border=--</code></pre></div>
</p>
============================== # Regression test for #12333
= [[Image]]
------------------------------
<h1 class="section" id="Image">[[Image]]</h1>
============================== Invalid use of attachment TracLink
[[Image(attachment:img.png:wiki:page)]]
------------------------------
<p>
</p><div class="system-message"><strong>No filespec given</strong></div><p>
</p>
============================== Non-existent attachment
[[Image(wiki:page:img2.png)]]
------------------------------
<p>
<img alt="No image "img2.png" attached to page" crossorigin="anonymous" src="http://assets.example.org/common/attachment.png" title="No image "img2.png" attached to page" />
</p>
============================== "[" and "]" characters - 1 (#12762)
[[Image(wiki:page:][img.png,nolink)]]
------------------------------
<p>
<img alt="image in page" src="/raw-attachment/wiki/page/%5D%5Bimg.png" title="image in page" />
</p>
============================== "[" and "]" characters - 2 (#12762)
[[Image(][img.png,nolink)]]
------------------------------
<p>
<img alt="No image "][img.png" attached to WikiStart" crossorigin="anonymous" src="http://assets.example.org/common/attachment.png" title="No image "][img.png" attached to WikiStart" />
</p>
"""
# Note: in the <img> src attribute above, the Unicode characters
# within the URI sometimes come out as %-encoded, sometimes raw
# (server-relative case). Both forms are valid (at least
# according to the W3C XHTML validator).
# == [[TitleIndex]]
def titleindex_teardown(tc):
tc.env.reset_db()
TITLEINDEX1_MACRO_TEST_CASES = u"""
============================== TitleIndex, default format
[[TitleIndex()]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex]]
============================== TitleIndex, compact format
[[TitleIndex(format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
==============================
[[TitleIndex(min=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro TitleIndex(min=--) failed</strong><pre>Invalid macro argument <code>min=--</code></pre></div>
</p>
==============================
[[TitleIndex(depth=--)]]
------------------------------
<p>
<div class="system-message"><strong>Macro TitleIndex(depth=--) failed</strong><pre>Invalid macro argument <code>depth=--</code></pre></div>
</p>
------------------------------
"""
TITLEINDEX2_MACRO_TEST_CASES = u"""
============================== TitleIndex, default format
[[TitleIndex()]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex]]
============================== TitleIndex, compact format
[[TitleIndex(format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">WikiEnd</a>, <a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, default format with prefix
[[TitleIndex(Wiki)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, compact format with prefix
[[TitleIndex(Wiki,format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">WikiEnd</a>, <a href="/wiki/WikiStart">WikiStart</a>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, default format with prefix hidden
[[TitleIndex(Wiki,hideprefix)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiEnd">End</a></li><li><a href="/wiki/WikiStart">Start</a></li></ul></div><p>
</p>
------------------------------
[[TitleIndex(...)]]
============================== TitleIndex, compact format with prefix hidden
[[TitleIndex(Wiki,hideprefix,format=compact)]]
------------------------------
<p>
<a href="/wiki/WikiEnd">End</a>, <a href="/wiki/WikiStart">Start</a>
</p>
------------------------------
[[TitleIndex(...)]]
"""
def titleindex2_setup(tc):
add_pages(tc, ['WikiEnd'])
TITLEINDEX3_MACRO_TEST_CASES = u"""
============================== TitleIndex, group format
[[TitleIndex(Wiki,format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>Wiki</strong><ul><li><strong>End</strong><ul><li><a href="/wiki/WikiEnd/First">WikiEnd/First</a></li><li><a href="/wiki/WikiEnd/Second">WikiEnd/Second</a></li></ul></li><li><strong>Start</strong><ul><li><a href="/wiki/WikiStart">WikiStart</a></li><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format
[[TitleIndex(WikiStart/, format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li>WikiStart<ul><li><a href="/wiki/WikiStart/First">First</a></li><li><a href="/wiki/WikiStart/Second">Second</a></li><li><a href="/wiki/WikiStart/Third">Third</a></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, group format, prefix hidden
[[TitleIndex(Wiki,hideprefix,format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>End</strong><ul><li><a href="/wiki/WikiEnd/First">End/First</a></li><li><a href="/wiki/WikiEnd/Second">End/Second</a></li></ul></li><li><strong>Start</strong><ul><li><a href="/wiki/WikiStart">Start</a></li><li><a href="/wiki/WikiStart/First">Start/First</a></li><li><a href="/wiki/WikiStart/Second">Start/Second</a></li><li><a href="/wiki/WikiStart/Third">Start/Third</a></li></ul></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format, prefix hidden
[[TitleIndex(WikiStart/,hideprefix,format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/First">First</a></li><li><a href="/wiki/WikiStart/Second">Second</a></li><li><a href="/wiki/WikiStart/Third">Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix
[[TitleIndex(../../WikiStart)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart">WikiStart</a></li><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix with trailing slash
[[TitleIndex(../../WikiStart/)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix ..
[[TitleIndex(..)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart">WikiStart</a></li><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix ../
[[TitleIndex(../)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/First">WikiStart/First</a></li><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li><li><a href="/wiki/WikiStart/Third">WikiStart/Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix .
[[TitleIndex(.)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/Second">WikiStart/Second</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative prefix ./
[[TitleIndex(./)]]
------------------------------
<p>
</p><div class="titleindex"><ul></ul></div><p>
</p>
------------------------------
============================== TitleIndex, relative hidden prefix ../
[[TitleIndex(../,hideprefix)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart/First">First</a></li><li><a href="/wiki/WikiStart/Second">Second</a></li><li><a href="/wiki/WikiStart/Third">Third</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, top-level pages only
[[TitleIndex(depth=0)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
"""
def titleindex3_setup(tc):
add_pages(tc, [
'WikiStart/First',
'WikiStart/Second',
'WikiStart/Third',
'WikiEnd/First',
'WikiEnd/Second',
])
TITLEINDEX4_MACRO_TEST_CASES = u"""
============================== TitleIndex group and page with numbers (#7919)
[[TitleIndex(format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>0.11</strong><ul><li><strong>Group</strong><ul><li><a href="/wiki/0.11/GroupOne">0.11/GroupOne</a></li><li><a href="/wiki/0.11/GroupTwo">0.11/GroupTwo</a></li></ul></li><li><a href="/wiki/0.11/Test">0.11/Test</a></li></ul></li><li><strong>Test</strong><ul><li><strong>0.11</strong><ul><li><a href="/wiki/Test0.11/Abc">Test0.11/Abc</a></li><li><a href="/wiki/Test0.11Abc">Test0.11Abc</a></li></ul></li><li><strong>0.12</strong><ul><li><a href="/wiki/Test0.12Def">Test0.12Def</a></li><li><a href="/wiki/Test0.12Ijk">Test0.12Ijk</a></li></ul></li><li><strong>0.13</strong><ul><li><a href="/wiki/Test0.13alpha">Test0.13alpha</a></li><li><a href="/wiki/Test0.13beta">Test0.13beta</a></li></ul></li><li><a href="/wiki/Test0.131">Test0.131</a></li><li><a href="/wiki/Test2">Test2</a></li><li><a href="/wiki/TestTest">TestTest</a></li><li><a href="/wiki/TestThing">TestThing</a></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, compact format with prefix hidden, including Test0.13*
[[TitleIndex(Test,format=compact,include=*0.13*)]]
------------------------------
<p>
<a href="/wiki/Test0.131">Test0.131</a>, <a href="/wiki/Test0.13alpha">Test0.13alpha</a>, <a href="/wiki/Test0.13beta">Test0.13beta</a>
</p>
------------------------------
============================== TitleIndex, compact format with prefix hidden, including Test0.13* but excluding Test0.131
[[TitleIndex(Test,format=compact,include=*0.13*,exclude=*1)]]
------------------------------
<p>
<a href="/wiki/Test0.13alpha">Test0.13alpha</a>, <a href="/wiki/Test0.13beta">Test0.13beta</a>
</p>
------------------------------
============================== TitleIndex, compact format, excluding various topics
[[TitleIndex(Test,format=compact,exclude=Test0.13*:*0.11*:Test2:Test*i*)]]
------------------------------
<p>
<a href="/wiki/Test0.12Def">Test0.12Def</a>, <a href="/wiki/Test0.12Ijk">Test0.12Ijk</a>, <a href="/wiki/TestTest">TestTest</a>
</p>
------------------------------
============================== TitleIndex, compact format, including and excluding various topics
[[TitleIndex(format=compact,include=*Group*:test2,exclude=*One)]]
------------------------------
<p>
<a href="/wiki/0.11/GroupTwo">0.11/GroupTwo</a>
</p>
------------------------------
"""
def titleindex4_setup(tc):
add_pages(tc, [
'TestTest',
'TestThing',
'Test2',
'Test0.11Abc',
'Test0.11/Abc',
'Test0.12Def',
'Test0.12Ijk',
'Test0.13alpha',
'Test0.13beta',
'Test0.131',
'0.11/Test',
'0.11/GroupOne',
'0.11/GroupTwo',
])
TITLEINDEX5_MACRO_TEST_CASES = u"""
============================== TitleIndex, hierarchy format with complex hierarchy
[[TitleIndex(format=hierarchy)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/TracDev">TracDev</a><ul><li><a href="/wiki/TracDev/ApiChanges">ApiChanges</a><ul><li><a href="/wiki/TracDev/ApiChanges/0.10">0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">0.11</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12">0.12</a><ul><li>Missing<ul><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">Exists</a></li></ul></li></ul></li></ul></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, hierarchy format with complex hierarchy (and min=5)
[[TitleIndex(format=hierarchy,min=5)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><a href="/wiki/TracDev">TracDev</a><ul><li><a href="/wiki/TracDev/ApiChanges">ApiChanges</a></li><li><a href="/wiki/TracDev/ApiChanges/0.10">ApiChanges/0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">ApiChanges/0.11</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12">ApiChanges/0.12</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">ApiChanges/0.12/Missing/Exists</a></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
============================== TitleIndex, group format with complex hierarchy
[[TitleIndex(format=group)]]
------------------------------
<p>
</p><div class="titleindex"><ul><li><strong>TracDev</strong><ul><li><a href="/wiki/TracDev">TracDev</a></li><li><strong>ApiChanges</strong><ul><li><a href="/wiki/TracDev/ApiChanges">TracDev/ApiChanges</a></li><li><a href="/wiki/TracDev/ApiChanges/0.10">TracDev/ApiChanges/0.10</a></li><li><a href="/wiki/TracDev/ApiChanges/0.11">TracDev/ApiChanges/0.11</a></li><li><strong>0.12</strong><ul><li><a href="/wiki/TracDev/ApiChanges/0.12">TracDev/ApiChanges/0.12</a></li><li><a href="/wiki/TracDev/ApiChanges/0.12/Missing/Exists">TracDev/ApiChanges/0.12/Missing/Exists</a></li></ul></li></ul></li></ul></li><li><a href="/wiki/WikiStart">WikiStart</a></li></ul></div><p>
</p>
------------------------------
"""
def titleindex5_setup(tc):
add_pages(tc, [
'TracDev',
'TracDev/ApiChanges',
'TracDev/ApiChanges/0.10',
'TracDev/ApiChanges/0.11',
'TracDev/ApiChanges/0.12',
'TracDev/ApiChanges/0.12/Missing/Exists',
])
RECENTCHANGES_MACRO_TEST_CASES = u""""
============================== RecentChanges, group option
[[RecentChanges()]]
[[RecentChanges(group=date)]]
[[RecentChanges(group=none)]]
[[RecentChanges(,2,group=none)]]
[[RecentChanges(Wiki,group=none)]]
[[RecentChanges(Wiki,1,group=none)]]
------------------------------
<p>
</p><div class="wikipage"><h3 class="section">%(date)s</h3><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div class="wikipage"><h3 class="section">%(date)s</h3><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div class="wikipage"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div class="wikipage"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li></ul></div><p>
</p><div class="wikipage"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li><li><a href="/wiki/WikiMid">WikiMid</a>
</li><li><a href="/wiki/WikiStart">WikiStart</a>
</li></ul></div><p>
</p><div class="wikipage"><ul><li><a href="/wiki/WikiEnd">WikiEnd</a>
</li></ul></div><p>
</p>
==============================
[[RecentChanges(Trac, --)]]
------------------------------
<p>
<div class="system-message"><strong>Macro RecentChanges(Trac, --) failed</strong><pre>Invalid macro argument <code>--</code></pre></div>
</p>
------------------------------
"""
def recentchanges_setup(tc):
def add_pages(tc, names):
for name in names:
now = datetime_now(utc)
w = WikiPage(tc.env)
w.name = name
w.text = '--'
w.save('joe', 'the page ' + name, now)
add_pages(tc, [
'WikiMid',
'WikiEnd',
])
tc.expected = tc.expected % {'date': format_date(tzinfo=utc,
locale=locale_en)}
def recentchanges_teardown(tc):
tc.env.reset_db()
PAGEOUTLINE_MACRO_TEST_CASES = u""""
==============================
[[PageOutline(a)]]
------------------------------
<p>
<div class="system-message"><strong>Macro PageOutline(a) failed</strong><pre>Invalid macro argument <code>a</code></pre></div>
</p>
==============================
[[PageOutline(a-b)]]
------------------------------
<p>
<div class="system-message"><strong>Macro PageOutline(a-b) failed</strong><pre>Invalid macro argument <code>a</code></pre></div>
</p>
==============================
[[PageOutline(0)]]
= Heading Level 1
== Heading Level 2
------------------------------
<p>
</p><div class="wiki-toc">
<ol>
<li>
<a href="#HeadingLevel1">Heading Level 1</a>
</li>
</ol>
</div><p>
</p>
<h1 class="section" id="HeadingLevel1">Heading Level 1</h1>
<h2 class="section" id="HeadingLevel2">Heading Level 2</h2>
==============================
[[PageOutline(7)]]
===== Heading Level 5
====== Heading Level 6
------------------------------
<p>
</p><div class="wiki-toc">
<ol>
<li>
<a href="#HeadingLevel6">Heading Level 6</a>
</li>
</ol>
</div><p>
</p>
<h5 class="section" id="HeadingLevel5">Heading Level 5</h5>
<h6 class="section" id="HeadingLevel6">Heading Level 6</h6>
==============================
[[PageOutline(0-7)]]
= Heading Level 1
== Heading Level 2
=== Heading Level 3
==== Heading Level 4
===== Heading Level 5
====== Heading Level 6
------------------------------
<p>
</p><div class="wiki-toc">
<ol>
<li>
<a href="#HeadingLevel1">Heading Level 1</a>
<ol>
<li>
<a href="#HeadingLevel2">Heading Level 2</a>
<ol>
<li>
<a href="#HeadingLevel3">Heading Level 3</a>
<ol>
<li>
<a href="#HeadingLevel4">Heading Level 4</a>
<ol>
<li>
<a href="#HeadingLevel5">Heading Level 5</a>
<ol>
<li>
<a href="#HeadingLevel6">Heading Level 6</a>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</li>
</ol>
</div><p>
</p>
<h1 class="section" id="HeadingLevel1">Heading Level 1</h1>
<h2 class="section" id="HeadingLevel2">Heading Level 2</h2>
<h3 class="section" id="HeadingLevel3">Heading Level 3</h3>
<h4 class="section" id="HeadingLevel4">Heading Level 4</h4>
<h5 class="section" id="HeadingLevel5">Heading Level 5</h5>
<h6 class="section" id="HeadingLevel6">Heading Level 6</h6>
"""
TRACINI_MACRO_TEST_CASES = u"""\
============================== TracIni, option with empty doc (#10940)
[[TracIni(section-42)]]
------------------------------
<p>
</p><div class="tracini"><h3 id="section-42-section"><code>[section-42]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-42-option1-option">\
<td><a class="tracini-option" href="#section-42-option1-option"><code>option1</code></a></td><td></td>\
<td class="default"><code>value</code></td></tr>\
<tr class="odd" id="section-42-option2-option">\
<td><a class="tracini-option" href="#section-42-option2-option"><code>option2</code></a></td><td><p>
blah
</p>
</td><td class="default"><code>value</code></td></tr>\
<tr class="even" id="section-42-option3-option">\
<td><a class="tracini-option" href="#section-42-option3-option"><code>option3</code></a></td><td><p>
Doc for option3
</p>
</td><td class="default"><code>value</code></td></tr></tbody></table></div><p>
</p>
------------------------------
============================== TracIni, list option with sep=| (#11074)
[[TracIni(section-list)]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-list-section"><code>[section-list]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-list-option1-option">\
<td><a class="tracini-option" href="#section-list-option1-option"><code>option1</code></a></td><td></td>\
<td class="default"><code>4.2|42|42||0|enabled</code></td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
============================== TracIni, option with "false" value as default
[[TracIni(section-def)]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-def-section"><code>[section-def]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-def-option1-option">\
<td><a class="tracini-option" href="#section-def-option1-option"><code>option1</code></a></td><td>\
</td><td class="nodefault">(no default)</td></tr>\
<tr class="odd" id="section-def-option2-option">\
<td><a class="tracini-option" href="#section-def-option2-option"><code>option2</code></a></td><td></td>\
<td class="nodefault">(no default)</td></tr>\
<tr class="even" id="section-def-option3-option">\
<td><a class="tracini-option" href="#section-def-option3-option"><code>option3</code></a></td><td></td>\
<td class="default"><code>0</code></td></tr>\
<tr class="odd" id="section-def-option4-option">\
<td><a class="tracini-option" href="#section-def-option4-option"><code>option4</code></a></td><td></td>\
<td class="default"><code>disabled</code></td></tr>\
<tr class="even" id="section-def-option5-option">\
<td><a class="tracini-option" href="#section-def-option5-option"><code>option5</code></a></td><td></td>\
<td class="nodefault">(no default)</td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
============================== TracIni, option argument
[[TracIni(,option5)]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-def-section"><code>[section-def]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-def-option5-option">\
<td><a class="tracini-option" href="#section-def-option5-option"><code>option5</code></a></td><td></td>\
<td class="nodefault">(no default)</td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
============================== TracIni, option named argument
[[TracIni(option=opt?o*[24])]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-42-section"><code>[section-42]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-42-option2-option">\
<td><a class="tracini-option" href="#section-42-option2-option"><code>option2</code></a></td><td><p>
blah
</p>
</td><td class="default"><code>value</code></td></tr></tbody></table>\
<h3 id="section-def-section"><code>[section-def]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-def-option2-option">\
<td><a class="tracini-option" href="#section-def-option2-option"><code>option2</code></a></td><td></td>\
<td class="nodefault">(no default)</td></tr>\
<tr class="odd" id="section-def-option4-option">\
<td><a class="tracini-option" href="#section-def-option4-option"><code>option4</code></a></td><td></td>\
<td class="default"><code>disabled</code></td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
============================== TracIni, section and option named argument
[[TracIni(section=section-*,option=opt*[13])]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-42-section"><code>[section-42]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-42-option1-option">\
<td><a class="tracini-option" href="#section-42-option1-option"><code>option1</code></a></td><td></td>\
<td class="default"><code>value</code></td></tr>\
<tr class="odd" id="section-42-option3-option">\
<td><a class="tracini-option" href="#section-42-option3-option"><code>option3</code></a></td>\
<td><p>
Doc for option3
</p>
</td><td class="default"><code>value</code></td></tr>\
</tbody></table>\
<h3 id="section-def-section"><code>[section-def]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-def-option1-option">\
<td><a class="tracini-option" href="#section-def-option1-option"><code>option1</code></a></td><td></td>\
<td class="nodefault">(no default)</td></tr>\
<tr class="odd" id="section-def-option3-option">\
<td><a class="tracini-option" href="#section-def-option3-option"><code>option3</code></a></td><td></td>\
<td class="default"><code>0</code></td></tr>\
</tbody></table>\
<h3 id="section-list-section"><code>[section-list]</code></h3>\
<table class="wiki"><tbody>\
<tr class="even" id="section-list-option1-option">\
<td><a class="tracini-option" href="#section-list-option1-option"><code>option1</code></a></td><td></td>\
<td class="default"><code>4.2|42|42||0|enabled</code></td></tr>\
</tbody></table>\
</div><p>
</p>
------------------------------
============================== TracIni, section with no options
[[TracIni(section=section-no-options)]]
------------------------------
<p>
</p><div class="tracini">\
<h3 id="section-no-options-section"><code>[section-no-options]</code></h3>\
<p>
No options
</p>
</div><p>
</p>
------------------------------
============================== TracIni, ordered arguments don't glob
[[TracIni(section*,option*)]]
------------------------------
<p>
</p><div class="tracini"></div><p>
</p>
------------------------------
"""
def tracini_setup(tc):
tc._orig_registries = ConfigSection.registry, Option.registry
class Foo(object):
section = (ConfigSection)('section-no-options', doc='No options')
option_a1 = (Option)('section-42', 'option1', 'value', doc='')
option_a2 = (Option)('section-42', 'option2', 'value', doc='blah')
option_a3 = (Option)('section-42', 'option3', 'value',
doc='Doc for %(name)s',
doc_args={'name': 'option3'})
option_l1 = (ListOption)('section-list', 'option1',
[4.2, '42', 42, None, 0, True], sep='|',
keep_empty=True)
option_d1 = (Option)('section-def', 'option1', None)
option_d2 = (Option)('section-def', 'option2', '')
option_d3 = (IntOption)('section-def', 'option3', 0)
option_d4 = (BoolOption)('section-def', 'option4', False)
option_d5 = (ListOption)('section-def', 'option5', [])
def tracini_teardown(tc):
ConfigSection.registry, Option.registry = tc._orig_registries
INTERWIKI_MACRO_TEST_CASES = u"""
==============================
[[InterWiki]]
------------------------------
<p>
</p><table class="wiki interwiki">\
<tr><th><em>Prefix</em></th><th><em>Site</em></th></tr>\
<tr><td><a href="http://wikicreole.org/wiki/RecentChanges">CreoleWiki</a></td>\
<td><a href="http://wikicreole.org/wiki/">http://wikicreole.org/wiki/</a></td></tr>\
<tr><td><a href="https://img.shields.io/RecentChanges">shields</a></td>\
<td><a href="https://img.shields.io/">https://img.shields.io/</a></td></tr>\
<tr><td><a href="http://stackoverflow.com/questions/RecentChanges">SO</a></td>\
<td><a href="http://stackoverflow.com/questions/">Question $1 in StackOverflow</a></td></tr>\
<tr><td><a href="https://travis-ci.org/RecentChanges?branch=">travis</a></td>\
<td><a href="https://travis-ci.org/$1?branch=$2">Travis CI</a></td></tr>\
</table><p>
</p>
------------------------------
"""
def interwiki_setup(tc):
tc.env.config.set('interwiki', 'shields', 'https://img.shields.io/')
tc.env.config.set('interwiki', 'travis',
'https://travis-ci.org/$1?branch=$2 Travis CI')
page = WikiPage(tc.env)
page.name = 'InterMapTxt'
page.text = """
The InterWikiTxt page
----
{{{
SO http://stackoverflow.com/questions/ # Question $1 in StackOverflow
CreoleWiki http://wikicreole.org/wiki/
}}}
"""
page.save('admin', 'created page')
def interwiki_teardown(tc):
tc.env.reset_db()
def test_suite():
suite = unittest.TestSuite()
suite.addTest(formatter.test_suite(IMAGE_MACRO_TEST_CASES,
file=__file__,
setup=image_setup,
teardown=image_teardown))
suite.addTest(formatter.test_suite(TITLEINDEX1_MACRO_TEST_CASES,
file=__file__))
suite.addTest(formatter.test_suite(TITLEINDEX2_MACRO_TEST_CASES,
file=__file__,
setup=titleindex2_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.test_suite(TITLEINDEX3_MACRO_TEST_CASES,
file=__file__,
setup=titleindex3_setup,
teardown=titleindex_teardown,
context=('wiki', 'WikiStart/Second')))
suite.addTest(formatter.test_suite(TITLEINDEX4_MACRO_TEST_CASES,
file=__file__,
setup=titleindex4_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.test_suite(TITLEINDEX5_MACRO_TEST_CASES,
file=__file__,
setup=titleindex5_setup,
teardown=titleindex_teardown))
suite.addTest(formatter.test_suite(RECENTCHANGES_MACRO_TEST_CASES,
file=__file__,
setup=recentchanges_setup,
teardown=recentchanges_teardown))
suite.addTest(formatter.test_suite(PAGEOUTLINE_MACRO_TEST_CASES,
file=__file__))
suite.addTest(formatter.test_suite(TRACINI_MACRO_TEST_CASES,
file=__file__,
setup=tracini_setup,
teardown=tracini_teardown))
suite.addTest(formatter.test_suite(INTERWIKI_MACRO_TEST_CASES,
file=__file__, setup=interwiki_setup))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| bsd-3-clause | 6,956,959,091,301,025,000 | 43.508048 | 991 | 0.549897 | false |
Razbit/razttthon | src/player.py | 1 | 2608 | # Razttthon, a python-implemented Tic-tac-toe game.
# Copyright Eetu 'Razbit' Pesonen, 2014
#
# This file is a part of Razttthon, which is free software: you can redistribute
# it and/or modify it under the terms of the GNU General Public License
# version 3 as published by the Free Software Foundation.
#
# Razttthonis distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
#This file contains the player class
from mainc import cMain
class cPlayerHandler(cMain):
def __init__(self):
pass
def getPlayers(self): return cMain.nPlayers
def getName(self, id): return cMain.playerlist[id][0]
def getGames(self, id): return cMain.playerlist[id][1]
def getWins(self, id): return cMain.playerlist[id][2]
def getLosses(self, id): return cMain.playerlist[id][3]
def getQuits(self, id): return cMain.playerlist[id][4]
def getData(self, id):
return [self.getName(id), self.getGames(id), self.getWins(id), self.getLosses(id), self.getQuits(id)]
def addPlayer(self, name):
#Adds a new player with name 'name' to the cMain's list
cMain.playerlist.append([name, 0, 0, 0, 0]) #Add player
cMain.nPlayers += 1
return self.getPlayers()-1 #Return PID for the newly created player
def addGame(self, id):
try:
cMain.playerlist[id][1] += 1
return True
except IndexError:
return False
def addWin(self, id):
try:
cMain.playerlist[id][2] += 1
return True
except IndexError:
return False
def addLose(self, id):
try:
cMain.playerlist[id][3] += 1
return True
except IndexError:
return False
def addQuit(self, id):
try:
cMain.playerlist[id][4] += 1
return True
except IndexError:
return False
def getPID(self, name):
#Search the playerlist, return index where player 'name' was found
for index in range(len(cMain.playerlist)):
if cMain.playerlist[index][0].upper() == name.upper():
return index
return -1 #If item isn't found, return -1
| gpl-3.0 | -5,719,532,197,032,230,000 | 32.435897 | 109 | 0.610813 | false |
borjam/exabgp | src/exabgp/bgp/neighbor.py | 2 | 17189 | # encoding: utf-8
"""
neighbor.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2017 Exa Networks. All rights reserved.
License: 3-clause BSD. (See the COPYRIGHT file)
"""
from collections import deque
from collections import Counter
from exabgp.protocol.family import AFI
from exabgp.util.dns import host, domain
from exabgp.bgp.message import Message
from exabgp.bgp.message.open.capability import AddPath
from exabgp.bgp.message.open.holdtime import HoldTime
from exabgp.rib import RIB
# class Section(dict):
# name = ''
# key = ''
# sub = ['capability']
# def string(self, level=0):
# prefix = ' ' * level
# key_name = self.get(key,'')
# returned = f'{prefix} {key_name} {\n'
# prefix = ' ' * (level+1)
# for k, v in self.items():
# if k == prefix:
# continue
# if k in sub:
# returned += self[k].string(level+1)
# returned += f'{k} {v};\n'
# return returned
# The definition of a neighbor (from reading the configuration)
class Neighbor(dict):
class Capability(dict):
defaults = {
'asn4': True,
'extended-message': True,
'graceful-restart': False,
'multi-session': False,
'operational': False,
'add-path': 0,
'route-refresh': 0,
'nexthop': None,
'aigp': None,
}
defaults = {
# Those are the field from the configuration
'description': '',
'router-id': None,
'local-address': None,
'peer-address': None,
'local-as': None,
'peer-as': None,
# passive indicate that we do not establish outgoing connections
'passive': False,
# the port to listen on ( zero mean that we do not listen )
'listen': 0,
# the port to connect to
'connect': 0,
'hold-time': HoldTime(180),
'rate-limit': 0,
'host-name': host(),
'domain-name': domain(),
'group-updates': True,
'auto-flush': True,
'adj-rib-in': True,
'adj-rib-out': True,
'manual-eor': False,
# XXX: this should be under an MD5 sub-dict/object ?
'md5-password': None,
'md5-base64': False,
'md5-ip': None,
'outgoing-ttl': None,
'incoming-ttl': None,
}
_GLOBAL = {'uid': 1}
def __init__(self):
# super init
self.update(self.defaults)
# Those are subconf
self.api = None # XXX: not scriptable - is replaced outside the class
# internal or calculated field
self['capability'] = self.Capability.defaults.copy()
# local_address uses auto discovery
self.auto_discovery = False
self.range_size = 1
# was this Neighbor generated from a range
self.generated = False
self._families = []
self._nexthop = []
self._addpath = []
self.rib = None
# The routes we have parsed from the configuration
self.changes = []
# On signal update, the previous routes so we can compare what changed
self.backup_changes = []
self.eor = deque()
self.asm = dict()
self.messages = deque()
self.refresh = deque()
self.counter = Counter()
# It is possible to :
# - have multiple exabgp toward one peer on the same host ( use of pid )
# - have more than once connection toward a peer
# - each connection has it own neihgbor (hence why identificator is not in Protocol)
self.uid = '%d' % self._GLOBAL['uid']
self._GLOBAL['uid'] += 1
def missing(self):
if self['local-as'] is None:
return 'incomplete neighbor, missing local-address'
if self['local-as'] is None:
return 'incomplete neighbor, missing local-as'
if self['peer-as'] is None:
return 'incomplete neighbor, missing peer-as'
return ''
def infer(self):
if self['md5-ip'] is None:
self['md5-ip'] = self['local-address']
if self['capability']['graceful-restart'] == 0:
self['capability']['graceful-restart'] = int(self['hold-time'])
def id(self):
return 'neighbor-%s' % self.uid
# This set must be unique between peer, not full draft-ietf-idr-bgp-multisession-07
def index(self):
if self['listen'] != 0:
return 'peer-ip %s listen %d' % (self['peer-address'], self['listen'])
return self.name()
def make_rib(self):
self.rib = RIB(self.name(), self['adj-rib-in'], self['adj-rib-out'], self._families)
# will resend all the routes once we reconnect
def reset_rib(self):
self.rib.reset()
self.messages = deque()
self.refresh = deque()
# back to square one, all the routes are removed
def clear_rib(self):
self.rib.clear()
self.messages = deque()
self.refresh = deque()
def name(self):
if self['capability']['multi-session']:
session = '/'.join("%s-%s" % (afi.name(), safi.name()) for (afi, safi) in self.families())
else:
session = 'in-open'
return "neighbor %s local-ip %s local-as %s peer-as %s router-id %s family-allowed %s" % (
self['peer-address'],
self['local-address'] if self['peer-address'] is not None else 'auto',
self['local-as'] if self['local-as'] is not None else 'auto',
self['peer-as'] if self['peer-as'] is not None else 'auto',
self['router-id'],
session,
)
def families(self):
# this list() is important .. as we use the function to modify self._families
return list(self._families)
def nexthops(self):
# this list() is important .. as we use the function to modify self._nexthop
return list(self._nexthop)
def addpaths(self):
# this list() is important .. as we use the function to modify self._add_path
return list(self._addpath)
def add_family(self, family):
# the families MUST be sorted for neighbor indexing name to be predictable for API users
# this list() is important .. as we use the function to modify self._families
if family not in self.families():
afi, safi = family
d = dict()
d[afi] = [
safi,
]
for afi, safi in self._families:
d.setdefault(afi, []).append(safi)
self._families = [(afi, safi) for afi in sorted(d) for safi in sorted(d[afi])]
def add_nexthop(self, afi, safi, nhafi):
if (afi, safi, nhafi) not in self._nexthop:
self._nexthop.append((afi, safi, nhafi))
def add_addpath(self, family):
# the families MUST be sorted for neighbor indexing name to be predictable for API users
# this list() is important .. as we use the function to modify self._add_path
if family not in self.addpaths():
afi, safi = family
d = dict()
d[afi] = [
safi,
]
for afi, safi in self._addpath:
d.setdefault(afi, []).append(safi)
self._addpath = [(afi, safi) for afi in sorted(d) for safi in sorted(d[afi])]
def remove_family(self, family):
if family in self.families():
self._families.remove(family)
def remove_nexthop(self, afi, safi, nhafi):
if (afi, safi, nhafi) in self.nexthops():
self._nexthop.remove((afi, safi, nhafi))
def remove_addpath(self, family):
if family in self.addpaths():
self._addpath.remove(family)
def missing(self):
if self['local-address'] is None and not self.auto_discovery:
return 'local-address'
if self['listen'] > 0 and self.auto_discovery:
return 'local-address'
if self['peer-address'] is None:
return 'peer-address'
if self.auto_discovery and not self['router-id']:
return 'router-id'
if self['peer-address'].afi == AFI.ipv6 and not self['router-id']:
return 'router-id'
return ''
# This function only compares the neighbor BUT NOT ITS ROUTES
def __eq__(self, other):
# Comparing local_address is skipped in the case where either
# peer is configured to auto discover its local address. In
# this case it can happen that one local_address is None and
# the other one will be set to the auto disocvered IP address.
auto_discovery = self.auto_discovery or other.auto_discovery
return (
self['router-id'] == other['router-id']
and self['local-as'] == other['local-as']
and self['peer-address'] == other['peer-address']
and self['peer-as'] == other['peer-as']
and self['passive'] == other['passive']
and self['listen'] == other['listen']
and self['connect'] == other['connect']
and self['hold-time'] == other['hold-time']
and self['rate-limit'] == other['rate-limit']
and self['host-name'] == other['host-name']
and self['domain-name'] == other['domain-name']
and self['md5-password'] == other['md5-password']
and self['md5-ip'] == other['md5-ip']
and self['incoming-ttl'] == other['incoming-ttl']
and self['outgoing-ttl'] == other['outgoing-ttl']
and self['group-updates'] == other['group-updates']
and self['auto-flush'] == other['auto-flush']
and self['adj-rib-in'] == other['adj-rib-in']
and self['adj-rib-out'] == other['adj-rib-out']
and (auto_discovery or self['local-address'] == other['local-address'])
and self['capability'] == other['capability']
and self.auto_discovery == other.auto_discovery
and self.families() == other.families()
)
def __ne__(self, other):
return not self.__eq__(other)
def string(self, with_changes=True):
changes = ''
if with_changes:
changes += '\nstatic { '
for change in self.rib.outgoing.queued_changes():
changes += '\n\t\t%s' % change.extensive()
changes += '\n}'
families = ''
for afi, safi in self.families():
families += '\n\t\t%s %s;' % (afi.name(), safi.name())
nexthops = ''
for afi, safi, nexthop in self.nexthops():
nexthops += '\n\t\t%s %s %s;' % (afi.name(), safi.name(), nexthop.name())
addpaths = ''
for afi, safi in self.addpaths():
addpaths += '\n\t\t%s %s;' % (afi.name(), safi.name())
codes = Message.CODE
_extension_global = {
'neighbor-changes': 'neighbor-changes',
'negotiated': 'negotiated',
'fsm': 'fsm',
'signal': 'signal',
}
_extension_receive = {
'receive-packets': 'packets',
'receive-parsed': 'parsed',
'receive-consolidate': 'consolidate',
'receive-%s' % codes.NOTIFICATION.SHORT: 'notification',
'receive-%s' % codes.OPEN.SHORT: 'open',
'receive-%s' % codes.KEEPALIVE.SHORT: 'keepalive',
'receive-%s' % codes.UPDATE.SHORT: 'update',
'receive-%s' % codes.ROUTE_REFRESH.SHORT: 'refresh',
'receive-%s' % codes.OPERATIONAL.SHORT: 'operational',
}
_extension_send = {
'send-packets': 'packets',
'send-parsed': 'parsed',
'send-consolidate': 'consolidate',
'send-%s' % codes.NOTIFICATION.SHORT: 'notification',
'send-%s' % codes.OPEN.SHORT: 'open',
'send-%s' % codes.KEEPALIVE.SHORT: 'keepalive',
'send-%s' % codes.UPDATE.SHORT: 'update',
'send-%s' % codes.ROUTE_REFRESH.SHORT: 'refresh',
'send-%s' % codes.OPERATIONAL.SHORT: 'operational',
}
apis = ''
for process in self.api.get('processes', []):
_global = []
_receive = []
_send = []
for api, name in _extension_global.items():
_global.extend(
[
'\t\t%s;\n' % name,
]
if process in self.api[api]
else []
)
for api, name in _extension_receive.items():
_receive.extend(
[
'\t\t\t%s;\n' % name,
]
if process in self.api[api]
else []
)
for api, name in _extension_send.items():
_send.extend(
[
'\t\t\t%s;\n' % name,
]
if process in self.api[api]
else []
)
_api = '\tapi {\n'
_api += '\t\tprocesses [ %s ];\n' % process
_api += ''.join(_global)
if _receive:
_api += '\t\treceive {\n'
_api += ''.join(_receive)
_api += '\t\t}\n'
if _send:
_api += '\t\tsend {\n'
_api += ''.join(_send)
_api += '\t\t}\n'
_api += '\t}\n'
apis += _api
returned = (
'neighbor %s {\n'
'\tdescription "%s";\n'
'\trouter-id %s;\n'
'\thost-name %s;\n'
'\tdomain-name %s;\n'
'\tlocal-address %s;\n'
'\tlocal-as %s;\n'
'\tpeer-as %s;\n'
'\thold-time %s;\n'
'\trate-limit %s;\n'
'\tmanual-eor %s;\n'
'%s%s%s%s%s%s%s%s%s%s%s\n'
'\tcapability {\n'
'%s%s%s%s%s%s%s%s%s\t}\n'
'\tfamily {%s\n'
'\t}\n'
'\tnexthop {%s\n'
'\t}\n'
'\tadd-path {%s\n'
'\t}\n'
'%s'
'%s'
'}'
% (
self['peer-address'],
self['description'],
self['router-id'],
self['host-name'],
self['domain-name'],
self['local-address'] if not self.auto_discovery else 'auto',
self['local-as'],
self['peer-as'],
self['hold-time'],
'disable' if self['rate-limit'] == 0 else self['rate-limit'],
'true' if self['manual-eor'] else 'false',
'\n\tpassive %s;\n' % ('true' if self['passive'] else 'false'),
'\n\tlisten %d;\n' % self['listen'] if self['listen'] else '',
'\n\tconnect %d;\n' % self['connect'] if self['connect'] else '',
'\tgroup-updates %s;\n' % ('true' if self['group-updates'] else 'false'),
'\tauto-flush %s;\n' % ('true' if self['auto-flush'] else 'false'),
'\tadj-rib-in %s;\n' % ('true' if self['adj-rib-in'] else 'false'),
'\tadj-rib-out %s;\n' % ('true' if self['adj-rib-out'] else 'false'),
'\tmd5-password "%s";\n' % self['md5-password'] if self['md5-password'] else '',
'\tmd5-base64 %s;\n'
% ('true' if self['md5-base64'] is True else 'false' if self['md5-base64'] is False else 'auto'),
'\tmd5-ip "%s";\n' % self['md5-ip'] if not self.auto_discovery else '',
'\toutgoing-ttl %s;\n' % self['outgoing-ttl'] if self['outgoing-ttl'] else '',
'\tincoming-ttl %s;\n' % self['incoming-ttl'] if self['incoming-ttl'] else '',
'\t\tasn4 %s;\n' % ('enable' if self['capability']['asn4'] else 'disable'),
'\t\troute-refresh %s;\n' % ('enable' if self['capability']['route-refresh'] else 'disable'),
'\t\tgraceful-restart %s;\n'
% (self['capability']['graceful-restart'] if self['capability']['graceful-restart'] else 'disable'),
'\t\tnexthop %s;\n' % ('enable' if self['capability']['nexthop'] else 'disable'),
'\t\tadd-path %s;\n'
% (AddPath.string[self['capability']['add-path']] if self['capability']['add-path'] else 'disable'),
'\t\tmulti-session %s;\n' % ('enable' if self['capability']['multi-session'] else 'disable'),
'\t\toperational %s;\n' % ('enable' if self['capability']['operational'] else 'disable'),
'\t\taigp %s;\n' % ('enable' if self['capability']['aigp'] else 'disable'),
families,
nexthops,
addpaths,
apis,
changes,
)
)
# '\t\treceive {\n%s\t\t}\n' % receive if receive else '',
# '\t\tsend {\n%s\t\t}\n' % send if send else '',
return returned.replace('\t', ' ')
def __str__(self):
return self.string(False)
| bsd-3-clause | -2,980,760,986,235,840,500 | 35.965591 | 116 | 0.507127 | false |
malisal/bfdpie | setup.py | 1 | 1563 | import os
import distutils
from setuptools import setup, Extension, Command
from distutils.command import build as build_module
from distutils.command.install import install
BINUTILS_VERSION = "binutils-2.26"
module = Extension(
name = "bfdpie._bfdpie",
sources = ["bfdpie.c"],
# Include dir is our own binutils
include_dirs= ["tmp/install/include/"],
# Link against what?
library_dirs=["tmp/install/lib/"],
libraries=["bfd", "opcodes", "iberty", "z"],
)
class BuildCommand(distutils.command.build.build):
def run(self):
# Download and compile binutils first
os.system("./bfdpie_build.sh %s" % (BINUTILS_VERSION))
build_module.build.run(self)
setup(
name = "bfdpie",
version = "0.1.14",
description = "A tiny interface around a subset of libBFD. Code based on https://github.com/Groundworkstech/pybfd",
author = "Luka Malisa",
author_email = "[email protected]",
url = "https://github.com/malisal/bfdpie",
keywords = ["binary", "libbfd"],
platforms=["any"],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
],
packages=["bfdpie"],
package_dir={"bfdpie": "bfdpie"},
ext_modules = [module],
test_suite = "tests",
install_requires = [
"wheel>=0.29.0",
],
package_data = {
"bfdpie" : ["bin/dummy.elf"],
},
cmdclass={
"build": BuildCommand,
}
)
| mit | -7,680,653,282,204,861,000 | 22.681818 | 118 | 0.633397 | false |
mikr/whatstyle | tools/create_formatstyle_history.py | 1 | 11059 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This script uses a repurposed version of docs/tools/dump_format_style.py
# from http://llvm.org/git/clang.git
# to generate a version history of clang-format options for all commits.
#
# The file include/clang/Format/Format.h contains the ground truth about
# the options for each clang-format version.
#
# This script takes a clang repository as only argument, e.g. llvm/tools/clang
# and needs 'git' in the PATH.
#
# The output is a diff-like representation that can be copied into whatstyle.py
# under CLANG_FORMAT_EVOLUTION to support new clang-formats options.
# The output looks like this:
# ---------------------------------------------------------------------
# # Clang bac016bd3f67ca2f4db1ddc619e611759352b84d
# + BasedOnStyle string
# LLVM
# Google
# + AccessModifierOffset int
# + ColumnLimit unsigned
# + MaxEmptyLinesToKeep unsigned
# + PointerAndReferenceBindToType bool
# + SplitTemplateClosingGreater bool
# # Clang 15757316d67cb7a854d53a0402d67ad58347600a
# + IndentCaseLabels bool
# # Clang 3.3
# + SpacesBeforeTrailingComments unsigned
# ...
# ---------------------------------------------------------------------
from __future__ import print_function
import sys
if (((sys.version_info[0] == 2) and (sys.version_info[1] < 7)) or (
(sys.version_info[0] == 3) and (sys.version_info[1] < 2))):
sys.stderr.write('Error: Python 2.7 or when running on Python 3 at least Python 3.2'
' is required to run whatstyle\n')
sys.exit(1)
import argparse
import codecs
import re
import subprocess
from collections import OrderedDict, namedtuple
from pprint import pprint
import dumpformatoptions
FIRST_SUPPORTED_VERSION = '3.5'
# Uncomment the next line to support ancient clang versions.
# FIRST_SUPPORTED_VERSION = None
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY2:
text_type = unicode
binary_type = str
else:
text_type = str
binary_type = bytes
def unistr(text):
if not isinstance(text, text_type):
text = text.decode('utf-8')
return text
def bytestr(text):
if not isinstance(text, binary_type):
text = text.encode('utf-8')
return text
def outline(s, fp=None):
if fp is None:
fp = sys.stdout
fp.write(unistr(s + '\n'))
reportmessage = outline
def reporterror(s, fp=None):
if fp is None:
fp = sys.stderr
reportmessage(s, fp=fp)
reportwarning = reporterror
ExeResult = namedtuple('ExeResult', ['returncode', 'stdout', 'stderr'])
def run_executable(executable, cmdargs, stdindata=None):
proc = subprocess.Popen([executable] + cmdargs,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(input=stdindata)
return ExeResult(proc.returncode, stdout, stderr)
def gitcmd(cmdargs):
return run_executable('git', cmdargs)
def gitcat(cwd, commit, filename):
"""Returns the file content for a commit.
"""
ret, stdout, stderr = gitcmd(['-C', cwd, 'show', '%s:%s' % (commit, unistr(filename))])
return stdout
def git_commits(cwd, *relpaths):
"""Return a list of commit hashes for relpath sorted from newest to oldest.
"""
args = ['-C', cwd, 'log', '--pretty=oneline']
args.extend(relpaths)
ret, stdout, stderr = gitcmd(args)
lines = unistr(stdout).splitlines()
return list([l.split()[0] for l in lines])
def git_format_commits(cwd):
"""Yields a triple of
(commithash, content of Format.h, content of docs/conf.py)
for each commit of Format.h.
"""
relpaths = 'include/clang/Format/Format.h include/clang/Tooling/Inclusions/IncludeStyle.h'.split()
for commit in reversed(git_commits(cwd, *relpaths)):
format_h = unistr(gitcat(cwd, commit, relpaths[0]))
includestyle_h = unistr(gitcat(cwd, commit, relpaths[1]))
conf_py = unistr(gitcat(cwd, commit, 'docs/conf.py'))
yield commit, format_h, includestyle_h, conf_py
def parse_options(format_h_lines, includestyle_h):
"""Parses the options from the lines of Format.h
by using a modified version of clangs dumpformatoption.py.
Returns the options and a list of unknown option types.
"""
unknown_optiontypes = []
def isknownoptiontype(optiontype):
is_known_type = optiontype in [
'bool', 'unsigned', 'int', 'std::string', 'std::vector<std::string>',
'std::vector<IncludeCategory>', 'std::vector<RawStringFormat>',
'std::vector<std::pair<std::string, unsigned>>'
]
if is_known_type:
return True
elif '::' in optiontype:
# An attempt at future-proofing this code...
unknown_optiontypes.append(optiontype)
return True
return False
options = dumpformatoptions.read_options(format_h_lines, isknownoptiontype)
try:
options += dumpformatoptions.read_options(includestyle_h, isknownoptiontype)
except Exception as exc:
pass
options = sorted(options, key=lambda x: x.name)
return options, unknown_optiontypes
def parse_styles(clangworkdir):
"""Returns a list of style definitions for every commit of Format.h in the
following style, this example is abbreviated and modified for clarity.
[
(u'3.3',
[u'LLVM', u'Google', u'Chromium', u'Mozilla'],
[('AccessModifierOffset', ('int', [])),
('PointerBindsToType', ('bool', []))]),
(u'3.4',
[u'LLVM', u'Google', u'Chromium', u'Mozilla', u'Predefined'],
[('AccessModifierOffset', ('int', [])),
('BreakBeforeBraces', ('BraceBreakingStyle', [u'Linux', u'Allman'])),
('PointerBindsToType', ('bool', []))])
]
"""
unknown_types = set()
style_versions = []
for commit, format_h, includestyle_h, conf_py in git_format_commits(clangworkdir):
base_formats = []
release = commit
# Use the clang version number instead of the commithash
# if it can be found in clangs conf.py.
for line in conf_py.splitlines():
m = re.match("release = '(.*)'", line)
if m:
release = m.group(1)
format_h_lines = format_h.splitlines()
# Record the format style names
# e.g. 'FormatStyle getChromiumStyle();' => 'Chromium'
for line in format_h_lines:
m = re.match('\s*FormatStyle\s*get(\w+)Style\([^\)]*\);\s*', line)
if m:
formatname = m.group(1)
if formatname != 'No':
# NoStyle is not a style named No.
base_formats.append(formatname)
try:
options, unknown_optiontypes = parse_options(format_h_lines, includestyle_h)
except Exception:
continue
for t in unknown_optiontypes:
unknown_types.add(t)
style_options = []
for opt in options:
configs = []
if opt.enum:
for enumvalue in opt.enum.values:
configs.append(re.sub('.*_', '', enumvalue.name))
elif opt.nested_struct:
for nestedoption in opt.nested_struct.values:
configs.append(nestedoption.name)
style_options.append((opt.name, (opt.type, configs)))
style_versions.append((release, base_formats, style_options))
return style_versions
def generate_style_history(clangworkdir):
"""Prints the style available style configurations
following the commit history, e.g.:
# Clang 3.4
+ BasedOnStyle string
LLVM
WebKit
+ SpaceAfterControlStatementKeyword bool
# Clang 3.4
+ TabWidth unsigned
Never
ForIndentation
Always
# Clang 3.5
- SpaceAfterControlStatementKeyword bool
+ AllowShortFunctionsOnASingleLine bool
"""
prev_styles = OrderedDict([])
changelines = []
unknown_types = set()
style_versions = parse_styles(clangworkdir)
if not style_versions:
return
_, latest_base_formats, _ = style_versions[-1]
supported = FIRST_SUPPORTED_VERSION is None
for release, base_formats, options in style_versions:
if release == FIRST_SUPPORTED_VERSION:
supported = True
# To keep the history small skip ancient clang versions.
if not supported:
continue
# In old clang-format versions only use base format styles that
# are still supported in the most recent version.
base_formats = [b for b in base_formats if b in latest_base_formats]
options = [('BasedOnStyle', ('string', base_formats))] + options
options = OrderedDict(options)
actions = []
# Delete options that no longer exist since this commit
for optionname, optiondef in prev_styles.items():
if optionname not in options:
actions.append("- %s" % option_rep(optionname, optiondef))
# Add new options from this commit
for optionname, optiondef in options.items():
oldstyledef = prev_styles.get(optionname)
if optiondef != oldstyledef:
actions.append("+ %s" % option_rep(optionname, optiondef))
if actions:
changelines.append("# Clang " + release)
changelines.extend(actions)
prev_styles = options
outline("\n".join(changelines))
sys.stdout.flush()
if unknown_types:
reportwarning("\nWarning: Not all of the encountered option types could be"
" recognized"
"\n while building the history of known clang-format"
" options."
"\n This should not be a problem but here they are:")
for u in unknown_types:
reportwarning("Unknown option type: %s" % u)
def option_rep(optionname, optiondef):
"""Returns a textual representation of an option.
option_rep('IndentCaseLabels', ('bool', []))
=> 'IndentCaseLabels bool'
option_rep('PointerAlignment', ('PointerAlignmentStyle',
[u'Left', u'Right', u'Middle']))
=> 'PointerAlignment PointerAlignmentStyle
Left
Right
Middle'
"""
optiontype, configs = optiondef
fragments = [optionname + ' ' + optiontype]
for c in configs:
fragments.append(" " * 8 + c)
rep = "\n".join(fragments)
return rep
def main():
parser = argparse.ArgumentParser(
description='Create clang-format format history from a clang git repo.')
parser.add_argument('clangrepo', nargs=1, help='path of a clang repository')
args = parser.parse_args()
generate_style_history(args.clangrepo[0])
if __name__ == '__main__':
if PY3:
sys.stdout = codecs.getwriter('utf8')(sys.stdout.buffer)
sys.stderr = codecs.getwriter('utf8')(sys.stderr.buffer)
sys.exit(main())
| mit | -326,049,171,635,495,600 | 32.110778 | 102 | 0.614522 | false |
schreiberx/sweet | scripts/pp_plot_lonlat_csv.py | 1 | 2786 | #! /usr/bin/python3
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import sys
first = True
s = 2e-5
eta_contour_levels = np.append(np.arange(-1e-4, 0, s), np.arange(s, 1e-4, s))
hs = 5
h_contour_levels = np.append(np.arange(900, 1000-hs, hs), np.arange(1000+hs, 1100, hs))
zoom_lat = True
zoom_lat = False
zoom_lat = 'eta' in sys.argv[1]
fontsize=8
figsize=(9, 3)
for filename in sys.argv[1:]:
print(filename)
data = np.loadtxt(filename, skiprows=3)
labelsx = data[0,1:]
labelsy = data[1:,0]
data = data[1:,1:]
if zoom_lat:
while labelsy[1] < 10:
labelsy = labelsy[1:]
data = data[1:]
while labelsy[-2] > 80:
labelsy = labelsy[0:-2]
data = data[0:-2]
# while labelsx[1] < 90:
# tmplabelsx = labelsx[0]
# labelsx[0:-1] = labelsx[1:]
# labelsx[-1] = tmplabelsx
#
# tmpdata = data[:,0]
# data[:,0:-1] = data[:,1:]
# data[:,-1] = tmpdata
if first:
lon_min = labelsx[0]
lon_max = labelsx[-1]
lat_min = labelsy[0]
lat_max = labelsy[-1]
new_labelsx = np.linspace(lon_min, lon_max, 7)
new_labelsy = np.linspace(lat_min, lat_max, 7)
labelsx = np.interp(new_labelsx, labelsx, labelsx)
labelsy = np.interp(new_labelsy, labelsy, labelsy)
if first:
cmin = np.amin(data)
cmax = np.amax(data)
if 'eta' in filename:
cmin = 1e-4
cmax = -1e-4
#cmin *= 1.2
#cmax *= 1.2
extent = (labelsx[0], labelsx[-1], labelsy[0], labelsy[-1])
plt.figure(figsize=figsize)
plt.imshow(data, interpolation='nearest', extent=extent, origin='lower', aspect='auto')
plt.clim(cmin, cmax)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=fontsize)
plt.title(filename, fontsize=fontsize)
if 'prog_eta' in filename:
plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=eta_contour_levels, linewidths=0.5)
elif 'prog_h' in filename:
plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=h_contour_levels, linewidths=0.5)
# elif '_u' in filename:
# hs = 0.001
# h_contour_levels = np.append(np.arange(-0.1, 0-hs, hs), np.arange(hs, 0.1, hs))
# plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=h_contour_levels, linewidths=0.5)
else:
if cmin != cmax:
pass
#plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, linewidths=0.5)
ax = plt.gca()
ax.xaxis.set_label_coords(0.5, -0.075)
plt.xticks(labelsx, fontsize=fontsize)
plt.xlabel("Longitude", fontsize=fontsize)
plt.yticks(labelsy, fontsize=fontsize)
plt.ylabel("Latitude", fontsize=fontsize)
#plt.show()
outfilename = filename.replace('.csv', '.png')
print(outfilename)
plt.savefig(outfilename, dpi=200)
plt.close()
first = False
| mit | 2,323,065,064,202,803,000 | 21.836066 | 131 | 0.660804 | false |
skosukhin/spack | var/spack/repos/builtin/packages/aspell/package.py | 1 | 3695 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from llnl.util.link_tree import LinkTree
import spack.store
from spack.package import ExtensionError, ExtensionConflictError
# See also: AspellDictPackage
class Aspell(AutotoolsPackage):
"""GNU Aspell is a Free and Open Source spell checker designed to
eventually replace Ispell."""
homepage = "http://aspell.net/"
url = "https://ftpmirror.gnu.org/aspell/aspell-0.60.6.1.tar.gz"
extendable = True # support activating dictionaries
version('0.60.6.1', 'e66a9c9af6a60dc46134fdacf6ce97d7')
# The dictionaries install all their bits into their prefix.lib dir,
# we want to link them into aspell's dict-dir.
# These are identical to what's in spack/package.py except
# for using:
# - extension.prefix.lib instead of extension.prefix in LinkTree()
# - dest_dir instead of self.prefix in tree.(find_conflict|merge)()
def activate(self, extension, **kwargs):
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
if extensions_layout is not spack.store.extensions:
raise ExtensionError(
'aspell does not support non-global extensions')
aspell = which(self.prefix.bin.aspell)
dest_dir = aspell('dump', 'config', 'dict-dir', output=str).strip()
tree = LinkTree(extension.prefix.lib)
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
conflict = tree.find_conflict(dest_dir, ignore=ignore)
if conflict:
raise ExtensionConflictError(conflict)
tree.merge(dest_dir, ignore=ignore)
def deactivate(self, extension, **kwargs):
extensions_layout = kwargs.get("extensions_layout",
spack.store.extensions)
if extensions_layout is not spack.store.extensions:
raise ExtensionError(
'aspell does not support non-global extensions')
aspell = which(self.prefix.bin.aspell)
dest_dir = aspell('dump', 'config', 'dict-dir', output=str).strip()
def ignore(filename):
return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix.lib)
tree.unmerge(dest_dir, ignore=ignore)
| lgpl-2.1 | -2,902,301,946,385,594,400 | 42.470588 | 78 | 0.651421 | false |
demisto/content | Packs/CommonScripts/Scripts/ModifyDateTime/ModifyDateTime_test.py | 1 | 1122 | import pytest
import dateparser
from ModifyDateTime import apply_variation
@pytest.mark.parametrize('original_time, variation, expected', [
# sanity
('2020/01/01', 'in 1 day', '2020-01-02T00:00:00'),
# textual variation 1
('2020/01/01', 'yesterday', '2019-12-31T00:00:00'),
# textual variation 2
('2020/01/01', 'next month', '2020-02-01T00:00:00'),
# negative variation
('2020-01-01T01:30:00', '-15m', '2020-01-01T01:15:00'),
# positive variation (treated the same as negative according to datetime.parse)
('2020-01-01T01:30:00', '15m', '2020-01-01T01:15:00'),
# zulu timezone
('2020-01-01T10:00:00Z', 'in 15m', '2020-01-01T10:15:00Z'),
# GMT
('2020-01-01T01:30:00+00:00', '15m', '2020-01-01T01:15:00+00:00'),
# GMT+
('2020-01-01T01:30:00+02:00', '15m', '2020-01-01T01:15:00+02:00'),
# GMT-
('2020-01-01T01:30:00-04:00', '15m', '2020-01-01T01:15:00-04:00'),
])
def test_apply_variation(original_time, variation, expected):
results = apply_variation(dateparser.parse(original_time), variation)
assert results == (dateparser.parse(expected))
| mit | -3,887,013,172,126,213,000 | 39.071429 | 83 | 0.642602 | false |
rwalk333/pyquadprog | test/test.py | 1 | 2107 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Test routines for the quadprog package. Excepted where noted, all examples are drawn
from the R quadprog package documentation and test suite.
"""
import numpy as np
import quadprog
import unittest
class TestQuadprog(unittest.TestCase):
def setUp(self):
print(dir(quadprog))
pass
def test_solveQP_basic(self):
'''Solve a simple quadratic program.
Minimize in x: -(0 5 0) %*% x + 1/2 x^T x
Subject to: A^T b >= b0
with b0 = (-8,2,0)^T
and (-4 2 0)
A = (-3 1 -2)
( 0 0 1)
'''
expected = [0.4761905, 1.0476190, 2.0952381]
Dmat = np.identity(3)
dvec = np.array([0,5,0])
Amat = np.array([[-4, 2, 0],[-3, 1, -2], [0, 0, 1]])
bvec = np.array([-8,2,0])
sol = quadprog.solveQP(Dmat,dvec,Amat,bvec)
print(self.test_solveQP_basic.__doc__ + '\nExpected: ' + expected.__str__())
np.testing.assert_almost_equal(sol.solution, np.array(expected))
def test_solveCompactFormQP_basic(self):
'''Solve a simple quadratic progam using the compact storage format for the constraint data.
Minimize in x: -(0 5 0) %*% x + 1/2 x^T x
Subject to: A^T b >= b0
with b0 = (-8,2,0)^T
and (-4 2 0)
A = (-3 1 -2)
( 0 0 1)
using a compact form of A.
'''
expected = [0.4761905, 1.0476190, 2.0952381]
Dmat = np.identity(3)
dvec = np.array([0,5,0])
Aind = np.array([[2,2,2], [1,1,2], [2,2,3]])
Amat = np.array([[-4,2,-2],[-3,1,1]])
bvec = np.array([-8,2,0])
sol = quadprog.solveCompactFormQP(Dmat, dvec, Amat, Aind, bvec)
print(self.test_solveCompactFormQP_basic.__doc__+ '\nExpected: ' + expected.__str__())
np.testing.assert_almost_equal(sol.solution, np.array(expected))
if __name__ == "__main__":
unittest.main()
| lgpl-2.1 | -8,512,066,933,382,506,000 | 30.924242 | 100 | 0.505458 | false |
crustycrab/Risk-Prototype | graphics.py | 1 | 1788 | import pygame
import os
import random
import res
class Camera:
def __init__(self):
self.x = self.y = 0
self.speed = 500
def update(self, dt, key_state):
speed = self.speed * dt
if key_state[0]:
self.x = min(self.x + speed, 0)
if key_state[1]:
self.x = max(self.x - speed, res.WIN_WIDTH - res.MAP_WIDTH)
if key_state[2]:
self.y = min(self.y + speed, 0)
if key_state[3]:
self.y = max(self.y - speed, res.WIN_HEIGHT - res.MAP_HEIGHT)
def convert_pos(self, pos):
return (pos[0] - self.x, pos[1] - self.y)
def get_pos(self):
return (self.x, self.y)
def set_pos(self, pos):
self.x, self.y = pos
class Hud:
def __init__(self):
pass
class Stars:
def __init__(self, num_stars=256):
self.num_stars = num_stars
self.stars = []
self.gen_stars()
def draw(self, surface):
for star in self.stars:
pygame.draw.rect(surface, star['color'], star['rect'], 1)
def update(self, dt):
for i, star in enumerate(self.stars):
speed = star['speed'] * dt
x, y = star['rect'].topleft
x -= speed
if x < 0:
x, y = (res.MAP_WIDTH + x, random.randint(0, res.MAP_HEIGHT))
self.stars[i]['rect'].topleft = (int(x), y)
def gen_stars(self):
for _ in range(self.num_stars):
x, y = self.get_random_cords()
star = {'speed': random.randint(1, 100),
'rect': pygame.Rect((x, y), (random.randint(2, 4),) * 2),
'color': (random.randint(153, 204), random.randint(153, 204), random.randint(178, 229))}
self.stars.append(star)
def get_random_cords(self):
return (random.randint(0, res.MAP_WIDTH - 1), random.randint(0, res.MAP_HEIGHT - 1))
| mit | 574,214,615,529,656,260 | 24.913043 | 108 | 0.553691 | false |
tonybeltramelli/Deep-Lyrics | gather.py | 1 | 3064 | #!/usr/bin/env python
__author__ = 'Tony Beltramelli www.tonybeltramelli.com - 09/07/2016'
import argparse
import os
import urllib2
import re
import codecs
from threading import Thread
from HTMLParser import HTMLParser
DOMAIN = "songmeanings.com/"
ARTIST_PATH = 'artist/view/songs/'
def start_new_thread(task, arg):
thread = Thread(target=task, args=(arg,))
thread.start()
def write_to_file(path, data):
output_file = codecs.open(path, 'a', 'utf_8')
output_file.write(data.encode('utf-8'))
output_file.write("\n")
output_file.close()
def get_url(path, arg = ""):
return 'http://' + DOMAIN + path + arg
def get_page_content(url):
response = urllib2.urlopen(url)
return response.read()
class SongPageParser(HTMLParser):
record = False
lyrics = ""
output_path = ""
def handle_starttag(self, tag, attrs):
for attr in attrs:
if attr[0] == "class" and attr[1].find('lyric-box') != -1:
self.record = True
if attr[0] == "id" and attr[1].find('lyrics-edit') != -1:
self.record = False
write_to_file(self.output_path, self.lyrics)
self.lyrics = ""
def handle_data(self, data):
if self.record:
self.lyrics += re.sub(r'[^\x00-\x7F]+', '\'', data.lstrip()) + "\n"
class ArtistPageParser(HTMLParser):
match = 0
url = ""
title = ""
output_path = ""
def handle_starttag(self, tag, attrs):
href = None
for attr in attrs:
if attr[0] == "id" and attr[1].find('lyric-') != -1:
self.match += 1
if attr[0] == "href" and attr[1].find(DOMAIN) != -1:
self.match += 1
href = attr[1]
if self.match > 1 and href is not None:
self.url = href[href.find(DOMAIN) + len(DOMAIN):]
def handle_endtag(self, tag):
self.match = 0
def handle_data(self, data):
if self.match > 1:
self.title = data
html = get_page_content(get_url(self.url))
song_parser = SongPageParser()
song_parser.output_path = self.output_path
start_new_thread(song_parser.feed, html)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--output_file', type=str, required=True)
parser.add_argument('--artists', type=str, required=True)
args = parser.parse_args()
output_file = args.output_file
artists = args.artists.replace(' ', '').split(',')
try:
os.remove(output_file)
except OSError:
print "The output file doesn't exist, creating it"
print "Gathering lyrics..."
for i, artist in enumerate(artists):
html = get_page_content(get_url(ARTIST_PATH, artist))
artist_parser = ArtistPageParser()
artist_parser.output_path = output_file
artist_parser.feed(html)
print "Progress: {}%".format(((i + 1) * 100) / len(artists))
print "Lyrics saved in {}".format(output_file)
if __name__ == "__main__":
main()
| mit | 6,959,599,421,735,938,000 | 26.357143 | 79 | 0.579308 | false |
Naeka/vosae-app | www/notification/api/resources/invoicing_notifications/make_invoice.py | 1 | 3176 | # -*- coding:Utf-8 -*-
from tastypie_mongoengine import fields
from notification.api.resources.base import NotificationBaseResource
from notification.api.doc import HELP_TEXT
from notification.models import invoicing_notifications
__all__ = (
'QuotationMakeInvoiceResource',
'QuotationMakeDownPaymentInvoiceResource',
'PurchaseOrderMakeInvoiceResource',
'PurchaseOrderMakeDownPaymentInvoiceResource',
)
class QuotationMakeInvoiceResource(NotificationBaseResource):
quotation = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='quotation',
help_text=HELP_TEXT['quotation_make_invoice']['quotation']
)
invoice = fields.ReferenceField(
to='invoicing.api.resources.InvoiceResource',
attribute='invoice',
help_text=HELP_TEXT['quotation_make_invoice']['invoice']
)
class Meta(NotificationBaseResource.Meta):
resource_name = 'quotation_make_invoice'
object_class = invoicing_notifications.QuotationMakeInvoice
class QuotationMakeDownPaymentInvoiceResource(NotificationBaseResource):
quotation = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='quotation',
help_text=HELP_TEXT['quotation_make_invoice']['quotation']
)
down_payment_invoice = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='down_payment_invoice',
help_text=HELP_TEXT['quotation_make_invoice']['down_payment_invoice']
)
class Meta(NotificationBaseResource.Meta):
resource_name = 'quotation_make_down_payment_invoice'
object_class = invoicing_notifications.QuotationMakeDownPaymentInvoice
class PurchaseOrderMakeInvoiceResource(NotificationBaseResource):
purchase_order = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='purchase_order',
help_text=HELP_TEXT['purchase_order_make_invoice']['purchase_order']
)
invoice = fields.ReferenceField(
to='invoicing.api.resources.InvoiceResource',
attribute='invoice',
help_text=HELP_TEXT['purchase_order_make_invoice']['invoice']
)
class Meta(NotificationBaseResource.Meta):
resource_name = 'purchase_order_make_invoice'
object_class = invoicing_notifications.PurchaseOrderMakeInvoice
class PurchaseOrderMakeDownPaymentInvoiceResource(NotificationBaseResource):
purchase_order = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='purchase_order',
help_text=HELP_TEXT['purchase_order_make_invoice']['purchase_order']
)
down_payment_invoice = fields.ReferenceField(
to='invoicing.api.resources.DownPaymentInvoiceResource',
attribute='down_payment_invoice',
help_text=HELP_TEXT['purchase_order_make_invoice']['down_payment_invoice']
)
class Meta(NotificationBaseResource.Meta):
resource_name = 'quotation_make_down_payment_invoice'
object_class = invoicing_notifications.PurchaseOrderMakeDownPaymentInvoice
| agpl-3.0 | 2,691,955,120,545,300,000 | 37.26506 | 82 | 0.732997 | false |
talapus/Ophidian | Flask_fu/shell.py | 1 | 1369 | import sys
from datetime import datetime
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask
from os import popen
app = Flask(__name__)
@app.route('/')
def main_form():
# app.logger.warning('A warning occurred (%d apples)', 42)
# app.logger.error('An error occurred')
# print('main_form', file=sys.stderr)
app.logger.info('main_form')
return '<form action="submit" id="textform" method="post"><textarea name="text">Hi</textarea><input type="submit" value="Submit"></form>'
'''
@app.route('/submit', methods=['POST'])
def submit_textarea():
# print('submit_textarea', file=sys.stderr)
# app.logger.info('submit_textarea')
app.logger.info('{} Submitted: {}'.format(datetime.now(), request.form["text"]))
return '{}'.format(request.form["text"])
def write_notes():
# print('write_notes', file=sys.stderr)
app.logger.info('{} write_notes'.format(datetime.now()))
with open ('notes.txt', 'w') as notes:
notes.write(submit_textarea())
'''
@app.route('/sh/<input>')
def bones(input):
data = popen('{}'.format(input)).read()
return ('<tt>{}</tt>'.format(data.replace('\n', '<br>')))
if __name__ == '__main__':
handler = RotatingFileHandler('foo.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
app.run() | bsd-3-clause | 404,746,768,136,102,460 | 30.136364 | 141 | 0.652301 | false |
PurpleMyst/porcupine | porcupine/_logs.py | 1 | 2465 | import itertools
import logging
import os
import platform
import sys
if platform.system() == 'Windows': # noqa
import msvcrt
else: # noqa
import fcntl
from porcupine import dirs
def _lock(fileno):
"""Try to lock a file. Return True on success."""
# closing the file unlocks it, so we don't need to unlock here
if platform.system() == 'Windows':
try:
msvcrt.locking(fileno, msvcrt.LK_NBLCK, 10)
return True
except PermissionError:
return False
else:
try:
fcntl.lockf(fileno, fcntl.LOCK_EX | fcntl.LOCK_NB)
return True
# the docs recommend catching both of these
except (BlockingIOError, PermissionError):
return False
def _open_log_file():
"""Open a Porcupine log file.
Usually this opens and overwrites log.txt. If another Porcupine
process has it currently opened, this opens log1.txt instead, then
log2.txt and so on.
"""
# create an iterator 'log.txt', 'log2.txt', 'log3.txt', ...
filenames = itertools.chain(
['log.txt'],
map('log{}.txt'.format, itertools.count(start=2)),
)
for filename in filenames:
path = os.path.join(dirs.cachedir, filename)
# unfortunately there's not a mode that would open in write but
# not truncate like 'w' or seek to end like 'a'
fileno = os.open(path, os.O_WRONLY | os.O_CREAT, 0o644)
if _lock(fileno):
# now we can delete the old content, can't use os.truncate
# here because it doesn't exist on windows
file = open(fileno, 'w')
file.truncate(0)
return file
else:
os.close(fileno)
# FileHandler doesn't take already opened files and StreamHandler
# doesn't close the file :(
class _ClosingStreamHandler(logging.StreamHandler):
def close(self):
self.stream.close()
def setup(file=None):
if file is None:
handler = _ClosingStreamHandler(_open_log_file())
elif file in (sys.stdout, sys.stderr):
# somehow closing these files just feels wrong
handler = logging.StreamHandler(file)
else:
handler = _ClosingStreamHandler(file)
handler.setLevel(logging.DEBUG)
handler.setFormatter(logging.Formatter(
"[PID {} %(levelname)s] %(name)s: %(message)s".format(os.getpid())
))
logging.basicConfig(level=logging.DEBUG, handlers=[handler])
| mit | -8,319,217,412,416,285,000 | 28.698795 | 74 | 0.625963 | false |
carpetri/nyu-lab-travis-ci | models.py | 1 | 8465 | ######################################################################
# Copyright 2016, 2017 John Rofrano. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
######################################################################
"""
Pet Model that uses Redis
You must initlaize this class before use by calling inititlize().
This class looks for an environment variable called VCAP_SERVICES
to get it's database credentials from. If it cannot find one, it
tries to connect to Redis on the localhost. If that fails it looks
for a server name 'redis' to connect to.
"""
import os
import json
import logging
import pickle
from redis import Redis
from redis.exceptions import ConnectionError
class DataValidationError(Exception):
""" Custom Exception with data validation fails """
pass
class Pet(object):
""" Pet interface to database """
logger = logging.getLogger(__name__)
redis = None
def __init__(self, id=0, name=None, category=None, available=True):
""" Constructor """
self.id = int(id)
self.name = name
self.category = category
self.available = available
def save(self):
""" Saves a Pet in the database """
if self.name is None: # name is the only required field
raise DataValidationError('name attribute is not set')
if self.id == 0:
self.id = Pet.__next_index()
Pet.redis.set(self.id, pickle.dumps(self.serialize()))
def delete(self):
""" Deletes a Pet from the database """
Pet.redis.delete(self.id)
def serialize(self):
""" serializes a Pet into a dictionary """
return {
"id": self.id,
"name": self.name,
"category": self.category,
"available": self.available
}
def deserialize(self, data):
""" deserializes a Pet my marshalling the data """
try:
self.name = data['name']
self.category = data['category']
self.available = data['available']
except KeyError as error:
raise DataValidationError('Invalid pet: missing ' + error.args[0])
except TypeError as error:
raise DataValidationError('Invalid pet: body of request contained bad or no data')
return self
######################################################################
# S T A T I C D A T A B S E M E T H O D S
######################################################################
@staticmethod
def __next_index():
""" Increments the index and returns it """
return Pet.redis.incr('index')
@staticmethod
def remove_all():
""" Removes all Pets from the database """
Pet.redis.flushall()
@staticmethod
def all():
""" Query that returns all Pets """
# results = [Pet.from_dict(redis.hgetall(key)) for key in redis.keys() if key != 'index']
results = []
for key in Pet.redis.keys():
if key != 'index': # filer out our id index
data = pickle.loads(Pet.redis.get(key))
pet = Pet(data['id']).deserialize(data)
results.append(pet)
return results
######################################################################
# F I N D E R M E T H O D S
######################################################################
@staticmethod
def find(pet_id):
""" Query that finds Pets by their id """
if Pet.redis.exists(pet_id):
data = pickle.loads(Pet.redis.get(pet_id))
pet = Pet(data['id']).deserialize(data)
return pet
return None
@staticmethod
def __find_by(attribute, value):
""" Generic Query that finds a key with a specific value """
# return [pet for pet in Pet.__data if pet.category == category]
Pet.logger.info('Processing %s query for %s', attribute, value)
if isinstance(value, str):
search_criteria = value.lower() # make case insensitive
else:
search_criteria = value
results = []
for key in Pet.redis.keys():
if key != 'index': # filer out our id index
data = pickle.loads(Pet.redis.get(key))
# perform case insensitive search on strings
if isinstance(data[attribute], str):
test_value = data[attribute].lower()
else:
test_value = data[attribute]
if test_value == search_criteria:
results.append(Pet(data['id']).deserialize(data))
return results
@staticmethod
def find_by_name(name):
""" Query that finds Pets by their name """
return Pet.__find_by('name', name)
@staticmethod
def find_by_category(category):
""" Query that finds Pets by their category """
return Pet.__find_by('category', category)
@staticmethod
def find_by_availability(available=True):
""" Query that finds Pets by their availability """
return Pet.__find_by('available', available)
######################################################################
# R E D I S D A T A B A S E C O N N E C T I O N M E T H O D S
######################################################################
@staticmethod
def connect_to_redis(hostname, port, password):
""" Connects to Redis and tests the connection """
Pet.logger.info("Testing Connection to: %s:%s", hostname, port)
Pet.redis = Redis(host=hostname, port=port, password=password)
try:
Pet.redis.ping()
Pet.logger.info("Connection established")
except ConnectionError:
Pet.logger.info("Connection Error from: %s:%s", hostname, port)
Pet.redis = None
return Pet.redis
@staticmethod
def init_db(redis=None):
"""
Initialized Redis database connection
This method will work in the following conditions:
1) In Bluemix with Redis bound through VCAP_SERVICES
2) With Redis running on the local server as with Travis CI
3) With Redis --link in a Docker container called 'redis'
4) Passing in your own Redis connection object
Exception:
----------
redis.ConnectionError - if ping() test fails
"""
if redis:
Pet.logger.info("Using client connection...")
Pet.redis = redis
try:
Pet.redis.ping()
Pet.logger.info("Connection established")
except ConnectionError:
Pet.logger.error("Client Connection Error!")
Pet.redis = None
raise ConnectionError('Could not connect to the Redis Service')
return
# Get the credentials from the Bluemix environment
if 'VCAP_SERVICES' in os.environ:
Pet.logger.info("Using VCAP_SERVICES...")
vcap_services = os.environ['VCAP_SERVICES']
services = json.loads(vcap_services)
creds = services['rediscloud'][0]['credentials']
Pet.logger.info("Conecting to Redis on host %s port %s",
creds['hostname'], creds['port'])
Pet.connect_to_redis(creds['hostname'], creds['port'], creds['password'])
else:
Pet.logger.info("VCAP_SERVICES not found, checking localhost for Redis")
Pet.connect_to_redis('127.0.0.1', 6379, None)
if not Pet.redis:
Pet.logger.info("No Redis on localhost, looking for redis host")
Pet.connect_to_redis('redis', 6379, None)
if not Pet.redis:
# if you end up here, redis instance is down.
Pet.logger.fatal('*** FATAL ERROR: Could not connect to the Redis Service')
raise ConnectionError('Could not connect to the Redis Service')
| apache-2.0 | 8,615,930,819,414,579,000 | 37.303167 | 97 | 0.555936 | false |
malirod/pylua | pylua/validator.py | 1 | 2300 | # -*- coding: utf-8 -*-
import xml.etree.ElementTree as etree
from json import JSONDecoder
class Validator:
_errors = [(0, 'Ok'), (1, "Function not found"), (2, "Validation error")]
_error_index_ok = 0
_error_index_not_found = 1
_error_index_error = 2
def __init__(self):
self._schema = None
def load_schema_from_string(self, xml_string):
assert xml_string is not None
try:
self._schema = etree.fromstring(xml_string)
except etree.ParseError:
return False
return True
@staticmethod
def _validate_param(schema_param_name, schema_param_is_mandatory,
schema_param_type, params):
assert schema_param_name is not None
assert schema_param_is_mandatory is not None
assert schema_param_type is not None
params_obj = JSONDecoder().decode(params)
if params_obj.get(schema_param_name) is None:
return False
return True
def _validate(self, function_item, params):
# This is very simple validation, will work only with test data
schema_params = function_item.findall('param')
is_schema_params_empty = len(schema_params) == 0
if not is_schema_params_empty and params is None:
return self._errors[self._error_index_error]
if is_schema_params_empty and params is None:
return self._errors[self._error_index_ok]
for param in schema_params:
validated = self._validate_param(
param.get('name'),
param.get('mandatory'),
param.get('type'),
params)
if not validated:
return self._errors[self._error_index_error]
return self._errors[self._error_index_ok]
def validate(self, function_id, function_type, params=None):
assert function_id is not None
assert function_type is not None
assert self._schema is not None
for function_item in self._schema.findall('function'):
if (function_id == function_item.get('id')
and function_type == function_item.get('type')):
return self._validate(function_item, params)
return self._errors[self._error_index_not_found]
| mit | -6,456,706,902,619,544,000 | 35.507937 | 77 | 0.600435 | false |
gjover/Lima_subtree | applications/tango/camera/Maxipix.py | 1 | 25664 |
############################################################################
# This file is part of LImA, a Library for Image Acquisition
#
# Copyright (C) : 2009-2011
# European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
############################################################################
#=============================================================================
#
# file : Maxipix.py
#
# description : Python source for the Maxipix and its commands.
# The class is derived from Device. It represents the
# CORBA servant object which will be accessed from the
# network. All commands which can be executed on the
# Pilatus are implemented in this file.
#
# project : TANGO Device Server
#
# copyleft : European Synchrotron Radiation Facility
# BP 220, Grenoble 38043
# FRANCE
#
#=============================================================================
# (c) - Bliss - ESRF
#=============================================================================
#
import PyTango
import sys, types, os, time
from Lima import Core
from Lima.Maxipix.MpxCommon import MpxError
class Maxipix(PyTango.Device_4Impl):
Core.DEB_CLASS(Core.DebModApplication, 'LimaCCDs')
#------------------------------------------------------------------
# Device constructor
#------------------------------------------------------------------
def __init__(self,*args) :
PyTango.Device_4Impl.__init__(self,*args)
self.init_device()
#------------------------------------------------------------------
# Device destructor
#------------------------------------------------------------------
def delete_device(self):
pass
#------------------------------------------------------------------
# Device initialization
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def init_device(self):
self.set_state(PyTango.DevState.ON)
self.get_device_properties(self.get_device_class())
_PriamAcq = _MaxipixAcq.getPriamAcq()
self.__SignalLevel = {'LOW_FALL': _PriamAcq.LOW_FALL,\
'HIGH_RISE': _PriamAcq.HIGH_RISE}
self.__ReadyMode = {'EXPOSURE': _PriamAcq.EXPOSURE,\
'EXPOSURE_READOUT': _PriamAcq.EXPOSURE_READOUT}
self.__GateMode = {'INACTIVE': _PriamAcq.INACTIVE,\
'ACTIVE': _PriamAcq.ACTIVE}
self.__FillMode = _MaxipixAcq.mpxFillModes
self.__dacname = "thl"
#Init default Path
if self.config_path:
try:
_MaxipixAcq.setPath(self.config_path)
except MpxError as error:
PyTango.Except.throw_exception('DevFailed',\
'MpxError: %s'%(error),\
'Maxipix Class')
#Load default config
if self.config_name:
try:
_MaxipixAcq.loadConfig(self.config_name)
except MpxError as error:
PyTango.Except.throw_exception('DevFailed',\
'MpxError: %s'%(error),\
'Maxipix Class')
#set the priamAcq attributes with properties if any
for attName in ['fill_mode','ready_mode','ready_level','gate_mode','gate_level','shutter_level','trigger_level'] :
self.__setMaxipixAttr(attName,None)
#==================================================================
#
# Some Utils
#
#==================================================================
def __getDictKey(self,dict, value):
try:
ind = dict.values().index(value)
except ValueError:
return None
return dict.keys()[ind]
def __getDictValue(self,dict, key):
try:
value = dict[key.upper()]
except KeyError:
return None
return value
def __getMaxipixAttr(self,attr_name):
_PriamAcq = _MaxipixAcq.getPriamAcq()
name = ''.join([name.capitalize() for name in attr_name.split('_')])
attr = getattr(self,attr_name)
if attr_name.count('level'):
dictInstance = self.__SignalLevel
else:
dictInstance = getattr(self,'_Maxipix__%s' % name)
if attr_name.count('fill_mode'): getMethod = getattr(_MaxipixAcq,'get%s' % name)
else: getMethod = getattr(_PriamAcq,'get%s' % name)
setattr(self,attr_name, self.__getDictKey(dictInstance,getMethod()))
return getattr(self,attr_name)
def __getValueList(self, attr_name):
name = ''.join([name.capitalize() for name in attr_name.split('_')])
if attr_name.count('level'):
valueList = self.__SignalLevel.keys()
elif attr_name.count('mode'):
valueList = getattr(self,'_Maxipix__%s' % name).keys()
elif attr_name.count('config_name'):
valueList = self.__getConfigNameList()
else:
valueList = []
return valueList
def __setMaxipixAttr(self,attr_name, key=None):
_PriamAcq = _MaxipixAcq.getPriamAcq()
name = ''.join([name.capitalize() for name in attr_name.split('_')])
attr = getattr(self,attr_name)
if attr_name.count('level'):
dictInstance = self.__SignalLevel
else:
dictInstance = getattr(self,'_Maxipix__%s' % name)
if attr_name.count('fill_mode'):
getMethod = getattr(_MaxipixAcq,'get%s' % name)
setMethod = getattr(_MaxipixAcq,'set%s' % name)
else:
getMethod = getattr(_PriamAcq,'get%s' % name)
setMethod = getattr(_PriamAcq,'set%s' % name)
if key != None:
# just set a new value for this attribute
attrValue = self.__getDictValue(dictInstance,key)
if attrValue == None:
PyTango.Except.throw_exception('DevFailed',\
'Wrong value %s: %s'%(attr_name,key),\
'Maxipix Class')
else:
setMethod(attrValue)
attrNewKey = key
else:
# here set attribute from the property value
# if the property is missing (=[]) then initialize the attribute by reading the hardware
if attr == []:
attrNewKey = self.__getDictKey(dictInstance,getMethod())
elif type(attr) is not types.StringType:
PyTango.Except.throw_exception('WrongData',\
'Wrong value %s: %s'%(attr_name,attr),\
'Maxipix Class')
else:
attrValue = self.__getDictValue(dictInstance,attr)
if attrValue == None:
PyTango.Except.throw_exception('WrongData',\
'Wrong value %s: %s'%(attr_name,attr),\
'Maxipix Class')
else:
setMethod(attrValue)
attrNewKey = attr
# set the new attribute value as upper string
setattr(self,attr_name, attrNewKey.upper())
def __getConfigNameList(self):
spath= os.path.normpath(self.config_path)
if not os.path.isdir(spath):
PyTango.Except.throw_exception('WrongData',\
'Invalid path: %s'%(self.config_path),\
'Maxipix Class')
else:
dirList = os.listdir(spath)
fileDict={}
fileList=[]
for file in dirList:
if file.endswith('.cfg'):
filePath = spath+'/'+file
fileStat = os.stat(filePath)
modifiedTime = fileStat.st_mtime
fileDict[modifiedTime]= file.strip('.cfg')
if fileDict:
timeList = fileDict.keys();timeList.sort()
for mTime in timeList:
fileList.append(fileDict[mTime])
#fileList.append(time.ctime(mTime))
return fileList
#==================================================================
#
# Maxipix read/write attribute methods
#
#==================================================================
## @brief Read the current dac name
#
def read_dac_name(self,attr) :
attr.set_value(self.__dacname)
## @brief Write dac name
#
def write_dac_name(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
if data not in dacs.getListKeys():
PyTango.Except.throw_exception('WrongData',\
'Wrong value %s: %s'%('dac_name',data),\
'Maxipix Class')
self.__dacname = data[0]
## @brief Read the possible dac names
#
def read_dac_possible(self,attr) :
dacs = _MaxipixAcq.mpxDacs
data = dacs.getListKeys()
attr.set_value(data)
# Read the chip dac value, named by the dac_name attribute
# For multichips only a unique DAC is valid for all the chips
def read_dac_value(self,attr) :
data = 0
dacs = _MaxipixAcq.mpxDacs
data = dacs.getOneDac(0,self.__dacname)
# if a all the chips don't have the same dac value
# None is return, typically this is the case for thl
if data == None: data = -1
attr.set_value(data)
## @brief Write a DAC value of the named dac_name attribute
#
def write_dac_value(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
dacs.setOneDac(0,self.__dacname, data)
dacs.applyChipDacs(0)
## @brief Read threshold noise of a maxipix chips
#
def read_threshold_noise(self,attr) :
dac = _MaxipixAcq.mpxDacs
thlNoises = dac.getThlNoise(0)
attr.set_value(thlNoises,len(thlNoises))
## @brief Write threshold noise of a maxipix chips
#
def write_threshold_noise(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
dacs.setThlNoise(0,data)
dacs.applyChipDacs(0)
## @brief Read the global threshold
#
def read_threshold(self,attr) :
dacs = _MaxipixAcq.mpxDacs
thl = dacs.getThl()
if thl is None: thl = -1
attr.set_value(thl)
## @brief Write the global threshold
#
def write_threshold(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
dacs.setThl(data)
dacs.applyChipDacs(0)
## @brief Read the energy step
#
# energy step is the coef which link the global threshold with energy
# threshold
#
def read_energy_calibration(self,attr) :
dacs = _MaxipixAcq.mpxDacs
values = dacs .getECalibration()
attr.set_value(values,len(values))
## @brief Write the energy step
#
def write_energy_calibration(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
dacs.setECalibration(data)
## @brief Read the energy threshold
#
# energy_threshold = energy_step * threshold (global)
def read_energy_threshold(self,attr) :
dacs= _MaxipixAcq.mpxDacs
value = dacs.getEThl()
if value is None: value = -1
attr.set_value(value)
## @brief Write the energy threshold
#
def write_energy_threshold(self,attr) :
data = attr.get_write_value()
dacs = _MaxipixAcq.mpxDacs
dacs.setEThl(data)
dacs.applyChipDacs(0)
## @brief read the config name
#
def read_config_name(self,attr) :
cfg_name = ""
if self.config_name:
cfg_name = self.config_name
attr.set_value(cfg_name)
## @brief Write the config name and load it
#
def write_config_name(self,attr) :
data = attr.get_write_value()
_MaxipixAcq.loadConfig(data)
self.config_name = data
## @brief read the config path
#
def read_config_path(self,attr) :
cfg_path = ""
if self.config_path:
cfg_path = self.config_path
attr.set_value(cfg_path)
## @brief Write the config path
#
def write_config_path(self,attr) :
data = attr.get_write_value()
_MaxipixAcq.setPath(data)
self.config_path = data
## @brief read the fill mode
#
def read_fill_mode(self,attr) :
fill_mode = self.__getMaxipixAttr('fill_mode')
attr.set_value(fill_mode)
## @brief Write the gap fill mode
#
def write_fill_mode(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('fill_mode',data)
## @brief read the board id
#
def read_espia_dev_nb(self,attr) :
espia_dev_nb = 0
if self.espia_dev_nb:
espia_dev_nb = self.espia_dev_nb
attr.set_value(espia_dev_nb)
## @brief read the ready_mode
# EXPOSURE-0, EXPOSURE_READOUT-1
def read_ready_mode(self,attr) :
ready_mode = self.__getMaxipixAttr('ready_mode')
attr.set_value(ready_mode)
## @brief Write the ready_mode
# EXPOSURE-0, EXPOSURE_READOUT-1
def write_ready_mode(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('ready_mode',data)
## @brief read the ready_level
# LOW_FALL-0, HIGH_RISE-1
def read_ready_level(self,attr) :
ready_level = self.__getMaxipixAttr('ready_level')
attr.set_value(ready_level)
## @brief Write the ready_level
# LOW_FALL-0, HIGH_RISE-1
def write_ready_level(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('ready_level',data)
## @brief read the shutter_level
# LOW_FALL-0, HIGH_RISE-1
def read_shutter_level(self,attr) :
shutter_level = self.__getMaxipixAttr('shutter_level')
attr.set_value(shutter_level)
## @brief Write the shutter_level
# LOW_FALL-0, HIGH_RISE-1
def write_shutter_level(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('shutter_level',data)
## @brief read the gate_mode
# FRAME-0, SEQUENCE-1
def read_gate_mode(self,attr) :
gate_mode = self.__getMaxipixAttr('gate_mode')
attr.set_value(gate_mode)
## @brief Write the gate_mode
# FRAME-0, SEQUENCE-1
def write_gate_mode(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('gate_mode',data)
## @brief read the gate_level
# LOW_FALL-0, HIGH_RISE-1
def read_gate_level(self,attr) :
gate_level = self.__getMaxipixAttr('gate_level')
attr.set_value(gate_level)
## @brief Write the gate_level
# LOW_FALL-0, HIGH_RISE-1
def write_gate_level(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('gate_level',data)
## @brief read the trigger_level
# LOW_FALL-0, HIGH_RISE-1
def read_trigger_level(self,attr) :
trigger_level = self.__getMaxipixAttr('trigger_level')
attr.set_value(trigger_level)
## @brief Write the trigger_level
# LOW_FALL-0, HIGH_RISE-1
def write_trigger_level(self,attr) :
data = attr.get_write_value()
self.__setMaxipixAttr('trigger_level',data)
#==================================================================
#
# Maxipix command methods
#
#==================================================================
#------------------------------------------------------------------
# getAttrStringValueList command:
#
# Description: return a list of authorized values if any
# argout: DevVarStringArray
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def getAttrStringValueList(self, attr_name):
valueList = self.__getValueList(attr_name)
return valueList
#------------------------------------------------------------------
# setDebugFlags command:
#
# Description: Get the current acquired frame number
# argout: DevVarDoubleArray
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def setDebugFlags(self, deb_flags):
deb_flags &= 0xffffffff
deb.Param('Setting debug flags: 0x%08x' % deb_flags)
Core.DebParams.setTypeFlags((deb_flags >> 16) & 0xff)
Core.DebParams.setModuleFlags((deb_flags >> 0) & 0xffff)
deb.Trace('FormatFlags: %s' % Core.DebParams.getFormatFlagsNameList())
deb.Trace('TypeFlags: %s' % Core.DebParams.getTypeFlagsNameList())
deb.Trace('ModuleFlags: %s' % Core.DebParams.getModuleFlagsNameList())
#------------------------------------------------------------------
# getDebugFlags command:
#
# Description: Get the current acquired frame number
# argout: DevVarDoubleArray
#------------------------------------------------------------------
@Core.DEB_MEMBER_FUNCT
def getDebugFlags(self):
deb.Trace('FormatFlags: %s' % Core.DebParams.getFormatFlagsNameList())
deb.Trace('TypeFlags: %s' % Core.DebParams.getTypeFlagsNameList())
deb.Trace('ModuleFlags: %s' % Core.DebParams.getModuleFlagsNameList())
deb_flags = (((Core.DebParams.getTypeFlags() & 0xff) << 16) |
((Core.DebParams.getModuleFlags() & 0xffff) << 0))
deb_flags &= 0xffffffff
deb.Return('Getting debug flags: 0x%08x' % deb_flags)
return deb_flags
class MaxipixClass(PyTango.DeviceClass):
class_property_list = {}
device_property_list = {
'espia_dev_nb':
[PyTango.DevShort,
"Espia board device number",[]],
'config_path':
[PyTango.DevString,
"Path where configuration files are",[]],
'config_name':
[PyTango.DevString,
"The default configuration loaded",[]],
'fill_mode':
[PyTango.DevString,
"The default configuration loaded",[]],
'ready_level':
[PyTango.DevString,
"The ready output signal level",[]],
'gate_level':
[PyTango.DevString,
"The gate output signal level",[]],
'shutter_level':
[PyTango.DevString,
"The shutter output signal level",[]],
'trigger_level':
[PyTango.DevString,
"The trigger output signal level",[]],
'ready_mode':
[PyTango.DevString,
"The ready output signal level",[]],
'gate_mode':
[PyTango.DevString,
"The gate output signal level",[]],
}
cmd_list = {
'getAttrStringValueList':
[[PyTango.DevString, "Attribute name"],
[PyTango.DevVarStringArray, "Authorized String value list"]],
'getDebugFlags':
[[PyTango.DevVoid, ""],
[PyTango.DevULong, "Debug flag in HEX format"]],
'setDebugFlags':
[[PyTango.DevULong, "Debug flag in HEX format"],
[PyTango.DevVoid, ""]],
}
attr_list = {
'threshold_noise':
[[PyTango.DevLong,
PyTango.SPECTRUM,
PyTango.READ_WRITE,5],
{
'label':"Threshold (thlow) noise of chips",
'unit':"N/A",
'format':"%6d",
'description':"Threshold (thlow) noise of the chip(s)",
}],
'threshold':
[[PyTango.DevLong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Global Threshold ",
'unit':"N/A",
'format':"%6d",
'description':"The global threshold, apply the same offset on all the chips",
}],
'energy_calibration':
[[PyTango.DevDouble,
PyTango.SPECTRUM,
PyTango.READ_WRITE,2],
{
'label':"Energy calibration",
'unit':"N/A",
'format':"%5.2f",
'description':"[0] = e0thl, [1] = estep: ethl=(thl-e0thl)*estep",
}],
'energy_threshold':
[[PyTango.DevDouble,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Energy thresholds",
'unit':"keV",
'format':"%5.2f",
'description':"Threshold in energy (keV)",
}],
'config_name':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Configuration name",
'unit':"N/A",
'format':"",
'description':"root name of the configuration files",
}],
'config_path':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Configuration directory path",
'unit':"N/A",
'format':"",
'description':"Path of the configuration directory",
}],
'fill_mode':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Fill mode",
'unit':"enum.",
'format':"",
'description':"Between chip filling mode",
}],
'espia_dev_nb':
[[PyTango.DevShort,
PyTango.SCALAR,
PyTango.READ],
{
'label':"Espia board number",
'unit':"number",
'format':"",
'description':"The Espia board device number",
}],
'ready_mode':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Ready output mode",
'unit':"enum.",
'format':"",
'description':"Mode of the Ready output",
}],
'ready_level':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Ready output level",
'unit':"enum.",
'format':"",
'description':"The level logic of the Ready output",
}],
'shutter_level':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"Shutter output level",
'unit':"enum.",
'format':"",
'description':"The level logic of the Shutter output",
}],
'gate_mode':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"The Gate input mode",
'unit':"enum.",
'format':"",
'description':"",
}],
'gate_level':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"",
'unit':"",
'format':"",
'description':"",
}],
'trigger_level':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"",
'unit':"",
'format':"",
'description':"",
}],
'dac_possible':
[[PyTango.DevString,
PyTango.SPECTRUM,
PyTango.READ,17],
{
'label':"",
'unit':"",
'format':"",
'description':"",
}],
'dac_name':
[[PyTango.DevString,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"",
'unit':"",
'format':"",
'description':"",
}],
'dac_value':
[[PyTango.DevLong,
PyTango.SCALAR,
PyTango.READ_WRITE],
{
'label':"",
'unit':"",
'format':"%xd",
'description':"",
}],
}
def __init__(self,name) :
PyTango.DeviceClass.__init__(self,name)
self.set_type(name)
#----------------------------------------------------------------------------
# Plugins
#----------------------------------------------------------------------------
from Lima.Maxipix.MpxAcq import MpxAcq
_MaxipixAcq = None
def get_control(espia_dev_nb = '0',**keys) :
#properties are passed here as string
global _MaxipixAcq
if _MaxipixAcq is None:
_MaxipixAcq = MpxAcq(int(espia_dev_nb))
return _MaxipixAcq.getControl()
def close_interface() :
global _MaxipixAcq
_MaxipixAcq = None
def get_tango_specific_class_n_device():
return MaxipixClass,Maxipix
| gpl-3.0 | -6,498,890,244,258,648,000 | 31.944801 | 122 | 0.501364 | false |
SanketDG/networkx | networkx/generators/tests/test_random_graphs.py | 1 | 5088 | #!/usr/bin/env python
from nose.tools import *
from networkx import *
from networkx.generators.random_graphs import *
class TestGeneratorsRandom():
def smoke_test_random_graph(self):
seed = 42
G=gnp_random_graph(100,0.25,seed)
G=binomial_graph(100,0.25,seed)
G=erdos_renyi_graph(100,0.25,seed)
G=fast_gnp_random_graph(100,0.25,seed)
G=gnm_random_graph(100,20,seed)
G=dense_gnm_random_graph(100,20,seed)
G=watts_strogatz_graph(10,2,0.25,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=connected_watts_strogatz_graph(10,2,0.1,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=watts_strogatz_graph(10,4,0.25,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 20)
G=newman_watts_strogatz_graph(10,2,0.0,seed)
assert_equal(len(G), 10)
assert_equal(G.number_of_edges(), 10)
G=newman_watts_strogatz_graph(10,4,0.25,seed)
assert_equal(len(G), 10)
assert_true(G.number_of_edges() >= 20)
G=barabasi_albert_graph(100,1,seed)
G=barabasi_albert_graph(100,3,seed)
assert_equal(G.number_of_edges(),(97*3))
G=powerlaw_cluster_graph(100,1,1.0,seed)
G=powerlaw_cluster_graph(100,3,0.0,seed)
assert_equal(G.number_of_edges(),(97*3))
G=duplication_divergence_graph(100,1.0,seed)
assert_equal(len(G), 100)
assert_raises(networkx.exception.NetworkXError,
duplication_divergence_graph, 100, 2)
assert_raises(networkx.exception.NetworkXError,
duplication_divergence_graph, 100, -1)
G=random_regular_graph(10,20,seed)
assert_raises(networkx.exception.NetworkXError,
random_regular_graph, 3, 21)
constructor=[(10,20,0.8),(20,40,0.8)]
G=random_shell_graph(constructor,seed)
G=nx.random_lobster(10,0.1,0.5,seed)
def test_random_zero_regular_graph(self):
"""Tests that a 0-regular graph has the correct number of nodes and
edges.
"""
G = random_regular_graph(0, 10)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
def test_gnp(self):
for generator in [gnp_random_graph, binomial_graph, erdos_renyi_graph,
fast_gnp_random_graph]:
G = generator(10, -1.1)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
G = generator(10, 0.1)
assert_equal(len(G), 10)
G = generator(10, 0.1, seed=42)
assert_equal(len(G), 10)
G = generator(10, 1.1)
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 45)
G = generator(10, -1.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 0)
G = generator(10, 0.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
G = generator(10, 1.1, directed=True)
assert_true(G.is_directed())
assert_equal(len(G), 10)
assert_equal(sum(1 for _ in G.edges()), 90)
# assert that random graphs generate all edges for p close to 1
edges = 0
runs = 100
for i in range(runs):
edges += sum(1 for _ in generator(10, 0.99999, directed=True).edges())
assert_almost_equal(edges/float(runs), 90, delta=runs*2.0/100)
def test_gnm(self):
G=gnm_random_graph(10,3)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()), 3)
G=gnm_random_graph(10,3,seed=42)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()), 3)
G=gnm_random_graph(10,100)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()), 45)
G=gnm_random_graph(10,100,directed=True)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()),90)
G=gnm_random_graph(10,-1.1)
assert_equal(len(G),10)
assert_equal(sum(1 for _ in G.edges()),0)
def test_watts_strogatz_big_k(self):
assert_raises(networkx.exception.NetworkXError,
watts_strogatz_graph, 10, 10, 0.25)
assert_raises(networkx.exception.NetworkXError,
newman_watts_strogatz_graph, 10, 10, 0.25)
# could create an infinite loop, now doesn't
# infinite loop used to occur when a node has degree n-1 and needs to rewire
watts_strogatz_graph(10, 9, 0.25, seed=0)
newman_watts_strogatz_graph(10, 9, 0.5, seed=0)
def test_random_kernel_graph(self):
def integral(u, w, z):
return c*(z-w)
def root(u, w, r):
return r/c+w
c = 1
graph = random_kernel_graph(1000, integral, root)
assert_equal(len(graph), 1000)
| bsd-3-clause | 1,429,658,134,924,292,900 | 33.849315 | 86 | 0.56761 | false |
earwig/earwigbot | earwigbot/commands/help.py | 1 | 3230 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2015 Ben Kurtovic <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from platform import python_version
import re
from earwigbot import __version__
from earwigbot.commands import Command
class Help(Command):
"""Displays information about the bot."""
name = "help"
commands = ["help", "version"]
def check(self, data):
if data.is_command:
if data.command in self.commands:
return True
if not data.command and data.trigger == data.my_nick:
return True
return False
def process(self, data):
if not data.command:
self.do_hello(data)
elif data.command == "version":
self.do_version(data)
elif data.args:
self.do_command_help(data)
else:
self.do_main_help(data)
def do_main_help(self, data):
"""Give the user a general help message with a list of all commands."""
msg = "Hi, I'm a bot! I have {0} commands loaded: {1}. You can get help for any command with '!help <command>'."
cmnds = sorted([cmnd.name for cmnd in self.bot.commands])
msg = msg.format(len(cmnds), ', '.join(cmnds))
self.reply(data, msg)
def do_command_help(self, data):
"""Give the user help for a specific command."""
target = data.args[0]
for command in self.bot.commands:
if command.name == target or target in command.commands:
if command.__doc__:
doc = command.__doc__.replace("\n", "")
doc = re.sub(r"\s\s+", " ", doc)
msg = 'Help for command \x0303{0}\x0F: "{1}"'
self.reply(data, msg.format(target, doc))
return
msg = "Sorry, no help for \x0303{0}\x0F.".format(target)
self.reply(data, msg)
def do_hello(self, data):
self.say(data.chan, "Yes, {0}?".format(data.nick))
def do_version(self, data):
vers = "EarwigBot v{bot} on Python {python}: https://github.com/earwig/earwigbot"
self.reply(data, vers.format(bot=__version__, python=python_version()))
| mit | -3,154,985,194,490,449,000 | 39.375 | 120 | 0.635913 | false |
qPCR4vir/orange3 | Orange/widgets/classify/owclassificationtree.py | 1 | 2938 | from collections import OrderedDict
from Orange.data import Table
from Orange.classification.tree import TreeLearner
from Orange.widgets import gui
from Orange.widgets.settings import Setting
from Orange.widgets.utils.owlearnerwidget import OWBaseLearner
class OWClassificationTree(OWBaseLearner):
name = "Classification Tree"
icon = "icons/ClassificationTree.svg"
description = "Classification tree algorithm with forward pruning."
priority = 30
LEARNER = TreeLearner
attribute_score = Setting(0)
limit_min_leaf = Setting(True)
min_leaf = Setting(2)
limit_min_internal = Setting(True)
min_internal = Setting(5)
limit_depth = Setting(True)
max_depth = Setting(100)
scores = (("Entropy", "entropy"), ("Gini Index", "gini"))
def add_main_layout(self):
gui.comboBox(self.controlArea, self, "attribute_score",
box='Feature Selection',
items=[name for name, _ in self.scores],
callback=self.settings_changed)
box = gui.vBox(self.controlArea, 'Pruning')
gui.spin(box, self, "min_leaf", 1, 1000,
label="Min. instances in leaves: ", checked="limit_min_leaf",
callback=self.settings_changed)
gui.spin(box, self, "min_internal", 1, 1000,
label="Stop splitting nodes with less instances than: ",
checked="limit_min_internal",
callback=self.settings_changed)
gui.spin(box, self, "max_depth", 1, 1000,
label="Limit the depth to: ", checked="limit_depth",
callback=self.settings_changed)
def create_learner(self):
return self.LEARNER(
criterion=self.scores[self.attribute_score][1],
max_depth=self.max_depth if self.limit_depth else None,
min_samples_split=(self.min_internal if self.limit_min_internal
else 2),
min_samples_leaf=(self.min_leaf if self.limit_min_leaf else 1),
preprocessors=self.preprocessors
)
def get_learner_parameters(self):
from Orange.canvas.report import plural_w
items = OrderedDict()
items["Split selection"] = self.scores[self.attribute_score][0]
items["Pruning"] = ", ".join(s for s, c in (
(plural_w("at least {number} instance{s} in leaves", self.min_leaf),
self.limit_min_leaf),
(plural_w("at least {number} instance{s} in internal nodes", self.min_internal),
self.limit_min_internal),
("maximum depth {}".format(self.max_depth), self.limit_depth)) if c) or "None"
return items
if __name__ == "__main__":
import sys
from PyQt4.QtGui import QApplication
a = QApplication(sys.argv)
ow = OWClassificationTree()
d = Table('iris')
ow.set_data(d)
ow.show()
a.exec_()
ow.saveSettings()
| bsd-2-clause | 3,757,405,505,065,211,000 | 36.666667 | 92 | 0.614364 | false |
saltastro/pysalt | saltspec/InterIdentify.py | 1 | 46552 | # Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. See LICENSE for more details #
"""INTERIDENTIFY provides an interactive method for identifying
lines in an arc image. The tasks displays the full image, a
line extracted from the image, and residuals to the fit of that line.
The task will display the total image so the user can extract the lines
to be fit. Or the user can automate the process so only certain lines are
fit by the user. On the next tab, the task displays the arc line
and the fit to the line including what lines have been detected and
are being used for the fit. Finally the task displays the residual in
the fit and the user can select different options to be displayed.
Author Version Date
-----------------------------------------------
S. M. Crawford (SAAO) 1.0 10 Oct 2009
TODO
----
LIMITATIONS
-----------
"""
# Ensure Python 2.5 compatibility
from __future__ import with_statement
# General imports
import os
import sys
import copy
import numpy as np
import pyfits
from pyraf import iraf
from pyraf.iraf import pysalt
# Gui library imports
from PyQt4 import QtGui, QtCore
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT
# Salt imports
import saltsafeio
from saltgui import ImageDisplay, MplCanvas
from salterror import SaltIOError
from PySpectrograph.Spectra import Spectrum, apext
import WavelengthSolution
import spectools as st
import AutoIdentify as ai
from spectools import SALTSpecError
class InterIdentifyWindow(QtGui.QMainWindow):
"""Main application window."""
def __init__(self, xarr, specarr, slines, sfluxes, ws, hmin=150, wmin=400, mdiff=20,
filename=None, res=2.0, dres=0.1, dc=20, ndstep=20, sigma=5, smooth=0, niter=5, istart=None,
nrows=1, rstep=100, method='Zeropoint', ivar=None, cmap='gray', scale='zscale', contrast=1.0,
subback=0, textcolor='green', preprocess=False, log=None, verbose=True):
"""Default constructor."""
# set up the variables
if istart is None:
self.y1 = int(0.5 * len(specarr))
else:
self.y1 = istart
self.y2 = self.y1 + nrows
self.specarr = specarr
self.xarr = xarr
self.ivar = ivar
self.slines = slines
self.sfluxes = sfluxes
self.hmin = hmin
self.wmin = wmin
self.ws = ws
self.res = res
self.dres = dres
self.mdiff = mdiff
self.sigma = sigma
self.niter = int(niter)
self.nrows = nrows
self.rstep = rstep
self.dc = dc
self.ndstep = ndstep
self.method = method
self.cmap = cmap
self.scale = scale
self.contrast = contrast
self.smooth = smooth
self.subback = subback
self.filename = filename
self.ImageSolution = {}
self.textcolor = textcolor
self.preprocess = preprocess
self.log = log
self.verbose = verbose
# Setup widget
QtGui.QMainWindow.__init__(self)
# Set main widget
self.main = QtGui.QWidget(self)
# Set window title
self.setWindowTitle("InterIdentify")
# create the Image page
self.imagePage = imageWidget(self.specarr, y1=self.y1, y2=self.y2, hmin=self.hmin, wmin=self.wmin, cmap=self.cmap,
rstep=self.rstep, name=self.filename, scale=self.scale, contrast=self.contrast, log=self.log)
# set up the arc page
self.farr = apext.makeflat(self.specarr, self.y1, self.y2)
self.farr = st.flatspectrum(self.xarr, self.farr, order=self.subback)
# set up variables
self.arcdisplay = ArcDisplay(xarr, self.farr, slines, sfluxes, self.ws, specarr=self.specarr,
res=self.res, dres=self.dres, dc=self.dc, ndstep=self.ndstep, xp=[], wp=[],
method=self.method, smooth=self.smooth, niter=self.niter, mdiff=self.mdiff,
sigma=self.sigma, textcolor=self.textcolor, preprocess=self.preprocess,
log=self.log, verbose=self.verbose)
self.arcPage = arcWidget(
self.arcdisplay,
hmin=hmin,
wmin=wmin,
y1=self.y1,
y2=self.y2,
name=self.filename)
# set up the residual page
self.errPage = errWidget(self.arcdisplay, hmin=hmin, wmin=wmin)
# create the tabs
self.tabWidget = QtGui.QTabWidget()
self.tabWidget.addTab(self.imagePage, 'Image')
self.tabWidget.addTab(self.arcPage, 'Arc')
self.tabWidget.addTab(self.errPage, 'Residual')
# layout the widgets
mainLayout = QtGui.QVBoxLayout(self.main)
mainLayout.addWidget(self.tabWidget)
# self.setLayout(mainLayout)
# Set focus to main widget
# self.main.setFocus()
# Set the main widget as the central widget
self.setCentralWidget(self.main)
# Destroy widget on close
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
# Close when config dialog is closed
# self.connect(self.conf, QtCore.SIGNAL('destroyed()'),
# self, QtCore.SLOT('close()'))
self.connect(self.tabWidget, QtCore.SIGNAL('currentChanged(int)'),
self.currentChanged)
self.connect(self.imagePage, QtCore.SIGNAL('regionChange(int,int)'),
self.regionChange)
self.connect(self.imagePage, QtCore.SIGNAL('runauto(int, int, int)'),
self.runauto)
self.connect(self.arcPage, QtCore.SIGNAL('savews()'), self.saveWS)
self.connect(self.arcdisplay, QtCore.SIGNAL('quit()'), self.close)
def keyPressEvent(self, event):
# print "Key Pressed:", event.key
if event.key == 'q':
self.close()
def currentChanged(self, event):
# print event
pass
def regionChange(self, y1, y2):
self.y1 = y1
self.y2 = y2
self.farr = apext.makeflat(self.specarr, self.y1, self.y2)
self.farr = st.flatspectrum(self.xarr, self.farr, order=self.subback)
# set up variables
self.ws = self.newWS(0.5 * (self.y1 + self.y2))
self.arcdisplay = ArcDisplay(
self.xarr,
self.farr,
self.slines,
self.sfluxes,
self.ws,
specarr=self.specarr,
res=self.res,
dres=self.dres,
smooth=self.smooth,
niter=self.niter,
sigma=self.sigma,
xp=[],
wp=[],
textcolor=self.textcolor,
preprocess=self.preprocess,
log=self.log,
verbose=self.verbose)
self.arcPage = arcWidget(
self.arcdisplay,
hmin=self.hmin,
wmin=self.wmin,
y1=self.y1,
y2=self.y2)
self.connect(self.arcPage, QtCore.SIGNAL('savews()'), self.saveWS)
# set up the residual page
self.errPage = errWidget(
self.arcdisplay,
hmin=self.hmin,
wmin=self.wmin)
# reset the pages
self.tabWidget.removeTab(2)
self.tabWidget.removeTab(1)
self.tabWidget.insertTab(1, self.arcPage, 'Arc')
self.tabWidget.insertTab(2, self.errPage, 'Residual')
def saveWS(self):
self.ws = self.arcdisplay.ws
value = 0.0
k = 0.5 * (self.y1 + self.y2)
xp = np.array(self.arcdisplay.xp)
wp = np.array(self.arcdisplay.wp)
if len(xp > 0):
w = self.arcdisplay.ws.value(xp)
value = (wp - w).std()
if self.log is not None:
msg = 'Saving WS value for row %i with rms=%f for %i lines' % (
k, value, len(self.arcdisplay.wp))
self.log.message(msg)
# create a new wavelength solution
nws = copy.deepcopy(self.ws)
if len(xp > 0):
nws = WavelengthSolution.WavelengthSolution(
self.ws.x_arr,
self.ws.w_arr,
order=self.ws.order,
function=self.ws.function)
nws.func.func.domain = self.ws.func.func.domain
try:
nws.fit()
except Exception as e:
if self.log is not None:
self.log.warning(
"Unable to save wavelength solution because %s" %
e)
return
self.ImageSolution[k] = nws
# for k in self.ImageSolution: print k,self.ImageSolution[k].coef
def newWS(self, y):
"""Determine the WS closest to the values given by y1 and y2"""
keys = np.array(self.ImageSolution.keys())
try:
i = abs(keys - y).argmin()
ws = self.ImageSolution[keys[i]]
nws = WavelengthSolution.WavelengthSolution(
ws.x_arr,
ws.w_arr,
order=ws.order,
function=ws.function)
nws.func.func.domain = ws.domain
nws.fit()
return nws
except:
return self.ws
def runauto(self, istart, nrows, rstep):
""" Autoidentify the rest of the lines and produce the image solution"""
self.ImageSolution = self.arcdisplay.autoidentify(
istart=istart,
nrows=nrows,
rstep=rstep,
oneline=False)
class imageWidget(QtGui.QWidget):
def __init__(self, imarr, y1=None, y2=None, nrows=1, rstep=100, hmin=150, wmin=400,
name=None, cmap='Gray', scale='zscale', contrast=0.1, log=None, parent=None):
super(imageWidget, self).__init__(parent)
self.y1 = y1
self.y2 = y2
self.x1 = 0
self.x2 = len(imarr[0])
self.nrows = nrows
self.rstep = rstep
self.log = log
# Add FITS display widget with mouse interaction and overplotting
self.imdisplay = ImageDisplay()
self.imdisplay.setMinimumHeight(hmin)
self.imdisplay.setMinimumWidth(wmin)
# Set colormap
self.imdisplay.setColormap(cmap)
# Set scale mode for dynamic range
self.imdisplay.scale = scale
self.imdisplay.contrast = contrast
self.imdisplay.aspect = 'auto'
self.imdisplay.loadImage(imarr)
self.imdisplay.drawImage()
self.y1line, = self.imdisplay.axes.plot(
[self.x1, self.x2], [self.y1, self.y1], ls='-', color='#00FF00')
self.y2line, = self.imdisplay.axes.plot(
[self.x1, self.x2], [self.y2, self.y2], ls='-', color='#00FF00')
# Add navigation toolbars for each widget to enable zooming
self.toolbar = NavigationToolbar2QT(self.imdisplay, self)
# set up the information panel
self.infopanel = QtGui.QWidget()
# add the name of the file
self.NameLabel = QtGui.QLabel("Filename:")
self.NameLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.NameValueLabel = QtGui.QLabel("%s" % name)
self.NameValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
# add the rows that are extracted
self.y1Label = QtGui.QLabel("Y1:")
self.y1Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.y1ValueEdit = QtGui.QLineEdit("%6i" % self.y1)
self.y2Label = QtGui.QLabel("Y2:")
self.y2Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.y2ValueEdit = QtGui.QLineEdit("%6i" % self.y2)
self.updateButton = QtGui.QPushButton("Update")
self.updateButton.clicked.connect(self.updatesection)
# add the update for automatically updating it
self.nrLabel = QtGui.QLabel("nrows:")
self.nrLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.nrValueEdit = QtGui.QLineEdit("%5i" % self.nrows)
self.nsLabel = QtGui.QLabel("rstep:")
self.nsLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.nsValueEdit = QtGui.QLineEdit("%6i" % self.rstep)
self.nextButton = QtGui.QPushButton("Next")
self.nextButton.clicked.connect(self.nextsection)
self.autoButton = QtGui.QPushButton("Auto-Identify")
self.autoButton.clicked.connect(self.runauto)
# set up the info panel layout
infoLayout = QtGui.QGridLayout(self.infopanel)
infoLayout.addWidget(self.NameLabel, 0, 0, 1, 1)
infoLayout.addWidget(self.NameValueLabel, 0, 1, 1, 5)
infoLayout.addWidget(self.y1Label, 1, 0, 1, 1)
infoLayout.addWidget(self.y1ValueEdit, 1, 1, 1, 1)
infoLayout.addWidget(self.y2Label, 1, 2, 1, 1)
infoLayout.addWidget(self.y2ValueEdit, 1, 3, 1, 1)
infoLayout.addWidget(self.updateButton, 1, 4, 1, 1)
infoLayout.addWidget(self.nrLabel, 2, 0, 1, 1)
infoLayout.addWidget(self.nrValueEdit, 2, 1, 1, 1)
infoLayout.addWidget(self.nsLabel, 2, 2, 1, 1)
infoLayout.addWidget(self.nsValueEdit, 2, 3, 1, 1)
infoLayout.addWidget(self.nextButton, 2, 4, 1, 1)
infoLayout.addWidget(self.autoButton, 3, 0, 1, 1)
# Set up the layout
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(self.imdisplay)
mainLayout.addWidget(self.toolbar)
mainLayout.addWidget(self.infopanel)
self.setLayout(mainLayout)
def updatesection(self):
self.y1 = int(self.y1ValueEdit.text())
self.y2 = int(self.y2ValueEdit.text())
self.nrows = int(self.nrValueEdit.text())
self.rstep = int(self.nsValueEdit.text())
if abs(self.y1 - self.y2) != self.nrows:
if self.log:
self.log.warning(
"Warning: Update y2 to increase the row sampling")
self.y1line.set_ydata([self.y1, self.y1])
self.y2line.set_ydata([self.y2, self.y2])
self.imdisplay.draw()
self.emit(QtCore.SIGNAL("regionChange(int,int)"), self.y1, self.y2)
def nextsection(self):
self.nrows = int(self.nrValueEdit.text())
self.rstep = int(self.nsValueEdit.text())
self.y1 = self.y1 + self.rstep
self.y2 = self.y1 + self.nrows
self.y1ValueEdit.setText('%6i' % self.y1)
self.y2ValueEdit.setText('%6i' % self.y2)
self.updatesection()
def runauto(self):
if self.log is not None:
self.log.message("Running Auto")
self.emit(
QtCore.SIGNAL("runauto(int, int, int)"),
self.y1,
self.nrows,
self.rstep)
class arcWidget(QtGui.QWidget):
def __init__(self, arcdisplay, hmin=150, wmin=450, name=None,
x1=0, w1=0, y1=None, y2=None, parent=None):
super(arcWidget, self).__init__(parent)
# Add FITS display widget with mouse interaction and overplotting
self.arcdisplay = arcdisplay
self.arcdisplay.arcfigure.setMinimumHeight(hmin)
self.arcdisplay.arcfigure.setMinimumWidth(wmin)
self.arcdisplay.plotArc()
# Add navigation toolbars for each widget to enable zooming
self.toolbar = NavigationToolbar2QT(self.arcdisplay.arcfigure, self)
# set up the information panel
self.infopanel = QtGui.QWidget()
# add the name of the file
self.NameLabel = QtGui.QLabel("Filename:")
self.NameLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.NameValueLabel = QtGui.QLabel("%s" % name)
self.NameValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
# add the rows that are extracted
self.y1Label = QtGui.QLabel("Y1:")
self.y1Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.y1ValueLabel = QtGui.QLabel("%6i" % y1)
self.y1ValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
self.y2Label = QtGui.QLabel("Y2:")
self.y2Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.y2ValueLabel = QtGui.QLabel("%6i" % y2)
self.y2ValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
# add in what the value is for a x and w position
self.x1Label = QtGui.QLabel("X1:")
self.x1Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.w1Label = QtGui.QLabel("w1:")
self.w1Label.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.x1ValueLabel = QtGui.QLabel("%6.2f" % x1)
self.x1ValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
w1 = self.arcdisplay.ws.value(x1)
self.w1ValueEdit = QtGui.QLineEdit("%6i" % w1)
self.addButton = QtGui.QPushButton("Add")
self.addButton.clicked.connect(self.addpoints)
# add in radio buttons for pixel or wavelength
self.pixelradio = QtGui.QRadioButton("Pixel")
self.wavelengthradio = QtGui.QRadioButton("Wavelength")
self.pixelradio.setChecked(True)
# add in information about the order and type of solution
self.funcLabel = QtGui.QLabel("Function:")
self.funcLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.funcComboBox = QtGui.QComboBox()
self.funcComboBox.addItems(self.arcdisplay.ws.func_options)
self.funcComboBox.setCurrentIndex(
self.arcdisplay.ws.func_options.index(
self.arcdisplay.ws.function))
# self.funcComboBox."%s" % self.arcdisplay.ws.function)
self.orderLabel = QtGui.QLabel("Order:")
self.orderLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.orderValueEdit = QtGui.QLineEdit("%2i" % self.arcdisplay.ws.order)
self.updateButton = QtGui.QPushButton("Update")
self.updateButton.clicked.connect(self.updatefunction)
# provide a method for automatically fitting the line
self.methodComboBox = QtGui.QComboBox()
self.methodComboBox.addItems(ai.autoidentify_options)
self.methodComboBox.setCurrentIndex(
ai.autoidentify_options.index(
self.arcdisplay.method))
self.runButton = QtGui.QPushButton("Run")
self.runButton.clicked.connect(self.runauto)
self.saveButton = QtGui.QPushButton("Save")
self.saveButton.clicked.connect(self.savews)
# provide the full layout of the information panel
infoLayout = QtGui.QGridLayout(self.infopanel)
infoLayout.addWidget(self.NameLabel, 0, 0, 1, 1)
infoLayout.addWidget(self.NameValueLabel, 0, 1, 1, 5)
infoLayout.addWidget(self.y1Label, 1, 0, 1, 1)
infoLayout.addWidget(self.y1ValueLabel, 1, 1, 1, 1)
infoLayout.addWidget(self.y2Label, 1, 2, 1, 1)
infoLayout.addWidget(self.y2ValueLabel, 1, 3, 1, 1)
infoLayout.addWidget(self.x1Label, 2, 0, 1, 1)
infoLayout.addWidget(self.x1ValueLabel, 2, 1, 1, 1)
infoLayout.addWidget(self.w1Label, 2, 2, 1, 1)
infoLayout.addWidget(self.w1ValueEdit, 2, 3)
infoLayout.addWidget(self.addButton, 2, 4, 1, 1)
infoLayout.addWidget(self.funcLabel, 3, 0, 1, 1)
infoLayout.addWidget(self.funcComboBox, 3, 1, 1, 1)
infoLayout.addWidget(self.orderLabel, 3, 2, 1, 1)
infoLayout.addWidget(self.orderValueEdit, 3, 3, 1, 1)
infoLayout.addWidget(self.updateButton, 3, 4, 1, 1)
infoLayout.addWidget(self.methodComboBox, 4, 0, 1, 1)
infoLayout.addWidget(self.runButton, 4, 1, 1, 1)
infoLayout.addWidget(self.saveButton, 4, 4, 1, 1)
# infoLayout.addWidget(self.pixelradio, 3, 0, 1, 2)
# infoLayout.addWidget(self.wavelengthradio, 3, 2, 1, 2)
# Set up the layout
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(self.arcdisplay.arcfigure)
mainLayout.addWidget(self.toolbar)
mainLayout.addWidget(self.infopanel)
self.setLayout(mainLayout)
self.connect(
self.arcdisplay,
QtCore.SIGNAL('keyPressEvent'),
self.keyPressEvent)
self.connect(
self.arcdisplay,
QtCore.SIGNAL('updatex(float)'),
self.updatexlabel)
self.connect(
self.funcComboBox,
QtCore.SIGNAL('activated(QString)'),
self.updatefunction)
self.connect(
self.methodComboBox,
QtCore.SIGNAL('activated(QString)'),
self.updatemethod)
def keyPressEvent(self, event):
pass
# print "Arc Widget, keyPress:", event
def updatexlabel(self, value):
try:
self.x1ValueLabel.setText("%6.2f" % value)
self.w1ValueEdit.setText("%6.2f" % self.arcdisplay.ws.value(value))
except TypeError:
pass
def addpoints(self):
"""Add the x and w points to the list of matched points"""
x = float(self.x1ValueLabel.text())
w = float(self.w1ValueEdit.text())
# x=[1904.5, 1687.22, 3124.349, 632.5705]
# w=[4671.225, 4624.275, 4916.512, 4383.901]
self.arcdisplay.addpoints(x, w)
def updatefunction(self):
"""Update the values for the function"""
self.arcdisplay.ws.order = int(self.orderValueEdit.text())
self.arcdisplay.ws.function = self.funcComboBox.currentText()
self.arcdisplay.ws.set_func()
self.arcdisplay.findfit()
def updatemethod(self):
"""Update the values for the method for autoidenitfy"""
self.arcdisplay.method = self.methodComboBox.currentText()
def runauto(self):
"""Run autoidenity on one line"""
self.arcdisplay.dc = 0.5 * self.arcdisplay.rms * self.arcdisplay.ndstep
self.arcdisplay.autoidentify()
def savews(self):
"""Save the wcs to the """
self.emit(QtCore.SIGNAL("savews()"))
class errWidget(QtGui.QWidget):
def __init__(self, arcdisplay, hmin=150, wmin=450, name=None, parent=None):
super(errWidget, self).__init__(parent)
# Add FITS display widget with mouse interaction and overplotting
self.arcdisplay = arcdisplay
self.arcdisplay.errfigure.setMinimumHeight(hmin)
self.arcdisplay.errfigure.setMinimumWidth(wmin)
self.arcdisplay.plotErr()
# Add navigation toolbars for each widget to enable zooming
self.toolbar = NavigationToolbar2QT(self.arcdisplay.errfigure, self)
# set up the information panel
self.infopanel = QtGui.QWidget()
# add the name of the file
self.NameLabel = QtGui.QLabel("Filename:")
self.NameLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.NameValueLabel = QtGui.QLabel("%s" % name)
self.NameValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
# add in the rejection parameters
self.sigmaLabel = QtGui.QLabel("Sigma:")
self.sigmaLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.sigmaValueEdit = QtGui.QLineEdit(
"%2.1f" %
self.arcdisplay.ws.thresh)
self.niterLabel = QtGui.QLabel("Niter:")
self.niterLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.niterValueEdit = QtGui.QLineEdit("%i" % self.arcdisplay.ws.niter)
self.rejectButton = QtGui.QPushButton("Reject")
self.rejectButton.clicked.connect(self.rejectpoints)
# add the labels for the results
self.aveLabel = QtGui.QLabel("Average:")
self.aveLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.aveValueLabel = QtGui.QLabel("")
self.aveValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
self.stdLabel = QtGui.QLabel("Std(A):")
self.stdLabel.setFrameStyle(QtGui.QFrame.Panel | QtGui.QFrame.Raised)
self.stdValueLabel = QtGui.QLabel("")
self.stdValueLabel.setFrameStyle(
QtGui.QFrame.Panel | QtGui.QFrame.Sunken)
# provide the full layout of the information panel
infoLayout = QtGui.QGridLayout(self.infopanel)
infoLayout.addWidget(self.NameLabel, 0, 0, 1, 1)
infoLayout.addWidget(self.NameValueLabel, 0, 1, 1, 5)
infoLayout.addWidget(self.aveLabel, 1, 0)
infoLayout.addWidget(self.aveValueLabel, 1, 1)
infoLayout.addWidget(self.stdLabel, 1, 2)
infoLayout.addWidget(self.stdValueLabel, 1, 3)
infoLayout.addWidget(self.sigmaLabel, 2, 0)
infoLayout.addWidget(self.sigmaValueEdit, 2, 1)
infoLayout.addWidget(self.niterLabel, 2, 2)
infoLayout.addWidget(self.niterValueEdit, 2, 3)
infoLayout.addWidget(self.rejectButton, 2, 4)
# Set up the layout
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(self.arcdisplay.errfigure)
mainLayout.addWidget(self.toolbar)
mainLayout.addWidget(self.infopanel)
self.setLayout(mainLayout)
self.connect(
self.arcdisplay,
QtCore.SIGNAL('fitUpdate()'),
self.fitUpdate)
def fitUpdate(self):
if len(self.arcdisplay.xp) <= 1:
return
try:
xp = np.array(self.arcdisplay.xp)
wp = np.array(self.arcdisplay.wp)
w = self.arcdisplay.ws.value(xp)
value = (wp - w).mean()
self.aveValueLabel.setText("%4.2g" % value)
value = (wp - w).std()
self.stdValueLabel.setText("%4.2g" % value)
except Exception as e:
if self.arcdisplay.log is not None:
self.arcdisplay.log.message(e)
pass
def rejectpoints(self):
self.arcdisplay.ws.set_thresh(float(self.sigmaValueEdit.text()))
self.arcdisplay.ws.set_niter(int(self.niterValueEdit.text()))
self.arcdisplay.findfit()
class ArcDisplay(QtGui.QWidget):
"""Class for displaying Arc Spectra using matplotlib and embedded in a Qt 4 GUI.
"""
def __init__(self, xarr, farr, slines, sfluxes, ws, xp=[], wp=[], mdiff=20, specarr=None,
res=2.0, dres=0.1, dc=20, ndstep=20, sigma=5, smooth=0, niter=5, method='MatchZero',
textcolor='green', preprocess=False, log=None, verbose=True):
"""Default constructor."""
QtGui.QWidget.__init__(self)
# Initialize base class
self.arcfigure = MplCanvas()
self.errfigure = MplCanvas()
# Add central axes instance
self.axes = self.arcfigure.figure.add_subplot(111)
self.erraxes = self.errfigure.figure.add_subplot(111)
# Connect mouse events
self.arcfigure.connectMatplotlibMouseMotion()
self.arcfigure.mpl_connect('button_press_event', self.onButtonPress)
self.arcfigure.mpl_connect('key_press_event', self.onKeyPress)
self.errfigure.connectMatplotlibMouseMotion()
self.errfigure.mpl_connect('button_press_event', self.onButtonPress)
self.errfigure.mpl_connect('key_press_event', self.onKeyPress)
# load the data
self.xarr = xarr
self.farr = farr
self.slines = slines
self.sfluxes = sfluxes
self.ws = ws
self.orig_ws = copy.deepcopy(ws)
self.specarr = specarr
self.mdiff = mdiff
self.sigma = sigma
self.niter = int(niter)
self.smooth = int(smooth)
self.res = res
self.dres = dres
self.dc = dc
self.sections = 6
self.ndstep = ndstep
self.method = method
self.textcolor = textcolor
self.preprocess = preprocess
self.log = log
self.verbose = True
# if asked, smooth the data
if self.smooth > 0:
self.farr = st.smooth_spectra(
self.xarr,
self.farr,
sigma=self.smooth)
self.xp = xp
self.wp = wp
self.rms = res
# set up the artificial spectra
self.spectrum = Spectrum.Spectrum(
self.slines,
self.sfluxes,
dw=self.dres,
stype='line',
sigma=self.res)
self.swarr = self.spectrum.wavelength
self.sfarr = self.spectrum.flux * \
self.farr.max() / self.spectrum.flux.max()
# set up the wavelength solution
if self.ws.function == 'line':
self.ws.set_xarr(self.xarr)
self.ws.farr = self.farr
self.ws.spectrum = self.spectrum
# set up the list of deleted points
self.dxp = []
self.dwp = []
# set up other variables
self.isArt = False
self.isFeature = False
# Set display parameters
self.xmin = self.xarr.min()
self.xmax = self.xarr.max()
self.ymin = self.farr.min()
self.ymax = self.farr.max()
#preprocess if asked
if self.preprocess:
self.log.message("Preprocessing Spectra", with_header=False)
self.findzpd()
self.findfeatures()
self.findfit()
self.isFeature = True
self.emit(QtCore.SIGNAL("fitUpdate()"))
def help(self):
helpoutput = """
? - Print this file q - Quit the program
c - centroid on line x - print the current position
a - Display spectrum l - display features
b - identify features f - fit solution
p - print features P - print solution
z - zeropoint fit Z - find zeropoint and dispersion
r - redraw spectrum R - reset values
e - add closest line L - show detected peaks
d - delete feature u - undelete feature
X - fit full X-cor
"""
print helpoutput
def onKeyPress(self, event):
"""Emit signal on key press"""
if event.key == '?':
# return the help file
self.help()
elif event.key == 'q':
# exit the task
self.emit(QtCore.SIGNAL("quit()"))
elif event.key == 'c':
# return the centroid
if event.xdata:
self.log.message(str(event.xdata), with_header=False)
cx = st.mcentroid(
self.xarr,
self.farr,
xc=event.xdata,
xdiff=self.mdiff)
self.emit(QtCore.SIGNAL("updatex(float)"), cx)
elif event.key == 'x':
# return the x position
if event.xdata:
self.log.message(str(event.xdata), with_header=False)
self.emit(QtCore.SIGNAL("updatex(float)"), event.xdata)
elif event.key == 'R':
# reset the fit
self.reset()
elif event.key == 'f':
# find the fit
self.findfit()
self.emit(QtCore.SIGNAL("fitUpdate()"))
elif event.key == 'b':
# auto-idenitfy features
self.isFeature = True
self.findfeatures()
elif event.key == 'z':
# Assume the solution is correct and find the zeropoint
# that best matches it from cross correllation
self.findzp()
elif event.key == 'Z':
# Assume the solution is correct and find the zeropoint
# that best matches it from cross correllation
self.findzpd()
elif event.key == 'X':
# Assume the solution is almost correct
# Fit the full solution using the cross correlation coefficient
self.findxcorfit()
elif event.key == 'e':
# find closest feature from existing fit and line list
# and match it
self.addclosestline(event.xdata)
elif event.key == 'i':
# reset identified features
pass
elif event.key == 't':
# reset identified features
self.isFeature = True
self.testfeatures()
elif event.key == 'l':
# plot the features from existing list
if self.isFeature:
self.isFeature = False
self.redraw_canvas()
else:
self.isFeature = True
self.plotFeatures()
self.redraw_canvas()
elif event.key == 'L':
# plot the sources that are detected
self.plotDetections()
elif event.key == 'p':
# print information about features
for i in range(len(self.xp)):
print self.xp[i], self.wp[i]
elif event.key == 'P':
# print information about features
print self.ws.coef
elif event.key == 'r':
# redraw graph
self.redraw_canvas()
elif event.key == 'a':
# draw artificial spectrum
self.isArt = not self.isArt
self.redraw_canvas()
elif event.key == 'd':
# Delete feature
save = False
y = None
if event.canvas == self.errfigure:
y = event.ydata
save = True
self.deletepoints(event.xdata, y=y, save=save)
self.redraw_canvas(keepzoom=True)
elif event.key == 'u':
# undelete
self.undeletepoints(event.xdata, y=event.ydata)
self.redraw_canvas(keepzoom=True)
elif event.key:
self.emit(QtCore.SIGNAL("keyPressEvent(string)"), event.key)
def onButtonPress(self, event):
"""Emit signal on selecting valid image position."""
if event.xdata and event.ydata:
self.emit(QtCore.SIGNAL("positionSelected(float, float)"),
float(event.xdata), float(event.ydata))
def plotArc(self):
"""Draw image to canvas."""
# plot the spectra
self.spcurve, = self.axes.plot(
self.xarr, self.farr, linewidth=0.5, linestyle='-', marker='None', color='b')
def plotArt(self):
"""Plot the artificial spectrum"""
self.isArt = True
warr = self.ws.value(self.xarr)
asfarr = st.interpolate(
warr,
self.swarr,
self.sfarr,
left=0.0,
right=0.0)
asfarr = asfarr * self.farr.max() / asfarr.max()
self.fpcurve, = self.axes.plot(self.xarr, asfarr, linewidth=0.5, linestyle='-',
marker='None', color='r')
def plotDetections(self):
"""Plot the lines that are detected"""
xp, xf = st.findpoints(
self.xarr, self.farr, self.sigma, self.niter, sections=self.sections)
print xp
self.axes.plot(xp, xf, ls='', marker='|', ms=20, color='#000000')
def plotFeatures(self):
"""Plot features identified in the line list"""
fl = np.array(self.xp) * 0.0 + 0.25 * self.farr.max()
self.splines = self.axes.plot(
self.xp,
fl,
ls='',
marker='|',
ms=20,
color=self.textcolor)
# set up the text position
tsize = 0.83
self.ymin, self.ymax = self.axes.get_ylim()
ppp = (self.ymax - self.ymin) / (self.arcfigure.figure.get_figheight()
* self.arcfigure.figure.get_dpi())
f = self.ymax - 10 * tsize * ppp
for x, w in zip(self.xp, self.wp):
w = '%6.2f' % float(w)
self.axes.text(
x,
f,
w,
size='small',
rotation='vertical',
color=self.textcolor)
def plotErr(self):
"""Draw image to canvas."""
if self.xp and self.wp:
# plot the spectra
w = self.ws.value(np.array(self.xp))
self.errcurve, = self.erraxes.plot(
self.xp, self.wp - w, linewidth=0.5, linestyle='', marker='o', color='b')
if self.dxp and self.dwp:
# plot the spectra
dw = self.ws.value(np.array(self.dxp))
self.delerrcurve, = self.erraxes.plot(
self.dxp, self.dwp - dw, linewidth=0.5, linestyle='', marker='x', color='b')
def set_wdiff(self):
"""Derive a value for wdiff"""
try:
self.wdiff = self.mdiff * self.ws.coef[1]
except:
self.wdiff = self.mdiff
def testfeatures(self):
"""Run the test matching algorithm"""
self.set_wdiff()
res = max(self.res * 0.25, 2)
xp, wp = st.crosslinematch(self.xarr, self.farr, self.slines, self.sfluxes, self.ws,
res=res, mdiff=self.mdiff, wdiff=20, sigma=self.sigma,
niter=self.niter, sections=self.sections)
for x, w in zip(xp, wp):
if w not in self.wp and w > -1:
self.xp.append(x)
self.wp.append(w)
self.plotFeatures()
self.redraw_canvas()
def findfeatures(self):
"""Given a set of features, find other features that might
correspond to those features
"""
#self.set_wdiff()
# xp, wp=st.findfeatures(self.xarr, self.farr, self.slines, self.sfluxes,
# self.ws, mdiff=self.mdiff, wdiff=self.wdiff, sigma=self.sigma,
# niter=self.niter, sections=3)
xp, wp = st.crosslinematch(self.xarr, self.farr, self.slines, self.sfluxes, self.ws,
res=max(self.sigma*self.res, 3), mdiff=self.mdiff, wdiff=10,
sections=self.sections, sigma=self.sigma, niter=self.niter)
for x, w in zip(xp, wp):
if w not in self.wp and w > -1:
self.xp.append(x)
self.wp.append(w)
# for i in range(len(self.xp)): print self.xp[i], self.wp[i]
# print
self.plotFeatures()
self.redraw_canvas()
def addclosestline(self, x):
"""Find the closes line to the centroided position and
add it
"""
cx = st.mcentroid(self.xarr, self.farr, xc=x, xdiff=self.mdiff)
w = self.ws.value(cx)
d = abs(self.slines - w)
w = self.slines[d.argmin()]
self.xp.append(x)
self.wp.append(w)
self.plotFeatures()
self.redraw_canvas()
def findzp(self):
"""Find the zeropoint for the source and plot of the new value
"""
dc = 0.5 * self.rms * self.ndstep
self.ws = st.findzeropoint(self.xarr, self.farr, self.swarr, self.sfarr,
self.ws, dc=dc, ndstep=self.ndstep, inttype='interp')
self.plotArt()
self.redraw_canvas()
def findzpd(self):
"""Find the zeropoint and dispersion for the source and plot of the new value
"""
dc = 0.5 * self.rms * self.ndstep
# fixed at 0.1 of the dispersion
dd = 0.1 * self.ws.coef[1]
# set upt he docef values
dcoef = self.ws.coef * 0.0
dcoef[0] = dc
dcoef[1] = dd
self.ws = st.findxcor(self.xarr, self.farr, self.swarr, self.sfarr, self.ws,
dcoef=dcoef, ndstep=self.ndstep, best=False, inttype='interp')
self.plotArt()
self.redraw_canvas()
def findxcorfit(self):
"""Maximize the normalized correlation coefficient using the full wavelength solution.
"""
self.ws = st.fitxcor(
self.xarr,
self.farr,
self.swarr,
self.sfarr,
self.ws,
interptype='interp')
self.plotArt()
self.redraw_canvas()
def findfit(self):
if len(self.xp) < self.ws.order:
raise SALTSpecError(
"Insufficient sources number of points for fit")
return
try:
self.ws = st.findfit(
np.array(
self.xp), np.array(
self.wp), ws=self.ws, thresh=self.ws.thresh)
except SALTSpecError as e:
self.log.warning(e)
return
del_list = []
for i in range(len(self.ws.func.mask)):
if self.ws.func.mask[i] == 0:
self.deletepoints(self.ws.func.x[i], w=self.ws.func.y[i],
save=True)
self.rms = self.ws.sigma(self.ws.x_arr, self.ws.w_arr)
self.redraw_canvas()
def autoidentify(self, rstep=1, istart=None, nrows=1, oneline=True):
"""Run the autoidentify method for the current line"""
# update the line list such that it is only the line list of selected
# lines
if self.wp:
slines = np.array(self.wp)
sfluxes = self.farr[np.array(self.xp, dtype=int)]
# sfluxes=np.zeros(len(slines))
# for i in range(len(slines)):
# try:
# sfluxes[i]=self.sfluxes[self.slines==slines[i]][0]
# except:
# if sfluxes.mean()==0:
# sfluxes[i]=1
# else:
# sfluxes[i]=sfluxes.mean()
else:
slines = self.slines
sfluxes = self.sfluxes
iws = ai.AutoIdentify(self.xarr, self.specarr, slines, sfluxes, self.ws, farr=self.farr,
method=self.method, rstep=rstep, istart=istart, nrows=nrows,
res=self.res, dres=self.dres, mdiff=self.mdiff, sigma=self.sigma,
smooth=self.smooth, niter=self.niter, dc=self.dc, ndstep=self.ndstep,
oneline=oneline, log=self.log, verbose=self.verbose)
if oneline:
self.ws = iws
else:
return iws
def addpoints(self, x, w):
"""Add points to the line list
"""
if isinstance(x, list) and isinstance(w, list):
self.xp.extend(x)
self.wp.extend(w)
else:
self.xp.append(x)
self.wp.append(w)
def deletepoints(self, x, y=None, w=None, save=False):
""" Delete points from the line list
"""
dist = (np.array(self.xp) - x) ** 2
# assumes you are using the error plot
if y is not None:
w = self.ws.value(np.array(self.xp))
norm = self.xarr.max() / abs(self.wp - w).max()
dist += norm * (self.wp - w - y) ** 2
# print y, norm, dist.min()
# print y, dist.min()
elif w is not None:
norm = self.xarr.max() / abs(self.wp - w).max()
dist += norm * (self.wp - w)**2
in_minw = dist.argmin()
if save:
self.dxp.append(self.xp[in_minw])
self.dwp.append(self.wp[in_minw])
self.xp.__delitem__(in_minw)
self.wp.__delitem__(in_minw)
def undeletepoints(self, x, y=None):
""" Delete points from the line list
"""
if len(self.dxp) < 1:
return
if len(self.dxp) == 1:
self.xp.append(self.dxp[0])
self.wp.append(self.dwp[0])
self.dxp.__delitem__(0)
self.dwp.__delitem__(0)
return
dist = (self.dxp - x) ** 2
if y is not None:
w = self.ws.value(np.array(self.dxp))
# dist += (self.dwp-w-y)**2
in_minw = dist.argmin()
self.xp.append(self.dxp[in_minw])
self.wp.append(self.dwp[in_minw])
self.dxp.__delitem__(in_minw)
self.dwp.__delitem__(in_minw)
return
def reset(self):
self.ws = copy.deepcopy(self.orig_ws)
self.redraw_canvas()
def redraw_canvas(self, keepzoom=False):
if keepzoom:
# Store current zoom level
xmin, xmax = self.axes.get_xlim()
ymin, ymax = self.axes.get_ylim()
# Clear plot
self.axes.clear()
# Draw image
self.plotArc()
# if necessary, redraw the features
if self.isFeature:
self.plotFeatures()
# if necessary, draw the artificial spectrum
if self.isArt:
self.plotArt()
# Restore zoom level
if keepzoom:
self.axes.set_xlim((self.xmin, self.xmax))
self.axes.set_ylim((self.ymin, self.ymax))
# Force redraw
self.arcfigure.draw()
self.err_redraw_canvas()
def err_redraw_canvas(self, keepzoom=False):
if keepzoom:
# Store current zoom level
xmin, xmax = self.erraxes.get_xlim()
ymin, ymax = self.erraxes.get_ylim()
else:
self.xmin, self.xmax = self.axes.get_xlim()
# Clear plot
self.erraxes.clear()
# Draw image
self.plotErr()
# Restore zoom level
if keepzoom:
self.erraxes.set_xlim((xmin, xmax))
self.erraxes.set_ylim((ymin, ymax))
else:
self.erraxes.set_xlim((self.xmin, self.xmax))
self.errfigure.draw()
self.emit(QtCore.SIGNAL("fitUpdate()"))
def InterIdentify(xarr, specarr, slines, sfluxes, ws, mdiff=20, rstep=1, filename=None,
function='poly', order=3, sigma=3, smooth=0, niter=5, res=2, dres=0.1, dc=20, ndstep=20,
istart=None, method='Zeropoint', scale='zscale', cmap='gray', contrast=1.0,
subback=0, textcolor='green', preprocess=False, log=None, verbose=True):
# Create GUI
global App
App = QtGui.QApplication.instance()
if App is None:
App = QtGui.QApplication(sys.argv)
aw = InterIdentifyWindow(xarr, specarr, slines, sfluxes, ws, rstep=rstep, mdiff=mdiff, sigma=sigma, niter=niter,
res=res, dres=dres, dc=dc, ndstep=ndstep, istart=istart, method=method, smooth=smooth,subback=subback,
cmap=cmap, scale=scale, contrast=contrast, filename=filename, textcolor=textcolor, preprocess=preprocess,
log=log)
aw.show()
# Start application event loop
exit = App.exec_()
imsol = aw.ImageSolution.copy()
# Check if GUI was executed succesfully
if exit != 0:
raise SALTSpecError(
'InterIdentify GUI has unexpected exit status ' +
str(exit))
del aw
return imsol
| bsd-3-clause | -5,777,721,175,345,980,000 | 36.034208 | 135 | 0.576237 | false |
boytm/transparent_proxy | tcp_proxy.py | 1 | 5604 | #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
import sys
import socket
import struct
import tornado.ioloop
import tornado.tcpserver
import tornado.tcpclient
#import tornado.web
from tornado import gen
import functools
class TCPProxyHandler(tornado.tcpserver.TCPServer):
@gen.coroutine
def handle_stream(self, stream, address):
factory = tornado.tcpclient.TCPClient()
if stream.socket.family == socket.AF_INET:
#print stream.socket.getsockopt(socket.SOL_IP, socket.SO_ORIGINAL_DST, 16)
dst = stream.socket.getsockopt(socket.SOL_IP, 80, 16)
srv_port, srv_ip = struct.unpack('!2xH4s8x', dst)
srv_ip = socket.inet_ntoa(srv_ip)
if cmp((srv_ip, srv_port), stream.socket.getsockname()) == 0:
print "ignore not nated stream"
stream.close()
return
try:
remote = yield factory.connect(srv_ip, srv_port)
Relay(stream, remote)
except:
print 'connect error'
stream.close()
return
else:
print 'Unsupported protocol family'
return
class Relay(object):
def __init__(self, local, remote):
self.local = local
self.remote = remote
self.local.set_nodelay(True)
self.remote.set_nodelay(True)
self.local.set_close_callback(self.on_local_close)
self.remote.set_close_callback(self.on_remote_close)
self.local.read_bytes(65536, callback=self.on_local_read, partial=True)
self.remote.read_bytes(65536, callback=self.on_remote_read, partial=True)
def on_local_close(self):
print 'detect local close'
if self.local.error:
print self.local.error
if not self.remote.writing():
self.remote.close()
def on_remote_close(self):
print 'detect remote close'
if self.remote.error:
print self.remote.error
if not self.local.writing():
self.local.close()
def on_local_read(self, data):
self.remote.write(data, callback = self.on_remote_write)
def on_local_write(self):
#if shouldclose:
# self.local.close()
#else:
if self.remote.closed():
print 'remote closed, cancel relay'
return
self.remote.read_bytes(65536, callback=self.on_remote_read, partial=True)
def on_remote_read(self, data):
if self.remote.closed():
print 'remote read %d, but should close' % len(data)
self.local.write(data, callback = self.on_local_write)
def on_remote_write(self):
if self.local.closed():
print 'local closed, cancel relay'
return
self.local.read_bytes(65536, callback=self.on_local_read, partial=True)
class TCPProxyHandler2(tornado.tcpserver.TCPServer):
#@gen.coroutine
def handle_stream(self, stream, address):
factory = tornado.tcpclient.TCPClient()
if stream.socket.family == socket.AF_INET:
#print stream.socket.getsockopt(socket.SOL_IP, socket.SO_ORIGINAL_DST, 16)
dst = stream.socket.getsockopt(socket.SOL_IP, 80, 16)
print struct.unpack('!2xH4s8x', dst)
srv_port, srv_ip = struct.unpack('!2xH4s8x', dst)
srv_ip = socket.inet_ntoa(srv_ip)
if cmp((srv_ip, srv_port), stream.socket.getsockname()) == 0:
print "error connect itself"
stream.close()
return
#remote = yield factory.connect(srv_ip, srv_port)
#Relay2(local, remote)
factory.connect(srv_ip, srv_port, callback=functools.partial(self.on_connect, stream))
else:
return
def on_connect(self, local, remote):
Relay2(local, remote)
class Relay2(object):
def __init__(self, local, remote):
self.local = local
self.remote = remote
self.quit = False
self.local.set_nodelay(True)
self.remote.set_nodelay(True)
self.local.set_close_callback(self.on_local_close)
self.remote.set_close_callback(self.on_remote_close)
self.read_and_write(local, remote)
self.read_and_write(remote, local)
def on_local_close(self):
print 'detect local close'
self.quit = True
if self.local.error:
print self.local.error
if not self.remote.writing():
self.remote.close()
def on_remote_close(self):
print 'detect remote close'
self.quit = True
if self.remote.error:
print self.remote.error
if not self.local.writing():
self.local.close()
@gen.coroutine
def read_and_write(self, read_from, to):
while not self.quit:
try:
data = yield read_from.read_bytes(65536, partial=True)
yield to.write(data)
except Exception as e:
print "error %s, quit relay" % str(e)
break
def main():
#tornado.netutil.Resolver.configure('tornado.netutil.ThreadedResolver')
#tornado.netutil.Resolver.configure('tornado.platform.caresresolver.CaresResolver')
server = TCPProxyHandler()
#server.listen(8888, address='127.0.0.1') # iptables can't DNAT to 127.0.0.1:8888
server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
if sys.platform == 'linux2':
import os, pwd
os.setuid(pwd.getpwnam('nobody').pw_uid)
main()
| apache-2.0 | 2,496,638,393,646,806,000 | 29.791209 | 98 | 0.597252 | false |
codingvirtual/fullstack-p4-conference | constants.py | 1 | 3218 | __author__ = 'Greg'
from protorpc import messages
from protorpc import message_types
from models import *
""" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - """
""" Default values for a new conference. Used only if the user creating
the conference doesn't supply values for a given field and only fields
left empty pick up the default (in other words, if the user supplies
a value for one of the fields below, but not the others, the one they
supplied a value for will retain that value and only the others that
were left empty will inherit the default values)"""
DEFAULTS = {
"city": "Default City",
"maxAttendees": 0,
"seatsAvailable": 0,
"topics": [ "Default", "Topic" ]
}
""" As above, defaults for a new session when there are fields left empty"""
SESSION_DEFAULTS = {
"speaker": "Unknown",
"duration": 60,
"typeOfSession": "Keynote",
}
""" Comparison operators used for filter and query operations"""
OPERATORS = {
'EQ': '=',
'GT': '>',
'GTEQ': '>=',
'LT': '<',
'LTEQ': '<=',
'NE': '!='
}
""" Fields present for a conference """
FIELDS = {
'CITY': 'city',
'TOPIC': 'topics',
'MONTH': 'month',
'MAX_ATTENDEES': 'maxAttendees',
}
""" The following list of elements each define a specific request or response
container that is specific to a particular Model in the overall data
scheme. A "websafe" key is a key that has been URL-encoded to preserve
integrity of the key for transmission across the web. Google code
can use this websafe key to get back to the "real" key in order to
access Datastore """
CONF_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
websafeConferenceKey=messages.StringField(1),
)
CONF_POST_REQUEST = endpoints.ResourceContainer(
ConferenceForm,
websafeConferenceKey=messages.StringField(1),
)
SESSIONS_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
conferenceKey=messages.StringField(1),
sessionKey=messages.StringField(2)
)
SESSIONS_POST_REQUEST = endpoints.ResourceContainer(
SessionForm,
conferenceKey=messages.StringField(1),
)
WISHLIST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
sessionKey=messages.StringField(1, required=True),
)
SPEAKER_GET_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
speaker=messages.StringField(1, required=True),
)
QUERY_POST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
startTime=messages.StringField(1),
typeOfSession=messages.StringField(2),
)
SESSION_BY_CONF_POST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
conferenceKey=messages.StringField(1),
)
SESSION_BY_TYPE_POST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
conferenceKey=messages.StringField(1),
typeOfSession=messages.StringField(2),
)
SESSION_BY_SPEAKER_POST_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
speaker=messages.StringField(1),
)
GET_FEATURED_SPEAKER_REQUEST = endpoints.ResourceContainer(
message_types.VoidMessage,
conf_key=messages.StringField(1, required=True)
) | apache-2.0 | -6,473,297,852,493,102,000 | 28.53211 | 77 | 0.699503 | false |
CoDaS-Lab/image_analysis | demo/demo_features.py | 1 | 1262 | # Copyright 2017 Codas Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import skimage.color
from image_analysis.pipeline.feature import Feature
class RGBToGray(Feature):
def __init__(self):
Feature.__init__(self, 'grayscale', frame_op=True)
def extract(self, RGB_frame):
return skimage.color.rgb2gray(RGB_frame)
class BatchOP(Feature):
def __init__(self):
Feature.__init__(self, 'batch_length', batch_op=True)
def extract(self, batch):
return len(batch)
class ArgMaxPixel(Feature):
def __init__(self):
Feature.__init__(self, 'max_pixel', frame_op=True)
def extract(self, frame):
return np.max(frame)
| apache-2.0 | -6,162,282,703,088,598,000 | 29.047619 | 80 | 0.654517 | false |
edocappelli/crystalpy | crystalpy/diffraction/GeometryType.py | 1 | 2379 | """
Represents geometry types/setups: Bragg diffraction, BraggTransmission, Laue diffraction, Laue transmission.
"""
class GeometryType(object):
def __init__(self, description):
"""
Constructor.
:param description: Description of the geometry type, e.g. "Bragg transmission"
"""
self._description = description
def description(self):
"""
Returns the description of this geometry type.
:return: Description of this geometry type.
"""
return self._description
def __eq__(self, candidate):
"""
Determines if two instances are equal.
:param candidate: Instances to compare to.
:return: True if both instances are equal. Otherwise False.
"""
return self.description() == candidate.description()
def __ne__(self, candidate):
"""
Determines if two instances are not equal.
:param candidate: Instances to compare.
:return: True if both instances are not equal. Otherwise False.
"""
return not self == candidate
def __hash__(self):
"""
Returns the hash value of this instance.
:return: Hash value of this instance.
"""
# As hash value just use the hash of the description.
return hash(self._description)
@staticmethod
def allGeometryTypes():
"""
Returns all possible geometry types.
:return: All possible geometry types.
"""
return [BraggDiffraction(),
LaueDiffraction(),
BraggTransmission(),
LaueTransmission()]
class LaueDiffraction(GeometryType):
"""
Represents Laue diffraction.
"""
def __init__(self):
super(LaueDiffraction, self).__init__("Laue diffraction")
class BraggDiffraction(GeometryType):
"""
Represents Bragg diffraction.
"""
def __init__(self):
super(BraggDiffraction, self).__init__("Bragg diffraction")
class LaueTransmission(GeometryType):
"""
Represents Laue transmission.
"""
def __init__(self):
super(LaueTransmission, self).__init__("Laue transmission")
class BraggTransmission(GeometryType):
"""
Represents Bragg transmission.
"""
def __init__(self):
super(BraggTransmission, self).__init__("Bragg transmission")
| mit | -4,599,577,668,534,421,500 | 26.662791 | 108 | 0.605717 | false |
klahnakoski/TestFailures | pyLibrary/debugs/exceptions.py | 1 | 7358 | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import sys
from collections import Mapping
from pyLibrary.dot import Dict, listwrap, unwraplist, set_default, Null
from pyLibrary.jsons.encoder import json_encoder
from pyLibrary.strings import indent, expand_template
FATAL = "FATAL"
ERROR = "ERROR"
WARNING = "WARNING"
ALARM = "ALARM"
UNEXPECTED = "UNEXPECTED"
NOTE = "NOTE"
class Except(Exception):
@staticmethod
def new_instance(desc):
return Except(
desc.type,
desc.template,
desc.params,
[Except.new_instance(c) for c in listwrap(desc.cause)],
desc.trace
)
def __init__(self, type=ERROR, template=Null, params=Null, cause=Null, trace=Null, **kwargs):
Exception.__init__(self)
self.type = type
self.template = template
self.params = set_default(kwargs, params)
self.cause = cause
if not trace:
self.trace=extract_stack(2)
else:
self.trace = trace
@classmethod
def wrap(cls, e, stack_depth=0):
if e == None:
return Null
elif isinstance(e, (list, Except)):
return e
elif isinstance(e, Mapping):
e.cause = unwraplist([Except.wrap(c) for c in listwrap(e.cause)])
return Except(**e)
else:
if hasattr(e, "message") and e.message:
cause = Except(ERROR, unicode(e.message), trace=_extract_traceback(0))
else:
cause = Except(ERROR, unicode(e), trace=_extract_traceback(0))
trace = extract_stack(stack_depth + 2) # +2 = to remove the caller, and it's call to this' Except.wrap()
cause.trace.extend(trace)
return cause
@property
def message(self):
return expand_template(self.template, self.params)
def __contains__(self, value):
if isinstance(value, basestring):
if self.template.find(value) >= 0 or self.message.find(value) >= 0:
return True
if self.type == value:
return True
for c in listwrap(self.cause):
if value in c:
return True
return False
def __unicode__(self):
output = self.type + ": " + self.template + "\n"
if self.params:
output = expand_template(output, self.params)
if self.trace:
output += indent(format_trace(self.trace))
if self.cause:
cause_strings = []
for c in listwrap(self.cause):
with suppress_exception:
cause_strings.append(unicode(c))
output += "caused by\n\t" + "and caused by\n\t".join(cause_strings)
return output
def __str__(self):
return self.__unicode__().encode('latin1', 'replace')
def as_dict(self):
return Dict(
type=self.type,
template=self.template,
params=self.params,
cause=self.cause,
trace=self.trace
)
def __json__(self):
return json_encoder(self.as_dict())
def extract_stack(start=0):
"""
SNAGGED FROM traceback.py
Extract the raw traceback from the current stack frame.
Each item in the returned list is a quadruple (filename,
line number, function name, text), and the entries are in order
from newest to oldest
"""
try:
raise ZeroDivisionError
except ZeroDivisionError:
trace = sys.exc_info()[2]
f = trace.tb_frame.f_back
for i in range(start):
f = f.f_back
stack = []
n = 0
while f is not None:
stack.append({
"depth": n,
"line": f.f_lineno,
"file": f.f_code.co_filename,
"method": f.f_code.co_name
})
f = f.f_back
n += 1
return stack
def _extract_traceback(start):
"""
SNAGGED FROM traceback.py
RETURN list OF dicts DESCRIBING THE STACK TRACE
"""
tb = sys.exc_info()[2]
for i in range(start):
tb = tb.tb_next
trace = []
n = 0
while tb is not None:
f = tb.tb_frame
trace.append({
"depth": n,
"file": f.f_code.co_filename,
"line": tb.tb_lineno,
"method": f.f_code.co_name
})
tb = tb.tb_next
n += 1
trace.reverse()
return trace
def format_trace(tbs, start=0):
trace = []
for d in tbs[start::]:
item = expand_template('File "{{file}}", line {{line}}, in {{method}}\n', d)
trace.append(item)
return "".join(trace)
class Suppress(object):
"""
IGNORE EXCEPTIONS
"""
def __init__(self, exception_type):
self.type = exception_type
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_val or isinstance(exc_val, self.type):
return True
suppress_exception = Suppress(Exception)
class Explanation(object):
"""
EXPLAIN THE ACTION BEING TAKEN
IF THERE IS AN EXCEPTION WRAP IT WITH THE EXPLANATION
CHAIN EXCEPTION AND RE-RAISE
"""
def __init__(
self,
template, # human readable template
**more_params
):
self.template = template
self.more_params = more_params
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
if isinstance(exc_val, Exception):
from pyLibrary.debugs.logs import Log
Log.error(
template="Failure in " + self.template,
default_params=self.more_params,
cause=exc_val,
stack_depth=1
)
return True
class WarnOnException(object):
"""
EXPLAIN THE ACTION BEING TAKEN
IF THERE IS AN EXCEPTION WRAP ISSUE A WARNING
"""
def __init__(
self,
template, # human readable template
**more_params
):
self.template = template
self.more_params = more_params
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
if isinstance(exc_val, Exception):
from pyLibrary.debugs.logs import Log
Log.warning(
template="Ignored failure while " + self.template,
default_params=self.more_params,
cause=exc_val,
stack_depth=1
)
return True
class AssertNoException(object):
"""
EXPECT NO EXCEPTION IN THIS BLOCK
"""
def __init__(self):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
if isinstance(exc_val, Exception):
from pyLibrary.debugs.logs import Log
Log.error(
template="Not expected to fail",
cause=exc_val,
stack_depth=1
)
return True
assert_no_exception = AssertNoException()
| mpl-2.0 | 7,010,921,535,276,321,000 | 23.858108 | 117 | 0.554499 | false |
vermouth1992/Leetcode | python/576.out-of-boundary-paths.py | 1 | 2536 | #
# @lc app=leetcode id=576 lang=python3
#
# [576] Out of Boundary Paths
#
# https://leetcode.com/problems/out-of-boundary-paths/description/
#
# algorithms
# Medium (36.32%)
# Total Accepted: 37.6K
# Total Submissions: 103.6K
# Testcase Example: '2\n2\n2\n0\n0'
#
# There is an m x n grid with a ball. The ball is initially at the position
# [startRow, startColumn]. You are allowed to move the ball to one of the four
# adjacent four cells in the grid (possibly out of the grid crossing the grid
# boundary). You can apply at most maxMove moves to the ball.
#
# Given the five integers m, n, maxMove, startRow, startColumn, return the
# number of paths to move the ball out of the grid boundary. Since the answer
# can be very large, return it modulo 10^9 + 7.
#
#
# Example 1:
#
#
# Input: m = 2, n = 2, maxMove = 2, startRow = 0, startColumn = 0
# Output: 6
#
#
# Example 2:
#
#
# Input: m = 1, n = 3, maxMove = 3, startRow = 0, startColumn = 1
# Output: 12
#
#
#
# Constraints:
#
#
# 1 <= m, n <= 50
# 0 <= maxMove <= 50
# 0 <= startRow <= m
# 0 <= startColumn <= n
#
#
#
class Solution:
def findPaths(self, m: int, n: int, maxMove: int, startRow: int, startColumn: int) -> int:
if maxMove == 0:
return 0
table = []
for move in range(maxMove):
table.append([])
for row in range(m):
table[move].append([])
for col in range(n):
table[move][row].append(0)
# all the boundaries are 1
for row in range(m):
table[0][row][0] += 1
table[0][row][n - 1] += 1
for col in range(n):
table[0][0][col] += 1
table[0][m - 1][col] += 1
for move in range(1, maxMove):
for row in range(m):
for col in range(n):
if row > 0:
table[move][row][col] += table[move - 1][row - 1][col]
if row < m - 1:
table[move][row][col] += table[move - 1][row + 1][col]
if col > 0:
table[move][row][col] += table[move - 1][row][col - 1]
if col < n - 1:
table[move][row][col] += table[move - 1][row][col + 1]
result = 0
for move in range(maxMove):
result += table[move][startRow][startColumn]
return result % 1000000007
if __name__ == '__main__':
print(Solution().findPaths(1, 3, 3, 0, 1)) | mit | -6,378,149,178,022,609,000 | 26.879121 | 94 | 0.521293 | false |
richardliaw/ray | rllib/utils/exploration/ornstein_uhlenbeck_noise.py | 1 | 9037 | import numpy as np
from typing import Optional, Union
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.utils.annotations import override
from ray.rllib.utils.exploration.gaussian_noise import GaussianNoise
from ray.rllib.utils.framework import try_import_tf, try_import_torch, \
get_variable, TensorType
from ray.rllib.utils.schedules import Schedule
tf1, tf, tfv = try_import_tf()
torch, _ = try_import_torch()
class OrnsteinUhlenbeckNoise(GaussianNoise):
"""An exploration that adds Ornstein-Uhlenbeck noise to continuous actions.
If explore=True, returns sampled actions plus a noise term X,
which changes according to this formula:
Xt+1 = -theta*Xt + sigma*N[0,stddev], where theta, sigma and stddev are
constants. Also, some completely random period is possible at the
beginning.
If explore=False, returns the deterministic action.
"""
def __init__(self,
action_space,
*,
framework: str,
ou_theta: float = 0.15,
ou_sigma: float = 0.2,
ou_base_scale: float = 0.1,
random_timesteps: int = 1000,
initial_scale: float = 1.0,
final_scale: float = 0.02,
scale_timesteps: int = 10000,
scale_schedule: Optional[Schedule] = None,
**kwargs):
"""Initializes an Ornstein-Uhlenbeck Exploration object.
Args:
action_space (Space): The gym action space used by the environment.
ou_theta (float): The theta parameter of the Ornstein-Uhlenbeck
process.
ou_sigma (float): The sigma parameter of the Ornstein-Uhlenbeck
process.
ou_base_scale (float): A fixed scaling factor, by which all OU-
noise is multiplied. NOTE: This is on top of the parent
GaussianNoise's scaling.
random_timesteps (int): The number of timesteps for which to act
completely randomly. Only after this number of timesteps, the
`self.scale` annealing process will start (see below).
initial_scale (float): The initial scaling weight to multiply
the noise with.
final_scale (float): The final scaling weight to multiply
the noise with.
scale_timesteps (int): The timesteps over which to linearly anneal
the scaling factor (after(!) having used random actions for
`random_timesteps` steps.
scale_schedule (Optional[Schedule]): An optional Schedule object
to use (instead of constructing one from the given parameters).
framework (Optional[str]): One of None, "tf", "torch".
"""
super().__init__(
action_space,
framework=framework,
random_timesteps=random_timesteps,
initial_scale=initial_scale,
final_scale=final_scale,
scale_timesteps=scale_timesteps,
scale_schedule=scale_schedule,
stddev=1.0, # Force `self.stddev` to 1.0.
**kwargs)
self.ou_theta = ou_theta
self.ou_sigma = ou_sigma
self.ou_base_scale = ou_base_scale
# The current OU-state value (gets updated each time, an eploration
# action is computed).
self.ou_state = get_variable(
np.array(self.action_space.low.size * [.0], dtype=np.float32),
framework=self.framework,
tf_name="ou_state",
torch_tensor=True,
device=self.device)
@override(GaussianNoise)
def _get_tf_exploration_action_op(self, action_dist: ActionDistribution,
explore: Union[bool, TensorType],
timestep: Union[int, TensorType]):
ts = timestep if timestep is not None else self.last_timestep
scale = self.scale_schedule(ts)
# The deterministic actions (if explore=False).
deterministic_actions = action_dist.deterministic_sample()
# Apply base-scaled and time-annealed scaled OU-noise to
# deterministic actions.
gaussian_sample = tf.random.normal(
shape=[self.action_space.low.size], stddev=self.stddev)
ou_new = self.ou_theta * -self.ou_state + \
self.ou_sigma * gaussian_sample
if self.framework in ["tf2", "tfe"]:
self.ou_state.assign_add(ou_new)
ou_state_new = self.ou_state
else:
ou_state_new = tf1.assign_add(self.ou_state, ou_new)
high_m_low = self.action_space.high - self.action_space.low
high_m_low = tf.where(
tf.math.is_inf(high_m_low), tf.ones_like(high_m_low), high_m_low)
noise = scale * self.ou_base_scale * ou_state_new * high_m_low
stochastic_actions = tf.clip_by_value(
deterministic_actions + noise,
self.action_space.low * tf.ones_like(deterministic_actions),
self.action_space.high * tf.ones_like(deterministic_actions))
# Stochastic actions could either be: random OR action + noise.
random_actions, _ = \
self.random_exploration.get_tf_exploration_action_op(
action_dist, explore)
exploration_actions = tf.cond(
pred=tf.convert_to_tensor(ts < self.random_timesteps),
true_fn=lambda: random_actions,
false_fn=lambda: stochastic_actions)
# Chose by `explore` (main exploration switch).
action = tf.cond(
pred=tf.constant(explore, dtype=tf.bool)
if isinstance(explore, bool) else explore,
true_fn=lambda: exploration_actions,
false_fn=lambda: deterministic_actions)
# Logp=always zero.
batch_size = tf.shape(deterministic_actions)[0]
logp = tf.zeros(shape=(batch_size, ), dtype=tf.float32)
# Increment `last_timestep` by 1 (or set to `timestep`).
if self.framework in ["tf2", "tfe"]:
if timestep is None:
self.last_timestep.assign_add(1)
else:
self.last_timestep.assign(timestep)
return action, logp
else:
assign_op = (tf1.assign_add(self.last_timestep, 1)
if timestep is None else tf1.assign(
self.last_timestep, timestep))
with tf1.control_dependencies([assign_op, ou_state_new]):
return action, logp
@override(GaussianNoise)
def _get_torch_exploration_action(self, action_dist: ActionDistribution,
explore: bool,
timestep: Union[int, TensorType]):
# Set last timestep or (if not given) increase by one.
self.last_timestep = timestep if timestep is not None else \
self.last_timestep + 1
# Apply exploration.
if explore:
# Random exploration phase.
if self.last_timestep < self.random_timesteps:
action, _ = \
self.random_exploration.get_torch_exploration_action(
action_dist, explore=True)
# Apply base-scaled and time-annealed scaled OU-noise to
# deterministic actions.
else:
det_actions = action_dist.deterministic_sample()
scale = self.scale_schedule(self.last_timestep)
gaussian_sample = scale * torch.normal(
mean=torch.zeros(self.ou_state.size()), std=1.0) \
.to(self.device)
ou_new = self.ou_theta * -self.ou_state + \
self.ou_sigma * gaussian_sample
self.ou_state += ou_new
high_m_low = torch.from_numpy(
self.action_space.high - self.action_space.low). \
to(self.device)
high_m_low = torch.where(
torch.isinf(high_m_low),
torch.ones_like(high_m_low).to(self.device), high_m_low)
noise = scale * self.ou_base_scale * self.ou_state * high_m_low
action = torch.min(
torch.max(
det_actions + noise,
torch.tensor(
self.action_space.low,
dtype=torch.float32,
device=self.device)),
torch.tensor(
self.action_space.high,
dtype=torch.float32,
device=self.device))
# No exploration -> Return deterministic actions.
else:
action = action_dist.deterministic_sample()
# Logp=always zero.
logp = torch.zeros(
(action.size()[0], ), dtype=torch.float32, device=self.device)
return action, logp
| apache-2.0 | -3,410,992,502,494,740,500 | 43.29902 | 79 | 0.565674 | false |
dukhlov/oslo.messaging | oslo_messaging/tests/drivers/zmq/test_zmq_transport_url.py | 1 | 3779 | # Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import oslo_messaging
from oslo_messaging._drivers import common
from oslo_messaging._drivers.zmq_driver.matchmaker.base import DummyMatchMaker
from oslo_messaging._drivers.zmq_driver.matchmaker import matchmaker_redis
from oslo_messaging._drivers.zmq_driver import zmq_async
from oslo_messaging.tests import utils as test_utils
zmq = zmq_async.import_zmq()
class TestZmqTransportUrl(test_utils.BaseTestCase):
@testtools.skipIf(zmq is None, "zmq not available")
def setUp(self):
super(TestZmqTransportUrl, self).setUp()
def setup_url(self, url):
transport = oslo_messaging.get_transport(self.conf, url)
self.addCleanup(transport.cleanup)
driver = transport._driver
return driver, url
def test_empty_url(self):
driver, url = self.setup_url("zmq:///")
self.assertIs(matchmaker_redis.RedisMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq', driver.matchmaker.url.transport)
def test_error_name(self):
self.assertRaises(common.RPCException, self.setup_url, "zmq+error:///")
def test_dummy_url(self):
driver, url = self.setup_url("zmq+dummy:///")
self.assertIs(DummyMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq+dummy', driver.matchmaker.url.transport)
def test_redis_url(self):
driver, url = self.setup_url("zmq+redis:///")
self.assertIs(matchmaker_redis.RedisMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq+redis', driver.matchmaker.url.transport)
def test_redis_url_no_creds(self):
driver, url = self.setup_url("zmq+redis://host:65123/")
self.assertIs(matchmaker_redis.RedisMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq+redis', driver.matchmaker.url.transport)
self.assertEqual("host", driver.matchmaker.standalone_redis["host"])
self.assertEqual(65123, driver.matchmaker.standalone_redis["port"])
def test_redis_url_no_port(self):
driver, url = self.setup_url("zmq+redis://:p12@host:65123/")
self.assertIs(matchmaker_redis.RedisMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq+redis', driver.matchmaker.url.transport)
self.assertEqual("host", driver.matchmaker.standalone_redis["host"])
self.assertEqual(65123, driver.matchmaker.standalone_redis["port"])
self.assertEqual("p12", driver.matchmaker.standalone_redis["password"])
def test_sentinel_multiple_hosts_url(self):
driver, url = self.setup_url(
"zmq+redis://sentinel1:20001,sentinel2:20001,sentinel3:20001/")
self.assertIs(matchmaker_redis.RedisMatchMaker,
driver.matchmaker.__class__)
self.assertEqual('zmq+redis', driver.matchmaker.url.transport)
self.assertEqual(3, len(driver.matchmaker.sentinel_hosts))
expected = [("sentinel1", 20001), ("sentinel2", 20001),
("sentinel3", 20001)]
self.assertEqual(expected, driver.matchmaker.sentinel_hosts)
| apache-2.0 | -708,235,220,730,619,100 | 42.436782 | 79 | 0.674517 | false |
zejn/babbage | tests/test_model.py | 1 | 2196 | from .util import TestCase, load_json_fixture
from babbage.model import Model
class ModelTestCase(TestCase):
def setUp(self):
super(ModelTestCase, self).setUp()
self.simple_model_data = load_json_fixture('models/simple_model.json')
self.simple_model = Model(self.simple_model_data)
def test_model_concepts(self):
concepts = list(self.simple_model.concepts)
assert len(concepts) == 7, len(concepts)
def test_model_match(self):
concepts = list(self.simple_model.match('foo'))
assert len(concepts) == 1, len(concepts)
def test_model_match_invalid(self):
concepts = list(self.simple_model.match('fooxx'))
assert len(concepts) == 0, len(concepts)
def test_model_aggregates(self):
aggregates = list(self.simple_model.aggregates)
assert len(aggregates) == 2, aggregates
def test_model_fact_table(self):
assert self.simple_model.fact_table_name == 'simple'
assert 'simple' in repr(self.simple_model), repr(self.simple_model)
def test_deref(self):
assert self.simple_model['foo'].name == 'foo'
assert self.simple_model['foo.key'].name == 'key'
assert self.simple_model['amount'].name == 'amount'
assert 'amount' in self.simple_model
assert 'amount.sum' in self.simple_model
assert '_count' in self.simple_model
assert 'yabba' not in self.simple_model
assert 'foo.key' in self.simple_model
def test_repr(self):
assert 'amount' in repr(self.simple_model['amount'])
assert 'amount.sum' in repr(self.simple_model['amount.sum'])
assert 'foo.key' in repr(self.simple_model['foo.key'])
assert 'foo' in repr(self.simple_model['foo'])
assert 'foo' in unicode(self.simple_model['foo'])
assert self.simple_model['foo'] == 'foo'
def test_to_dict(self):
data = self.simple_model.to_dict()
assert 'measures' in data
assert 'amount' in data['measures']
assert 'amount.sum' in data['aggregates']
assert 'ref' in data['measures']['amount']
assert 'dimensions' in data
assert 'foo' in data['dimensions']
| mit | 2,243,494,680,258,463,000 | 37.526316 | 78 | 0.636612 | false |
sebastianlan/wedfairy-api | rsvp/migrations/0001_initial.py | 1 | 1612 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Attendance',
fields=[
('id', models.AutoField(serialize=False, verbose_name=b'id', primary_key=True)),
('user_pic_url', models.TextField()),
('name', models.TextField()),
('people', models.IntegerField()),
('create_date', models.DateField()),
],
options={
'db_table': 'attendance',
},
),
migrations.CreateModel(
name='Rsvp',
fields=[
('id', models.AutoField(serialize=False, verbose_name=b'id', primary_key=True)),
('message', models.TextField()),
('deadline', models.DateField()),
],
options={
'db_table': 'rsvp',
},
),
migrations.CreateModel(
name='UserRsvp',
fields=[
('id', models.AutoField(serialize=False, verbose_name=b'id', primary_key=True)),
('user', models.IntegerField()),
('rsvp', models.ForeignKey(to='rsvp.Rsvp')),
],
options={
'db_table': 'user_rsvp',
},
),
migrations.AddField(
model_name='attendance',
name='rsvp',
field=models.ForeignKey(to='rsvp.Rsvp'),
),
]
| mit | 7,234,744,672,930,323,000 | 29.415094 | 96 | 0.466501 | false |
lgfausak/sqlbridge | sqlbridge/scripts/cli.py | 1 | 4335 | #!/usr/bin/env python
###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
import sys, os, argparse, six
import twisted
from twisted.python import log
from autobahn.twisted.wamp import ApplicationRunner
from autobahn.wamp import types
from autobahn import util
from sqlbridge.twisted.dbengine import DB
import argparse
# http://stackoverflow.com/questions/3853722/python-argparse-how-to-insert-newline-the-help-text
class SmartFormatter(argparse.HelpFormatter):
def _split_lines(self, text, width):
# this is the RawTextHelpFormatter._split_lines
if text.startswith('R|'):
return text[2:].splitlines()
return argparse.HelpFormatter._split_lines(self, text, width)
def run():
prog = os.path.basename(__file__)
def_wsocket = 'ws://127.0.0.1:8080/ws'
def_user = 'db'
def_secret = 'dbsecret'
def_realm = 'realm1'
def_topic_base = 'com.db'
# http://stackoverflow.com/questions/3853722/python-argparse-how-to-insert-newline-the-help-text
p = argparse.ArgumentParser(description="db admin manager for autobahn", formatter_class=SmartFormatter)
p.add_argument('-w', '--websocket', action='store', dest='wsocket', default=def_wsocket,
help='web socket definition, default is: '+def_wsocket)
p.add_argument('-r', '--realm', action='store', dest='realm', default=def_realm,
help='connect to websocket using realm, default is: '+def_realm)
p.add_argument('-v', '--verbose', action='store_true', dest='verbose',
default=False, help='Verbose logging for debugging')
p.add_argument('-u', '--user', action='store', dest='user', default=def_user,
help='connect to websocket as user, default is: '+def_user)
p.add_argument('-s', '--secret', action='store', dest='password', default=def_secret,
help='users "secret" password')
p.add_argument('-e', '--engine', action='store', dest='engine', default=None,
help='if specified, a database engine will be attached.' +
' Note engine is rooted on --topic.' +
' Valid engine options are PG, MYSQL or SQLITE')
p.add_argument('-d', '--dsn', action='store', dest='dsn', default=None,
help='R|if specified the database in dsn will be connected and ready.\n' +
'dsns are unique to the engine being used. Valid examples:' +
'\n-----------' +
'\nPG: dbname=autobahn host=192.168.200.230 user=autouser password=testpass' +
'\nMYSQL: database=autobahn user=autouser password=passtest' +
'\nSQLITE: Z')
p.add_argument('-t', '--topic', action='store', dest='topic_base', default=def_topic_base,
help='if you specify --dsn then you will need a topic to root it on, the default ' + def_topic_base + ' is fine.')
args = p.parse_args()
if args.verbose:
log.startLogging(sys.stdout)
component_config = types.ComponentConfig(realm=args.realm)
ai = {
'auth_type':'wampcra',
'auth_user':args.user,
'auth_password':args.password
}
mdb = DB(config=component_config,
authinfo=ai,engine=args.engine,topic_base=args.topic_base,dsn=args.dsn, debug=args.verbose)
runner = ApplicationRunner(args.wsocket, args.realm)
runner.run(lambda _: mdb)
if __name__ == '__main__':
run()
| apache-2.0 | 7,169,285,442,915,159,000 | 42.787879 | 138 | 0.599539 | false |
bjura/pisak2 | pisak/resources.py | 1 | 1489 | import os.path
from PyQt5.QtCore import QObject, QStandardPaths, QDir, pyqtSlot, pyqtProperty
from .res import getRes
class Resources(QObject):
appDataDir = QStandardPaths.writableLocation(QStandardPaths.AppDataLocation)
soundFileExt = '.wav'
iconFileExt = '.svg'
homeAppDir = os.path.join(QStandardPaths.standardLocations(QStandardPaths.HomeLocation)[0], '.pisak')
@pyqtSlot(str, result=str)
def getSoundPath(self, soundName):
soundFile = soundName + self.soundFileExt
path = os.path.join(self.appDataDir, soundFile)
if not os.path.exists(path):
path = getRes(os.path.join('sounds', soundFile))
return path
@pyqtSlot(str, result=str)
def getIconPath(self, iconName):
iconFile = iconName + self.iconFileExt
return getRes(os.path.join('icons', iconFile))
@pyqtSlot(str, result=str)
def getResource(self, item):
return getRes(item)
@pyqtProperty(str, constant=True)
def symbolsFolder(self):
return getRes('symbols')
@pyqtProperty(str, constant=True)
def moviesDir(self):
return QStandardPaths.standardLocations(QStandardPaths.MoviesLocation)[0]
@pyqtProperty(str, constant=True)
def musicDir(self):
return QStandardPaths.standardLocations(QStandardPaths.MusicLocation)[0]
@pyqtProperty(str, constant=True)
def photosDir(self):
return QStandardPaths.standardLocations(QStandardPaths.PicturesLocation)[0]
| gpl-3.0 | 8,239,106,199,094,781,000 | 29.387755 | 105 | 0.703156 | false |
nitehawck/dem | dem/dependency/url.py | 1 | 1463 | import os
import wget
from dem.dependency.archive import ArchiveInstaller
from dem.project.reader import Config
class UrlInstaller:
def __init__(self, project, packages, cache):
self._packages = packages
self._project = project
self._download_directory = os.path.join('.devenv', project, 'downloads')
self._config = Config({'remote-locations': [self._download_directory]})
self._cache = cache
def install_packages(self):
installed_packages = []
for p in self._packages:
if 'url' in p:
file_extension = UrlInstaller._get_ext(p['url'])
file_name = '{}-{}{}'.format(p['name'], p['version'], file_extension)
local_file = os.path.join(self._download_directory, file_name)
if not os.path.exists(local_file) and not self._cache.is_package_installed(p['name'], p['version']):
print('Fetching {}'.format(p['url']))
wget.download(p['url'], out=local_file)
print()
installed_packages.append(p)
local_installer = ArchiveInstaller(self._project, self._config, installed_packages, self._cache)
return local_installer.install_packages()
@staticmethod
def _get_ext(url):
root, ext = os.path.splitext(url.split('/')[-1])
if ext in ['.gz', '.bz2']:
ext = os.path.splitext(root)[1] + ext
return ext
| mit | 6,745,438,510,528,294,000 | 37.5 | 116 | 0.583049 | false |
edx/edx-val | edxval/management/commands/verify_pact.py | 1 | 1671 | """
Management command to verify VEM pact.
"""
import logging
from django.conf import settings
from django.core.management.base import BaseCommand
from pact import Verifier
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Management command to verify VAL provider pacts.
Example Usage: python manage.py verify_pact --settings=edxval.settings.test
It should be run explicitly with test or test-only settings because the pact verification requires
some database operations that should not occur in a production related database.
"""
help = "Verify the VAL provider pacts"
default_opts = {
'broker_url': getattr(settings, 'PACT_BROKER_BASE_URL', None),
'publish_version': '1',
'publish_verification_results': getattr(settings, 'PUBLISH_VERIFICATION_RESULTS', False)
}
def verify_pact(self):
"""
Verify the pacts with Pact-verifier.
"""
verifier = Verifier(
provider='VAL',
provider_base_url=settings.PROVIDER_BASE_URL
)
if self.default_opts['broker_url']:
verifier.verify_with_broker(
**self.default_opts,
verbose=False,
provider_states_setup_url=settings.PROVIDER_STATES_URL,
)
else:
verifier.verify_pacts(
'edxval/pacts/vem-val.json',
provider_states_setup_url=settings.PROVIDER_STATES_URL,
)
def handle(self, *args, **options):
log.info("Starting pact verification")
self.verify_pact()
log.info('Pact verification completed')
| agpl-3.0 | 2,924,957,101,037,660,700 | 29.381818 | 102 | 0.622382 | false |
rcmorano/gecosws-config-assistant | firstboot/validation.py | 1 | 1642 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# This file is part of Guadalinex
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__author__ = "Antonio Hernández <[email protected]>"
__copyright__ = "Copyright (C) 2011, Junta de Andalucía <[email protected]>"
__license__ = "GPL-2"
import re
def is_empty(value):
ret = not(len(value) > 0)
#print '> %s :: %s' % (ret, value)
return ret
def is_qname(value):
m = re.search(r'^[a-zA-Z]([\w-]|\.)+$', value)
#print '> %s :: %s' % (m != None, value)
return m != None
def is_domain(value):
m = re.search(r'[a-zA-Z0-9]{3,}\.[a-z]{2,3}$', value)
return m != None
def is_url(value):
m = re.search(r'^(http|https|ftp|ftps|file|ldap|ldaps)://(.+)', value)
#print '> %s :: %s' % (m != None, value)
return m != None
def is_auth_type(value):
return value == 'ldap' or value == 'ad'
def is_password(value):
""" Maybe not necesary """
return True
| gpl-2.0 | 2,564,201,980,378,884,000 | 31.156863 | 83 | 0.656098 | false |
Micronaet/micronaet-product | inventory_field/inventory.py | 1 | 3807 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class ProductProductInventoryCategory(orm.Model):
""" Model name: ProductProductInventoryCategory
"""
_name = 'product.product.inventory.category'
_description = 'Inventory category'
def force_no_code_category(self, cr, uid, ids, context=None):
''' Force all no code to this category
'''
product_pool = self.pool.get('product.product')
current_proxy = self.browse(cr, uid, ids, context=context)[0]
product_ids = product_pool.search(cr, uid, [
('default_code', '=', False)], context=context)
product_pool.write(cr, uid, product_ids, {
'inventory_category_id': current_proxy.id,
}, context=context)
return True
def force_code_category(self, cr, uid, ids, context=None):
''' Force product category with code in text field
'''
product_pool = self.pool.get('product.product')
current_proxy = self.browse(cr, uid, ids, context=context)[0]
code = current_proxy.code
code_list = code.split('\n')
product_ids = product_pool.search(cr, uid, [
('default_code', 'in', code_list)], context=context)
product_pool.write(cr, uid, product_ids, {
'inventory_category_id': current_proxy.id,
}, context=context)
return True
_columns = {
'name': fields.char(
'Name', size=64, required=True),
'note': fields.text('Note'),
'code': fields.text('Force code'),
}
class ProductProduct(orm.Model):
''' Link product to inventory purchase order
'''
_inherit = 'product.product'
_columns = {
# TODO No more use:
'inventory_start': fields.float(
'Inventory start', digits=(16, 3)),
'inventory_delta': fields.float(
'Inventory delta', digits=(16, 3),
help='Delta inventory for post correction retroactive'),
'inventory_date': fields.date('Inventory date'),
# XXX Inventory report (keep in isolated module?)
'inventory_category_id': fields.many2one(
'product.product.inventory.category', 'Inventory category'),
'inventory_excluded': fields.boolean('Inventory excluded'),
}
| agpl-3.0 | 1,827,273,622,032,613,400 | 35.961165 | 79 | 0.628054 | false |
powderblock/PyBad-Translator | translate.py | 1 | 3795 | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# <[email protected]> wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return to Terry Yin.
#
# The idea of this is borrowed from <[email protected]>'s brilliant work
# https://github.com/soimort/google-translate-cli
# He uses "THE BEER-WARE LICENSE". That's why I use it too. So you can buy him a
# beer too.
# ----------------------------------------------------------------------------
'''
This is a simple, yet powerful command line translator with google translate
behind it. You can also use it as a Python module in your code.
'''
import re
import json
from textwrap import wrap
try:
import urllib2 as request
from urllib import quote
except:
from urllib import request
from urllib.parse import quote
class Translator:
string_pattern = r"\"(([^\"\\]|\\.)*)\""
match_string =re.compile(
r"\,?\["
+ string_pattern + r"\,"
+ string_pattern + r"\,"
+ string_pattern + r"\,"
+ string_pattern
+r"\]")
def __init__(self, to_lang, from_lang='auto'):
self.from_lang = from_lang
self.to_lang = to_lang
def translate(self, source):
self.source_list = wrap(source, 1000, replace_whitespace=False)
return ' '.join(self._get_translation_from_google(s) for s in self.source_list)
def _get_translation_from_google(self, source):
json5 = self._get_json5_from_google(source)
return self._unescape(self._get_translation_from_json5(json5))
def _get_translation_from_json5(self, content):
result = ""
pos = 2
while True:
m = self.match_string.match(content, pos)
if not m:
break
result += m.group(1)
pos = m.end()
return result
def _get_json5_from_google(self, source):
escaped_source = quote(source, '')
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19'}
req = request.Request(
url="http://translate.google.com/translate_a/t?client=t&ie=UTF-8&oe=UTF-8"
+"&sl=%s&tl=%s&text=%s" % (self.from_lang, self.to_lang, escaped_source)
, headers = headers)
r = request.urlopen(req)
return r.read().decode('utf-8')
def _unescape(self, text):
return json.loads('"%s"' % text)
def main():
import argparse
import sys
import locale
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('texts', metavar='text', nargs='+',
help='a string to translate(use "" when it\'s a sentence)')
parser.add_argument('-t', '--to', dest='to_lang', type=str, default='zh',
help='To language (e.g. zh, zh-TW, en, ja, ko). Default is zh.')
parser.add_argument('-f', '--from', dest='from_lang', type=str, default='auto',
help='From language (e.g. zh, zh-TW, en, ja, ko). Default is auto.')
args = parser.parse_args()
translator= Translator(from_lang=args.from_lang, to_lang=args.to_lang)
for text in args.texts:
translation = translator.translate(text)
if sys.version_info.major == 2:
translation =translation.encode(locale.getpreferredencoding())
sys.stdout.write(translation)
sys.stdout.write("\n")
if __name__ == "__main__":
main()
| mit | -4,403,544,834,307,570,700 | 39.37234 | 156 | 0.566535 | false |
trichter/sito | bin/events_Tocopilla.py | 1 | 1889 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# by TR
from sito import Events
from sito.data import IPOC
from obspy.core import UTCDateTime as UTC
import matplotlib.pyplot as plt
import logging
logging.basicConfig()
lat_Toc = -22.170
lon_Toc = -69.971
#events = Events.load(min_datetime="2007-01-01", max_datetime="2008-12-31",
# min_latitude=lat_Toc - 1, max_latitude=lat_Toc + 1,
# min_longitude=lon_Toc - 1., max_longitude=lon_Toc + 1,
# max_results=1000000,
# min_magnitude=None, max_magnitude=None)
#events.write('/home/richter/Data/events/events_Tocopilla.txt')
events = Events.read('/home/richter/Data/events/events_Tocopilla.txt')
events.pick(latitude=lat_Toc, longitude=lon_Toc, minval=0, maxval=100., indegree=False)
#events.plot(lat_Toc, lon_Toc, circles=(1,))
method = 'filter2-20_1bit'
#method = 'filter0.005_1bit'
data = IPOC(xcorr_append='/Tocopilla/' + method, use_local_LVC=True)
t1 = UTC('2007-11-01')
t2 = UTC('2007-12-01')
period = 1800
correlation = ('PB03Z', 'PB03Z')
stream = data.readX(correlation, t1, t2, period=period)
#stream.filter2(2, 20)
stream.setHIForHist(events, period=period)
figsize = (8.267, 11.693)[::-1]
add_to_title = '_againfilter_zoom1'
#save = data.getPlotXCorr(correlation, 'all') + '_againfilter_zoom1 + events.png'
save = False
stream.plotXcorr(0, 50, imshow=True, use_dlognorm=True, filter=(2, 20),
fig=plt.figure(figsize=figsize),
figtitle='station ' + add_to_title,
dateformatter='%y-%m-%d %Hh%M', save=save, show=True, #dateformatter=' % y % b'
plotinfo=('num_events',), #plotinfo_width=0.1, #@UnusedVariable
plotlabel=('# events',), #@UnusedVariable
plotinfowhere=('right',))
plt.show()
| mit | -358,035,704,147,035,970 | 36.039216 | 112 | 0.618846 | false |
SNET-Entrance/Entrance-UM | src/cm/models.py | 1 | 9801 | import uuid
from bootstrap import db, all_attr, policy_mode
from bootstrap.models import Ext, AttrAuth
from um.models import Contact, Attribute
class Container(db.Model, Ext):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.Text, nullable=False) # name of the container
path = db.Column(db.Text, nullable=False) # path to the container file
type = db.Column(db.Integer, nullable=False) # used by the policy enforcement strategies
files = db.relationship('File', backref='container', cascade='all, delete-orphan', lazy='joined')
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __init__(self, name, path, type, user_id):
self.name = name
self.path = path
self.type = type
self.user_id = user_id
# reencrypts a container considering the new attributes sets of contacts
# called whenever a contact is modified
def reencrypt(self, user):
container = Container(self.name, self.path, self.type, user.id)
container.files = self.files
for f in container.files:
f.policy = Policy.generate(f.policy_text, user)
db.session.add(f)
db.session.delete(self)
db.session.add(container)
db.session.commit()
out = container.dict()
out['files'] = list()
for f in container.files:
out['files'].append(f.dict())
aa_param = dict()
aa_param['files'] = list()
for f in out['files']:
aa_param['files'].append({
"path": f['path'],
"type": f['type'],
"policy": f['policy']
})
aa_param['outfile'] = container.path
aa_param['overwriteOutfile'] = True
aa_response = AttrAuth.encrypt_container(container, aa_param)
if aa_response is None:
return None
return True
class File(db.Model, Ext):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
path = db.Column(db.Text, nullable=False) # path to the file
type = db.Column(db.Text, nullable=False) # always PABE14
policy = db.Column(db.Text, nullable=False) # the actual ABE policy
policy_text = db.Column(db.Text, nullable=False) # the specified policy of the user
container_id = db.Column(db.Integer, db.ForeignKey('container.id'))
def __init__(self, path, type, policy, policy_text, container_id):
self.path = path
self.type = type
self.policy = policy
self.policy_text = policy_text
self.container_id = container_id
class Policy(object):
def __init__(self):
pass
@staticmethod
def evaluate(policy, user):
operators = ['<', '>', '=', '<=', '>='] # list of operators to identify and evaluate numeric attributes
users = Contact.query.filter_by(user_id=user.id).all() # load all contacts of a user
literals = [x.split(':') for x in policy.split(',')] # parse submitted policy
excludes = set(
[x[0].replace('NOT ', '') for x in literals if x[0].startswith('NOT ')]) # identify explicit excludes
allowed_users = [] # initialize the authorized set of contacts
for user in users: # iterate over users list
attrs = set([a.display_name for a in user.attributes]) # extract attributes
for literal in literals: # for each user iterate through literals
if any(any(x in s for s in literal) for x in operators): # if any literal has an operator
condition = True
for l in literal:
operator = ''
for o in operators:
if o in l:
operator = o
if operator == '':
if l not in attrs:
condition = False
continue
else:
continue
attr, cond = l.split(operator)
present = False
for a in attrs:
if attr in a:
present = True
value = a.split('=')[1]
if not eval(
value + operator + cond): # check if the literal is met by the contact's attribute value
condition = False
if not present:
condition = False
if condition: # if condition is met check if user is in exclude list
if len(excludes.intersection(attrs)) == 0:
allowed_users.append(user)
else: # if no numeric attribute is used in literals
if set(literal).issubset(attrs): # simply check if attributes set of contact is subset of literals
if len(excludes.intersection(
attrs)) == 0: # and ensure again that contact is not in exclude list
allowed_users.append(user)
return list(set([a for a in allowed_users])) # return a distinct set of authorized contacts
@staticmethod
def convert(policy):
# convert a policy into an actual ABE policy
return ' OR '.join(['(' + ' AND '.join(l) + ')' for l in [x.split(':') for x in policy.split(',')]])
@staticmethod
def generate(policy, current_user):
# generate a policy based on a user-specified policy dependend on the policy_mode
if policy == all_attr: # if policy is the default policy simply use it
return policy
else:
# otherwise ...
users = Policy.evaluate(policy, current_user) # compute the authorized set of contacts
if policy_mode == 0:
if 'NOT' not in policy: # normal ABE only work if no excludes have been used
return Policy.convert(policy)
# TODO: else case - what to do if exlcuded have been used
elif policy_mode == 1: # case: static ciphertext strategy
uuid_attr = 'AAAAA' + str(uuid.uuid4()).replace('-', '') # generate a unique attribute
attr = Attribute(uuid_attr, True, current_user.id) # store this attribute permanently
db.session.add(attr)
db.session.commit()
# and assign it to the authorized contacts
for user in users:
user.attributes.append(attr)
db.session.add(user)
aa_response = AttrAuth.add_attr(user, attr, current_user) # AA communication
if aa_response is None:
db.session.rollback()
db.session.commit()
return uuid_attr
elif policy_mode == 2: # case: static secret key strategy
return ' OR '.join([c.identity for c in
users]) # generate disjunction of identity attribute of authorized contacts
@staticmethod
def check_for(contact, user):
# check_for() is used to determine ciphertexts that have to be updated after a contact has been modified
container = Container.query.filter_by(user_id=user.id)
for c in container: # iterate over all container of a user
if c.type == 0: # case: no strategy used - do nothing
pass
elif c.type == 1: # case: static ciphertext strategy used
for f in c.files: # iterate over all files - for each file
allowed_users = Policy.evaluate(f.policy_text, user) # evaluate the policy of the file
uuid = Attribute.query.filter_by(name=f.policy).first()
if contact not in allowed_users and uuid in contact.attributes: # if contact is not in set of allowed_users after modification
contact.attributes.remove(uuid) # remove uuid attribute from the contact
db.session.add(contact)
db.session.commit()
aa_response = AttrAuth.delete_attr(contact, uuid, user) # inform AA
if aa_response is None:
db.session.rollback()
elif contact in allowed_users and uuid not in contact.attributes: # if contact is in set of allowed_users but has not the corresponding attribute
contact.attributes.append(uuid) # assign attribute to the contact
db.session.add(contact)
db.session.commit()
aa_response = AttrAuth.add_attr(contact, uuid, user) # inform AA
if aa_response is None:
db.session.rollback()
elif c.type == 2: # case: static secret key strategy used
for f in c.files: # iterate through files again
allowed_users = Policy.evaluate(f.policy_text, user) # compute authorized users
if contact not in allowed_users and contact.identity in f.policy: # if user is not intented to have access to the resource after modification
c.reencrypt(user) # reencrypt
if contact in allowed_users and contact.identity not in f.policy: # if user is intended to have access to the resource after the modification
c.reencrypt(user) # reencrypt
# TODO: make this easier
| apache-2.0 | 6,430,938,400,690,601,000 | 49.261538 | 166 | 0.550352 | false |
PXke/invenio | invenio/ext/assets/extensions.py | 1 | 8258 | # -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.assets.extensions
-----------------------------
This module contains custom `Jinja2` extensions.
"""
from operator import itemgetter
from jinja2 import nodes
from jinja2.ext import Extension
ENV_PREFIX = '_collected_'
def prepare_tag_bundle(cls, tag):
"""
Construct function that returns collected data specified
in jinja2 template like `{% <tag> <value> %}` in correct
order.
Here is an example that shows the final order when template
inheritance is used::
example.html
------------
{%\ extends 'page.html' %}
{%\ css 'template2.css' %}
{%\ css 'template3.css' %}
page.html
---------
{%\ css 'template1.css' %}
{{ get_css_bundle() }}
Output:
-------
[template1.css, template2.css, template3.css]
"""
def get_bundle(key=None, iterate=False):
def _get_data_by_key(data_, key_):
return map(itemgetter(1), filter(lambda (k, v): k == key_, data_))
data = getattr(cls.environment, ENV_PREFIX+tag)
if iterate:
bundles = sorted(set(map(itemgetter(0), data)))
def _generate_bundles():
for bundle in bundles:
cls._reset(tag, bundle)
yield cls.environment.new_bundle(tag,
_get_data_by_key(data,
bundle),
bundle)
return _generate_bundles()
else:
if key is not None:
data = _get_data_by_key(data, key)
else:
bundles = sorted(set(map(itemgetter(0), data)))
data = [f for bundle in bundles
for f in _get_data_by_key(data, bundle)]
cls._reset(tag, key)
return cls.environment.new_bundle(tag, data, key)
return get_bundle
class CollectionExtension(Extension):
"""
CollectionExtension adds new tags `css` and `js` and functions
``get_css_bundle`` and ``get_js_bundle`` for jinja2 templates.
The ``new_bundle`` method is used to create bundle from
list of file names collected using `css` and `js` tags.
Example: simple case
{% css 'css/invenio.css' %}
{% js 'js/jquery.js' %}
{% js 'js/invenio.js' %}
...
{% assets get_css_bundle() %}
<link rel="stylesheet" type="text/css" href="{{ ASSET_URL }}"></link>
{% endassets %}
{% assets get_js_bundle() %}
In template, use {{ ASSETS_URL }} for printing file URL.
{% endassets %}
Example: named bundles
record.html:
{% extend 'page.html' %}
{% css 'css/may-vary.css' %}
# default bundle name can be changed in application factory
# app.jinja_env.extend(default_bundle_name='90-default')
{% css 'css/record.css', '10-record' %}
{% css 'css/form.css', '10-record' %}
page.html:
{% css 'css/bootstrap.css', '00-base' %}
{% css 'css/invenio.css', '00-base' %}
...
{% for bundle in get_css_bundle(iterate=True) %}
{% assets bundle %}
<link rel="stylesheet" type="text/css" href="{{ ASSET_URL }}"></link>
{% endassets %}
{% endfor %}
Output:
<link rel="stylesheet" type="text/css" href="/css/00-base.css"></link>
<link rel="stylesheet" type="text/css" href="/css/10-record.css"></link>
<link rel="stylesheet" type="text/css" href="/css/90-default.css"></link>
Note:
If you decide not to use assets bundle but directly print
stylesheet and script html tags, you MUST define:
```
_app.jinja_env.extend(
use_bundle = False,
collection_templates = {
'css': '<link rel="stylesheet" type="text/css" href="/%s"></link>',
'js': '<script type="text/javascript" src="/%s"></script>'
})
```
Both callable and string with '%s' are allowed in
``collection_templates``.
"""
tags = set(['css', 'js'])
def __init__(self, environment):
super(CollectionExtension, self).__init__(environment)
ext = dict(('get_%s_bundle' % tag, prepare_tag_bundle(self, tag))
for tag in self.tags)
environment.extend(
default_bundle_name='10-default',
use_bundle=True,
collection_templates=dict((tag, lambda x: x) for tag in self.tags),
new_bundle=lambda tag, collection, name: collection,
**ext)
for tag in self.tags:
self._reset(tag)
def _reset(self, tag, key=None):
"""
Empty list of used scripts.
"""
if key is None:
setattr(self.environment, ENV_PREFIX+tag, [])
else:
data = filter(lambda (k, v): k != key,
getattr(self.environment, ENV_PREFIX+tag))
setattr(self.environment, ENV_PREFIX+tag, data)
def _update(self, tag, value, key, caller=None):
"""
Update list of used scripts.
"""
try:
values = getattr(self.environment, ENV_PREFIX+tag)
values.append((key, value))
except:
values = [(key, value)]
setattr(self.environment, ENV_PREFIX+tag, values)
return ''
def parse(self, parser):
"""
Parse Jinja statement tag defined in `self.tags` (default: css, js).
This accually tries to build corresponding html script tag
or collect script file name in jinja2 environment variable.
If you use bundles it is important to call ``get_css_bundle``
or ``get_js_bundle`` in template after all occurrences of
script tags (e.g. {% css ... %}, {% js ...%}).
"""
tag = parser.stream.current.value
lineno = next(parser.stream).lineno
default_bundle_name = u"%s" % (self.environment.default_bundle_name)
default_bundle_name.encode('utf-8')
bundle_name = nodes.Const(default_bundle_name)
#parse filename
if parser.stream.current.type != 'block_end':
value = parser.parse_expression()
# get first optional argument: bundle_name
if parser.stream.skip_if('comma'):
bundle_name = parser.parse_expression()
if isinstance(bundle_name, nodes.Name):
bundle_name = nodes.Name(bundle_name.name, 'load')
else:
value = parser.parse_tuple()
args = [nodes.Const(tag), value, bundle_name]
# Return html tag with link to corresponding script file.
if self.environment.use_bundle is False:
value = value.value
if callable(self.environment.collection_templates[tag]):
node = self.environment.collection_templates[tag](value)
else:
node = self.environment.collection_templates[tag] % value
return nodes.Output([nodes.MarkSafeIfAutoescape(nodes.Const(node))])
# Call :meth:`_update` to collect names of used scripts.
return nodes.CallBlock(self.call_method('_update', args=args,
lineno=lineno),
[], [], '')
| gpl-2.0 | -8,043,150,594,907,580,000 | 35.061135 | 85 | 0.553039 | false |
gr33ndata/dysl | dysl/corpora/corpuslib/train.py | 1 | 3488 | import os
import codecs
import time
from datetime import datetime
class Train:
def __init__(self, root=''):
# Setting root directory for training data
if root:
self.root = root
self.using_builtin_training = False
else:
#self.root = 'corpora/corpus-esaren'
self.root = __file__.rsplit('/',2)[0] + '/corpus-esaren'
self.using_builtin_training = True
#print self.root
self.root_depth = len(self.root.split('/'))
# Set of languages
self.lang_set = set()
# Temp Training Samples
# These are sample adding in run-time
# self.temp_train_data = {
# 'en': ['hello world', 'this is sparta'],
# 'es': ['hasta la vista', 'hola amigos']
# }
self.temp_train_data = {}
def get_corpus(self):
self.corpus = []
self.load()
return self.corpus
def get_corpus_path(self):
return self.root
def get_lang_set(self):
return list(self.lang_set)
def add(self, text=u'', lang=''):
if self.using_builtin_training:
print "Warning: Cannot add training samples to builtin training-set."
return
elif not text or not lang:
raise Exception("Error: No input text given!")
if not lang in self.temp_train_data:
self.temp_train_data[lang] = [text]
else:
self.temp_train_data[lang].append(text)
def save(self, domain='', filename=''):
if self.using_builtin_training:
raise Exception("Failed to save data, use custom training-set instead.")
if not domain:
timestamp = datetime.now().strftime("%y%m%d%H%M%S")
folder_path = self.root + '/batchTS' + timestamp
else:
folder_path = self.root + '/' + domain
try:
os.mkdir(folder_path)
except:
pass
for lang in self.temp_train_data:
lang_folder_path = folder_path + '/' + lang
try:
os.mkdir(lang_folder_path)
except:
pass
if not filename:
filename_and_path = lang_folder_path + '/file.txt'
else:
filename_and_path = lang_folder_path + '/' + filename
f = codecs.open(filename_and_path, mode='w', encoding='utf-8')
for sample in self.temp_train_data[lang]:
text = sample + u'\n'
f.write(text)
f.close()
def get_last_modified(self):
# Get corpus last modified timestamp
if self.using_builtin_training:
return 0
else:
return os.path.getmtime(self.root)
def visit(self, arg, dirname, names):
#print dirname
path = dirname.split('/')
#print 'path:', path, len(path)
if len(path) == self.root_depth + 2:
lang = path[-1]
# Update Language Set
self.lang_set.add(lang)
# Ignore hidden files
names = [name for name in names if not name.startswith('.')]
for name in names:
self.corpus.append((lang, dirname + '/' + name))
#print lang, path, dirname + '/' + name
def load(self):
os.path.walk(self.root, self.visit, '') | mit | -8,860,003,400,784,678,000 | 28.820513 | 84 | 0.509461 | false |
ondrokrc/gramps | gramps/gen/filters/rules/_hasldsbase.py | 1 | 2527 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2008 Jerome Rapinat
# Copyright (C) 2008 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# gen.filters.rules/_HasLDSBase.py
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ...const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from . import Rule
#-------------------------------------------------------------------------
#
# HasLDSBase
#
#-------------------------------------------------------------------------
class HasLDSBase(Rule):
"""Rule that checks for object with a LDS event"""
labels = [ _('Number of instances:'), _('Number must be:')]
name = 'Objects with LDS events'
description = "Matches objects with LDS events"
category = _('General filters')
def prepare(self, db):
# things we want to do just once, not for every handle
if self.list[1] == 'less than':
self.count_type = 0
elif self.list[1] == 'greater than':
self.count_type = 2
else:
self.count_type = 1 # "equal to"
self.userSelectedCount = int(self.list[0])
def apply(self, db, obj):
count = len( obj.get_lds_ord_list())
if self.count_type == 0: # "less than"
return count < self.userSelectedCount
elif self.count_type == 2: # "greater than"
return count > self.userSelectedCount
# "equal to"
return count == self.userSelectedCount
| gpl-2.0 | -4,215,482,488,620,058,600 | 34.591549 | 79 | 0.540166 | false |
att-comdev/drydock | drydock_provisioner/control/health.py | 1 | 4369 | # Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import falcon
import json
from drydock_provisioner.control.base import StatefulResource
from drydock_provisioner.drivers.node.maasdriver.actions.node import ValidateNodeServices
from drydock_provisioner.objects.healthcheck import HealthCheck
from drydock_provisioner.objects.healthcheck import HealthCheckMessage
from drydock_provisioner.objects.fields import ActionResult
import drydock_provisioner.objects.fields as hd_fields
import drydock_provisioner.policy as policy
class HealthResource(StatefulResource):
"""
Returns empty response body that Drydock is healthy
"""
def __init__(self, orchestrator=None, **kwargs):
"""Object initializer.
:param orchestrator: instance of Drydock orchestrator
"""
super().__init__(**kwargs)
self.orchestrator = orchestrator
def on_get(self, req, resp):
"""
Returns 204 on healthy, otherwise 503, without response body.
"""
hc = HealthCheckCombined(
state_manager=self.state_manager,
orchestrator=self.orchestrator,
extended=False)
return hc.get(req, resp)
class HealthExtendedResource(StatefulResource):
"""
Returns response body that Drydock is healthy
"""
def __init__(self, orchestrator=None, **kwargs):
"""Object initializer.
:param orchestrator: instance of Drydock orchestrator
"""
super().__init__(**kwargs)
self.orchestrator = orchestrator
@policy.ApiEnforcer('physical_provisioner:health_data')
def on_get(self, req, resp):
"""
Returns 200 on success, otherwise 503, with a response body.
"""
hc = HealthCheckCombined(
state_manager=self.state_manager,
orchestrator=self.orchestrator,
extended=True)
return hc.get(req, resp)
class HealthCheckCombined(object):
"""
Returns Drydock health check status.
"""
def __init__(self, state_manager=None, orchestrator=None, extended=False):
"""Object initializer.
:param orchestrator: instance of Drydock orchestrator
"""
self.state_manager = state_manager
self.orchestrator = orchestrator
self.extended = extended
def get(self, req, resp):
"""
Returns updated response with body if extended.
"""
health_check = HealthCheck()
# Test database connection
try:
now = self.state_manager.get_now()
if now is None:
raise Exception('None received from database for now()')
except Exception as ex:
hcm = HealthCheckMessage(
msg='Unable to connect to database', error=True)
health_check.add_detail_msg(msg=hcm)
# Test MaaS connection
try:
task = self.orchestrator.create_task(
action=hd_fields.OrchestratorAction.Noop)
maas_validation = ValidateNodeServices(task, self.orchestrator,
self.state_manager)
maas_validation.start()
if maas_validation.task.get_status() == ActionResult.Failure:
raise Exception('MaaS task failure')
except Exception as ex:
hcm = HealthCheckMessage(
msg='Unable to connect to MaaS', error=True)
health_check.add_detail_msg(msg=hcm)
if self.extended:
resp.body = json.dumps(health_check.to_dict())
if health_check.is_healthy() and self.extended:
resp.status = falcon.HTTP_200
elif health_check.is_healthy():
resp.status = falcon.HTTP_204
else:
resp.status = falcon.HTTP_503
| apache-2.0 | 5,482,039,274,584,251,000 | 33.674603 | 89 | 0.642939 | false |
thedemz/python-gems | scrypto.py | 1 | 5153 | #https://bitbucket.org/mhallin/py-scrypt/src
#pip3.4 install scrypt
#pip3.4 install pycrypto
import struct
from binascii import b2a_base64 as e64
from binascii import a2b_base64 as d64
import scrypt
import Crypto.Random
random = Crypto.Random.new().read
#from passlib.utils import consteq
def consteq(left, right):
"""Check two strings/bytes for equality.
This is functionally equivalent to ``left == right``,
but attempts to take constant time relative to the size of the righthand input.
The purpose of this function is to help prevent timing attacks
during digest comparisons: the standard ``==`` operator aborts
after the first mismatched character, causing it's runtime to be
proportional to the longest prefix shared by the two inputs.
If an attacker is able to predict and control one of the two
inputs, repeated queries can be leveraged to reveal information about
the content of the second argument. To minimize this risk, :func:`!consteq`
is designed to take ``THETA(len(right))`` time, regardless
of the contents of the two strings.
It is recommended that the attacker-controlled input
be passed in as the left-hand value.
.. warning::
This function is *not* perfect. Various VM-dependant issues
(e.g. the VM's integer object instantiation algorithm, internal unicode representation, etc),
may still cause the function's run time to be affected by the inputs,
though in a less predictable manner.
*To minimize such risks, this function should not be passed* :class:`unicode`
*inputs that might contain non-* ``ASCII`` *characters*.
.. versionadded:: 1.6
"""
# NOTE:
# resources & discussions considered in the design of this function:
# hmac timing attack --
# http://rdist.root.org/2009/05/28/timing-attack-in-google-keyczar-library/
# python developer discussion surrounding similar function --
# http://bugs.python.org/issue15061
# http://bugs.python.org/issue14955
# validate types
if isinstance(left, str):
if not isinstance(right, unicode):
raise TypeError("inputs must be both unicode or both bytes")
is_py3_bytes = False
elif isinstance(left, bytes):
if not isinstance(right, bytes):
raise TypeError("inputs must be both unicode or both bytes")
is_py3_bytes = True #Python3
else:
raise TypeError("inputs must be both unicode or both bytes")
# do size comparison.
# NOTE: the double-if construction below is done deliberately, to ensure
# the same number of operations (including branches) is performed regardless
# of whether left & right are the same size.
same_size = (len(left) == len(right))
if same_size:
# if sizes are the same, setup loop to perform actual check of contents.
tmp = left
result = 0
if not same_size:
# if sizes aren't the same, set 'result' so equality will fail regardless
# of contents. then, to ensure we do exactly 'len(right)' iterations
# of the loop, just compare 'right' against itself.
tmp = right
result = 1
# run constant-time string comparision
# TODO: use izip instead (but first verify it's faster than zip for this case)
if is_py3_bytes:
for l,r in zip(tmp, right):
result |= l ^ r
else:
for l,r in zip(tmp, right):
result |= ord(l) ^ ord(r)
return result == 0
_PARAMS = struct.Struct("!BBBB") #Four standardsize of 1 unsigned char.
def pack_verifier( logN,r,p, salt, hash):
"""
Return a bytes object containing the values v1, v2, ... packed according to the format string fmt, fmt is !BBBB.
The arguments must match the values required by the format exactly.
"""
packed = _PARAMS.pack(logN,r,p,len(salt)) + salt + hash
return packed
def unpack_verifier(verifier):
logN,r,p,salt_bytes = _PARAMS.unpack_from(verifier)
i = _PARAMS.size+salt_bytes
salt = verifier[_PARAMS.size:i]
hash = verifier[i:]
return logN,r,p,salt,hash
def make_verifier( password, logN=14, r=8, p=1, salt_bytes=16,hash_bytes=16):
"""
Factory Class, returns a packed bytes object.
"""
salt = random(salt_bytes)
hash = scrypt.hash(password,salt,1<<logN,r,p,hash_bytes)
return pack_verifier(logN,r,p,salt,hash)
def verify_password( password, verifier ):
logN,r,p,salt,hash = unpack_verifier(verifier)
newhash = scrypt.hash(password,salt,1<<logN,r,p,len(hash))
return consteq(newhash,hash)
def ev( verifier ):
"""
Create a ev string from a verifier.
"""
return e64(verifier).strip()
def get_verifier( ev ):
"""
Create a verifier from a ev string.
"""
return d64(ev)
if __name__=="__main__":
v = make_verifier( "password" )
print(verify_password( "password", v))#True
print(verify_password( "Password", v))#False
ev = e64(v).strip()
print(ev)#b'DggBECLLfyJNB/HlbT9m6nByPq0334rbufeNV191YNNWOImZ'
# store ev in database
print( verify_password("password",d64(ev)))#True
| mit | 2,875,160,194,208,963,600 | 34.784722 | 116 | 0.668737 | false |
forScie/RAAED | RAAEDServer.py | 1 | 5949 | #!/usr/bin/env python3
# RAAED Server software: v1.0
# A GUI RAAED Server
# Detects a reverse SSH connection bound to port 22 from an RAAED Client.
#
# DESCRIPTION
# The server is designed to continually check for the prescence of a reverse SSH session on port 22.
# The GUI will then reflect the presence of the reverse SSH session.
# A Shell in the context of the reverse SSH session can be launched through clicking a button.
#
# SSH REQUIREMENTS
# This script requires an SSH service to be active and running locally.
# /etc/ssh/sshd_config should be configured to allow public key authentication, and operate on port 443.
# a valid private RSA key for the RAAED Client should be placed in ~/.ssh (id_rsa)
# a valid public key with an associated private key on the RAAED Client should be located in ~/.ssh (id_rsa.pub)
#
# THIRD PARTY DEPENDENCIES
# pip3 install psutil
# pip3 install appjar
#
# AUTHOR: forScience ([email protected])
#
# INDENT: TABS
import sys
import os
import threading
import subprocess
import psutil
import time
from appJar import gui
# Checks if port 22 is listening on localhost.
# Called in a thread at launch. Runs in the background.
# If the the port is open then update GUI to reflect change
def connection_check():
# loop infinately (in background)
while True:
time.sleep(2)
# retrieve tuples of all local IPv4 connections (in form of [IP, Port])
local = psutil.net_connections('inet4')
connect = False # set flag to false each itteration of loop
# iterrate through local IPv4 tuples
for address in local:
(ip, port) = address.laddr # assign each tuple to local variables
# check each IP for localhost and Port for 22
if ip == '127.0.0.1' and port == 22:
connect = True # set flag to indicate connection
# if flag has been set then connection exists
if connect:
# only update GUI if port 22 on localhost is found
gui_update("connected")
else:
# otherwise GUI continues to indicate disconnection
gui_update("disconnected")
# Updates GUI to show client connection state
# Called by connection_check() depending on local port activity
# Updates indicator and text to reflect state
def gui_update(update):
if update == "connected":
# update gui to reflect connection
# update indicator
app.setLabel("indicator", "Connected") # update GUI indicator text
app.setLabelBg("indicator", "green") # update GUI indicator colour
# update text
app.setLabelFg("text", "green") # update GUI text colour
text = "Connected to client" # create explanation string
app.setLabel("text", text) # update GUI with explanation string
elif update == "disconnected":
# update gui to reflect disconnection
# update indicator
app.setLabel("indicator", "Disconnected") # update GUI indicator text
app.setLabelBg("indicator", "red") # update GUI indicator colour
# update text
app.setLabelFg("text", "red") # update GUI text colour
text = "No connection from client" # create explanation string
app.setLabel("text", text) # update GUI with explanation string
elif update == "list targets":
# update gui with targets from client
# open retrieved network list file
with open('/root/Desktop/network.list', 'r') as file:
iplist = file.read() # read in file to variable and remove EOL
# display targets in gui
app.setMessage('enumeration', iplist)
# Spawns an SSH session in a new shell
# gnome-terminal only works within the GNOME DE
def spawn_shell(btn):
# terminal remains open after command issued with '-x'
subprocess.call(['gnome-terminal', '-x', 'ssh', 'localhost'])
# Connects via scp to RAAED Client and retrieves a list of
# IPs enumerated on the Clients local network.
# The list is displayed in the GUI
def get_enum(btn):
# define local and remote list locations
localdest = "/root/Desktop/network.list"
remotedest = "/root/Desktop/network.list"
# retrieve enumeration txt files from client
sshcopy = "scp root@localhost:" + remotedest + " " + localdest # build ssh copy command
copyresult = subprocess.call(sshcopy, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) # execute scp command
# if scp was successful
if copyresult == 0:
# update gui and delete localdest file
gui_update('list targets')
delfile = "rm " + localdest # build command to delete local network.list file
subprocess.call(delfile, shell=True) # delete file
# Entry
if __name__ == "__main__":
# put connection_check() in a thread and background
thread = threading.Thread(target=connection_check, args=())
thread.daemon = True # daemonised for clean closure, ok to kill with main
thread.start() # start daemon thread
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<
# GUI ELEMENTS
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>
# create the GUI & set a title
app = gui("RAAED Server")
app.setBg("white")
app.setFont(12, font="Arial")
app.setSticky("nesw")
app.setResizable(canResize=False)
# RAAED CONNECTION STATUS
app.startLabelFrame("Connection Status")
app.setLabelFramePadding("Connection Status", 4, 8)
# connection indicator
app.addLabel("indicator", "Disconnected", 0, 0)
app.setLabelBg("indicator", "red")
app.setLabelFg("indicator", "white")
app.setLabelPadding("indicator", 2, 5)
# explanation text
app.addLabel("text", "No connection from client", 0, 1)
app.setLabelFg("text", "red")
app.setLabelPadding("text", 4, 8)
# end frame
app.stopLabelFrame()
# SPAWN SHELL AND RETRIEVE ENUM BUTTONS
app.startLabelFrame("")
app.setLabelFramePadding("", 4, 8)
# spawn shell button
app.addButton("Spawn Shell", spawn_shell, 0, 0)
# retrieve enumeration button
app.addButton("Show Remote Hosts", get_enum, 0, 1)
# end bottom frame
app.stopLabelFrame()
# REMOTE TARGET LIST
app.startLabelFrame("Remote Network Hosts")
app.setLabelFramePadding("Remote Network Hosts", 4, 8)
# spawn shell button
app.addEmptyMessage("enumeration")
# end bottom frame
app.stopLabelFrame()
# start GUI
app.go() | gpl-3.0 | -6,022,877,912,289,613,000 | 31.872928 | 126 | 0.723651 | false |
nel215/py-sae | dataset.py | 1 | 1536 | #coding: utf-8
import requests
import os.path
import pickle
def get_binary_dataset():
# 0-1 dataset
dataset = requests.get('https://archive.ics.uci.edu/ml/machine-learning-databases/spect/SPECT.train').text
dataset = map(lambda row: row.split(','), dataset.split('\n'))
titles = dataset[0]
dataset = dataset[1:]
dataset = filter(lambda data: len(data) > 1, dataset)
features = map(lambda data: map(float, data[:-1]), dataset)
labels = map(lambda data: map(float, data[-1:]), dataset)
return (features, labels)
def get_mushroom_dataset():
filename = './tmp/mushroom.dat'
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
if os.path.isfile(filename):
f = open(filename, 'r')
return pickle.load(f)
dataset = requests.get('http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/mushrooms').text
num_feature = 112
features = []
labels = []
dataset = filter(lambda data: len(data)>1, dataset.split('\n'))
for data in dataset:
data = data.split(' ')
labels.append([1] if data[0] == '2' else [0])
feature = [0 for f in xrange(num_feature)]
for [bin, _] in map(lambda d: d.split(':'), filter(lambda d: len(d)>1, data[1:])):
feature[int(bin)-1] = 1
features.append(feature)
result = (features, labels)
f = open(filename, 'w')
pickle.dump(result, f)
f.close()
return result
if __name__=='__main__':
get_mushroom_dataset()
| mit | -6,360,485,346,207,833,000 | 31.680851 | 110 | 0.617188 | false |
MGautier/security-sensor | trunk/version-1-0/webapp/secproject/secproject/settings.py | 1 | 3617 | """
Django settings for secproject project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3m&&ig6ksy_fy=sc4n8)foq&*-%ug*la@5d@8m*u1s%fcs2rsz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'gunicorn',
'react',
'rest_framework',
'secapp.apps.SecappConfig',
]
REACT = {
'RENDER': not DEBUG,
'RENDER_URL': 'http://127.0.0.1:8001/render',
}
# REST_FRAMEWORK = {
# 'DEFAULT_RENDERER_CLASSES': (
# 'rest_framework.renderers.JSONRenderer',
# ),
# 'DEFAULT_PARSER_CLASSES': (
# 'rest_framework.parsers.JSONParser',
# )
# }
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'secproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'secproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'database.db'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'es-ES'
TIME_ZONE = 'Europe/Madrid'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "secapp/static/")
| mit | -4,357,623,697,613,626,400 | 25.021583 | 91 | 0.679292 | false |
JulyKikuAkita/PythonPrac | cs15211/PrintBinaryTree.py | 1 | 6156 | __source__ = 'https://leetcode.com/problems/print-binary-tree/'
# Time: O()
# Space: O()
#
# Description: Leetcode # 655. Print Binary Tree
#
# Print a binary tree in an m*n 2D string array following these rules:
#
# The row number m should be equal to the height of the given binary tree.
# The column number n should always be an odd number.
# The root node's value (in string format) should be put in the exactly middle of the first row it can be put.
# The column and the row where the root node belongs will separate the rest space into two parts
# (left-bottom part and right-bottom part).
# You should print the left subtree in the left-bottom part and print the right subtree in the right-bottom part.
# The left-bottom part and the right-bottom part should have the same size.
# Even if one subtree is none while the other is not,
# you don't need to print anything for the none subtree
# but still need to leave the space as large as that for the other subtree.
# However, if two subtrees are none, then you don't need to leave space for both of them.
#
# Each unused space should contain an empty string "".
# Print the subtrees following the same rules.
# Example 1:
# Input:
# 1
# /
# 2
# Output:
# [["", "1", ""],
# ["2", "", ""]]
# Example 2:
# Input:
# 1
# / \
# 2 3
# \
# 4
# Output:
# [["", "", "", "1", "", "", ""],
# ["", "2", "", "", "", "3", ""],
# ["", "", "4", "", "", "", ""]]
# Example 3:
# Input:
# 1
# / \
# 2 5
# /
# 3
# /
# 4
# Output:
#
# [["", "", "", "", "", "", "", "1", "", "", "", "", "", "", ""]
# ["", "", "", "2", "", "", "", "", "", "", "", "5", "", "", ""]
# ["", "3", "", "", "", "", "", "", "", "", "", "", "", "", ""]
# ["4", "", "", "", "", "", "", "", "", "", "", "", "", "", ""]]
# Note: The height of binary tree is in the range of [1, 10].
#
# Companies
# Poynt
# Related Topics
# Tree
#
import unittest
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# 28ms 99.57%
class Solution(object):
def printTree(self, root):
"""
:type root: TreeNode
:rtype: List[List[str]]
"""
if not root: return [""]
def depth(root):
if not root: return 0
return max(depth(root.left), depth(root.right)) + 1
d = depth(root)
self.res = [[""] * (2**d - 1) for _ in xrange(d)]
def helper(node, d, pos):
self.res[-d - 1][pos] = str(node.val)
if node.left: helper(node.left, d - 1, pos - 2**(d - 1))
if node.right: helper(node.right, d - 1, pos + 2**(d - 1))
helper(root, d - 1, 2**(d - 1) - 1)
return self.res
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/print-binary-tree/solution/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
# 4ms 92.65%
class Solution {
public List<List<String>> printTree(TreeNode root) {
List<List<String>> res = new LinkedList<>();
int height = root == null ? 1 : getHeight(root);
int row = height;
int col = (1 << height) - 1; // Math.pow(2, height) - 1
List<String> list = new ArrayList<>();
for (int i = 0; i < col; i++) list.add("");
for (int i = 0; i < row ; i++) res.add(new ArrayList<>(list));
fillColumn(root, res, 0, row, 0, col - 1);
return res;
}
public void fillColumn(TreeNode root, List<List<String>> res, int row, int totalRows, int start, int end) {
if (row == totalRows || root == null) return;
int mid = (int) start + (end - start) / 2;
res.get(row).set(mid, root.val + "");
fillColumn(root.left, res, row + 1, totalRows, start, mid - 1);
fillColumn(root.right, res, row + 1, totalRows, mid + 1, end);
}
public int getHeight(TreeNode root) {
if (root == null) return 0;
return 1 + Math.max(getHeight(root.left), getHeight(root.right));
}
}
Approach #2 Using queue(BFS)[Accepted]
# 4ms 92.65%
class Solution {
class Params {
Params(TreeNode n, int ii, int ll, int rr) {
root = n;
i = ii;
l = ll;
r = rr;
}
TreeNode root;
int i, l, r;
}
public List<List<String>> printTree(TreeNode root) {
int height = getHeight(root);
String[][] res = new String[height][(1 << height) - 1];
for (String[] arr: res) Arrays.fill(arr, "");
List<List<String>> ans = new ArrayList<>();
fill(res, root, 0, 0, res[0].length);
for (String[] arr: res) ans.add(Arrays.asList(arr));
return ans;
}
public void fill(String[][] res, TreeNode root, int i, int l, int r) {
Queue<Params> queue = new LinkedList();
queue.add(new Params(root, 0, 0, res[0].length));
while (!queue.isEmpty()) {
Params p = queue.remove();
res[p.i][(p.l + p.r) / 2] = "" + p.root.val;
if (p.root.left != null)
queue.add(new Params(p.root.left, p.i + 1, p.l, (p.l + p.r) / 2));
if (p.root.right != null)
queue.add(new Params(p.root.right, p.i + 1, (p.l + p.r + 1) / 2, p.r));
}
}
public int getHeight(TreeNode root) {
Queue<TreeNode> queue = new LinkedList();
queue.add(root);
int height = 0;
while (!queue.isEmpty()) {
height++;
Queue<TreeNode> temp = new LinkedList();
while (!queue.isEmpty()) {
TreeNode node = queue.remove();
if (node.left != null)
temp.add(node.left);
if (node.right != null)
temp.add(node.right);
}
queue = temp;
}
return height;
}
}
''' | apache-2.0 | -7,820,780,622,699,906,000 | 30.09596 | 113 | 0.515757 | false |
jwacalex/MULTEX-EAST-PoS-Tagger | MTEDownloader.py | 1 | 3734 | """
Downloader for multex east corpus.
"""
import os
from os.path import expanduser, abspath
import sys
import urllib
import zipfile
import nltk.data
isCustomPath = False
def main():
download()
def download():
try:
__download__()
except KeyboardInterrupt:
print("\nDiscarded download due to keyboard interrupt.\n")
def __getFilePath__():
global isCustomPath
paths = list(zip(range(len(nltk.data.path)+1), nltk.data.path + ["custom"]))
pathStr = ""
try:
pathStr = raw_input("Where should the corpus be saved?" + str(paths) + " [%s]: " % 0)
except:
pathStr = input("Where should the corpus be saved?" + str(paths) + " [%s]: " % 0)
pathNum = None
if pathStr:
pathNum = int(pathStr)
else:
pathNum = 0
if (pathNum == len(nltk.data.path)):
isCustomPath = True
try:
return abspath(raw_input(
"Please input the directory where you want the files to be saved (NO backslash at the end): ")) + "/"
except:
return abspath(input(
"Please input the directory where you want the files to be saved (NO backslash at the end): ")) + "/"
else:
return abspath(nltk.data.path[pathNum]) + "/corpora/"
def __download__():
filePath = __getFilePath__()
finished = False
try:
if not os.path.exists(filePath):
os.makedirs(filePath)
except EnvironmentError:
print("Could not create or write to file")
else:
# download zip archive
with open(filePath + "mte_teip5.zip", "wb") as f:
url = "https://www.clarin.si/repository/xmlui/bitstream/handle/11356/1043/MTE1984-ana.zip"
try:
request = urllib.urlopen(url)
except:
request = urllib.request.urlopen(url)
chunk_read_write(f, request, report_hook=chunk_report)
print("Download finished")
# handle "invalid" zip format from clarin.si
with open(filePath + "mte_teip5.zip", "r+b") as f:
content = f.read()
pos = content.rfind(
b'\x50\x4b\x05\x06') # reverse find: this string of bytes is the end of the zip's central directory.
if pos > 0:
f.seek(pos + 20) # +20: see secion V.I in 'ZIP format' link above.
f.truncate()
f.write(b'\x00\x00') # Zip file comment length: 0 byte length; tell zip applications to stop reading.
f.seek(0)
# extract zip archive
print("Extracting files...")
with zipfile.ZipFile(filePath + "mte_teip5.zip", "r") as z:
z.extractall(filePath)
os.rename(filePath + "MTE1984-ana", filePath + "mte_teip5")
print("Done")
def chunk_report(bytes_so_far, chunk_size, total_size):
percent = float(bytes_so_far) / total_size
percent = round(percent * 100, 2)
sys.stdout.write("Downloaded %d of %d bytes (%0.2f%%)\r" %
(bytes_so_far, total_size, percent))
if bytes_so_far >= total_size:
sys.stdout.write('\n')
def chunk_read_write(fileHandle, response, chunk_size=8192, report_hook=None):
try:
total_size = response.info().getheader('Content-Length').strip()
except:
total_size = response.getheader('Content-Length').strip()
total_size = int(total_size)
bytes_so_far = 0
while 1:
chunk = response.read(chunk_size)
fileHandle.write(chunk)
bytes_so_far += len(chunk)
if not chunk:
break
if report_hook:
report_hook(bytes_so_far, chunk_size, total_size)
return bytes_so_far
if __name__ == "__main__":
main()
| lgpl-3.0 | 4,770,773,840,565,016,000 | 28.171875 | 118 | 0.580343 | false |
bak1an/django | django/utils/timesince.py | 1 | 2806 | import calendar
import datetime
from django.utils.html import avoid_wrapping
from django.utils.timezone import is_aware, utc
from django.utils.translation import gettext, ngettext_lazy
TIMESINCE_CHUNKS = (
(60 * 60 * 24 * 365, ngettext_lazy('%d year', '%d years')),
(60 * 60 * 24 * 30, ngettext_lazy('%d month', '%d months')),
(60 * 60 * 24 * 7, ngettext_lazy('%d week', '%d weeks')),
(60 * 60 * 24, ngettext_lazy('%d day', '%d days')),
(60 * 60, ngettext_lazy('%d hour', '%d hours')),
(60, ngettext_lazy('%d minute', '%d minutes'))
)
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
if reversed:
d, now = now, d
delta = now - d
# Deal with leapyears by subtracing the number of leapdays
leapdays = calendar.leapdays(d.year, now.year)
if leapdays != 0:
if calendar.isleap(d.year):
leapdays -= 1
elif calendar.isleap(now.year):
leapdays += 1
delta -= datetime.timedelta(leapdays)
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return avoid_wrapping(gettext('0 minutes'))
for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS):
count = since // seconds
if count != 0:
break
result = avoid_wrapping(name % count)
if i + 1 < len(TIMESINCE_CHUNKS):
# Now get the second item
seconds2, name2 = TIMESINCE_CHUNKS[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
result += gettext(', ') + avoid_wrapping(name2 % count2)
return result
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
| bsd-3-clause | -6,810,944,789,384,965,000 | 34.974359 | 101 | 0.629722 | false |
seece/cbpp | minifyoperation.py | 1 | 1252 | import re
from util import *
from operation import Operation, OperationResult
class Replacement:
def __init__(self, regex, substitution):
self.regex = regex
self.substitution = substitution
class MinifyOperation(Operation):
def __init__(self):
self.inMultilineComment = False
pass
def apply(self, line, state):
result = OperationResult(line, False)
if not state.args.minify:
return result
l = stripComments(line)
strings = scanForStrings(l)
commentStart = len(l)
stringRegex = r'(("[^"]+")|(|[^"]*?)([^\s]*?))?'
comments = r'(?P<comment>(|(\'|//)*$))'
def string(s):
if not s:
return ""
return s
def replace(m, group):
if checkIfInsideString(m.start(group), strings):
return string(m.group(0))
return string(m.group(1)) + string(m.group(group))
ops = []
ops.append(Replacement(re.compile(r'' + stringRegex + '\s*(?P<op>[=+\-*/\><,\^]{1,2})\s*'), lambda m: replace(m, "op")))
ops.append(Replacement(re.compile(r'' + stringRegex + r'(?<=\D)(0)(?P<digit>\.\d+)'), lambda m: replace(m, "digit") ))
#l = l.lstrip("\t")
for o in ops:
l = o.regex.sub(o.substitution, l)
l = l.rstrip("\r\n")
result.line = strInsert(result.line, 0, commentStart-1, l)
return result
| mit | -1,057,761,309,854,611,000 | 23.54902 | 122 | 0.620607 | false |
SummerLW/Perf-Insight-Report | dashboard/dashboard/bisect_stats.py | 1 | 3742 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""URL endpoints to show bisect stats."""
import datetime
import json
from dashboard import layered_cache
from dashboard import request_handler
from dashboard import utils
_BISECT_STATS_CACHE_KEY = 'bisect_stats'
_NUM_POINTS_TO_DISPLAY = 52
_BISECT_STAT_SERIES_NAME = ['win', 'linux', 'mac', 'android']
class BisectStatsHandler(request_handler.RequestHandler):
"""URL endpoint to get stats about bisect runs."""
def get(self):
"""Renders the UI with charts."""
bisect_stats = layered_cache.GetExternal(_BISECT_STATS_CACHE_KEY)
if not bisect_stats:
bisect_stats = {
'failed': [],
'completed': []
}
series_data = {
'failed': bisect_stats['failed'],
'completed': bisect_stats['completed']
}
total_series_data = {
'failed': self._GetTotalBisectRunSeries(bisect_stats['failed']),
'completed': self._GetTotalBisectRunSeries(bisect_stats['completed'])
}
self.RenderHtml('bisect_stats.html', {
'series_data': json.dumps(series_data),
'total_series_data': json.dumps(total_series_data),
})
def _GetTotalBisectRunSeries(self, series_map):
"""Sums up failed and completed bisect run series.
Args:
series_map: Dictionary of series names to list of data series.
Returns:
A list of data series.
"""
cropped_series_list = []
for key in series_map:
series = series_map[key]
cropped_series_list.append(series[len(series) - _NUM_POINTS_TO_DISPLAY:])
# Sum up series.
series_map = {}
for series in cropped_series_list:
for x_value, y_value in series:
if x_value not in series_map:
series_map[x_value] = y_value
else:
series_map[x_value] += y_value
result_list = []
for key in sorted(series_map):
result_list.append([key, series_map[key]])
return result_list
def UpdateBisectStats(bot_name, status):
"""Updates bisect run stat by bot name and status.
Bisect stats stored in a layered_cache entity have the form below. Each
tick is one week and count is the sum of failed or completed bisect runs.
{
'failed': {
bot_name: [[week_timestamp, count], [week_timestamp, count]],
},
'completed': {
bot_name: [[week_timestamp, count], [week_timestamp, count]],
}
}
Args:
bot_name: Name of the bisect bot.
status: Bisect status. Either 'failed' or 'completed'.
"""
# TODO(chrisphan): Add stats for staled bisect.
if status not in ['failed', 'completed']:
return
series_name = _GetSeriesNameFromBotName(bot_name)
week_timestamp = _GetLastMondayTimestamp()
bisect_stats = layered_cache.GetExternal(_BISECT_STATS_CACHE_KEY)
if not bisect_stats:
bisect_stats = {
'failed': {},
'completed': {},
}
series_map = bisect_stats[status]
if series_name not in series_map:
series_map[series_name] = [[week_timestamp, 1]]
else:
series = series_map[series_name]
if week_timestamp == series[-1][0]:
series[-1][1] += 1
else:
series.append([week_timestamp, 1])
layered_cache.SetExternal(_BISECT_STATS_CACHE_KEY, bisect_stats)
def _GetLastMondayTimestamp():
"""Get timestamp of 00:00 last Monday in milliseconds as an integer."""
today = datetime.date.today()
monday = today - datetime.timedelta(days=today.weekday())
return utils.TimestampMilliseconds(monday)
def _GetSeriesNameFromBotName(bot_name):
for series_name in _BISECT_STAT_SERIES_NAME:
if series_name in bot_name:
return series_name
return 'other'
| bsd-3-clause | -2,050,621,821,403,550,200 | 27.564885 | 79 | 0.656868 | false |
pytorch/fairseq | fairseq/modules/layer_norm.py | 1 | 1500 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import torch.nn as nn
import torch.nn.functional as F
try:
from apex.normalization import FusedLayerNorm as _FusedLayerNorm
has_fused_layernorm = True
class FusedLayerNorm(_FusedLayerNorm):
@torch.jit.unused
def forward(self, x):
if not x.is_cuda:
return super().forward(x)
else:
with torch.cuda.device(x.device):
return super().forward(x)
except ImportError:
has_fused_layernorm = False
def LayerNorm(normalized_shape, eps=1e-5, elementwise_affine=True, export=False):
if torch.jit.is_scripting():
export = True
if not export and torch.cuda.is_available() and has_fused_layernorm:
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine)
class Fp32LayerNorm(nn.LayerNorm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def forward(self, input):
output = F.layer_norm(
input.float(),
self.normalized_shape,
self.weight.float() if self.weight is not None else None,
self.bias.float() if self.bias is not None else None,
self.eps,
)
return output.type_as(input)
| mit | 4,775,746,740,642,711,000 | 29 | 81 | 0.639333 | false |
allenlavoie/tensorflow | tensorflow/contrib/kfac/python/kernel_tests/estimator_test.py | 1 | 11914 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.kfac.estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.kfac.python.ops import estimator
from tensorflow.contrib.kfac.python.ops import layer_collection as lc
from tensorflow.contrib.kfac.python.ops import utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import training_util
_ALL_ESTIMATION_MODES = ["gradients", "empirical", "curvature_prop", "exact"]
class EstimatorTest(test.TestCase):
def setUp(self):
self._graph = ops.Graph()
with self._graph.as_default():
self.layer_collection = lc.LayerCollection()
self.inputs = random_ops.random_normal((2, 2), dtype=dtypes.float32)
self.weights = variable_scope.get_variable(
"w", shape=(2, 2), dtype=dtypes.float32)
self.bias = variable_scope.get_variable(
"b", initializer=init_ops.zeros_initializer(), shape=(2, 1))
self.output = math_ops.matmul(self.inputs, self.weights) + self.bias
# Only register the weights.
self.layer_collection.register_fully_connected(
params=(self.weights,), inputs=self.inputs, outputs=self.output)
self.outputs = math_ops.tanh(self.output)
self.targets = array_ops.zeros_like(self.outputs)
self.layer_collection.register_categorical_predictive_distribution(
logits=self.outputs, targets=self.targets)
def testEstimatorInitManualRegistration(self):
with self._graph.as_default():
# We should be able to build an estimator for only the registered vars.
estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection
)
# Check that we throw an error if we try to build an estimator for vars
# that were not manually registered.
with self.assertRaises(ValueError):
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights, self.bias],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection
)
est.make_ops_and_vars()
# Check that we throw an error if we don't include registered variables,
# i.e. self.weights
with self.assertRaises(ValueError):
est = estimator.FisherEstimatorRoundRobin(
variables=[],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection)
est.make_ops_and_vars()
@test.mock.patch.object(utils.SubGraph, "variable_uses", return_value=42)
def testVariableWrongNumberOfUses(self, mock_uses):
with self.assertRaises(ValueError):
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection)
est.make_ops_and_vars()
def testInvalidEstimationMode(self):
with self.assertRaises(ValueError):
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection,
estimation_mode="not_a_real_mode")
est.make_ops_and_vars()
def testGradientsModeBuild(self):
with self._graph.as_default():
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection,
estimation_mode="gradients")
est.make_ops_and_vars()
def testEmpiricalModeBuild(self):
with self._graph.as_default():
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection,
estimation_mode="empirical")
est.make_ops_and_vars()
def testCurvaturePropModeBuild(self):
with self._graph.as_default():
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection,
estimation_mode="curvature_prop")
est.make_ops_and_vars()
def testExactModeBuild(self):
with self._graph.as_default():
est = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
cov_ema_decay=0.1,
damping=0.2,
layer_collection=self.layer_collection,
estimation_mode="exact")
est.make_ops_and_vars()
def test_cov_update_thunks(self):
"""Ensures covariance update ops run once per global_step."""
with self._graph.as_default(), self.test_session() as sess:
fisher_estimator = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
layer_collection=self.layer_collection,
damping=0.2,
cov_ema_decay=0.0)
# Construct an op that executes one covariance update per step.
global_step = training_util.get_or_create_global_step()
(cov_variable_thunks, cov_update_op_thunks, _,
_) = fisher_estimator.create_ops_and_vars_thunks()
for thunk in cov_variable_thunks:
thunk()
cov_matrices = [
fisher_factor.get_cov()
for fisher_factor in self.layer_collection.get_factors()
]
cov_update_op = control_flow_ops.case(
[(math_ops.equal(global_step, i), thunk)
for i, thunk in enumerate(cov_update_op_thunks)])
increment_global_step = global_step.assign_add(1)
sess.run(variables.global_variables_initializer())
initial_cov_values = sess.run(cov_matrices)
# Ensure there's one update per covariance matrix.
self.assertEqual(len(cov_matrices), len(cov_update_op_thunks))
# Test is no-op if only 1 covariance matrix.
assert len(cov_matrices) > 1
for i in range(len(cov_matrices)):
# Compare new and old covariance values
new_cov_values = sess.run(cov_matrices)
is_cov_equal = [
np.allclose(initial_cov_value, new_cov_value)
for (initial_cov_value,
new_cov_value) in zip(initial_cov_values, new_cov_values)
]
num_cov_equal = sum(is_cov_equal)
# Ensure exactly one covariance matrix changes per step.
self.assertEqual(num_cov_equal, len(cov_matrices) - i)
# Run all covariance update ops.
sess.run(cov_update_op)
sess.run(increment_global_step)
def test_round_robin_placement(self):
"""Check if the ops and variables are placed on devices correctly."""
with self._graph.as_default():
fisher_estimator = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
layer_collection=self.layer_collection,
damping=0.2,
cov_ema_decay=0.0,
cov_devices=["/cpu:{}".format(i) for i in range(2)],
inv_devices=["/cpu:{}".format(i) for i in range(2)])
# Construct an op that executes one covariance update per step.
(cov_update_ops, _, inv_update_ops, _, _,
_) = fisher_estimator.make_ops_and_vars(scope="test")
self.assertEqual(cov_update_ops[0].device, "/device:CPU:0")
self.assertEqual(cov_update_ops[1].device, "/device:CPU:1")
self.assertEqual(inv_update_ops[0].device, "/device:CPU:0")
self.assertEqual(inv_update_ops[1].device, "/device:CPU:1")
cov_matrices = [
fisher_factor.get_cov()
for fisher_factor in self.layer_collection.get_factors()
]
inv_matrices = [
matrix
for fisher_factor in self.layer_collection.get_factors()
for matrix in fisher_factor._matpower_by_exp_and_damping.values()
]
self.assertEqual(cov_matrices[0].device, "/device:CPU:0")
self.assertEqual(cov_matrices[1].device, "/device:CPU:1")
# Inverse matrices need to be explicitly placed.
self.assertEqual(inv_matrices[0].device, "")
self.assertEqual(inv_matrices[1].device, "")
def test_inv_update_thunks(self):
"""Ensures inverse update ops run once per global_step."""
with self._graph.as_default(), self.test_session() as sess:
fisher_estimator = estimator.FisherEstimatorRoundRobin(
variables=[self.weights],
layer_collection=self.layer_collection,
damping=0.2,
cov_ema_decay=0.0)
# Construct op that updates one inverse per global step.
global_step = training_util.get_or_create_global_step()
(cov_variable_thunks, _, inv_variable_thunks,
inv_update_op_thunks) = fisher_estimator.create_ops_and_vars_thunks()
for thunk in cov_variable_thunks:
thunk()
for thunk in inv_variable_thunks:
thunk()
inv_matrices = [
matrix
for fisher_factor in self.layer_collection.get_factors()
for matrix in fisher_factor._matpower_by_exp_and_damping.values()
]
inv_update_op = control_flow_ops.case(
[(math_ops.equal(global_step, i), thunk)
for i, thunk in enumerate(inv_update_op_thunks)])
increment_global_step = global_step.assign_add(1)
sess.run(variables.global_variables_initializer())
initial_inv_values = sess.run(inv_matrices)
# Ensure there's one update per inverse matrix. This is true as long as
# there's no fan-in/fan-out or parameter re-use.
self.assertEqual(len(inv_matrices), len(inv_update_op_thunks))
# Test is no-op if only 1 invariance matrix.
assert len(inv_matrices) > 1
# Assign each covariance matrix a value other than the identity. This
# ensures that the inverse matrices are updated to something different as
# well.
cov_matrices = [
fisher_factor.get_cov()
for fisher_factor in self.layer_collection.get_factors()
]
sess.run([
cov_matrix.assign(2 * linalg_ops.eye(int(cov_matrix.shape[0])))
for cov_matrix in cov_matrices
])
for i in range(len(inv_matrices)):
# Compare new and old inverse values
new_inv_values = sess.run(inv_matrices)
is_inv_equal = [
np.allclose(initial_inv_value, new_inv_value)
for (initial_inv_value,
new_inv_value) in zip(initial_inv_values, new_inv_values)
]
num_inv_equal = sum(is_inv_equal)
# Ensure exactly one inverse matrix changes per step.
self.assertEqual(num_inv_equal, len(inv_matrices) - i)
# Run all inverse update ops.
sess.run(inv_update_op)
sess.run(increment_global_step)
if __name__ == "__main__":
test.main()
| apache-2.0 | 4,237,093,489,579,139,600 | 37.807818 | 80 | 0.650999 | false |
stuart-knock/tvb-framework | tvb_test/core/base_testcase.py | 1 | 9311 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Calin Pavel <[email protected]>
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
import os
import shutil
from types import FunctionType
from tvb.basic.config.settings import TVBSettings as cfg
from tvb.basic.logger.builder import get_logger
from tvb.core.utils import get_matlab_executable
from tvb.core.entities.storage import dao
from tvb.core.entities.storage.session_maker import SessionMaker
from tvb.core.entities import model
from tvb.core.entities.model_manager import reset_database
from tvb.core.services.initializer import initialize
from tvb.core.services.operation_service import OperationService
LOGGER = get_logger(__name__)
MATLAB_EXECUTABLE = get_matlab_executable()
def init_test_env():
"""
This method prepares all necessary data for tests execution
"""
default_mlab_exe = cfg.MATLAB_EXECUTABLE
cfg.MATLAB_EXECUTABLE = get_matlab_executable()
reset_database()
initialize(["tvb.config", "tvb_test"], load_xml_events=False)
cfg.MATLAB_EXECUTABLE = default_mlab_exe
def transactional_test(func, callback=None):
"""
A decorator to be used in tests which makes sure all database changes are reverted
at the end of the test.
"""
if func.__name__.startswith('test_'):
def dec(*args, **kwargs):
session_maker = SessionMaker()
cfg.ALLOW_NESTED_TRANSACTIONS = True
default_dir = cfg.CURRENT_DIR
default_mlab_exe = cfg.MATLAB_EXECUTABLE
cfg.MATLAB_EXECUTABLE = get_matlab_executable()
session_maker.start_transaction()
try:
try:
if hasattr(args[0], 'setUpTVB'):
args[0].setUpTVB()
result = func(*args, **kwargs)
finally:
if hasattr(args[0], 'tearDownTVB'):
args[0].tearDownTVB()
args[0].delete_project_folders()
finally:
session_maker.rollback_transaction()
session_maker.close_transaction()
cfg.ALLOW_NESTED_TRANSACTIONS = False
cfg.MATLAB_EXECUTABLE = default_mlab_exe
cfg.CURRENT_DIR = default_dir
if callback is not None:
callback(*args, **kwargs)
return result
return dec
else:
return func
class TransactionalTestMeta(type):
"""
New MetaClass.
"""
def __new__(mcs, classname, bases, class_dict):
"""
Called when a new class gets instantiated.
"""
new_class_dict = {}
for attr_name, attribute in class_dict.items():
if (type(attribute) == FunctionType and not (attribute.__name__.startswith('__')
and attribute.__name__.endswith('__'))):
if attr_name.startswith('test_'):
attribute = transactional_test(attribute)
if attr_name in ('setUp', 'tearDown'):
new_class_dict[attr_name + 'TVB'] = attribute
else:
new_class_dict[attr_name] = attribute
else:
new_class_dict[attr_name] = attribute
return type.__new__(mcs, classname, bases, new_class_dict)
# Following code is executed once / tests execution to reduce time.
if "TEST_INITIALIZATION_DONE" not in globals():
init_test_env()
TEST_INITIALIZATION_DONE = True
class BaseTestCase(unittest.TestCase):
"""
This class should implement basic functionality which
is common to all TVB tests.
"""
EXCLUDE_TABLES = ["ALGORITHMS", "ALGORITHM_GROUPS", "ALGORITHM_CATEGORIES", "PORTLETS",
"MAPPED_INTERNAL__CLASS", "MAPPED_MAPPED_TEST_CLASS"]
def assertEqual(self, expected, actual, message=""):
super(BaseTestCase, self).assertEqual(expected, actual,
message + " Expected %s but got %s." % (expected, actual))
def clean_database(self, delete_folders=True):
"""
Deletes data from all tables
"""
self.cancel_all_operations()
LOGGER.warning("Your Database content will be deleted.")
try:
session = SessionMaker()
for table in reversed(model.Base.metadata.sorted_tables):
# We don't delete data from some tables, because those are
# imported only during introspection which is done one time
if table.name not in self.EXCLUDE_TABLES:
try:
session.open_session()
con = session.connection()
LOGGER.debug("Executing Delete From Table " + table.name)
con.execute(table.delete())
session.commit()
except Exception, e:
# We cache exception here, in case some table does not exists and
# to allow the others to be deleted
LOGGER.warning(e)
session.rollback()
finally:
session.close_session()
LOGGER.info("Database was cleanup!")
except Exception, excep:
LOGGER.warning(excep)
raise
# Now if the database is clean we can delete also project folders on disk
if delete_folders:
self.delete_project_folders()
dao.store_entity(model.User(cfg.SYSTEM_USER_NAME,
None, None, True, None))
def cancel_all_operations(self):
"""
To make sure that no running operations are left which could make some other
test started afterwards to fail, cancel all operations after each test.
"""
LOGGER.info("Stopping all operations.")
op_service = OperationService()
operations = self.get_all_entities(model.Operation)
for operation in operations:
op_service.stop_operation(operation.id)
def delete_project_folders(self):
"""
This method deletes folders for all projects from TVB folder.
This is done without any check on database. You might get projects in DB but no folder for them on disk.
"""
if os.path.exists(cfg.TVB_STORAGE):
for current_file in os.listdir(cfg.TVB_STORAGE):
full_path = os.path.join(cfg.TVB_STORAGE, current_file)
if current_file != "db_repo" and os.path.isdir(full_path):
shutil.rmtree(full_path, ignore_errors=True)
def get_all_entities(self, entity_type):
"""
Retrieve all entities of a given type."""
result = []
try:
session = SessionMaker()
session.open_session()
result = session.query(entity_type).all()
except Exception, excep:
LOGGER.warning(excep)
finally:
session.close_session()
return result
def get_all_datatypes(self):
"""Return all DataType entities in DB or []."""
return self.get_all_entities(model.DataType)
def reset_database(self):
init_test_env()
class TransactionalTestCase(BaseTestCase):
"""
This class makes sure that any test case it contains is ran in a transactional
environment and a rollback is issued at the end of that transaction. This should
improve performance for most cases.
WARNING! Do not use this is any test class that has uses multiple threads to do
dao related operations since that might cause errors/leave some dangling sessions.
"""
__metaclass__ = TransactionalTestMeta
| gpl-2.0 | 3,642,143,029,564,304,400 | 38.28692 | 112 | 0.608528 | false |
sadig/DC2 | components/dc2-admincenter/dc2/admincenter/apps/main.py | 1 | 3768 | # -*- coding: utf-8 -*-
#
# (DC)² - DataCenter Deployment Control
# Copyright (C) 2010, 2011, 2012, 2013, 2014 Stephan Adig <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import sys
try:
import web
except ImportError as e:
print(e)
print("You need to install web.py")
sys.exit(1)
try:
from dc2.admincenter.globals import CSS_FILES
from dc2.admincenter.globals import JS_LIBS
from dc2.admincenter.globals import logger
except ImportError as e:
print(e)
print("You are missing the necessary DC2 modules")
sys.exit(1)
try:
from jinja2 import Environment, FileSystemLoader
except ImportError as e:
print(e)
print("You didn't install jinja2 templating engine")
sys.exit(1)
try:
from dc2.lib.web.pages import Page
from dc2.lib.web.csrf import csrf_protected
from dc2.lib.decorators.logger import Logger
except ImportError as e:
print(e)
print("You are missing the necessary DC2 modules")
sys.exit(1)
try:
from settings import TEMPLATE_DIR
from settings import KERBEROS_AUTH_ENABLED
except ImportError as e:
print(e)
print("You don't have a settings file")
sys.exit(1)
try:
from dc2.admincenter.lib.auth import do_kinit
from dc2.admincenter.lib.auth import KerberosAuthError
except ImportError as e:
print(e)
print("There are dc2.admincenter modules missing")
sys.exit(1)
tmpl_env = Environment(loader=FileSystemLoader(TEMPLATE_DIR))
class Home(object):
@Logger(logger=logger)
def GET(self):
page = Page('index.tmpl', tmpl_env, web.ctx)
page.set_title('DC2-AdminCenter - Index')
page.set_cssfiles(CSS_FILES)
page.set_jslibs(JS_LIBS)
if ('authenticated' in web.ctx.session and
web.ctx.session.authenticated):
user_info = {}
user_info['username'] = web.ctx.session.username
user_info['realname'] = web.ctx.session.realname
user_info['is_dc2admin'] = web.ctx.session.is_dc2admin
page.add_page_data({'user': user_info})
return page.render()
class Login(object):
@csrf_protected
@Logger(logger=logger)
def POST(self):
params = web.input()
if 'error' in web.ctx.session:
del web.ctx.session.error
del web.ctx.session.errorno
del web.ctx.session.errormsg
if KERBEROS_AUTH_ENABLED:
try:
do_kinit(params.username, params.password)
web.ctx.session.authenticated = True
web.ctx.session.username = params.username
raise web.seeother('/')
except KerberosAuthError, e:
web.ctx.session.authenticated = False
web.ctx.session.error = True
web.ctx.session.errorno = 1020
web.ctx.session.errormsg = e
raise web.seeother('/')
# TODO: Standard Auth
else:
web.ctx.session.authenticated = True
web.ctx.session.username = params.username
raise web.seeother('/')
| gpl-2.0 | -5,261,619,635,792,575,000 | 31.196581 | 76 | 0.655694 | false |
usc-isi-i2/etk | etk/utilities.py | 1 | 2077 | import datetime
import hashlib
import json
from typing import Dict
import uuid
import warnings
class Utility(object):
@staticmethod
def make_json_serializable(doc: Dict):
"""
Make the document JSON serializable. This is a poor man's implementation that handles dates and nothing else.
This method modifies the given document in place.
Args:
doc: A Python Dictionary, typically a CDR object.
Returns: None
"""
for k, v in doc.items():
if isinstance(v, datetime.date):
doc[k] = v.strftime("%Y-%m-%d")
elif isinstance(v, datetime.datetime):
doc[k] = v.isoformat()
@staticmethod
def create_doc_id_from_json(doc) -> str:
"""
Docs with identical contents get the same ID.
Args:
doc:
Returns: a string with the hash of the given document.
"""
return hashlib.sha256(json.dumps(doc, sort_keys=True).encode('utf-8')).hexdigest()
@staticmethod
def create_doc_id_string(any_string):
"""
Creates sha256 has of a string
:param any_string: input string
:return: sha256 hash of any_string
"""
try:
return hashlib.sha256(any_string).hexdigest()
except:
# probably failed because of unicode
return hashlib.sha256(any_string.encode('utf-8')).hexdigest()
@staticmethod
def create_uuid():
return str(uuid.uuid4())
@staticmethod
def create_description_from_json(doc_json):
description = ''
for key in doc_json:
description += '"' + key + '":"' + str(doc_json[key]) + '", <br/>'
description += '}'
return description
def deprecated(msg=''):
def deprecated_decorator(func):
def deprecated_func(*args, **kwargs):
warnings.warn("{}: this function is deprecated. {}".format(func.__name__, msg))
return func(*args, **kwargs)
return deprecated_func
return deprecated_decorator
| mit | 3,378,224,767,070,204,000 | 27.847222 | 117 | 0.585941 | false |
postlund/home-assistant | homeassistant/components/rflink/switch.py | 1 | 2332 | """Support for Rflink switches."""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchDevice
from homeassistant.const import CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import (
CONF_ALIASES,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DEVICE_DEFAULTS_SCHEMA,
SwitchableRflinkDevice,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
)
},
},
extra=vol.ALLOW_EXTRA,
)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink switch devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
device = RflinkSwitch(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink platform."""
async_add_entities(devices_from_config(config))
# pylint: disable=too-many-ancestors
class RflinkSwitch(SwitchableRflinkDevice, SwitchDevice):
"""Representation of a Rflink switch."""
pass
| apache-2.0 | 2,837,758,458,445,601,000 | 29.684211 | 86 | 0.602487 | false |
fmaguire/ete | ete3/tools/phylobuild_lib/task/phyml.py | 1 | 4152 | from __future__ import absolute_import
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: [email protected]
#
#
# #END_LICENSE#############################################################
import os
import shutil
import sys
import re
import logging
log = logging.getLogger("main")
from ete3.tools.phylobuild_lib.master_task import TreeTask
from ete3.tools.phylobuild_lib.master_job import Job
from ete3.tools.phylobuild_lib.utils import basename, PhyloTree, OrderedDict, GLOBALS, PHYML_CITE, DATATYPES
from ete3.tools.phylobuild_lib import db
__all__ = ["Phyml"]
class Phyml(TreeTask):
def __init__(self, nodeid, alg_phylip_file, constrain_id, model,
seqtype, conf, confname, parts_id=None):
GLOBALS["citator"].add(PHYML_CITE)
base_args = OrderedDict({
"--model": "",
"--no_memory_check": "",
"--quiet": "",
"--constraint_tree": ""})
self.confname = confname
self.conf = conf
self.constrain_tree = None
if constrain_id:
self.constrain_tree = db.get_dataid(constrain_id, DATATYPES.constrain_tree)
self.alg_phylip_file = alg_phylip_file
TreeTask.__init__(self, nodeid, "tree", "Phyml",
base_args, conf[confname])
if seqtype == "aa":
self.model = model or conf[confname]["_aa_model"]
elif seqtype == "nt":
self.model = model or conf[confname]["_nt_model"]
self.seqtype = seqtype
self.lk = None
self.init()
def load_jobs(self):
appname = self.conf[self.confname]["_app"]
args = OrderedDict(self.args)
args["--model"] = self.model
args["--datatype"] = self.seqtype
args["--input"] = self.alg_phylip_file
if self.constrain_tree:
args["--constraint_tree"] = self.constrain_tree
args["-u"] = self.constrain_tree
else:
del args["--constraint_tree"]
job = Job(self.conf["app"][appname], args, parent_ids=[self.nodeid])
job.add_input_file(self.alg_phylip_file, job.jobdir)
if self.constrain_tree:
job.add_input_file(self.constrain_tree, job.jobdir)
job.jobname += "-"+self.model
self.jobs.append(job)
def finish(self):
lks = []
j = self.jobs[0]
tree_file = os.path.join(j.jobdir,
self.alg_phylip_file+"_phyml_tree.txt")
stats_file = os.path.join(j.jobdir,
self.alg_phylip_file+"_phyml_stats.txt")
m = re.search('Log-likelihood:\s+(-?\d+\.\d+)',
open(stats_file).read())
lk = float(m.groups()[0])
stats = {"lk": lk}
tree = PhyloTree(tree_file)
TreeTask.store_data(self, tree.write(), stats)
| gpl-3.0 | 8,228,641,529,093,788,000 | 34.186441 | 108 | 0.593931 | false |
Mkebede/OmicsIntegrator | scripts/motif_regression.py | 1 | 11666 | '''
File to handle motif/expression regression
'''
__author__='Anthony Soltis'
__email__='[email protected]'
import sys,os,pickle,re
from optparse import OptionParser
import numpy as np
from scipy import stats
import fileinput
import matplotlib
matplotlib.use('pdf')
from matplotlib import pyplot as plt
def load_tgm(tgm_fn):
'''
Load tgm file and produce output matrix.
Output is transposed numpy array object.
'''
print 'Loading tgm file...'
tgm = []
for line in fileinput.input(tgm_fn):
l = line.strip('\n').split()
tgm.append(l)
# display results, return array
s = np.asarray(tgm).T.shape
print 'TGM file loaded with %d genes by %d motifs.'%(s[0],s[1])
return np.asarray(tgm).T
def load_ids(ids_fn):
'''
Load ids filename and store as list.
'''
ids = []
for line in fileinput.input(ids_fn):
l = line.strip('\n')
ids.append(l)
return ids
def load_response(data_fn):
'''
Load ydata and return numpy vector.
Input file should have one value per-row.
'''
r_data = []
r_genes = []
for line in fileinput.input(data_fn):
row=line.strip('\n').split('\t')
if len(row)>1:
r_genes.append(row[0])
r_data.append(float(row[1]))
else:
r_data.append(float(row[0]))
# r_data.append(float(line.strip('\n')))
print 'Response data file loaded with %d values.'%(len(r_data))
return np.asarray(r_data),r_genes
def map_data(Xdata,Xnames,Ydata,Ynames):
'''
Map X (predictor) data to Y (response) data using X and Y data ids (i.e. gene names).
'''
# Intersect two gene lists
Xinds = []
Yinds = []
#yn = []
for i,Xgene in enumerate(Xnames):
for j,Ygene in enumerate(Ynames):
if Xgene == Ygene:
Xinds.append(i)
Yinds.append(j)
# yn.append(Ygene)
Xdata_out = Xdata[Xinds,:]
Ydata_out = Ydata[Yinds]
print 'Found %d genes that have binding data and are in the expression output'%(len(Yinds))
#yn.sort()
#print ','.join(yn[0:20])
return Xdata_out,Ydata_out
def perform_regression(X,Y,motif_ids,norm,outdir,plot):
'''
'''
reg_results = []
for i in range(0,X.shape[1]):
# Set up data
x = np.array(X[:,i],dtype=float)
if norm != None:
if norm == 'log2':
y = np.log2(Y+.1)
elif norm == 'log10':
y = np.log10(Y+.1)
else: y = Y
# Perform regression
slope,intercept,r_val,p_val,std_err = stats.linregress(x,y)
reg_results.append(([motif_ids[i],slope,p_val,i]))
#regression plot
if plot:
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax1.plot(x,y,'bo',x,intercept+slope*x,'k')
ax1.set_title(motif_ids[i])
ax1.set_xlabel('Estimated transcription factor affinity')
ax1.set_ylabel('Expression log fold change')
#checking if a subdirectory is present to save plots
plotdir = os.path.join(os.path.split(outdir)[0],'regression_plots')
if not os.path.isdir(plotdir):
os.makedirs(plotdir)
#cleaning all motif ids to have all alphanumeric name
if not re.match(r'^[A-Za-z0-9.]*$', motif_ids[i]):
motif_ids[i] = "".join(c for c in motif_ids[i] if c not in ('!','$','@','!','%','*','\\','/','_','-'))
#file name must be within max characters supported by os
if len(motif_ids[i])>162:
st = motif_ids[i]
motif_ids[i] = st[0:160]
plotfile = os.path.join(plotdir,motif_ids[i]+'.pdf')
fig.savefig(open(plotfile,'w'),dpi=300)
plt.close()
return sorted(reg_results,key=lambda x: x[2])
def fdr_correction(results):
'''
Compute FDR corrected p-values based on Benjamini-Hochberg procedure.
'''
new_results = []
num_tests = len([r for r in results if str(r[1])!='nan'])
print 'Correcting for '+str(num_tests)+' numeric values'
for i in range(0,num_tests):
tup = results[i]
pval = tup[2]
fdr = num_tests*pval/(i+1)
if fdr > 1.0: fdr = 1.0
tup+=(fdr,)
new_results.append(tup)
return new_results
def main():
usage = "%prog [options] <scores.tgm or scores.tgm.pkl> <response_values.tab>"
description = "Script that takes a predicted TF-Gene matrix and uses a linear regression to identify which TFs have binding scores correlated with gene expression changes."
parser = OptionParser(usage=usage,description=description)
##get program directory
progdir=os.path.dirname(os.path.abspath(sys.argv[0]))
# Options
parser.add_option('--outdir','--out',dest="outdir",default='./test_out.txt',
help='Choose output file name. Default is %default.')
parser.add_option('--motif-ids','--motif-ids',dest='motif_ids',default=None,
help='OPTIONAL: If input file is in text format (.tgm), provide motif ids corresponding to tgm file motifs.')
parser.add_option('--tgm-genes',dest='tgm_genes',default=None,
help='OPTIONAL: If input file is in text format (.tgm), provide gene ids corresponding to tgm file genes.')
parser.add_option('--response-genes',dest='response_genes',default=None,
help='OPTIONAL: If two-column file is not provided, add in gene ids corresponding to response values.')
parser.add_option('--norm-type',dest='norm_type',default=None,
help='Choose normalization type for response data. Choices are: "log2", "log10".\
Default is %default.')
parser.add_option('--use-qval',dest='use_qval',action='store_true',default=False,help='If set this the Forest input file will contain -log(qval) instead of -log(pval) and threshold the output using qval. Default:%default')
parser.add_option('--thresh',dest='thresh',type='string',default='0.9',help='P/Q-Value threshold to illustrate results. Default:%default')
parser.add_option('--gifdir',dest='motifs',default=os.path.join(progdir,'../data/matrix_files/gifs'),
help='Directory containing motif GIFs to illustrate results. Default is %default')
parser.add_option('--plot',dest='plot',action='store_true',default=False,help='Enable plot generation for regression results. Default:%default')
# get options, arguments
(opts,args) = parser.parse_args()
# Handle arguments
tgm_fn = args[0]
response_data_fn = args[1]
# Load in Y-vector data (gene expression, fold-changes, etc.)
response_data,response_genes = load_response(response_data_fn)
print 'Trying to get file type...'
ext=tgm_fn.split('.')[-1]
if ext.lower()=='pkl':
print '...found PKL file'
pkl=True
else:
print '...found text file, looking for additional data files in options'
pkl=False
# Handle options
outdir = opts.outdir
motif_ids = opts.motif_ids
if motif_ids == None and not pkl:
print 'Must provide motif ids file or use pickled dictionary. Exiting.'
sys.exit()
tgm_genes = opts.tgm_genes
if tgm_genes == None and not pkl:
print 'Must provide gene ids for motifs file or use pickled dictionary. Exiting.'
sys.exit()
# response_genes = opts.response_genes
if opts.response_genes == None and len(response_genes)==0:
print 'Must provide gene ids for response data or have a two-column data file. Exiting.'
sys.exit()
norm_type = opts.norm_type
valid_norm_types = ['log2','log10']
if norm_type != None:
if norm_type not in valid_norm_types:
print 'Normalization type not valid. Exiting.'
sys.exit()
if pkl:
#load in values from dictionary
tgmdict=pickle.load(open(tgm_fn,'rU'))
tgm_data=tgmdict['matrix'].T
motif_ids=tgmdict['tfs']
tgm_genes=tgmdict['genes']
delim=tgmdict['delim']
else:
# Load in transcription factor affinity matrix and IDs
tgm_data = load_tgm(tgm_fn)
motif_ids = load_ids(motif_ids)
tgm_genes = load_ids(tgm_genes)
delim='.'
#now load response_genes if they're not loaded yet
if len(response_genes)==0:
response_genes = load_ids(opts.response_genes)
# Map predictor data to response data
X,Y=map_data(tgm_data,tgm_genes,response_data,response_genes)
# Perform regression
reg_results=perform_regression(X,Y,motif_ids,norm_type,outdir,opts.plot)
# FDR correction
new_results = fdr_correction(reg_results)
dn=os.path.dirname(outdir)
if dn!='' and dn!='./' and not os.path.exists(dn):
os.system('mkdir '+dn)
# Write to TEXT file complete results
of = open(outdir,'w')
of.writelines('\t'.join(['Motif','Slope','p-val','q-val'])+'\n')
for res in new_results:
if str(res[1])=='nan':
continue
ostr = '\t'.join([res[0],str(res[1]),str(res[2]),str(res[4])]) + '\n'
of.writelines(ostr)
of.close()
##now create HTML writeup
threshold = float(opts.thresh)
of= open(re.sub(outdir.split('.')[-1],'html',outdir),'w')
of.writelines("""<html>
<title>GARNET Results</title>
<h3>GARNET regression results</h3>
<p>This table includes the results for GARNET TF-motif discovery and regression. This Table includes the non-zero results of the linear regression</p>
<table width="90%">
<tr><th style="width:25%">Motif Cluster</th><th style="width:12%">Slope</th><th style="width:12%">P-value</th><th style="width:12%">Q-value</th><th style="width:35%">LOGO</th></tr>
""")
for res in new_results:
if str(res[1])=='nan':
continue
# skip rows that exceed the q-value or p-value threhsold
if (opts.use_qval and res[4]<=threshold) or ((not opts.use_qval) and res[2]<=threshold):
motifgif=os.path.join(opts.motifs,'motif'+str(res[3])+'.gif')
ostr = "<tr><td>"+' '.join(res[0].split('.'))+"</td><td>"+str(res[1])+'</td><td>'+str(res[2])+"</td><td>"+str(res[4])+"</td><td><img src=\""+motifgif+"\" scale=80%></td></tr>\n"
of.writelines(ostr)
of.writelines("</table></html>")
of.close()
##now write to Forest-friendly input file
##collect dictionary of all individual tf names and their regression p-values
##or q-values
regdict={}
for row in new_results:
tfs=[t for t in row[0].split(delim) if t!='' and ' ' not in t]
#print row
if str(row[1])=='nan':
continue
# skip rows that exceed the q-value or p-value threhsold
if opts.use_qval:
if row[4]>threshold:
continue
elif row[2]>threshold:
continue
for tf in tfs:
if row[2]==1:
continue
if opts.use_qval:
lpv=-1.0*np.log2(float(row[4]))#calculate neg log2 qvalue
else:
lpv=-1.0*np.log2(float(row[2]))#calculate neg log2 pvalue
try:
cpv=regdict[tf]
except KeyError:
cpv=0.0
if lpv>cpv:
regdict[tf]=lpv
print 'Found '+str(len(regdict))+'Tf scores for '+str(len(new_results))+' motif results'
of=open(re.sub('.tsv','_FOREST_INPUT.tsv',outdir),'w')
for tf in sorted(regdict.keys()):
val=regdict[tf]
of.write(tf+'\t'+str(val)+'\n')
of.close()
if __name__ == '__main__': main()
| bsd-2-clause | 4,813,545,824,401,240,000 | 36.271565 | 226 | 0.59232 | false |
jodonnell/Minesweeper- | minesweeper/views.py | 1 | 4681 | # Create your views here.
from django.shortcuts import render_to_response
from django import forms
from django import http
from minesweeper.classes.create_board import CreateBoard
from minesweeper.classes.board import Board
from pymongo import Connection, DESCENDING, ASCENDING
import cPickle
import json
import datetime
ROWS = 8
COLUMNS = 8
TOTAL_MINES = 10
connection = Connection('localhost', 27017)
class EmailForm(forms.Form):
email = forms.EmailField(required = True)
def _get_minesweeper_db():
return connection.minesweeper
def index(request):
if 'email' not in request.COOKIES:
return _get_email(request)
email = request.COOKIES['email']
db = _get_minesweeper_db()
game_query = db.minesweeper.find_one({'email':email})
board = _create_new_board()
new_record = {"email": email, "board":cPickle.dumps(board), 'new_game':True}
if game_query is None:
db.minesweeper.insert(new_record)
else:
db.minesweeper.update({"email": email}, new_record)
return render_to_response('index.html', {'num_flags':TOTAL_MINES, 'rows':ROWS, 'columns':COLUMNS})
def clear(request):
"User is attempting to clear a square"
row, column, email = _get_row_column_email_params(request)
board = _get_board(email)
_update_board(email, board)
if board.is_mined(row, column):
return http.HttpResponse(json.dumps({'lost':True}))
num_surronding_mines = board.get_num_surronding_mines(row, column)
if num_surronding_mines:
return http.HttpResponse(json.dumps({'num_surronding_mines':num_surronding_mines}))
clear_area = board.get_clear_area(row, column, [])
return http.HttpResponse(json.dumps({'clear_area':clear_area}))
def _update_board(email, board):
update_row = {"email": email, "board":cPickle.dumps(board), "new_game":False}
db = _get_minesweeper_db()
query = db.minesweeper.find_one({'email':email})
if 'new_game' in query and query['new_game']:
update_row['time'] = datetime.datetime.now()
else:
update_row['time'] = query['time']
db.minesweeper.update({"email": email}, update_row)
def flag(request):
row, column, email = _get_row_column_email_params(request)
board = _get_board(email)
board.place_flag(row, column)
_update_board(email, board)
response = {}
if board.has_won():
high_score = _check_high_score(email)
response = {'won':True, 'high_score': high_score}
return http.HttpResponse(json.dumps(response))
def _get_row_column_email_params(request):
row = int(request.GET['row'])
column = int(request.GET['column'])
email = request.COOKIES['email']
return (row, column, email)
def _check_high_score(email):
db = _get_minesweeper_db()
game = db.minesweeper.find_one({'email':email})
high_scores_query = db.high_scores.find()
high_scores_query.sort('time', DESCENDING)
time_diff = datetime.datetime.now() - game['time']
game_time = float(str(time_diff.seconds) + '.' + str(time_diff.microseconds))
high_score = 0
if high_scores_query.count() >= 10 and game_time < high_scores_query[0]['time']:
db.high_scores.remove(high_scores_query[0]['_id'])
db.high_scores.insert({'email':game['email'], 'time':game_time})
high_score = game_time
elif high_scores_query.count() < 10:
db.high_scores.insert({'email':game['email'], 'time':game_time})
high_score = game_time
return high_score
def reset(request):
email = request.COOKIES['email']
board = _create_new_board()
db = _get_minesweeper_db()
db.minesweeper.update({"email": email}, {"email": email, "board":cPickle.dumps(board), 'new_game':True})
return http.HttpResponse(json.dumps([]))
def _create_new_board():
create_board = CreateBoard(ROWS, COLUMNS, TOTAL_MINES)
return Board(create_board)
def view_high_scores(request):
db = _get_minesweeper_db()
high_scores_query = db.high_scores.find()
high_scores_query.sort('time', ASCENDING)
return render_to_response('view_high_scores.html', { 'high_scores': high_scores_query })
def _get_board(email):
db = _get_minesweeper_db()
query = db.minesweeper.find_one({'email':email})
return cPickle.loads(str(query['board']))
def _get_email(request):
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
redirect = http.HttpResponseRedirect('/')
redirect.set_cookie('email', form.cleaned_data['email'])
return redirect
else:
form = EmailForm()
return render_to_response('get_email.html', { 'form': form })
| unlicense | -8,034,674,039,468,431,000 | 30.206667 | 108 | 0.657979 | false |
BrechtBa/plottools | plottools/__init__.py | 1 | 1325 | #!/usr/bin/env/ python
################################################################################
# Copyright 2016 Brecht Baeten
# This file is part of plottools.
#
# plottools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# plottools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with plottools. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from .__version__ import version as __version__
from .plottools import *
import cs
import cm
import style
################################################################################
# create default color schemes
################################################################################
color = cs.Colorscheme(cs.default.colors,longnames=cs.default.longnames,cycle=cs.default.cycle)
| gpl-2.0 | -6,697,476,864,000,099,000 | 40.40625 | 95 | 0.549434 | false |
ActiveState/code | recipes/Python/491280_BackgroundCall_Threading_like/recipe-491280.py | 1 | 2587 | def example_BackgroundCall():
import urllib,time
def work():
return urllib.urlopen('http://www.python.org/').read()
bkcall=BackgroundCall(work)
print 'work() executing in background ...'
while not bkcall.is_done():
print '.',
time.sleep(0.010)
print 'done.'
print bkcall.get_return()[:500]
import sys
from time import time as _time, sleep as _sleep
class Full(Exception):pass
class Empty(Exception):pass
class BackgroundCall:
"""BackgroundCall
Example:
bkcall=BackgroundCall( time_consuming_function )
...
if bkcall.is_done():
print "got", bkcall.get_return()
"""
id=None
done=0 #1=returned; 2=exception raised
def __init__(self, func, args=(), kwargs={}):
import thread
def thread_bkcall():
try:
self.ret=func(*args, **kwargs)
self.done=1
except:
self.exc=sys.exc_info()
self.done=2
self.id=thread.start_new(thread_bkcall, ())
def is_done(self):
return self.done
def get_return(self, wait=1, timeout=None, raise_exception=1, alt_return=None):
"""delivers the return value or (by default) echoes the exception of
the call job
wait: 0=no waiting; Attribute error raised if no
1=waits for return value or exception
callable -> waits and wait()-call's while waiting for return
"""
if not self.done and wait:
starttime=_time()
delay=0.0005
while not self.done:
if timeout:
remaining = starttime + timeout - _time()
if remaining <= 0: #time is over
if raise_exception:
raise Empty, "return timed out"
else:
return alt_return
delay = min(delay * 2, remaining, .05)
else:
delay = min(delay * 2, .05)
if callable(wait): wait()
_sleep(delay) #reduce CPU usage by using a sleep
if self.done==2: #we had an exception
exc=self.exc
del self.exc
if raise_exception & 1: #by default exception is raised
raise exc[0],exc[1],exc[2]
else:
return alt_return
return self.ret
def get_exception(self):
return self.exc
if __name__=='__main__':
example_BackgroundCall()
| mit | 7,555,456,405,313,564,000 | 32.166667 | 83 | 0.521067 | false |
area3001/ColliScanner | barcode.py | 1 | 1301 | import io
from threading import Thread
import picamera
from PIL import Image
import zbar
class BarcodeScanner(Thread):
def __init__(self, resolutionX=800, resolutionY=600, callback=None):
self.callback = callback
self.scanner = zbar.ImageScanner()
self.scanner.parse_config("enable")
self.stream = io.BytesIO()
self.camera = picamera.PiCamera()
self.camera.resolution = (resolutionX, resolutionY)
self.quit = False
Thread.__init__(self)
def setCallback(self, callback):
self.callback = callback
def run(self):
self.quit = False
if self.camera.closed:
self.camera.open()
self.scan()
def terminate(self):
self.quit = True
if not self.camera.closed:
self.camera.close()
def scan(self):
while not self.quit and not self.camera.closed:
self.stream = io.BytesIO()
self.camera.capture(self.stream, format="jpeg")
# "Rewind" the stream to the beginning so we can read its content
self.stream.seek(0)
pil = Image.open(self.stream)
# create a reader
pil = pil.convert("L")
width, height = pil.size
raw = pil.tobytes()
# wrap image data
image = zbar.Image(width, height, "Y800", raw)
# scan the image for barcodes
self.scanner.scan(image)
if any(True for _ in image):
self.callback(image)
self.quit = True
| lgpl-3.0 | 2,577,924,286,906,595,000 | 22.25 | 69 | 0.691007 | false |
eammx/proyectosWeb | proyectoPython/env/lib/python3.6/site-packages/werkzeug/debug/tbtools.py | 2 | 20363 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.tbtools
~~~~~~~~~~~~~~~~~~~~~~
This module provides various traceback related utility functions.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import codecs
import inspect
import json
import os
import re
import sys
import sysconfig
import traceback
from tokenize import TokenError
from .._compat import PY2
from .._compat import range_type
from .._compat import reraise
from .._compat import string_types
from .._compat import text_type
from .._compat import to_native
from .._compat import to_unicode
from ..filesystem import get_filesystem_encoding
from ..utils import cached_property
from ..utils import escape
from .console import Console
_coding_re = re.compile(br"coding[:=]\s*([-\w.]+)")
_line_re = re.compile(br"^(.*?)$", re.MULTILINE)
_funcdef_re = re.compile(r"^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)")
UTF8_COOKIE = b"\xef\xbb\xbf"
system_exceptions = (SystemExit, KeyboardInterrupt)
try:
system_exceptions += (GeneratorExit,)
except NameError:
pass
HEADER = u"""\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>%(title)s // Werkzeug Debugger</title>
<link rel="stylesheet" href="?__debugger__=yes&cmd=resource&f=style.css"
type="text/css">
<!-- We need to make sure this has a favicon so that the debugger does
not by accident trigger a request to /favicon.ico which might
change the application state. -->
<link rel="shortcut icon"
href="?__debugger__=yes&cmd=resource&f=console.png">
<script src="?__debugger__=yes&cmd=resource&f=jquery.js"></script>
<script src="?__debugger__=yes&cmd=resource&f=debugger.js"></script>
<script type="text/javascript">
var TRACEBACK = %(traceback_id)d,
CONSOLE_MODE = %(console)s,
EVALEX = %(evalex)s,
EVALEX_TRUSTED = %(evalex_trusted)s,
SECRET = "%(secret)s";
</script>
</head>
<body style="background-color: #fff">
<div class="debugger">
"""
FOOTER = u"""\
<div class="footer">
Brought to you by <strong class="arthur">DON'T PANIC</strong>, your
friendly Werkzeug powered traceback interpreter.
</div>
</div>
<div class="pin-prompt">
<div class="inner">
<h3>Console Locked</h3>
<p>
The console is locked and needs to be unlocked by entering the PIN.
You can find the PIN printed out on the standard output of your
shell that runs the server.
<form>
<p>PIN:
<input type=text name=pin size=14>
<input type=submit name=btn value="Confirm Pin">
</form>
</div>
</div>
</body>
</html>
"""
PAGE_HTML = (
HEADER
+ u"""\
<h1>%(exception_type)s</h1>
<div class="detail">
<p class="errormsg">%(exception)s</p>
</div>
<h2 class="traceback">Traceback <em>(most recent call last)</em></h2>
%(summary)s
<div class="plain">
<form action="/?__debugger__=yes&cmd=paste" method="post">
<p>
<input type="hidden" name="language" value="pytb">
This is the Copy/Paste friendly version of the traceback. <span
class="pastemessage">You can also paste this traceback into
a <a href="https://gist.github.com/">gist</a>:
<input type="submit" value="create paste"></span>
</p>
<textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea>
</form>
</div>
<div class="explanation">
The debugger caught an exception in your WSGI application. You can now
look at the traceback which led to the error. <span class="nojavascript">
If you enable JavaScript you can also use additional features such as code
execution (if the evalex feature is enabled), automatic pasting of the
exceptions and much more.</span>
</div>
"""
+ FOOTER
+ """
<!--
%(plaintext_cs)s
-->
"""
)
CONSOLE_HTML = (
HEADER
+ u"""\
<h1>Interactive Console</h1>
<div class="explanation">
In this console you can execute Python expressions in the context of the
application. The initial namespace was created by the debugger automatically.
</div>
<div class="console"><div class="inner">The Console requires JavaScript.</div></div>
"""
+ FOOTER
)
SUMMARY_HTML = u"""\
<div class="%(classes)s">
%(title)s
<ul>%(frames)s</ul>
%(description)s
</div>
"""
FRAME_HTML = u"""\
<div class="frame" id="frame-%(id)d">
<h4>File <cite class="filename">"%(filename)s"</cite>,
line <em class="line">%(lineno)s</em>,
in <code class="function">%(function_name)s</code></h4>
<div class="source %(library)s">%(lines)s</div>
</div>
"""
SOURCE_LINE_HTML = u"""\
<tr class="%(classes)s">
<td class=lineno>%(lineno)s</td>
<td>%(code)s</td>
</tr>
"""
def render_console_html(secret, evalex_trusted=True):
return CONSOLE_HTML % {
"evalex": "true",
"evalex_trusted": "true" if evalex_trusted else "false",
"console": "true",
"title": "Console",
"secret": secret,
"traceback_id": -1,
}
def get_current_traceback(
ignore_system_exceptions=False, show_hidden_frames=False, skip=0
):
"""Get the current exception info as `Traceback` object. Per default
calling this method will reraise system exceptions such as generator exit,
system exit or others. This behavior can be disabled by passing `False`
to the function as first parameter.
"""
exc_type, exc_value, tb = sys.exc_info()
if ignore_system_exceptions and exc_type in system_exceptions:
reraise(exc_type, exc_value, tb)
for _ in range_type(skip):
if tb.tb_next is None:
break
tb = tb.tb_next
tb = Traceback(exc_type, exc_value, tb)
if not show_hidden_frames:
tb.filter_hidden_frames()
return tb
class Line(object):
"""Helper for the source renderer."""
__slots__ = ("lineno", "code", "in_frame", "current")
def __init__(self, lineno, code):
self.lineno = lineno
self.code = code
self.in_frame = False
self.current = False
@property
def classes(self):
rv = ["line"]
if self.in_frame:
rv.append("in-frame")
if self.current:
rv.append("current")
return rv
def render(self):
return SOURCE_LINE_HTML % {
"classes": u" ".join(self.classes),
"lineno": self.lineno,
"code": escape(self.code),
}
class Traceback(object):
"""Wraps a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
exception_type = exc_type.__name__
if exc_type.__module__ not in {"builtins", "__builtin__", "exceptions"}:
exception_type = exc_type.__module__ + "." + exception_type
self.exception_type = exception_type
self.groups = []
memo = set()
while True:
self.groups.append(Group(exc_type, exc_value, tb))
memo.add(id(exc_value))
if PY2:
break
exc_value = exc_value.__cause__ or exc_value.__context__
if exc_value is None or id(exc_value) in memo:
break
exc_type = type(exc_value)
tb = exc_value.__traceback__
self.groups.reverse()
self.frames = [frame for group in self.groups for frame in group.frames]
def filter_hidden_frames(self):
"""Remove the frames according to the paste spec."""
for group in self.groups:
group.filter_hidden_frames()
self.frames[:] = [frame for group in self.groups for frame in group.frames]
@property
def is_syntax_error(self):
"""Is it a syntax error?"""
return isinstance(self.exc_value, SyntaxError)
@property
def exception(self):
"""String representation of the final exception."""
return self.groups[-1].exception
def log(self, logfile=None):
"""Log the ASCII traceback into a file object."""
if logfile is None:
logfile = sys.stderr
tb = self.plaintext.rstrip() + u"\n"
logfile.write(to_native(tb, "utf-8", "replace"))
def paste(self):
"""Create a paste and return the paste id."""
data = json.dumps(
{
"description": "Werkzeug Internal Server Error",
"public": False,
"files": {"traceback.txt": {"content": self.plaintext}},
}
).encode("utf-8")
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
rv = urlopen("https://api.github.com/gists", data=data)
resp = json.loads(rv.read().decode("utf-8"))
rv.close()
return {"url": resp["html_url"], "id": resp["id"]}
def render_summary(self, include_title=True):
"""Render the traceback for the interactive console."""
title = ""
classes = ["traceback"]
if not self.frames:
classes.append("noframe-traceback")
frames = []
else:
library_frames = sum(frame.is_library for frame in self.frames)
mark_lib = 0 < library_frames < len(self.frames)
frames = [group.render(mark_lib=mark_lib) for group in self.groups]
if include_title:
if self.is_syntax_error:
title = u"Syntax Error"
else:
title = u"Traceback <em>(most recent call last)</em>:"
if self.is_syntax_error:
description_wrapper = u"<pre class=syntaxerror>%s</pre>"
else:
description_wrapper = u"<blockquote>%s</blockquote>"
return SUMMARY_HTML % {
"classes": u" ".join(classes),
"title": u"<h3>%s</h3>" % title if title else u"",
"frames": u"\n".join(frames),
"description": description_wrapper % escape(self.exception),
}
def render_full(self, evalex=False, secret=None, evalex_trusted=True):
"""Render the Full HTML page with the traceback info."""
exc = escape(self.exception)
return PAGE_HTML % {
"evalex": "true" if evalex else "false",
"evalex_trusted": "true" if evalex_trusted else "false",
"console": "false",
"title": exc,
"exception": exc,
"exception_type": escape(self.exception_type),
"summary": self.render_summary(include_title=False),
"plaintext": escape(self.plaintext),
"plaintext_cs": re.sub("-{2,}", "-", self.plaintext),
"traceback_id": self.id,
"secret": secret,
}
@cached_property
def plaintext(self):
return u"\n".join([group.render_text() for group in self.groups])
@property
def id(self):
return id(self)
class Group(object):
"""A group of frames for an exception in a traceback. On Python 3,
if the exception has a ``__cause__`` or ``__context__``, there are
multiple exception groups.
"""
def __init__(self, exc_type, exc_value, tb):
self.exc_type = exc_type
self.exc_value = exc_value
self.info = None
if not PY2:
if exc_value.__cause__ is not None:
self.info = (
u"The above exception was the direct cause of the"
u" following exception"
)
elif exc_value.__context__ is not None:
self.info = (
u"During handling of the above exception, another"
u" exception occurred"
)
self.frames = []
while tb is not None:
self.frames.append(Frame(exc_type, exc_value, tb))
tb = tb.tb_next
def filter_hidden_frames(self):
new_frames = []
hidden = False
for frame in self.frames:
hide = frame.hide
if hide in ("before", "before_and_this"):
new_frames = []
hidden = False
if hide == "before_and_this":
continue
elif hide in ("reset", "reset_and_this"):
hidden = False
if hide == "reset_and_this":
continue
elif hide in ("after", "after_and_this"):
hidden = True
if hide == "after_and_this":
continue
elif hide or hidden:
continue
new_frames.append(frame)
# if we only have one frame and that frame is from the codeop
# module, remove it.
if len(new_frames) == 1 and self.frames[0].module == "codeop":
del self.frames[:]
# if the last frame is missing something went terrible wrong :(
elif self.frames[-1] in new_frames:
self.frames[:] = new_frames
@property
def exception(self):
"""String representation of the exception."""
buf = traceback.format_exception_only(self.exc_type, self.exc_value)
rv = "".join(buf).strip()
return to_unicode(rv, "utf-8", "replace")
def render(self, mark_lib=True):
out = []
if self.info is not None:
out.append(u'<li><div class="exc-divider">%s:</div>' % self.info)
for frame in self.frames:
out.append(
u"<li%s>%s"
% (
u' title="%s"' % escape(frame.info) if frame.info else u"",
frame.render(mark_lib=mark_lib),
)
)
return u"\n".join(out)
def render_text(self):
out = []
if self.info is not None:
out.append(u"\n%s:\n" % self.info)
out.append(u"Traceback (most recent call last):")
for frame in self.frames:
out.append(frame.render_text())
out.append(self.exception)
return u"\n".join(out)
class Frame(object):
"""A single frame in a traceback."""
def __init__(self, exc_type, exc_value, tb):
self.lineno = tb.tb_lineno
self.function_name = tb.tb_frame.f_code.co_name
self.locals = tb.tb_frame.f_locals
self.globals = tb.tb_frame.f_globals
fn = inspect.getsourcefile(tb) or inspect.getfile(tb)
if fn[-4:] in (".pyo", ".pyc"):
fn = fn[:-1]
# if it's a file on the file system resolve the real filename.
if os.path.isfile(fn):
fn = os.path.realpath(fn)
self.filename = to_unicode(fn, get_filesystem_encoding())
self.module = self.globals.get("__name__", self.locals.get("__name__"))
self.loader = self.globals.get("__loader__", self.locals.get("__loader__"))
self.code = tb.tb_frame.f_code
# support for paste's traceback extensions
self.hide = self.locals.get("__traceback_hide__", False)
info = self.locals.get("__traceback_info__")
if info is not None:
info = to_unicode(info, "utf-8", "replace")
self.info = info
def render(self, mark_lib=True):
"""Render a single frame in a traceback."""
return FRAME_HTML % {
"id": self.id,
"filename": escape(self.filename),
"lineno": self.lineno,
"function_name": escape(self.function_name),
"lines": self.render_line_context(),
"library": "library" if mark_lib and self.is_library else "",
}
@cached_property
def is_library(self):
return any(
self.filename.startswith(path) for path in sysconfig.get_paths().values()
)
def render_text(self):
return u' File "%s", line %s, in %s\n %s' % (
self.filename,
self.lineno,
self.function_name,
self.current_line.strip(),
)
def render_line_context(self):
before, current, after = self.get_context_lines()
rv = []
def render_line(line, cls):
line = line.expandtabs().rstrip()
stripped_line = line.strip()
prefix = len(line) - len(stripped_line)
rv.append(
'<pre class="line %s"><span class="ws">%s</span>%s</pre>'
% (cls, " " * prefix, escape(stripped_line) or " ")
)
for line in before:
render_line(line, "before")
render_line(current, "current")
for line in after:
render_line(line, "after")
return "\n".join(rv)
def get_annotated_lines(self):
"""Helper function that returns lines with extra information."""
lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)]
# find function definition and mark lines
if hasattr(self.code, "co_firstlineno"):
lineno = self.code.co_firstlineno - 1
while lineno > 0:
if _funcdef_re.match(lines[lineno].code):
break
lineno -= 1
try:
offset = len(inspect.getblock([x.code + "\n" for x in lines[lineno:]]))
except TokenError:
offset = 0
for line in lines[lineno : lineno + offset]:
line.in_frame = True
# mark current line
try:
lines[self.lineno - 1].current = True
except IndexError:
pass
return lines
def eval(self, code, mode="single"):
"""Evaluate code in the context of the frame."""
if isinstance(code, string_types):
if PY2 and isinstance(code, text_type): # noqa
code = UTF8_COOKIE + code.encode("utf-8")
code = compile(code, "<interactive>", mode)
return eval(code, self.globals, self.locals)
@cached_property
def sourcelines(self):
"""The sourcecode of the file as list of unicode strings."""
# get sourcecode from loader or file
source = None
if self.loader is not None:
try:
if hasattr(self.loader, "get_source"):
source = self.loader.get_source(self.module)
elif hasattr(self.loader, "get_source_by_code"):
source = self.loader.get_source_by_code(self.code)
except Exception:
# we munch the exception so that we don't cause troubles
# if the loader is broken.
pass
if source is None:
try:
with open(
to_native(self.filename, get_filesystem_encoding()), mode="rb"
) as f:
source = f.read()
except IOError:
return []
# already unicode? return right away
if isinstance(source, text_type):
return source.splitlines()
# yes. it should be ascii, but we don't want to reject too many
# characters in the debugger if something breaks
charset = "utf-8"
if source.startswith(UTF8_COOKIE):
source = source[3:]
else:
for idx, match in enumerate(_line_re.finditer(source)):
match = _coding_re.search(match.group())
if match is not None:
charset = match.group(1)
break
if idx > 1:
break
# on broken cookies we fall back to utf-8 too
charset = to_native(charset)
try:
codecs.lookup(charset)
except LookupError:
charset = "utf-8"
return source.decode(charset, "replace").splitlines()
def get_context_lines(self, context=5):
before = self.sourcelines[self.lineno - context - 1 : self.lineno - 1]
past = self.sourcelines[self.lineno : self.lineno + context]
return (before, self.current_line, past)
@property
def current_line(self):
try:
return self.sourcelines[self.lineno - 1]
except IndexError:
return u""
@cached_property
def console(self):
return Console(self.globals, self.locals)
@property
def id(self):
return id(self)
| mit | 8,006,801,681,762,134,000 | 31.425159 | 87 | 0.559201 | false |
mit-dig/punya | appinventor/misc/emulator-support/aiWinStarter.py | 1 | 5833 | #!/usr/bin/python
from bottle import run,route,app,request,response,template,default_app,Bottle,debug,abort
import sys
import os
import platform
import subprocess
import re
#from flup.server.fcgi import WSGIServer
#from cStringIO import StringIO
#import memcache
app = Bottle()
default_app.push(app)
VERSION = "2.2"
platforms = platform.uname()[0]
print "Platform = %s" % platforms
if platforms == 'Windows': # Windows
PLATDIR = os.environ["ProgramFiles"]
PLATDIR = '"' + PLATDIR + '"'
print "AppInventor tools located here: %s" % PLATDIR
else:
sys.exit(1)
@route('/ping/')
def ping():
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
response.headers['Content-Type'] = 'application/json'
return '{ "status" : "OK", "version" : "%s" }' % VERSION
@route('/utest/')
def utest():
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
response.headers['Content-Type'] = 'application/json'
device = checkrunning(False)
if device:
return '{ "status" : "OK", "device" : "%s", "version" : "%s" }' % (device, VERSION)
else:
return '{ "status" : "NO", "version" : "%s" }' % VERSION
@route('/start/')
def start():
subprocess.call(PLATDIR + "\\AppInventor\\commands-for-Appinventor\\run-emulator ", shell=True)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
return ''
@route('/emulatorreset/')
def emulatorreset():
subprocess.call(PLATDIR + "\\AppInventor\\commands-for-Appinventor\\reset-emulator ", shell=True)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
return ''
@route('/echeck/')
def echeck():
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
response.headers['Content-Type'] = 'application/json'
device = checkrunning(True)
if device:
return '{ "status" : "OK", "device" : "%s", "version" : "%s"}' % (device, VERSION)
else:
return '{ "status" : "NO", "version" : "%s" }' % VERSION
@route('/ucheck/')
def ucheck():
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
response.headers['Content-Type'] = 'application/json'
device = checkrunning(False)
if device:
return '{ "status" : "OK", "device" : "%s", "version" : "%s"}' % (device, VERSION)
else:
return '{ "status" : "NO", "version" : "%s" }' % VERSION
@route('/reset/')
def reset():
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
response.headers['Content-Type'] = 'application/json'
killadb()
killemulator()
return '{ "status" : "OK", "version" : "%s" }' % VERSION
@route('/replstart/:device')
def replstart(device=None):
print "Device = %s" % device
try:
subprocess.check_output((PLATDIR + "\\AppInventor\\commands-for-Appinventor\\adb -s %s forward tcp:8001 tcp:8001") % device, shell=True)
if re.match('.*emulat.*', device): # Only fake the menu key for the emulator
subprocess.check_output((PLATDIR + "\\AppInventor\\commands-for-Appinventor\\adb -s %s shell input keyevent 82") % device, shell=True)
subprocess.check_output((PLATDIR + "\\AppInventor\\commands-for-Appinventor\\adb -s %s shell am start -a android.intent.action.VIEW -n edu.mit.appinventor.punya.aicompanion3/.Screen1 --ez rundirect true") % device, shell=True)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Headers'] = 'origin, content-type'
return ''
except subprocess.CalledProcessError as e:
print "Problem starting companion app : status %i\n" % e.returncode
return ''
def checkrunning(emulator):
try:
result = subprocess.check_output(PLATDIR + "\\AppInventor\\commands-for-Appinventor\\adb devices", shell=True)
lines = result.split('\n')
for line in lines[1:]:
if emulator:
m = re.search('^(.*emulator-[1-9]+)\t+device.*', line)
else:
if re.search('^(.*emulator-[1-9]+)\t+device.*', line): # We are an emulator
continue # Skip it
m = re.search('^([A-z0-9.:]+.*?)\t+device.*', line)
if m:
break
if m:
return m.group(1)
return False
except subprocess.CalledProcessError as e:
print "Problem checking for devices : status %i\n" % e.returncode
return False
def killadb():
try:
subprocess.check_output(PLATDIR + "\\AppInventor\\commands-for-Appinventor\\adb kill-server", shell=True)
print "Killed adb\n"
except subprocess.CalledProcessError as e:
print "Problem stopping adb : status %i\n" % e.returncode
return ''
def killemulator():
try:
subprocess.check_output(PLATDIR + "\\AppInventor\\commands-for-Appinventor\\kill-emulator", shell=True)
print "Killed emulator\n"
except subprocess.CalledProcessError as e:
print "Problem stopping emulator : status %i\n" % e.returncode
return ''
def shutdown():
try: # Be quiet...
killadb()
killemulator()
except:
pass
if __name__ == '__main__':
import atexit
atexit.register(shutdown)
run(host='127.0.0.1', port=8004)
##WSGIServer(app).run()
| apache-2.0 | -4,360,912,690,102,491,000 | 37.124183 | 234 | 0.621464 | false |
JeffRoy/mi-dataset | mi/dataset/driver/nutnr_b/dcl_conc/nutnr_b_dcl_conc_telemetered_driver.py | 1 | 1490 | #!/usr/local/bin/python2.7
##
# OOIPLACEHOLDER
#
# Copyright 2014 Raytheon Co.
##
__author__ = 'kustert,mworden'
import os
from mi.logging import config
from mi.core.log import get_logger
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import DataSetDriver
from mi.dataset.parser.nutnr_b_dcl_conc import NutnrBDclConcTelemeteredParser
from mi.core.versioning import version
@version("15.7.0")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
config.add_configuration(os.path.join(basePythonCodePath, 'res', 'config', 'mi-logging.yml'))
log = get_logger()
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.nutnr_b_particles',
DataSetDriverConfigKeys.PARTICLE_CLASS: None
}
def exception_callback(exception):
log.debug("ERROR: %r", exception)
particleDataHdlrObj.setParticleDataCaptureFailure()
with open(sourceFilePath, 'r') as stream_handle:
parser = NutnrBDclConcTelemeteredParser(parser_config,
stream_handle,
lambda state, ingested: None,
lambda data: None,
exception_callback)
driver = DataSetDriver(parser, particleDataHdlrObj)
driver.processFileStream()
return particleDataHdlrObj
| bsd-2-clause | -7,912,399,062,643,848,000 | 32.111111 | 97 | 0.639597 | false |
ReubenAbrams/Chrubix | src/setbright.py | 1 | 3204 | #!/usr/local/bin/python3
'''simple brightness controller for Chrubix
'''
import sys
import os
# import hashlib
from chrubix.utils import logme, read_oneliner_file
# from chrubix import save_distro_record, load_distro_record
try:
from PyQt4.QtCore import QString
except ImportError:
QString = str
TIME_BETWEEN_CHECKS = 200 # .2 seconds
DELAY_BEFORE_HIDING = 3000 # 3 seconds
from PyQt4.QtCore import pyqtSignature, Qt, QTimer
# from PyQt4.Qt import QLineEdit, QPixmap
from PyQt4 import QtGui # , uic
# from PyQt4 import QtCore
# import resources_rc
from ui.ui_BrightnessControl import Ui_BrightnessControlWidget
class BrightnessControlWidget( QtGui.QDialog, Ui_BrightnessControlWidget ):
def __init__( self ):
# self._password = None
self.cycles = 99
self.brightnow_fname = '%s/.brightnow' % ( os.path.expanduser( "~" ) )
super( BrightnessControlWidget, self ).__init__()
self.setupUi( self )
self.setWindowFlags( Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint | Qt.ToolTip ) # QtCore.Qt.Tool )
self.show()
self.raise_()
self.setAttribute( Qt.WA_ShowWithoutActivating )
# self.setBrightness( 0 )
self.hide()
self.old_brightness = None
# self.speaker_width = self.speakeron.width()
# self.speaker_height = self.speakeron.height()
QTimer.singleShot( TIME_BETWEEN_CHECKS, self.monitor )
def monitor( self ):
noof_checks = DELAY_BEFORE_HIDING / TIME_BETWEEN_CHECKS
if self.cycles > noof_checks:
self.hide()
# print( 'hiding again' )
else:
self.cycles += 1
# print( 'checking' )
if os.path.exists( self.brightnow_fname ):
try:
new_brightness = int( read_oneliner_file( self.brightnow_fname ) )
# logme( 'curr bri = %d' % ( new_brightness ) )
if new_brightness != self.old_brightness:
self.setBrightness( new_brightness )
self.old_brightness = new_brightness
# logme( 'Updating brightness to %d' % ( new_brightness ) )
except ValueError:
logme( 'Bad entry for %s' % ( self.brightnow_fname ) )
# else:
# print( 'Waiting for .brightnow to appear' )
QTimer.singleShot( TIME_BETWEEN_CHECKS, self.monitor )
def setBrightness( self, brightness ):
# logme( 'setBrightness(%d)' % ( brightness ) )
self.cycles = 0
self.show()
self.progressBar.setValue( brightness )
self.update()
self.repaint()
# self.raise_()
@pyqtSignature( "" )
def closeEvent( self, event ):
event.accept()
sys.exit()
#------------------------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
app = QtGui.QApplication( sys.argv )
window = BrightnessControlWidget()
screen = QtGui.QDesktopWidget().screenGeometry()
window.setGeometry( screen.width() - window.width() * 2 - 2, screen.height() - 49, window.width(), window.height() )
sys.exit( app.exec_() )
| gpl-3.0 | 2,982,620,355,699,736,600 | 31.693878 | 133 | 0.586454 | false |
jelly/calibre | src/calibre/ebooks/rtf2xml/preamble_div.py | 2 | 22954 | #########################################################################
# #
# #
# copyright 2002 Paul Henry Tremblay #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
# General Public License for more details. #
# #
# #
#########################################################################
import sys, os
from calibre.ebooks.rtf2xml import copy, override_table, list_table
from calibre.ptempfile import better_mktemp
class PreambleDiv:
"""
Break the preamble into divisions.
"""
def __init__(self, in_file,
bug_handler,
copy=None,
no_namespace=None,
run_level=1,
):
"""
Required:
'file'
Optional:
'copy'-- whether to make a copy of result for debugging
'temp_dir' --where to output temporary results (default is
directory from which the script is run.)
Returns:
nothing
"""
self.__file = in_file
self.__bug_handler = bug_handler
self.__copy = copy
self.__no_namespace = no_namespace
self.__write_to = better_mktemp()
self.__run_level = run_level
def __initiate_values(self):
"""
Set values, including those for the dictionary.
"""
self.__all_lists = {}
self.__page = {
'margin-top' : 72,
'margin-bottom' : 72,
'margin-left' : 90,
'margin-right' : 90,
'gutter' : 0,
}
self.__cb_count = ''
self.__ob_count = ''
self.__state = 'preamble'
self.__rtf_final = ''
self.__close_group_count = ''
self.__found_font_table = 0
self.__list_table_final = ''
self.__override_table_final = ''
self.__revision_table_final = ''
self.__doc_info_table_final = ''
self.__state_dict = {
'default' : self.__default_func,
'rtf_header' : self.__rtf_head_func,
'preamble' : self.__preamble_func,
'font_table' : self.__font_table_func,
'color_table' : self.__color_table_func,
'style_sheet' : self.__style_sheet_func,
'list_table' : self.__list_table_func,
'override_table' : self.__override_table_func,
'revision_table' : self.__revision_table_func,
'doc_info' : self.__doc_info_func,
'body' : self.__body_func,
'ignore' : self.__ignore_func,
'cw<ri<rtf_______' : self.__found_rtf_head_func,
'cw<pf<par-def___' : self.__para_def_func,
'tx<nu<__________' : self.__text_func,
'cw<tb<row-def___' : self.__row_def_func,
'cw<sc<section___' : self.__new_section_func,
'cw<sc<sect-defin' : self.__new_section_func,
'cw<it<font-table' : self.__found_font_table_func,
'cw<it<colr-table' : self.__found_color_table_func,
'cw<ss<style-shet' : self.__found_style_sheet_func,
'cw<it<listtable_' : self.__found_list_table_func,
'cw<it<lovr-table' : self.__found_override_table_func,
'cw<it<revi-table' : self.__found_revision_table_func,
'cw<di<doc-info__' : self.__found_doc_info_func,
'cw<pa<margin-lef' : self.__margin_func,
'cw<pa<margin-rig' : self.__margin_func,
'cw<pa<margin-top' : self.__margin_func,
'cw<pa<margin-bot' : self.__margin_func,
'cw<pa<gutter____' : self.__margin_func,
'cw<pa<paper-widt' : self.__margin_func,
'cw<pa<paper-hght' : self.__margin_func,
# 'cw<tb<columns___' : self.__section_func,
}
self.__margin_dict = {
'margin-lef' : 'margin-left',
'margin-rig' : 'margin-right',
'margin-top' : 'margin-top',
'margin-bot' : 'margin-bottom',
'gutter____' : 'gutter',
'paper-widt' : 'paper-width',
'paper-hght' : 'paper-height',
}
self.__translate_sec = {
'columns___' : 'column',
}
self.__section = {}
# self.__write_obj.write(self.__color_table_final)
self.__color_table_final = ''
self.__style_sheet_final = ''
self.__individual_font = 0
self.__old_font = 0
self.__ob_group = 0 # depth of group
self.__font_table_final = 0
self.__list_table_obj = list_table.ListTable(
run_level=self.__run_level,
bug_handler=self.__bug_handler,
)
def __ignore_func(self, line):
"""
Ignore all lines, until the bracket is found that marks the end of
the group.
"""
if self.__ignore_num == self.__cb_count:
self.__state = self.__previous_state
def __found_rtf_head_func(self, line):
self.__state = 'rtf_header'
def __rtf_head_func(self, line):
if self.__ob_count == '0002':
self.__rtf_final = (
'mi<mk<rtfhed-beg\n' +
self.__rtf_final +
'mi<mk<rtfhed-end\n'
)
self.__state = 'preamble'
elif self.__token_info == 'tx<nu<__________' or \
self.__token_info == 'cw<pf<par-def___':
self.__state = 'body'
self.__rtf_final = (
'mi<mk<rtfhed-beg\n' +
self.__rtf_final +
'mi<mk<rtfhed-end\n'
)
self.__make_default_font_table()
self.__write_preamble()
self.__write_obj.write(line)
else:
self.__rtf_final = self.__rtf_final + line
def __make_default_font_table(self):
"""
If not font table is fount, need to write one out.
"""
self.__font_table_final = 'mi<tg<open______<font-table\n'
self.__font_table_final += 'mi<mk<fonttb-beg\n'
self.__font_table_final += 'mi<mk<fontit-beg\n'
self.__font_table_final += 'cw<ci<font-style<nu<0\n'
self.__font_table_final += 'tx<nu<__________<Times;\n'
self.__font_table_final += 'mi<mk<fontit-end\n'
self.__font_table_final += 'mi<mk<fonttb-end\n'
self.__font_table_final += 'mi<tg<close_____<font-table\n'
def __make_default_color_table(self):
"""
If no color table is found, write a string for a default one
"""
self.__color_table_final = 'mi<tg<open______<color-table\n'
self.__color_table_final += 'mi<mk<clrtbl-beg\n'
self.__color_table_final += 'cw<ci<red_______<nu<00\n'
self.__color_table_final += 'cw<ci<green_____<nu<00\n'
self.__color_table_final += 'cw<ci<blue______<en<00\n'
self.__color_table_final += 'mi<mk<clrtbl-end\n'
self.__color_table_final += 'mi<tg<close_____<color-table\n'
def __make_default_style_table(self):
"""
If not font table is found, make a string for a default one
"""
"""
self.__style_sheet_final = 'mi<tg<open______<style-table\n'
self.__style_sheet_final +=
self.__style_sheet_final +=
self.__style_sheet_final +=
self.__style_sheet_final +=
self.__style_sheet_final +=
self.__style_sheet_final += 'mi<tg<close_____<style-table\n'
"""
self.__style_sheet_final = """mi<tg<open______<style-table
mi<mk<styles-beg
mi<mk<stylei-beg
cw<ci<font-style<nu<0
tx<nu<__________<Normal;
mi<mk<stylei-end
mi<mk<stylei-beg
cw<ss<char-style<nu<0
tx<nu<__________<Default Paragraph Font;
mi<mk<stylei-end
mi<mk<styles-end
mi<tg<close_____<style-table
"""
def __found_font_table_func(self, line):
if self.__found_font_table:
self.__state = 'ignore'
else:
self.__state = 'font_table'
self.__font_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
self.__found_font_table = 1
def __font_table_func(self, line):
"""
Keep adding to the self.__individual_font string until end of group
found. If a bracket is found, check that it is only one bracket deep.
If it is, then set the marker for an individual font. If it is not,
then ignore all data in this group.
cw<ci<font-style<nu<0
"""
if self.__cb_count == self.__close_group_count:
self.__state = 'preamble'
self.__font_table_final = 'mi<tg<open______<font-table\n' + \
'mi<mk<fonttb-beg\n' + self.__font_table_final
self.__font_table_final += \
'mi<mk<fonttb-end\n' + 'mi<tg<close_____<font-table\n'
elif self.__token_info == 'ob<nu<open-brack':
if int(self.__ob_count) == int(self.__close_group_count) + 1:
self.__font_table_final += \
'mi<mk<fontit-beg\n'
self.__individual_font = 1
else:
# ignore
self.__previous_state = 'font_table'
self.__state = 'ignore'
self.__ignore_num = self.__ob_count
elif self.__token_info == 'cb<nu<clos-brack':
if int(self.__cb_count) == int(self.__close_group_count) + 1:
self.__individual_font = 0
self.__font_table_final += \
'mi<mk<fontit-end\n'
elif self.__individual_font:
if self.__old_font and self.__token_info == 'tx<nu<__________':
if ';' in line:
self.__font_table_final += line
self.__font_table_final += 'mi<mk<fontit-end\n'
self.__individual_font = 0
else:
self.__font_table_final += line
elif self.__token_info == 'cw<ci<font-style':
self.__old_font = 1
self.__individual_font = 1
self.__font_table_final += 'mi<mk<fontit-beg\n'
self.__font_table_final += line
def __old_font_func(self, line):
"""
Required:
line --line to parse
Returns:
nothing
Logic:
used for older forms of RTF:
\f3\fswiss\fcharset77 Helvetica-Oblique;\f4\fnil\fcharset77 Geneva;}
Note how each font is not divided by a bracket
"""
def __found_color_table_func(self, line):
"""
all functions that start with __found operate the same. They set the
state, initiate a string, determine the self.__close_group_count, and
set self.__cb_count to zero.
"""
self.__state = 'color_table'
self.__color_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
def __color_table_func(self, line):
if int(self.__cb_count) == int(self.__close_group_count):
self.__state = 'preamble'
self.__color_table_final = 'mi<tg<open______<color-table\n' + \
'mi<mk<clrtbl-beg\n' + self.__color_table_final
self.__color_table_final += \
'mi<mk<clrtbl-end\n' + 'mi<tg<close_____<color-table\n'
else:
self.__color_table_final += line
def __found_style_sheet_func(self, line):
self.__state = 'style_sheet'
self.__style_sheet_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
def __style_sheet_func(self, line):
"""
Same logic as the font_table_func.
"""
if self.__cb_count == self.__close_group_count:
self.__state = 'preamble'
self.__style_sheet_final = 'mi<tg<open______<style-table\n' + \
'mi<mk<styles-beg\n' + self.__style_sheet_final
self.__style_sheet_final += \
'mi<mk<styles-end\n' + 'mi<tg<close_____<style-table\n'
elif self.__token_info == 'ob<nu<open-brack':
if int(self.__ob_count) == int(self.__close_group_count) + 1:
self.__style_sheet_final += \
'mi<mk<stylei-beg\n'
elif self.__token_info == 'cb<nu<clos-brack':
if int(self.__cb_count) == int(self.__close_group_count) + 1:
self.__style_sheet_final += \
'mi<mk<stylei-end\n'
else:
self.__style_sheet_final += line
def __found_list_table_func(self, line):
self.__state = 'list_table'
self.__list_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
def __list_table_func(self, line):
if self.__cb_count == self.__close_group_count:
self.__state = 'preamble'
self.__list_table_final, self.__all_lists =\
self.__list_table_obj.parse_list_table(
self.__list_table_final)
# sys.stderr.write(repr(all_lists))
elif self.__token_info == '':
pass
else:
self.__list_table_final += line
pass
def __found_override_table_func(self, line):
self.__override_table_obj = override_table.OverrideTable(
run_level=self.__run_level,
list_of_lists=self.__all_lists,
)
self.__state = 'override_table'
self.__override_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
# cw<it<lovr-table
def __override_table_func(self, line):
if self.__cb_count == self.__close_group_count:
self.__state = 'preamble'
self.__override_table_final, self.__all_lists =\
self.__override_table_obj.parse_override_table(self.__override_table_final)
elif self.__token_info == '':
pass
else:
self.__override_table_final += line
def __found_revision_table_func(self, line):
self.__state = 'revision_table'
self.__revision_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
def __revision_table_func(self, line):
if int(self.__cb_count) == int(self.__close_group_count):
self.__state = 'preamble'
self.__revision_table_final = 'mi<tg<open______<revision-table\n' + \
'mi<mk<revtbl-beg\n' + self.__revision_table_final
self.__revision_table_final += \
'mi<mk<revtbl-end\n' + 'mi<tg<close_____<revision-table\n'
else:
self.__revision_table_final += line
def __found_doc_info_func(self, line):
self.__state = 'doc_info'
self.__doc_info_table_final = ''
self.__close_group_count = self.__ob_count
self.__cb_count = 0
def __doc_info_func(self, line):
if self.__cb_count == self.__close_group_count:
self.__state = 'preamble'
self.__doc_info_table_final = 'mi<tg<open______<doc-information\n' + \
'mi<mk<doc-in-beg\n' + self.__doc_info_table_final
self.__doc_info_table_final += \
'mi<mk<doc-in-end\n' + 'mi<tg<close_____<doc-information\n'
elif self.__token_info == 'ob<nu<open-brack':
if int(self.__ob_count) == int(self.__close_group_count) + 1:
self.__doc_info_table_final += \
'mi<mk<docinf-beg\n'
elif self.__token_info == 'cb<nu<clos-brack':
if int(self.__cb_count) == int(self.__close_group_count) + 1:
self.__doc_info_table_final += \
'mi<mk<docinf-end\n'
else:
self.__doc_info_table_final += line
def __margin_func(self, line):
"""
Handles lines that describe page info. Add the apporpriate info in the
token to the self.__margin_dict dicitonary.
"""
info = line[6:16]
changed = self.__margin_dict.get(info)
if changed is None:
print 'woops!'
else:
self.__page[changed] = line[20:-1]
# cw<pa<margin-lef<nu<1728
def __print_page_info(self):
self.__write_obj.write('mi<tg<empty-att_<page-definition')
for key in self.__page.keys():
self.__write_obj.write(
'<%s>%s' % (key, self.__page[key])
)
self.__write_obj.write('\n')
# mi<tg<open-att__<footn
def __print_sec_info(self):
"""
Check if there is any section info. If so, print it out.
If not, print out an empty tag to satisfy the dtd.
"""
if len(self.__section.keys()) == 0:
self.__write_obj.write(
'mi<tg<open______<section-definition\n'
)
else:
self.__write_obj.write(
'mi<tg<open-att__<section-definition')
keys = self.__section.keys()
for key in keys:
self.__write_obj.write(
'<%s>%s' % (key, self.__section[key])
)
self.__write_obj.write('\n')
def __section_func(self, line):
"""
Add info pertaining to section to the self.__section dictionary, to be
printed out later.
"""
info = self.__translate_sec.get(line[6:16])
if info is None:
sys.stderr.write('woops!\n')
else:
self.__section[info] = 'true'
def __body_func(self, line):
self.__write_obj.write(line)
def __default_func(self, line):
# either in preamble or in body
pass
def __para_def_func(self, line):
# if self.__ob_group == 1
# this tells dept of group
if self.__cb_count == '0002':
self.__state = 'body'
self.__write_preamble()
self.__write_obj.write(line)
def __text_func(self, line):
"""
If the cb_count is less than 1, you have hit the body
For older RTF
Newer RTF should never have to use this function
"""
if self.__cb_count == '':
cb_count = '0002'
else:
cb_count = self.__cb_count
# ignore previous lines
# should be
# if self.__ob_group == 1
# this tells dept of group
if cb_count == '0002':
self.__state = 'body'
self.__write_preamble()
self.__write_obj.write(line)
def __row_def_func(self, line):
# if self.__ob_group == 1
# this tells dept of group
if self.__cb_count == '0002':
self.__state = 'body'
self.__write_preamble()
self.__write_obj.write(line)
def __new_section_func(self, line):
"""
This is new. The start of a section marks the end of the preamble
"""
if self.__cb_count == '0002':
self.__state = 'body'
self.__write_preamble()
else:
sys.stderr.write('module is preamble_div\n')
sys.stderr.write('method is __new_section_func\n')
sys.stderr.write('bracket count should be 2?\n')
self.__write_obj.write(line)
def __write_preamble(self):
"""
Write all the strings, which represent all the data in the preamble.
Write a body and section beginning.
"""
if self.__no_namespace:
self.__write_obj.write(
'mi<tg<open______<doc\n'
)
else:
self.__write_obj.write(
'mi<tg<open-att__<doc<xmlns>http://rtf2xml.sourceforge.net/\n')
self.__write_obj.write('mi<tg<open______<preamble\n')
self.__write_obj.write(self.__rtf_final)
if not self.__color_table_final:
self.__make_default_color_table()
if not self.__font_table_final:
self.__make_default_font_table()
self.__write_obj.write(self.__font_table_final)
self.__write_obj.write(self.__color_table_final)
if not self.__style_sheet_final:
self.__make_default_style_table()
self.__write_obj.write(self.__style_sheet_final)
self.__write_obj.write(self.__list_table_final)
self.__write_obj.write(self.__override_table_final)
self.__write_obj.write(self.__revision_table_final)
self.__write_obj.write(self.__doc_info_table_final)
self.__print_page_info()
self.__write_obj.write('ob<nu<open-brack<0001\n')
self.__write_obj.write('ob<nu<open-brack<0002\n')
self.__write_obj.write('cb<nu<clos-brack<0002\n')
self.__write_obj.write('mi<tg<close_____<preamble\n')
self.__write_obj.write('mi<tg<open______<body\n')
# self.__write_obj.write('mi<tg<open-att__<section<num>1\n')
# self.__print_sec_info()
# self.__write_obj.write('mi<tg<open______<headers-and-footers\n')
# self.__write_obj.write('mi<mk<head_foot_<\n')
# self.__write_obj.write('mi<tg<close_____<headers-and-footers\n')
self.__write_obj.write('mi<mk<body-open_\n')
def __preamble_func(self, line):
"""
Check if the token info belongs to the dictionary. If so, take the
appropriate action.
"""
action = self.__state_dict.get(self.__token_info)
if action:
action(line)
def make_preamble_divisions(self):
self.__initiate_values()
read_obj = open(self.__file, 'r')
self.__write_obj = open(self.__write_to, 'w')
line_to_read = 1
while line_to_read:
line_to_read = read_obj.readline()
line = line_to_read
self.__token_info = line[:16]
if self.__token_info == 'ob<nu<open-brack':
self.__ob_count = line[-5:-1]
self.__ob_group += 1
if self.__token_info == 'cb<nu<clos-brack':
self.__cb_count = line[-5:-1]
self.__ob_group -= 1
action = self.__state_dict.get(self.__state)
if action is None:
print self.__state
action(line)
read_obj.close()
self.__write_obj.close()
copy_obj = copy.Copy(bug_handler=self.__bug_handler)
if self.__copy:
copy_obj.copy_file(self.__write_to, "preamble_div.data")
copy_obj.rename(self.__write_to, self.__file)
os.remove(self.__write_to)
return self.__all_lists
| gpl-3.0 | -456,268,684,472,462,900 | 38.037415 | 91 | 0.500218 | false |
ubibene/mwclient | mwclient/ex.py | 2 | 2582 | import client
import requests
def read_config(config_files, **predata):
cfg = {}
for config_file in config_files:
cfg.update(_read_config_file(
config_file, predata))
return cfg
def _read_config_file(_config_file, predata):
_file = open(_config_file)
exec(_file, globals(), predata)
_file.close()
for _k, _v in predata.iteritems():
if not _k.startswith('_'):
yield _k, _v
for _k, _v in locals().iteritems():
if not _k.startswith('_'):
yield _k, _v
class SiteList(object):
def __init__(self):
self.sites = {}
def __getitem__(self, key):
if key not in self.sites:
self.sites[key] = {}
return self.sites[key]
def __iter__(self):
return self.sites.itervalues()
class ConfiguredSite(client.Site):
def __init__(self, *config_files, **kwargs):
self.config = read_config(config_files, sites=SiteList())
if 'name' in kwargs:
self.config.update(self.config['sites'][kwargs['name']])
do_login = 'username' in self.config and 'password' in self.config
super(ConfiguredSite, self).__init__(
host=self.config['host'],
path=self.config['path'],
ext=self.config.get('ext', '.php'),
do_init=not do_login,
retry_timeout=self.config.get('retry_timeout', 30),
max_retries=self.config.get('max_retries', -1),
)
if do_login:
self.login(self.config['username'],
self.config['password'])
class ConfiguredPool(list):
def __init__(self, *config_files):
self.config = read_config(config_files, sites=SiteList())
self.pool = requests.Session()
config = dict([(k, v) for k, v in self.config.iteritems()
if k != 'sites'])
for site in self.config['sites']:
cfg = config.copy()
cfg.update(site)
site.update(cfg)
do_login = 'username' in site and 'password' in site
self.append(client.Site(host=site['host'],
path=site['path'], ext=site.get('ext', '.php'),
pool=self.pool, do_init=not do_login,
retry_timeout=site.get('retry_timeout', 30),
max_retries=site.get('max_retries', -1)))
if do_login:
self[-1].login(site['username'], site['password'])
self[-1].config = site
| mit | -6,205,772,578,056,202,000 | 28.678161 | 83 | 0.522851 | false |
bigent/sinemalar-python | sinemalar/api.py | 1 | 13305 | import datetime, json
import requests
from .core import CallObject
from . import str2bool
class Artist(object):
def __init__(self, artist):
if type(artist) is not dict:
raise TypeError("Type of 'artist' must be 'dict'.")
self.id = int(artist['id'])
self.nameSurname = artist['nameSurname']
self.characterName = str2bool(artist['characterName'], True)
self.image = artist['image']
class Comment(object):
def __init__(self, comment):
if type(comment) is not dict:
raise TypeError("Type of 'comment' must be 'dict'.")
self.id = int(comment['id'])
self.username = comment['username']
self.comment = comment['comment']
self.addDate = datetime.datetime.strptime(comment['addDate'], '%Y-%m-%d %H:%M:%S')
class Movie(CallObject):
def __init__(self, movie_id=0, display_artists=False, display_comments=False, movie=None, to_gallery=False):
if type(movie_id) is not int:
raise TypeError("Type of 'movie_id' must be 'int'.")
if movie and movie_id:
raise ValueError("Only one can set a value.")
if not movie and not movie_id:
raise ValueError("You should set a value to 'movie_id' or 'movie'.")
CallObject.__init__(self)
self.to_gallery = to_gallery
self._path_name = "movie"
self.movie_id = movie_id
if movie:
if type(movie) is not dict:
raise TypeError("Type of 'movie' must be 'dict'.")
self.id = movie['id']
self.name = movie['name']
self.orgName = movie['orgName']
try:
self.image = movie['image']
self.rating = float(movie['rating'])
except:
pass
try:
self.type = movie['type']
self.seances = []
for i in movie['seances']:
self.seances.append(datetime.datetime.strptime(i, '%H:%M'))
self.selected = int(movie['selected'])
except:
pass
try:
self.director = movie['director']
except:
pass
elif not to_gallery:
if type(display_artists) is not bool:
raise TypeError("Type of 'display_artist' must be 'boolean'.")
if type(display_comments) is not bool:
raise TypeError("Type of 'display_comments' must be 'boolean'.")
self.display_artists = display_artists
self.display_comments = display_comments
if str2bool(self.show()[self._path_name]['id'], True):
self.id = int(self.show()[self._path_name]['id'])
else:
raise ValueError("Not found any movie of this ID.")
self.name = self.show()[self._path_name]['name']
self.orgName = self.show()[self._path_name]['orgName']
self.image = self.show()[self._path_name]['image']
self.rating = float(self.show()[self._path_name]['rating'])
self.type = self.show()[self._path_name]['type']
self.director = self.show()[self._path_name]['director']
self.summary = str2bool(self.show()[self._path_name]['summary'], True)
self.duration = str2bool(self.show()[self._path_name]['duration'], True)
self.produceYear = int(self.show()[self._path_name]['produceYear'])
self.week = str2bool(self.show()[self._path_name]['week'], True)
self.pubDate = str2bool(self.show()[self._path_name]['pubDate'], True)
self.embedId = str2bool(self.show()[self._path_name]['embedId'], True)
self.embedTitle = str2bool(self.show()[self._path_name]['embedTitle'], True)
self.trailerUrl = self.show()[self._path_name]['trailerUrl']
#artists
if display_artists:
self.artists = []
for i in self.show()['artists']:
self.artists.append(Artist(i))
#comments
if display_comments:
self.comments = []
for i in self.show()['comments']:
self.comments.append(Comment(i))
else:
print type(to_gallery)
if type(to_gallery) is not bool:
raise TypeError("Type of 'to_gallery' must be 'boolean'.")
self.gallery = []
if str2bool(self.show(), True):
for i in self.show():
self.gallery.append(i)
else:
raise ValueError("Not found any movie of this ID.")
def show(self):
if self.to_gallery:
return self.GET(
'gallery',
self._path_name,
self.movie_id,
)
else:
return self.GET(
self._path_name,
self.movie_id,
self.is_True(self.display_artists),
self.is_True(self.display_comments)
)
class Theatre(object):
def __init__(self, theatre):
if type(theatre) is not dict:
raise TypeError("Type of 'theatre' must be 'dict'.")
self.id = int(theatre['id'])
self.name = theatre['name']
try:
self.seances = []
for i in theatre['seances'][0]:
self.seances.append(datetime.datetime.strptime(i, '%H:%M'))
self.selected = theatre['selected']
except:
pass
try:
self.city = theatre['city']
self.latitude = float(theatre['latitude'])
self.longitude = float(theatre['longitude'])
self.phone = theatre['phone']
self.address = theatre['address']
except:
pass
try:
self.ad = theatre['ad']
#seances
self.movies = []
for i in theatre['movies']:
self.movies.append(Movie(i))
except:
pass
try:
self.town = theatre['town']
self.distance = theatre['distance']
except:
pass
try:
self.cityId = int(theatre['cityId'])
except:
pass
class Theatres(CallObject):
def __init__(self, theatre_id=0, city_id=0, city_count=1000):
if type(theatre_id) is not int:
raise TypeError("Type of 'theatre_id' must be 'int'.")
if type(city_id) is not int:
raise TypeError("Type of 'city_id' must be 'int'.")
if type(city_count) is not int:
raise TypeError("Type of 'city_count' must be 'int'.")
if theatre_id and city_id:
raise ValueError("Only one can set a value.")
if not theatre_id and not city_id:
raise ValueError("You should set a value to 'theatre_id' or 'city_id'.")
CallObject.__init__(self)
self._path_name = "theatre"
self.theatre_id = theatre_id
self.city_id = city_id
self.city_count = city_count
if city_id:
if str2bool(self.show()[0]['id'], True):
self.theatres = []
for i in self.show():
self.theatres.append(Theatre(i))
else:
raise ValueError("Not found any city of this ID.")
else:
if str2bool(self.show()[0]['id'], True):
self.theatre = Theatre(self.show())
else:
raise ValueError("Not found any theatre of this ID.")
def show(self):
if self.city_id:
return self.GET(
self._path_name,
0,
1,
self.city_id,
self.city_count
)
else:
return self.GET(
self._path_name,
self.theatre_id,
)[0]
class NearTheatre(CallObject):
def __init__(self, lat=41.0, lng=30.0):
if type(lat) is not float:
if type(lat) is not int:
raise TypeError("Type of 'lat' must be 'float' or 'int'.")
if type(lng) is not float:
if type(lng) is not int:
raise TypeError("Type of 'lng' must be 'float' or 'int'.")
CallObject.__init__(self)
self._path_name = "nearTheatre"
self._latitude = str(lat)
self._longitude = str(lng)
try:
self.show()
except:
raise ValueError("Not found any near theatre in this latitude and longitude.")
if str2bool(self.show()['tenPlus'], True) is not False:
self.theatres = []
for i in self.show()['tenPlus']:
self.theatres.append(Theatre(i))
if str2bool(self.show()['five'], True) is not False:
self.theatres = []
for i in self.show()['five']:
self.theatres.append(Theatre(i))
def show(self):
return self.GET(
"gps",
self._path_name,
self._latitude,
self._longitude
)
class City(object):
def __init__(self, city):
if type(city) is not dict:
raise TypeError("Type of 'city' must be 'dict'.")
self.id = int(city['id'])
self.name = city['name']
class Cities(CallObject):
def __init__(self):
CallObject.__init__(self)
self._path_name = "cities"
#cities
self.cities = []
for i in self.show():
self.cities.append(City(city=i))
def show(self):
return self.GET(
self._path_name,
"0",
)
class PlayingMovies(CallObject):
def __init__(self):
CallObject.__init__(self)
self.api_domain = "www.sinemalar.com"
self._path_name = "playingMovies"
self.sections = []
for i in self.show()['sections']:
self.sections.append(i)
self.movies = []
for i in self.show()['movies']:
for z in i:
self.movies.append(Movie(movie=z))
def show(self):
return self.GET(
self._path_name,
)
class PlayingMoviesRemain(PlayingMovies):
def __init__(self):
PlayingMovies.__init__(self)
self._path_name = "playingMoviesRemain"
class ComingSoon(PlayingMovies):
def __init__(self):
PlayingMovies.__init__(self)
self._path_name = "comingSoon"
class NearestSeances(CallObject):
def __init__(self, movie_id, lat=41.0, lng=30.0):
if type(movie_id) is not int:
raise TypeError("Type of 'movie_id' must be 'int'.")
if type(lat) is not float:
if type(lat) is not int:
raise TypeError("Type of 'lat' must be 'float' or 'int'.")
if type(lng) is not float:
if type(lng) is not int:
raise TypeError("Type of 'lng' must be 'float' or 'int'.")
CallObject.__init__(self)
self._path_name = "seance"
self.movie_id = movie_id
self._latitude = str(lat)
self._longitude = str(lng)
try:
self.show()
except:
raise ValueError("Not found the nearest seance of the movie in this latitude and longitude.")
self.seances = []
for i in self.show()['seances']:
self.seances.append(datetime.datetime.strptime(i, '%H:%M'))
self.selected = self.show()['selected']
self.cinema = Theatre(self.show()['cinema'])
def show(self):
return self.GET(
"gps",
self._path_name,
self._latitude,
self._longitude,
self.movie_id
)
class TheatreSeance(CallObject):
def __init__(self, city_id, movie_id):
if type(city_id) is not int:
raise TypeError("Type of 'city_id' must be 'int'.")
if type(movie_id) is not int:
raise TypeError("Type of 'movie_id' must be 'int'.")
CallObject.__init__(self)
self._path_name = "theatreSeance"
self.city_id = city_id
self.movie_id = movie_id
if not str2bool(self.show()['movie']['id'], True):
raise ValueError("Not found any movie of this ID.")
self.movie = Movie(movie=self.show()['movie'])
self.theatres = []
for i in self.show()['theatre']:
self.theatres.append(Theatre(i))
def show(self):
return self.GET(
self._path_name,
self.city_id,
self.movie_id
)
class ArtistGallery(CallObject):
def __init__(self, artist_id):
if type(artist_id) is not int:
raise TypeError("Type of 'artist_id' must be 'int'.")
CallObject.__init__(self)
self._path_name = "artist"
self.artist_id = artist_id
if not str2bool(self.show(), True):
raise ValueError("Not found any artist of this ID.")
self.gallery = []
for i in self.show():
self.gallery.append(i)
def show(self):
return self.GET(
"gallery",
self._path_name,
self.artist_id,
)
| mit | 7,511,655,941,144,440,000 | 29.798611 | 112 | 0.514543 | false |
sammosummo/sammosummo.github.io | assets/_scripts/variable-precision.py | 1 | 4851 | # approximate sum of two von mises with a single von mises
import numpy as np
from scipy.stats import vonmises
def sim(a, b, plot=False, n=int(1e8)):
unwrapped = vonmises.rvs(a, size=n) + vonmises.rvs(b, size=n)
unwrapped = unwrapped
wrapped = (unwrapped + np.pi) % (2 * np.pi) - np.pi
kappa, _, _ = vonmises.fit(wrapped, floc=0, fscale=1)
if plot is True:
plt.hist(wrapped, normed=True, bins=100)
x = np.linspace(-np.pi, np.pi)
y = vonmises.pdf(x, kappa)
plt.plot(x, y)
return kappa
# import numpy as np
# import pymc3 as pm
# import matplotlib.pyplot as plt
# import theano.tensor as tt
# from scipy.stats import norm, vonmises
# from scipy.integrate import quad
#
#
# n = 10000
# mu = 3
# sigma = 3
#
# k = np.exp(norm.rvs(mu, sigma, size=n))
# x = vonmises.rvs(kappa=k, size=n)
#
# with pm.Model():
#
# mu = pm.Normal(name="mu", mu=0, sigma=10)
# sigma = pm.HalfCauchy(name="sigma", beta=1)
# delta = pm.Normal(name="delta", mu=0, sigma=1, shape=n)
# kappa = tt.exp(mu + delta * sigma) # IMPORTANT! Use non-centered parameterization
# pm.VonMises(name="obs", mu=0, kappa=kappa, observed=x)
# trace = pm.sample(10000, tune=5000, chains=2)
# pm.traceplot(trace, compact=True, var_names=["mu", "sigma"])
# plt.savefig("tmp.png")
#
# # hist(x, bins=100, normed=True)
# #
# # x = np.linspace(-np.pi, np.pi, 100)
# #
# # def pdf(x, mu, sigma, a):
# # g = 1
# # v = vonmises.pdf(x, kappa=mu)
# # def f(k, x):
# # g = gamma.pdf(k, mu**2 / sigma**2, scale=1. / (mu / sigma**2))
# # v = vonmises.pdf(x, kappa=k)
# # return g * v
# # return [quad(f, 0, a, _x)[0] for _x in x]
# #
# # def logpdf(x, mu, sigma, a):
# # g = 1
# # v = vonmises.pdf(x, kappa=mu)
# # def f(k, x):
# # g = gamma.logpdf(k, mu**2 / sigma**2, scale=1. / (mu / sigma**2))
# # v = vonmises.logpdf(x, kappa=k)
# # return g * v
# # return [quad(f, 0, a, _x)[0] for _x in x]
# #
# # [plot(x, pdf(x, mu, sigma, a)) for a in [500]]
# #
# #
# # plot(x, np.log(pdf(x, mu, sigma)))
#
#
#
#
#
#
# # from scipy.integrate import quad
# # import theano
# # import theano.tensor as tt
# # import numpy as np
# # import pymc3 as pm
# #
# #
# # class Integrate(theano.Op):
# # def __init__(self, expr, var, *extra_vars):
# # super().__init__()
# # self._expr = expr
# # self._var = var
# # self._extra_vars = extra_vars
# # self._func = theano.function(
# # [var] + list(extra_vars),
# # self._expr,
# # on_unused_input='ignore')
# #
# # def make_node(self, start, stop, *extra_vars):
# # self._extra_vars_node = extra_vars
# # assert len(self._extra_vars) == len(extra_vars)
# # self._start = start
# # self._stop = stop
# # vars = [start, stop] + list(extra_vars)
# # # vars = list(extra_vars)
# # return theano.Apply(self, vars, [tt.dscalar().type()])
# #
# # def perform(self, node, inputs, out):
# # start, stop, *args = inputs
# # val = quad(self._func, start, stop, args=tuple(args))[0]
# # out[0][0] = np.array(val)
# #
# # def grad(self, inputs, grads):
# # start, stop, *args = inputs
# # out, = grads
# # replace = dict(zip(self._extra_vars, args))
# #
# # replace_ = replace.copy()
# # replace_[self._var] = start
# # dstart = out * theano.clone(-self._expr, replace=replace_)
# #
# # replace_ = replace.copy()
# # replace_[self._var] = stop
# # dstop = out * theano.clone(self._expr, replace=replace_)
# #
# # grads = tt.grad(self._expr, self._extra_vars)
# # dargs = []
# # for grad in grads:
# # integrate = Integrate(grad, self._var, *self._extra_vars)
# # darg = out * integrate(start, stop, *args)
# # dargs.append(darg)
# #
# # return [dstart, dstop] + dargs
# #
# #
# # y_obs = 8.3
# #
# # start = theano.shared(1.)
# # stop = theano.shared(2.)
# # with pm.Model() as basic_model:
# # a = pm.Uniform('a', 1.5, 3.5)
# # b = pm.Uniform('b', 4., 6.)
# #
# # # Define the function to integrate in plain theano
# # t = tt.dscalar('t')
# # t.tag.test_value = np.zeros(())
# # a_ = tt.dscalar('a_')
# # a_.tag.test_value = np.ones(())*2.
# # b_ = tt.dscalar('b_')
# # b_.tag.test_value = np.ones(())*5.
# # func = t**a_ + b_
# # integrate = Integrate(func, t, a_, b_)
# #
# # # Now we plug in the values from the model.
# # # The `a_` and `b_` from above corresponds to the `a` and `b` here.
# # mu = integrate(start, stop, a, b)
# # y = pm.Normal('y', mu=mu, sd=0.4, observed=y_obs)
# # trace = pm.sample(1500, tune=500, cores=2, chains=2) | mit | 1,727,707,249,573,825,000 | 30.506494 | 88 | 0.527726 | false |
googleapis/googleapis-gen | google/cloud/retail/v2alpha/retail-v2alpha-py/google/cloud/retail_v2alpha/types/product.py | 1 | 11878 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.retail_v2alpha.types import common
from google.protobuf import timestamp_pb2 # type: ignore
from google.protobuf import wrappers_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.retail.v2alpha',
manifest={
'Product',
},
)
class Product(proto.Message):
r"""Product captures all metadata information of items to be
recommended or searched.
Attributes:
name (str):
Immutable. Full resource name of the product, such as
"projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/product_id".
The branch ID must be "default_branch".
id (str):
Immutable. [Product][google.cloud.retail.v2alpha.Product]
identifier, which is the final component of
[name][google.cloud.retail.v2alpha.Product.name]. For
example, this field is "id_1", if
[name][google.cloud.retail.v2alpha.Product.name] is
"projects/*/locations/global/catalogs/default_catalog/branches/default_branch/products/id_1".
This field must be a UTF-8 encoded string with a length
limit of 128 characters. Otherwise, an INVALID_ARGUMENT
error is returned.
Google Merchant Center property
`id <https://support.google.com/merchants/answer/6324405>`__.
Schema.org Property
`Product.sku <https://schema.org/sku>`__.
type_ (google.cloud.retail_v2alpha.types.Product.Type):
Immutable. The type of the product. This
field is output-only.
primary_product_id (str):
Variant group identifier. Must be an
[id][google.cloud.retail.v2alpha.Product.id], with the same
parent branch with this product. Otherwise, an error is
thrown.
For
[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY]
[Product][google.cloud.retail.v2alpha.Product]s, this field
can only be empty or set to the same value as
[id][google.cloud.retail.v2alpha.Product.id].
For VARIANT [Product][google.cloud.retail.v2alpha.Product]s,
this field cannot be empty. A maximum of 2,000 products are
allowed to share the same
[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY]
[Product][google.cloud.retail.v2alpha.Product]. Otherwise,
an INVALID_ARGUMENT error is returned.
Google Merchant Center Property
`item_group_id <https://support.google.com/merchants/answer/6324507>`__.
Schema.org Property
`Product.inProductGroupWithID <https://schema.org/inProductGroupWithID>`__.
This field must be enabled before it can be used. `Learn
more </recommendations-ai/docs/catalog#item-group-id>`__.
categories (Sequence[str]):
Product categories. This field is repeated for supporting
one product belonging to several parallel categories.
Strongly recommended using the full path for better search /
recommendation quality.
To represent full path of category, use '>' sign to separate
different hierarchies. If '>' is part of the category name,
please replace it with other character(s).
For example, if a shoes product belongs to both ["Shoes &
Accessories" -> "Shoes"] and ["Sports & Fitness" ->
"Athletic Clothing" -> "Shoes"], it could be represented as:
::
"categories": [
"Shoes & Accessories > Shoes",
"Sports & Fitness > Athletic Clothing > Shoes"
]
Must be set for
[Type.PRIMARY][google.cloud.retail.v2alpha.Product.Type.PRIMARY]
[Product][google.cloud.retail.v2alpha.Product] otherwise an
INVALID_ARGUMENT error is returned.
At most 250 values are allowed per
[Product][google.cloud.retail.v2alpha.Product]. Empty values
are not allowed. Each value must be a UTF-8 encoded string
with a length limit of 5,000 characters. Otherwise, an
INVALID_ARGUMENT error is returned.
Google Merchant Center property
`google_product_category <https://support.google.com/merchants/answer/6324436>`__.
Schema.org property [Product.category]
(https://schema.org/category).
title (str):
Required. Product title.
This field must be a UTF-8 encoded string with a length
limit of 128 characters. Otherwise, an INVALID_ARGUMENT
error is returned.
Google Merchant Center property
`title <https://support.google.com/merchants/answer/6324415>`__.
Schema.org property
`Product.name <https://schema.org/name>`__.
description (str):
Product description.
This field must be a UTF-8 encoded string with a length
limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT
error is returned.
Google Merchant Center property
`description <https://support.google.com/merchants/answer/6324468>`__.
schema.org property
`Product.description <https://schema.org/description>`__.
attributes (Sequence[google.cloud.retail_v2alpha.types.Product.AttributesEntry]):
Highly encouraged. Extra product attributes to be included.
For example, for products, this could include the store
name, vendor, style, color, etc. These are very strong
signals for recommendation model, thus we highly recommend
providing the attributes here.
Features that can take on one of a limited number of
possible values. Two types of features can be set are:
Textual features. some examples would be the brand/maker of
a product, or country of a customer. Numerical features.
Some examples would be the height/weight of a product, or
age of a customer.
For example:
``{ "vendor": {"text": ["vendor123", "vendor456"]}, "lengths_cm": {"numbers":[2.3, 15.4]}, "heights_cm": {"numbers":[8.1, 6.4]} }``.
A maximum of 150 attributes are allowed. Otherwise, an
INVALID_ARGUMENT error is returned.
The key must be a UTF-8 encoded string with a length limit
of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is
returned.
tags (Sequence[str]):
Custom tags associated with the product.
At most 250 values are allowed per
[Product][google.cloud.retail.v2alpha.Product]. This value
must be a UTF-8 encoded string with a length limit of 1,000
characters. Otherwise, an INVALID_ARGUMENT error is
returned.
This tag can be used for filtering recommendation results by
passing the tag as part of the
[PredictRequest.filter][google.cloud.retail.v2alpha.PredictRequest.filter].
Google Merchant Center property
`custom_label_0–4 <https://support.google.com/merchants/answer/6324473>`__.
price_info (google.cloud.retail_v2alpha.types.PriceInfo):
Product price and cost information.
Google Merchant Center property
`price <https://support.google.com/merchants/answer/6324371>`__.
available_time (google.protobuf.timestamp_pb2.Timestamp):
The timestamp when this
[Product][google.cloud.retail.v2alpha.Product] becomes
available recommendation and search.
availability (google.cloud.retail_v2alpha.types.Product.Availability):
The online availability of the
[Product][google.cloud.retail.v2alpha.Product]. Default to
[Availability.IN_STOCK][google.cloud.retail.v2alpha.Product.Availability.IN_STOCK].
Google Merchant Center Property
`availability <https://support.google.com/merchants/answer/6324448>`__.
Schema.org Property
`Offer.availability <https://schema.org/availability>`__.
available_quantity (google.protobuf.wrappers_pb2.Int32Value):
The available quantity of the item.
uri (str):
Canonical URL directly linking to the product detail page.
This field must be a UTF-8 encoded string with a length
limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT
error is returned.
Google Merchant Center property
`link <https://support.google.com/merchants/answer/6324416>`__.
Schema.org property `Offer.url <https://schema.org/url>`__.
images (Sequence[google.cloud.retail_v2alpha.types.Image]):
Product images for the product.
A maximum of 300 images are allowed.
Google Merchant Center property
`image_link <https://support.google.com/merchants/answer/6324350>`__.
Schema.org property
`Product.image <https://schema.org/image>`__.
"""
class Type(proto.Enum):
r"""The type of this product."""
TYPE_UNSPECIFIED = 0
PRIMARY = 1
VARIANT = 2
COLLECTION = 3
class Availability(proto.Enum):
r"""Product availability. If this field is unspecified, the
product is assumed to be in stock.
"""
AVAILABILITY_UNSPECIFIED = 0
IN_STOCK = 1
OUT_OF_STOCK = 2
PREORDER = 3
BACKORDER = 4
name = proto.Field(
proto.STRING,
number=1,
)
id = proto.Field(
proto.STRING,
number=2,
)
type_ = proto.Field(
proto.ENUM,
number=3,
enum=Type,
)
primary_product_id = proto.Field(
proto.STRING,
number=4,
)
categories = proto.RepeatedField(
proto.STRING,
number=7,
)
title = proto.Field(
proto.STRING,
number=8,
)
description = proto.Field(
proto.STRING,
number=10,
)
attributes = proto.MapField(
proto.STRING,
proto.MESSAGE,
number=12,
message=common.CustomAttribute,
)
tags = proto.RepeatedField(
proto.STRING,
number=13,
)
price_info = proto.Field(
proto.MESSAGE,
number=14,
message=common.PriceInfo,
)
available_time = proto.Field(
proto.MESSAGE,
number=18,
message=timestamp_pb2.Timestamp,
)
availability = proto.Field(
proto.ENUM,
number=19,
enum=Availability,
)
available_quantity = proto.Field(
proto.MESSAGE,
number=20,
message=wrappers_pb2.Int32Value,
)
uri = proto.Field(
proto.STRING,
number=22,
)
images = proto.RepeatedField(
proto.MESSAGE,
number=23,
message=common.Image,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 2,006,343,473,132,579,000 | 37.309677 | 144 | 0.616706 | false |
gkonstantyno/construct | construct/debug.py | 1 | 4154 | """
Debugging utilities for constructs
"""
import sys
import traceback
import pdb
import inspect
from construct.core import Construct, Subconstruct
from construct.lib import HexString, Container, ListContainer
class Probe(Construct):
"""
A probe: dumps the context, stack frames, and stream content to the screen
to aid the debugging process.
.. seealso:: :class:`Debugger`.
:param name: the display name
:param show_stream: whether or not to show stream contents. default is True. the stream must be seekable.
:param show_context: whether or not to show the context. default is True.
:param show_stack: whether or not to show the upper stack frames. default is True.
:param stream_lookahead: the number of bytes to dump when show_stack is set. default is 100.
Example::
Struct("foo",
UBInt8("a"),
Probe("between a and b"),
UBInt8("b"),
)
"""
__slots__ = [
"printname", "show_stream", "show_context", "show_stack",
"stream_lookahead"
]
counter = 0
def __init__(self, name = None, show_stream = True,
show_context = True, show_stack = True,
stream_lookahead = 100):
super(Probe, self).__init__(None)
if name is None:
Probe.counter += 1
name = "<unnamed %d>" % (Probe.counter,)
self.printname = name
self.show_stream = show_stream
self.show_context = show_context
self.show_stack = show_stack
self.stream_lookahead = stream_lookahead
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.printname)
def _parse(self, stream, context):
self.printout(stream, context)
def _build(self, obj, stream, context):
self.printout(stream, context)
def _sizeof(self, context):
return 0
def printout(self, stream, context):
obj = Container()
if self.show_stream:
obj.stream_position = stream.tell()
follows = stream.read(self.stream_lookahead)
if not follows:
obj.following_stream_data = "EOF reached"
else:
stream.seek(-len(follows), 1)
obj.following_stream_data = HexString(follows)
print("")
if self.show_context:
obj.context = context
if self.show_stack:
obj.stack = ListContainer()
frames = [s[0] for s in inspect.stack()][1:-1]
frames.reverse()
for f in frames:
a = Container()
a.__update__(f.f_locals)
obj.stack.append(a)
print("=" * 80)
print("Probe %s" % (self.printname,))
print(obj)
print("=" * 80)
class Debugger(Subconstruct):
"""
A pdb-based debugger. When an exception occurs in the subcon, a debugger
will appear and allow you to debug the error (and even fix on-the-fly).
:param subcon: the subcon to debug
Example::
Debugger(
Enum(UBInt8("foo"),
a = 1,
b = 2,
c = 3
)
)
"""
__slots__ = ["retval"]
def _parse(self, stream, context):
try:
return self.subcon._parse(stream, context)
except Exception:
self.retval = NotImplemented
self.handle_exc("(you can set the value of 'self.retval', "
"which will be returned)")
if self.retval is NotImplemented:
raise
else:
return self.retval
def _build(self, obj, stream, context):
try:
self.subcon._build(obj, stream, context)
except Exception:
self.handle_exc()
def handle_exc(self, msg = None):
print("=" * 80)
print("Debugging exception of %s:" % (self.subcon,))
print("".join(traceback.format_exception(*sys.exc_info())[1:]))
if msg:
print(msg)
pdb.post_mortem(sys.exc_info()[2])
print("=" * 80)
| mit | -8,919,516,380,459,991,000 | 30.233083 | 109 | 0.545258 | false |
liuenyan/django-blog | blog/views.py | 1 | 8259 | """
博客应用的视图函数。
"""
import json
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, InvalidPage
from django.contrib import messages
from django.conf import settings
from django.http import HttpResponse
from django.views.decorators.http import require_POST
from blog.models import Post, Comment, Tag, Category
from blog.forms import PostForm, CommentForm, EditProfileForm, CategoryForm, TagForm
from blog.tools import clean_html_tags, convert_to_html
# Create your views here.
def index(request):
"""首页的视图函数"""
post_list = Post.objects.all().order_by('-id')
paginator = Paginator(post_list, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except InvalidPage:
posts = paginator.page(1)
return render(request, "index.html", context={'posts': posts})
def post_detail(request, slug):
"""文章页面的视图函数"""
post = get_object_or_404(Post, slug=slug)
context = {
'comments_provider': settings.DEFAULT_COMMENTS_PROVIDER,
'post': post,
}
if settings.DEFAULT_COMMENTS_PROVIDER == 'default':
if request.method == 'POST':
form = CommentForm(request.POST)
if form.is_valid():
comment = Comment(
name=form.cleaned_data['name'],
url=form.cleaned_data['url'],
email=form.cleaned_data['email'],
comment=clean_html_tags(form.cleaned_data['comment']),
post=post
)
comment.save()
return redirect('post', slug)
else:
messages.add_message(request, messages.ERROR, form.errors)
form = CommentForm()
comments = Comment.objects.filter(post=post)
context['form'] = form
context['comments'] = comments
return render(request, 'post.html', context)
@login_required
def edit_post(request, slug):
"""文章编辑页面的视图函数"""
post = get_object_or_404(Post, slug=slug)
if request.user.id != post.author.id:
return redirect('post', slug)
if request.method == 'POST':
post_form = PostForm(request.POST, instance=post)
if post_form.is_valid():
post.body_html = convert_to_html(post_form.cleaned_data['body_markdown'])
post_form.save()
messages.add_message(request, messages.SUCCESS, '文章已更新')
return redirect('post', post.slug)
else:
messages.add_message(request, messages.ERROR, post_form.errors)
context = {
'post_form': post_form,
'category_form': CategoryForm(),
'tag_form': TagForm(),
}
return render(request, 'edit_post.html', context)
context = {
'post_form': PostForm(instance=post),
'category_form': CategoryForm(),
'tag_form': TagForm(),
}
return render(request, 'edit_post.html', context)
@login_required
def new_post(request):
"""文章新建页面的视图函数"""
if request.method == 'POST':
post_form = PostForm(request.POST)
if post_form.is_valid():
post = post_form.save(commit=False)
post.body_html = convert_to_html(post_form.cleaned_data['body_markdown'])
post.author = request.user
post.save()
post_form.save_m2m()
messages.add_message(request, messages.SUCCESS, '文章已发布')
return redirect('post', post.slug)
else:
messages.add_message(request, messages.ERROR, post_form.errors)
context = {
'post_form': post_form,
'category_form': CategoryForm(),
'tag_form': TagForm(),
}
return render(request, 'edit_post.html', context)
context = {
'post_form': PostForm(),
'category_form': CategoryForm(),
'tag_form': TagForm(),
}
return render(request, 'edit_post.html', context)
@login_required
def delete_post(request, slug):
"""文章删除的视图函数"""
post = get_object_or_404(Post, id=slug)
if request.user.id != post.author.id:
return redirect('post', slug)
post.delete()
return redirect('index')
def category_posts(request, category_name):
"""分类页面的视图函数"""
category_object = get_object_or_404(Category, category=category_name)
post_list = category_object.post_set.order_by('-id')
paginator = Paginator(post_list, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except InvalidPage:
posts = paginator.page(1)
title = '分类为{0}的文章'.format(category_name)
return render(request, 'index.html', context={'title': title, 'posts': posts})
@login_required
@require_POST
def new_category(request):
"""新建分类的处理函数"""
form = CategoryForm(request.POST)
if form.is_valid():
category = form.save()
result = {
'status': 'success',
'category': {
'id': category.id,
'category': category.category,
},
}
return HttpResponse(json.dumps(result), content_type="text/json")
else:
result = {
'status': 'fail',
'errors': form.category.errors,
}
return HttpResponse(json.dumps(result), content="text/json")
@login_required
@require_POST
def new_tag(request):
"""新建标签的处理函数"""
form = TagForm(request.POST)
if form.is_valid():
tag = form.save()
result = {
'status': 'success',
'tag': {
'id': tag.id,
'tag': tag.tag,
}
}
return HttpResponse(json.dumps(result), content_type="text/json")
else:
result = {
'status': 'fail',
'errors': form.errors,
}
return HttpResponse(json.dumps(result), content="text/json")
def tag_posts(request, tagname):
"""标签页面的视图函数"""
tag_object = get_object_or_404(Tag, tag=tagname)
post_list = tag_object.post_set.order_by('-id')
paginator = Paginator(post_list, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except InvalidPage:
posts = paginator.page(1)
title = '标签为{0}的文章'.format(tagname)
return render(request, 'index.html', context={'title': title, 'posts': posts})
def archive(request, year, month):
"""归档页面的视图函数"""
post_list = Post.objects.filter(
creation_time__year=year,
creation_time__month=month
).order_by('-id')
paginator = Paginator(post_list, 10)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except InvalidPage:
posts = paginator.page(1)
title = '{0}年{1}月的归档'.format(year, month)
return render(request, 'index.html', context={'title': title, 'posts': posts})
@login_required
def profile(request):
"""个人资料页面的视图函数"""
return render(request, 'profile.html')
@login_required
def change_profile(request):
"""修改个人资料的视图函数"""
current_user = request.user
if request.method == 'POST':
form = EditProfileForm(request.POST)
if form.is_valid():
current_user.first_name = form.cleaned_data['first_name']
current_user.last_name = form.cleaned_data['last_name']
current_user.email = form.cleaned_data['email']
current_user.save()
messages.add_message(request, messages.SUCCESS, '个人资料已更新')
return redirect('profile')
else:
messages.add_message(request, messages.ERROR, form.errors)
data = {
'first_name': current_user.first_name,
'last_name': current_user.last_name,
'email': current_user.email
}
form = EditProfileForm(data)
return render(request, 'change_profile.html', context={'form': form})
| mit | 8,883,726,643,625,573,000 | 31.687243 | 85 | 0.591716 | false |
twisted/mantissa | axiom/plugins/webcmd.py | 1 | 5974 | # -*- test-case-name: xmantissa.test.test_webcmd -*-
import os
import sys
from twisted.python import reflect
from twisted.python.usage import UsageError
from axiom import item, attributes
from axiom.dependency import installOn, onlyInstallPowerups
from axiom.scripts import axiomatic
from xmantissa.web import SiteConfiguration
from xmantissa.website import StaticSite, APIKey
from xmantissa import ixmantissa, webadmin
from xmantissa.plugins.baseoff import baseOffering
class WebConfiguration(axiomatic.AxiomaticCommand):
name = 'web'
description = 'Web. Yay.'
optParameters = [
('http-log', 'h', None,
'Filename (relative to files directory of the store) to which to log '
'HTTP requests (empty string to disable)'),
('hostname', 'H', None,
'Canonical hostname for this server (used in URL generation).'),
('urchin-key', '', None,
'Google Analytics API key for this site')]
def __init__(self, *a, **k):
super(WebConfiguration, self).__init__(*a, **k)
self.staticPaths = []
didSomething = 0
def postOptions(self):
siteStore = self.parent.getStore()
# Make sure the base mantissa offering is installed.
offeringTech = ixmantissa.IOfferingTechnician(siteStore)
offerings = offeringTech.getInstalledOfferingNames()
if baseOffering.name not in offerings:
raise UsageError(
"This command can only be used on Mantissa databases.")
# It is, we can make some simplifying assumptions. Specifically,
# there is exactly one SiteConfiguration installed.
site = siteStore.findUnique(SiteConfiguration)
if self['http-log'] is not None:
if self['http-log']:
site.httpLog = siteStore.filesdir.preauthChild(
self['http-log'])
else:
site.httpLog = None
if self['hostname'] is not None:
if self['hostname']:
site.hostname = self.decodeCommandLine(self['hostname'])
else:
raise UsageError("Hostname may not be empty.")
if self['urchin-key'] is not None:
# Install the API key for Google Analytics, to enable tracking for
# this site.
APIKey.setKeyForAPI(
siteStore, APIKey.URCHIN, self['urchin-key'].decode('ascii'))
# Set up whatever static content was requested.
for webPath, filePath in self.staticPaths:
staticSite = siteStore.findFirst(
StaticSite, StaticSite.prefixURL == webPath)
if staticSite is not None:
staticSite.staticContentPath = filePath
else:
staticSite = StaticSite(
store=siteStore,
staticContentPath=filePath,
prefixURL=webPath,
sessionless=True)
onlyInstallPowerups(staticSite, siteStore)
def opt_static(self, pathMapping):
webPath, filePath = self.decodeCommandLine(pathMapping).split(os.pathsep, 1)
if webPath.startswith('/'):
webPath = webPath[1:]
self.staticPaths.append((webPath, os.path.abspath(filePath)))
def opt_list(self):
self.didSomething = 1
s = self.parent.getStore()
for ws in s.query(SiteConfiguration):
print 'The hostname is', ws.hostname
if ws.httpLog is not None:
print 'Logging HTTP requests to', ws.httpLog
break
else:
print 'No configured webservers.'
def powerupsWithPriorityFor(interface):
for cable in s.query(
item._PowerupConnector,
attributes.AND(item._PowerupConnector.interface == unicode(reflect.qual(interface)),
item._PowerupConnector.item == s),
sort=item._PowerupConnector.priority.descending):
yield cable.powerup, cable.priority
print 'Sessionless plugins:'
for srp, prio in powerupsWithPriorityFor(ixmantissa.ISessionlessSiteRootPlugin):
print ' %s (prio. %d)' % (srp, prio)
print 'Sessioned plugins:'
for srp, prio in powerupsWithPriorityFor(ixmantissa.ISiteRootPlugin):
print ' %s (prio. %d)' % (srp, prio)
sys.exit(0)
opt_static.__doc__ = """
Add an element to the mapping of web URLs to locations of static
content on the filesystem (webpath%sfilepath)
""" % (os.pathsep,)
class WebAdministration(axiomatic.AxiomaticCommand):
name = 'web-admin'
description = 'Administrative controls for the web'
optFlags = [
('admin', 'a', 'Enable administrative controls'),
('developer', 'd', 'Enable developer controls'),
('disable', 'D', 'Remove the indicated options, instead of enabling them.'),
]
def postOptions(self):
s = self.parent.getStore()
didSomething = False
if self['admin']:
didSomething = True
if self['disable']:
for app in s.query(webadmin.AdminStatsApplication):
app.deleteFromStore()
break
else:
raise UsageError('Administrator controls already disabled.')
else:
installOn(webadmin.AdminStatsApplication(store=s), s)
if self['developer']:
didSomething = True
if self['disable']:
for app in s.query(webadmin.DeveloperApplication):
app.deleteFromStore()
break
else:
raise UsageError('Developer controls already disabled.')
else:
installOn(webadmin.DeveloperApplication(store=s), s)
if not didSomething:
raise UsageError("Specify something or I won't do anything.")
| mit | 752,348,731,789,274,400 | 34.349112 | 100 | 0.597255 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/sympy/utilities/lambdify.py | 1 | 18165 | """
This module provides convenient functions to transform sympy expressions to
lambda functions which can be used to calculate numerical values very fast.
"""
from __future__ import print_function, division
from sympy.external import import_module
from sympy.core.compatibility import exec_, is_sequence, iterable, string_types
import inspect
# These are the namespaces the lambda functions will use.
MATH = {}
MPMATH = {}
NUMPY = {}
SYMPY = {}
# Default namespaces, letting us define translations that can't be defined
# by simple variable maps, like I => 1j
# These are separate from the names above because the above names are modified
# throughout this file, whereas these should remain unmodified.
MATH_DEFAULT = {}
MPMATH_DEFAULT = {}
NUMPY_DEFAULT = {"I": 1j}
SYMPY_DEFAULT = {}
# Mappings between sympy and other modules function names.
MATH_TRANSLATIONS = {
"Abs": "fabs",
"ceiling": "ceil",
"E": "e",
"ln": "log",
}
MPMATH_TRANSLATIONS = {
"Abs": "fabs",
"elliptic_k": "ellipk",
"elliptic_f": "ellipf",
"elliptic_e": "ellipe",
"elliptic_pi": "ellippi",
"ceiling": "ceil",
"chebyshevt": "chebyt",
"chebyshevu": "chebyu",
"E": "e",
"I": "j",
"ln": "log",
#"lowergamma":"lower_gamma",
"oo": "inf",
#"uppergamma":"upper_gamma",
"LambertW": "lambertw",
"Matrix": "matrix",
"MutableDenseMatrix": "matrix",
"ImmutableMatrix": "matrix",
"conjugate": "conj",
"dirichlet_eta": "altzeta",
"Ei": "ei",
"Shi": "shi",
"Chi": "chi",
"Si": "si",
"Ci": "ci"
}
NUMPY_TRANSLATIONS = {
"Abs": "abs",
"acos": "arccos",
"acosh": "arccosh",
"arg": "angle",
"asin": "arcsin",
"asinh": "arcsinh",
"atan": "arctan",
"atan2": "arctan2",
"atanh": "arctanh",
"ceiling": "ceil",
"E": "e",
"im": "imag",
"ln": "log",
"Matrix": "matrix",
"MutableDenseMatrix": "matrix",
"ImmutableMatrix": "matrix",
"Max": "amax",
"Min": "amin",
"oo": "inf",
"re": "real",
}
# Available modules:
MODULES = {
"math": (MATH, MATH_DEFAULT, MATH_TRANSLATIONS, ("from math import *",)),
"mpmath": (MPMATH, MPMATH_DEFAULT, MPMATH_TRANSLATIONS, ("from sympy.mpmath import *",)),
"numpy": (NUMPY, NUMPY_DEFAULT, NUMPY_TRANSLATIONS, ("import_module('numpy')",)),
"sympy": (SYMPY, SYMPY_DEFAULT, {}, (
"from sympy.functions import *",
"from sympy.matrices import *",
"from sympy import Integral, pi, oo, nan, zoo, E, I",)),
}
def _import(module, reload="False"):
"""
Creates a global translation dictionary for module.
The argument module has to be one of the following strings: "math",
"mpmath", "numpy", "sympy".
These dictionaries map names of python functions to their equivalent in
other modules.
"""
try:
namespace, namespace_default, translations, import_commands = MODULES[
module]
except KeyError:
raise NameError(
"'%s' module can't be used for lambdification" % module)
# Clear namespace or exit
if namespace != namespace_default:
# The namespace was already generated, don't do it again if not forced.
if reload:
namespace.clear()
namespace.update(namespace_default)
else:
return
for import_command in import_commands:
if import_command.startswith('import_module'):
module = eval(import_command)
if module is not None:
namespace.update(module.__dict__)
continue
else:
try:
exec_(import_command, {}, namespace)
continue
except ImportError:
pass
raise ImportError(
"can't import '%s' with '%s' command" % (module, import_command))
# Add translated names to namespace
for sympyname, translation in translations.items():
namespace[sympyname] = namespace[translation]
def lambdify(args, expr, modules=None, printer=None, use_imps=True, dummify=True):
"""
Returns a lambda function for fast calculation of numerical values.
If not specified differently by the user, SymPy functions are replaced as
far as possible by either python-math, numpy (if available) or mpmath
functions - exactly in this order. To change this behavior, the "modules"
argument can be used. It accepts:
- the strings "math", "mpmath", "numpy", "sympy"
- any modules (e.g. math)
- dictionaries that map names of sympy functions to arbitrary functions
- lists that contain a mix of the arguments above, with higher priority
given to entries appearing first.
The default behavior is to substitute all arguments in the provided
expression with dummy symbols. This allows for applied functions (e.g.
f(t)) to be supplied as arguments. Call the function with dummify=False if
dummy substitution is unwanted (and `args` is not a string). If you want
to view the lambdified function or provide "sympy" as the module, you
should probably set dummify=False.
Usage
=====
(1) Use one of the provided modules:
>>> from sympy import lambdify, sin, gamma
>>> from sympy.utilities.lambdify import lambdastr
>>> from sympy.abc import x
>>> f = lambdify(x, sin(x), "math")
Attention: Functions that are not in the math module will throw a name
error when the lambda function is evaluated! So this would
be better:
>>> f = lambdify(x, sin(x)*gamma(x), ("math", "mpmath", "sympy"))
(2) Use some other module:
>> import numpy
>> f = lambdify((x,y), tan(x*y), numpy)
Attention: There are naming differences between numpy and sympy. So if
you simply take the numpy module, e.g. sympy.atan will not be
translated to numpy.arctan. Use the modified module instead
by passing the string "numpy":
>> f = lambdify((x,y), tan(x*y), "numpy")
>> f(1, 2)
-2.18503986326
>> from numpy import array
>> f(array([1, 2, 3]), array([2, 3, 5]))
[-2.18503986 -0.29100619 -0.8559934 ]
(3) Use a dictionary defining custom functions:
>>> def my_cool_function(x): return 'sin(%s) is cool' % x
>>> myfuncs = {"sin" : my_cool_function}
>>> f = lambdify(x, sin(x), myfuncs); f(1)
'sin(1) is cool'
Examples
========
>>> from sympy.utilities.lambdify import implemented_function, lambdify
>>> from sympy import sqrt, sin, Matrix
>>> from sympy import Function
>>> from sympy.abc import w, x, y, z
>>> f = lambdify(x, x**2)
>>> f(2)
4
>>> f = lambdify((x, y, z), [z, y, x])
>>> f(1,2,3)
[3, 2, 1]
>>> f = lambdify(x, sqrt(x))
>>> f(4)
2.0
>>> f = lambdify((x, y), sin(x*y)**2)
>>> f(0, 5)
0.0
>>> row = lambdify((x, y), Matrix((x, x + y)).T, modules='sympy')
>>> row(1, 2)
Matrix([[1, 3]])
Tuple arguments are handled and the lambdified function should
be called with the same type of arguments as were used to create
the function.:
>>> f = lambdify((x, (y, z)), x + y)
>>> f(1, (2, 4))
3
A more robust way of handling this is to always work with flattened
arguments:
>>> from sympy.utilities.iterables import flatten
>>> args = w, (x, (y, z))
>>> vals = 1, (2, (3, 4))
>>> f = lambdify(flatten(args), w + x + y + z)
>>> f(*flatten(vals))
10
Functions present in `expr` can also carry their own numerical
implementations, in a callable attached to the ``_imp_``
attribute. Usually you attach this using the
``implemented_function`` factory:
>>> f = implemented_function(Function('f'), lambda x: x+1)
>>> func = lambdify(x, f(x))
>>> func(4)
5
``lambdify`` always prefers ``_imp_`` implementations to implementations
in other namespaces, unless the ``use_imps`` input parameter is False.
"""
from sympy.core.symbol import Symbol
from sympy.utilities.iterables import flatten
# If the user hasn't specified any modules, use what is available.
module_provided = True
if modules is None:
module_provided = False
# Use either numpy (if available) or python.math where possible.
# XXX: This leads to different behaviour on different systems and
# might be the reason for irreproducible errors.
modules = ["math", "mpmath", "sympy"]
try:
_import("numpy")
except ImportError:
pass
else:
modules.insert(1, "numpy")
# Get the needed namespaces.
namespaces = []
# First find any function implementations
if use_imps:
namespaces.append(_imp_namespace(expr))
# Check for dict before iterating
if isinstance(modules, (dict, str)) or not hasattr(modules, '__iter__'):
namespaces.append(modules)
else:
namespaces += list(modules)
# fill namespace with first having highest priority
namespace = {}
for m in namespaces[::-1]:
buf = _get_namespace(m)
namespace.update(buf)
if hasattr(expr, "atoms"):
#Try if you can extract symbols from the expression.
#Move on if expr.atoms in not implemented.
syms = expr.atoms(Symbol)
for term in syms:
namespace.update({str(term): term})
# Create lambda function.
lstr = lambdastr(args, expr, printer=printer, dummify=dummify)
flat = '__flatten_args__'
if flat in lstr:
import itertools
namespace.update({flat: flatten})
return eval(lstr, namespace)
def _get_namespace(m):
"""
This is used by _lambdify to parse its arguments.
"""
if isinstance(m, str):
_import(m)
return MODULES[m][0]
elif isinstance(m, dict):
return m
elif hasattr(m, "__dict__"):
return m.__dict__
else:
raise TypeError("Argument must be either a string, dict or module but it is: %s" % m)
def lambdastr(args, expr, printer=None, dummify=False):
"""
Returns a string that can be evaluated to a lambda function.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy.utilities.lambdify import lambdastr
>>> lambdastr(x, x**2)
'lambda x: (x**2)'
>>> lambdastr((x,y,z), [z,y,x])
'lambda x,y,z: ([z, y, x])'
Although tuples may not appear as arguments to lambda in Python 3,
lambdastr will create a lambda function that will unpack the original
arguments so that nested arguments can be handled:
>>> lambdastr((x, (y, z)), x + y)
'lambda _0,_1: (lambda x,y,z: (x + y))(*list(__flatten_args__([_0,_1])))'
"""
# Transforming everything to strings.
from sympy.matrices import DeferredVector
from sympy import Dummy, sympify, Symbol, Function, flatten
if printer is not None:
if inspect.isfunction(printer):
lambdarepr = printer
else:
if inspect.isclass(printer):
lambdarepr = lambda expr: printer().doprint(expr)
else:
lambdarepr = lambda expr: printer.doprint(expr)
else:
#XXX: This has to be done here because of circular imports
from sympy.printing.lambdarepr import lambdarepr
def sub_args(args, dummies_dict):
if isinstance(args, str):
return args
elif isinstance(args, DeferredVector):
return str(args)
elif iterable(args):
dummies = flatten([sub_args(a, dummies_dict) for a in args])
return ",".join(str(a) for a in dummies)
else:
if isinstance(args, Function):
dummies = Dummy()
dummies_dict.update({args : dummies})
return str(dummies)
else:
return str(args)
def sub_expr(expr, dummies_dict):
try:
expr = sympify(expr).xreplace(dummies_dict)
except:
if isinstance(expr, DeferredVector):
pass
elif isinstance(expr, dict):
k = [sub_expr(sympify(a), dummies_dict) for a in expr.keys()]
v = [sub_expr(sympify(a), dummies_dict) for a in expr.values()]
expr = dict(zip(k, v))
elif isinstance(expr, tuple):
expr = tuple(sub_expr(sympify(a), dummies_dict) for a in expr)
elif isinstance(expr, list):
expr = [sub_expr(sympify(a), dummies_dict) for a in expr]
return expr
# Transform args
def isiter(l):
return iterable(l, exclude=(str, DeferredVector))
if isiter(args) and any(isiter(i) for i in args):
from sympy.utilities.iterables import flatten
import re
dum_args = [str(Dummy(str(i))) for i in range(len(args))]
iter_args = ','.join([i if isiter(a) else i
for i, a in zip(dum_args, args)])
lstr = lambdastr(flatten(args), expr, printer=printer, dummify=dummify)
flat = '__flatten_args__'
rv = 'lambda %s: (%s)(*list(%s([%s])))' % (
','.join(dum_args), lstr, flat, iter_args)
if len(re.findall(r'\b%s\b' % flat, rv)) > 1:
raise ValueError('the name %s is reserved by lambdastr' % flat)
return rv
dummies_dict = {}
if dummify:
args = sub_args(args, dummies_dict)
else:
if isinstance(args, str):
pass
elif iterable(args, exclude=DeferredVector):
args = ",".join(str(a) for a in args)
# Transform expr
if dummify:
if isinstance(expr, str):
pass
else:
expr = sub_expr(expr, dummies_dict)
expr = lambdarepr(expr)
return "lambda %s: (%s)" % (args, expr)
def _imp_namespace(expr, namespace=None):
""" Return namespace dict with function implementations
We need to search for functions in anything that can be thrown at
us - that is - anything that could be passed as `expr`. Examples
include sympy expressions, as well as tuples, lists and dicts that may
contain sympy expressions.
Parameters
----------
expr : object
Something passed to lambdify, that will generate valid code from
``str(expr)``.
namespace : None or mapping
Namespace to fill. None results in new empty dict
Returns
-------
namespace : dict
dict with keys of implemented function names within `expr` and
corresponding values being the numerical implementation of
function
Examples
--------
>>> from sympy.abc import x
>>> from sympy.utilities.lambdify import implemented_function, _imp_namespace
>>> from sympy import Function
>>> f = implemented_function(Function('f'), lambda x: x+1)
>>> g = implemented_function(Function('g'), lambda x: x*10)
>>> namespace = _imp_namespace(f(g(x)))
>>> sorted(namespace.keys())
['f', 'g']
"""
# Delayed import to avoid circular imports
from sympy.core.function import FunctionClass
if namespace is None:
namespace = {}
# tuples, lists, dicts are valid expressions
if is_sequence(expr):
for arg in expr:
_imp_namespace(arg, namespace)
return namespace
elif isinstance(expr, dict):
for key, val in expr.items():
# functions can be in dictionary keys
_imp_namespace(key, namespace)
_imp_namespace(val, namespace)
return namespace
# sympy expressions may be Functions themselves
func = getattr(expr, 'func', None)
if isinstance(func, FunctionClass):
imp = getattr(func, '_imp_', None)
if imp is not None:
name = expr.func.__name__
if name in namespace and namespace[name] != imp:
raise ValueError('We found more than one '
'implementation with name '
'"%s"' % name)
namespace[name] = imp
# and / or they may take Functions as arguments
if hasattr(expr, 'args'):
for arg in expr.args:
_imp_namespace(arg, namespace)
return namespace
def implemented_function(symfunc, implementation):
""" Add numerical ``implementation`` to function ``symfunc``.
``symfunc`` can be an ``UndefinedFunction`` instance, or a name string.
In the latter case we create an ``UndefinedFunction`` instance with that
name.
Be aware that this is a quick workaround, not a general method to create
special symbolic functions. If you want to create a symbolic function to be
used by all the machinery of sympy you should subclass the ``Function``
class.
Parameters
----------
symfunc : ``str`` or ``UndefinedFunction`` instance
If ``str``, then create new ``UndefinedFunction`` with this as
name. If `symfunc` is a sympy function, attach implementation to it.
implementation : callable
numerical implementation to be called by ``evalf()`` or ``lambdify``
Returns
-------
afunc : sympy.FunctionClass instance
function with attached implementation
Examples
--------
>>> from sympy.abc import x
>>> from sympy.utilities.lambdify import lambdify, implemented_function
>>> from sympy import Function
>>> f = implemented_function(Function('f'), lambda x: x+1)
>>> lam_f = lambdify(x, f(x))
>>> lam_f(4)
5
"""
# Delayed import to avoid circular imports
from sympy.core.function import UndefinedFunction
# if name, create function to hold implementation
if isinstance(symfunc, string_types):
symfunc = UndefinedFunction(symfunc)
elif not isinstance(symfunc, UndefinedFunction):
raise ValueError('symfunc should be either a string or'
' an UndefinedFunction instance.')
# We need to attach as a method because symfunc will be a class
symfunc._imp_ = staticmethod(implementation)
return symfunc
| gpl-3.0 | 8,490,795,049,944,381,000 | 32.14781 | 93 | 0.600606 | false |
ContributeToScience/participant-booking-app | booking/scientist/migrations/0006_auto__add_field_research_is_complete.py | 1 | 15358 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Research.is_complete'
db.add_column(u'scientist_research', 'is_complete',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Research.is_complete'
db.delete_column(u'scientist_research', 'is_complete')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'scientist.category': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Category'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['scientist.Category']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'scientist.locationinfo': {
'Meta': {'object_name': 'LocationInfo'},
'address': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'lng': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
u'scientist.participantresearch': {
'Meta': {'object_name': 'ParticipantResearch'},
'award_credit': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'donate_credit': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'participant_resp': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'participant_resp_dt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'payment_account': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'payment_resp': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'payment_status': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'payment_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"}),
'scientist_award_dt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'superuser_award_dt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'scientist.participantresearchevent': {
'Meta': {'object_name': 'ParticipantResearchEvent'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'research_event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.ResearchEvent']"})
},
u'scientist.remindparticipantinfo': {
'Meta': {'object_name': 'RemindParticipantInfo'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"}),
'time': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'time_type': ('django.db.models.fields.CharField', [], {'default': "'minutes'", 'max_length': '20'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'email'", 'max_length': '20'})
},
u'scientist.remindscientistinfo': {
'Meta': {'object_name': 'RemindScientistInfo'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"}),
'time': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'time_type': ('django.db.models.fields.CharField', [], {'default': "'minutes'", 'max_length': '20'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'email'", 'max_length': '20'})
},
u'scientist.research': {
'Meta': {'object_name': 'Research'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'$'", 'max_length': '1'}),
'default_event_duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '45', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {}),
'ethical_permission': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'further_ethical_permisson_info': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_credit_scheme': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_on_web': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_paid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_publish': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.LocationInfo']", 'null': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'need_participant_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'non_ethical_permission_reason': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'payment_dt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'payment_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'remind_participant': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'remind_research': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'remuneration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'restrictions': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'room': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'total_credit': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'scientist.researchcategory': {
'Meta': {'object_name': 'ResearchCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Category']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"})
},
u'scientist.researchevent': {
'Meta': {'object_name': 'ResearchEvent'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'end': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_participant_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_scientist_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"}),
'start': ('django.db.models.fields.DateTimeField', [], {})
},
u'scientist.scientistresearch': {
'Meta': {'object_name': 'ScientistResearch'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'research': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scientist.Research']"}),
'scientist': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['scientist'] | gpl-2.0 | -6,620,653,928,510,723,000 | 79.836842 | 187 | 0.564657 | false |
daevaorn/kombu | kombu/transport/zmq.py | 1 | 8458 | """
kombu.transport.zmq
===================
ZeroMQ transport.
"""
from __future__ import absolute_import
import errno
import os
import socket
try:
import zmq
from zmq import ZMQError
except ImportError:
zmq = ZMQError = None # noqa
from kombu.five import Empty
from kombu.log import get_logger
from kombu.serialization import pickle
from kombu.utils import cached_property
from kombu.utils.eventio import poll, READ
from . import virtual
logger = get_logger('kombu.transport.zmq')
DEFAULT_PORT = 5555
DEFAULT_HWM = 128
DEFAULT_INCR = 1
dumps, loads = pickle.dumps, pickle.loads
class MultiChannelPoller(object):
eventflags = READ
def __init__(self):
# active channels
self._channels = set()
# file descriptor -> channel map
self._fd_to_chan = {}
# poll implementation (epoll/kqueue/select)
self.poller = poll()
def close(self):
for fd in self._fd_to_chan:
try:
self.poller.unregister(fd)
except KeyError:
pass
self._channels.clear()
self._fd_to_chan.clear()
self.poller = None
def add(self, channel):
self._channels.add(channel)
def discard(self, channel):
self._channels.discard(channel)
self._fd_to_chan.pop(channel.client.connection.fd, None)
def _register(self, channel):
conn = channel.client.connection
self._fd_to_chan[conn.fd] = channel
self.poller.register(conn.fd, self.eventflags)
def on_poll_start(self):
for channel in self._channels:
self._register(channel)
def on_readable(self, fileno):
chan = self._fd_to_chan[fileno]
return chan.drain_events(), chan
def get(self, timeout=None):
self.on_poll_start()
events = self.poller.poll(timeout)
for fileno, _ in events or []:
return self.on_readable(fileno)
raise Empty()
@property
def fds(self):
return self._fd_to_chan
class Client(object):
def __init__(self, uri='tcp://127.0.0.1', port=DEFAULT_PORT,
hwm=DEFAULT_HWM, swap_size=None, enable_sink=True,
context=None):
try:
scheme, parts = uri.split('://')
except ValueError:
scheme = 'tcp'
parts = uri
endpoints = parts.split(';')
self.port = port
if scheme != 'tcp':
raise NotImplementedError('Currently only TCP can be used')
self.context = context or zmq.Context.instance()
if enable_sink:
self.sink = self.context.socket(zmq.PULL)
self.sink.bind('tcp://*:{0.port}'.format(self))
else:
self.sink = None
self.vent = self.context.socket(zmq.PUSH)
if hasattr(zmq, 'SNDHWM'):
self.vent.setsockopt(zmq.SNDHWM, hwm)
else:
self.vent.setsockopt(zmq.HWM, hwm)
if swap_size:
self.vent.setsockopt(zmq.SWAP, swap_size)
for endpoint in endpoints:
if scheme == 'tcp' and ':' not in endpoint:
endpoint += ':' + str(DEFAULT_PORT)
endpoint = ''.join([scheme, '://', endpoint])
self.connect(endpoint)
def connect(self, endpoint):
self.vent.connect(endpoint)
def get(self, queue=None, timeout=None):
sink = self.sink
try:
if timeout is not None:
prev_timeout, sink.RCVTIMEO = sink.RCVTIMEO, timeout
try:
return sink.recv()
finally:
sink.RCVTIMEO = prev_timeout
else:
return sink.recv()
except ZMQError as exc:
if exc.errno == zmq.EAGAIN:
raise socket.error(errno.EAGAIN, exc.strerror)
else:
raise
def put(self, queue, message, **kwargs):
return self.vent.send(message)
def close(self):
if self.sink and not self.sink.closed:
self.sink.close()
if not self.vent.closed:
self.vent.close()
@property
def connection(self):
if self.sink:
return self.sink
return self.vent
class Channel(virtual.Channel):
Client = Client
hwm = DEFAULT_HWM
swap_size = None
enable_sink = True
port_incr = DEFAULT_INCR
from_transport_options = (
virtual.Channel.from_transport_options +
('hwm', 'swap_size', 'enable_sink', 'port_incr')
)
def __init__(self, *args, **kwargs):
super_ = super(Channel, self)
super_.__init__(*args, **kwargs)
# Evaluate socket
self.client.connection.closed
self.connection.cycle.add(self)
self.connection_errors = self.connection.connection_errors
def _get(self, queue, timeout=None):
try:
return loads(self.client.get(queue, timeout))
except socket.error as exc:
if exc.errno == errno.EAGAIN and timeout != 0:
raise Empty()
else:
raise
def _put(self, queue, message, **kwargs):
self.client.put(queue, dumps(message, -1), **kwargs)
def _purge(self, queue):
return 0
def _poll(self, cycle, timeout=None):
return cycle.get(timeout=timeout)
def close(self):
if not self.closed:
self.connection.cycle.discard(self)
try:
self.__dict__['client'].close()
except KeyError:
pass
super(Channel, self).close()
def _prepare_port(self, port):
return (port + self.channel_id - 1) * self.port_incr
def _create_client(self):
conninfo = self.connection.client
port = self._prepare_port(conninfo.port or DEFAULT_PORT)
return self.Client(uri=conninfo.hostname or 'tcp://127.0.0.1',
port=port,
hwm=self.hwm,
swap_size=self.swap_size,
enable_sink=self.enable_sink,
context=self.connection.context)
@cached_property
def client(self):
return self._create_client()
class Transport(virtual.Transport):
Channel = Channel
can_parse_url = True
default_port = DEFAULT_PORT
driver_type = 'zeromq'
driver_name = 'zmq'
connection_errors = virtual.Transport.connection_errors + (ZMQError,)
implements = virtual.Transport.implements.extend(
async=True,
)
polling_interval = None
def __init__(self, *args, **kwargs):
if zmq is None:
raise ImportError('The zmq library is not installed')
super(Transport, self).__init__(*args, **kwargs)
self.cycle = MultiChannelPoller()
def driver_version(self):
return zmq.__version__
def register_with_event_loop(self, connection, loop):
cycle = self.cycle
cycle.poller = loop.poller
add_reader = loop.add_reader
on_readable = self.on_readable
cycle_poll_start = cycle.on_poll_start
def on_poll_start():
cycle_poll_start()
[add_reader(fd, on_readable, fd) for fd in cycle.fds]
loop.on_tick.add(on_poll_start)
def on_readable(self, fileno):
self._handle_event(self.cycle.on_readable(fileno))
def drain_events(self, connection, timeout=None):
more_to_read = False
for channel in connection.channels:
try:
evt = channel.cycle.get(timeout=timeout)
except socket.error as exc:
if exc.errno == errno.EAGAIN:
continue
raise
else:
connection._handle_event((evt, channel))
more_to_read = True
if not more_to_read:
raise socket.error(errno.EAGAIN, os.strerror(errno.EAGAIN))
def _handle_event(self, evt):
item, channel = evt
self._deliver(*item)
def establish_connection(self):
self.context.closed
return super(Transport, self).establish_connection()
def close_connection(self, connection):
super(Transport, self).close_connection(connection)
try:
connection.__dict__['context'].term()
except KeyError:
pass
@cached_property
def context(self):
return zmq.Context(1)
| bsd-3-clause | -779,977,540,349,261,000 | 26.196141 | 73 | 0.567155 | false |
daviddaub/pyssllabs | pyssllabs.py | 1 | 21480 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import urllib2
import json
import time
from datetime import datetime
import sys
SSLLABS_API_ENTRYPOINT = 'https://api.ssllabs.com/api/v2/'
_FMT = '%Y-%m-%d %H:%M:%S'
hasColorama = False
def _c(c):
return c if hasColorama else ''
def _parse_args():
ap = argparse.ArgumentParser(description='SSL Server Test with the ssllabs.com API')
meg = ap.add_mutually_exclusive_group(required=True)
meg.add_argument('-i', '--info', action='store_true', help='Info')
meg.add_argument('-H', '--host', dest='host', type=str, metavar='<host>',
help='Test a single host e.g. www.example.com')
meg.add_argument('-S', '--statuscodes', action='store_true',
help='Show available status codes and its details')
meg.add_argument('-file', '--file', action='store_true',
help='Show available status codes and its details')
ap.add_argument('-n', '--nocolor', action='store_true',
help='Omit colorized output')
ap.add_argument('-g', '--grade', action='store_true',
help='Output the grade in the form <fqdn>:<grade>')
ap.add_argument('-s', '--startnew', action='store_true',
help='Start new scan. Don\'t deliver cached results.')
return ap
def _format_timestamp(t):
return time.strftime(_FMT, time.localtime(t / 1000))
class Info(object):
version = None
criteriaVersion = None
maxAssessments = None
currentAssessments = None
messages = None
clientMaxAssessments = None
class Host(object):
host = None
port = None
protocol = None
isPublic = None
status = None
statusMessage = None
startTime = None
testTime = None
engineVersion = None
criteriaVersion = None
cacheExpiryTime = None
endpoints = []
certHostnames = []
class EndPoint(object):
ipAddress = None
serverName = None
statusMessage = None
statusDetails = None
statusDetailsMessage = None
grade = None
hasWarnings = None
isExceptional = None
progress = None
duration = None
eta = None
delegation = None
details = None
class Key(object):
size = None
strength = None
alg = None
debianFlaw = None
q = None
class Cert(object):
subject = None
commonNames = []
altNames = []
notBefore = None
notAfter = None
issuerSubject = None
sigAlg = None
revocationInfo = None
crlURIs = []
ocspURIs = []
revocationStatus = None
sgc = None
validationType = None
issues = None
class Chain(object):
certs = []
issues = None
class Suites(object):
_list = []
preference = None
class SimDetails(object):
results = []
class EndpointDetails(object):
hostStartTime = None
key = Key()
cert = Cert()
chain = Chain()
protocols = []
suites = Suites()
serverSignature = None
prefixDelegation = None
nonPrefixDelegation = None
vulnBeast = None
renegSupport = None
stsResponseHeader = None
stsMaxAge = None
stsSubdomains = None
pkpResponseHeader = None
sessionResumption = None
compressionMethods = None
supportsNpn = None
npnProtocols = None
sessionTickets = None
ocspStapling = None
sniRequired = None
httpStatusCode = None
httpForwarding = None
supportsRc4 = None
forwardSecrecy = None
rc4WithModern = None
sims = SimDetails()
heartbleed = None
heartbeat = None
openSslCcs = None
poodleTls = None
fallbackScsv = None
freak = None
class ChainCert(object):
subject = None
label = None
notBefore = None
notAfter = None
issuerSubject = None
issuerLabel = None
sigAlg = None
issues = None
keyAlg = None
keySize = None
keyStrength = None
raw = None
class Protocol(object):
_id = None
name = None
version = None
v2SuitesDisabled = None
q = None
class SimClient(object):
_id = None
name = None
platform = None
version = None
isReference = None
class Simulation(object):
client = None
errorCode = None
attempts = None
protocolId = None
suiteId = None
class Suite(object):
_id = None
name = None
cipherStrength = None
dhStrength = None
dhP = None
ghG = None
dhYs = None
ecdhBits = None
ecdhStrength = None
q = None
class StatusCodes(object):
statusDetails = None
class SSLLabs(object):
def info(self):
f = urllib2.urlopen(SSLLABS_API_ENTRYPOINT + 'info')
jsn = json.loads(f.read())
f.close()
i = Info()
i.version = jsn.get('engineVersion')
i.criteriaVersion = jsn.get('criteriaVersion')
i.maxAssessments = jsn.get('maxAssessments')
i.currentAssessments = jsn.get('currentAssessments')
i.messages = jsn.get('messages')
i.clientMaxAssessments = jsn.get('clientMaxAssessments')
return i
def analyze(self, host='www.ssllabs.com', publish='off', startNew='off',
fromCache='off', maxAge='1', _all='on', ignoreMismatch='off'):
# TODO: catch HTTP errors
f = urllib2.urlopen(SSLLABS_API_ENTRYPOINT + 'analyze?' +
'host=' + host + '&' +
'publish=' + publish + '&' +
'startNew=' + startNew + '&' +
'fromCache=' + fromCache + '&' +
'maxAge=' + maxAge + '&' +
'all=' + _all + '&' +
'ignoreMismatch=' + ignoreMismatch)
jsn = json.loads(f.read())
f.close()
h = Host()
h.host = jsn.get('host')
h.port = jsn.get('port')
h.protocol = jsn.get('protocol')
h.isPublic = jsn.get('isPublic')
h.status = jsn.get('status')
h.statusMessage = jsn.get('statusMessage')
h.startTime = jsn.get('startTime')
h.testTime = jsn.get('testTime')
h.engineVersion = jsn.get('engineVersion')
h.criteriaVersion = jsn.get('criteriaVersion')
h.cacheExpiryTime = jsn.get('cacheExpiryTime')
if h.status != 'READY':
return h
for e in jsn.get('endpoints'):
endpoint = EndPoint()
endpoint.ipAddress = e.get('ipAddress')
endpoint.serverName = e.get('serverName')
endpoint.statusMessage = e.get('statusMessage')
endpoint.statusDetails = e.get('statusDetails')
endpoint.statusDetailsMessage = e.get('statusDetailsMessage')
endpoint.grade = e.get('grade')
endpoint.hasWarnings = e.get('hasWarnings')
endpoint.isExceptional = e.get('isExceptional')
endpoint.progress = e.get('progress')
endpoint.duration = e.get('duration')
endpoint.eta = e.get('eta')
endpoint.delegation = e.get('delegation')
if _all == 'on':
endpoint.details = EndpointDetails()
endpoint.details.hostStartTime = e.get('details').get('hostStartTime')
endpoint.details.key = Key()
endpoint.details.key.size = e.get('details').get('key').get('size')
endpoint.details.key.strength = e.get('details').get('key').get('strength')
endpoint.details.key.alg = e.get('details').get('key').get('alg')
endpoint.details.key.debianFlaw = e.get('details').get('key').get('debianFlaw')
endpoint.details.key.q = e.get('details').get('key').get('q')
endpoint.details.cert = Cert()
endpoint.details.cert.subject = e.get('details').get('cert').get('subject')
endpoint.details.cert.commonNames = e.get('details').get('cert').get('commonNames')
endpoint.details.cert.altNames = e.get('details').get('cert').get('altNames')
endpoint.details.cert.notBefore = e.get('details').get('cert').get('notAfter')
endpoint.details.cert.issuerSubject = e.get('details').get('cert').get('issuerSubject')
endpoint.details.cert.sigAlg = e.get('details').get('cert').get('sigAlg')
endpoint.details.cert.issuerLabel = e.get('details').get('cert').get('issuerLabel')
endpoint.details.cert.revocationInfo = e.get('details').get('cert').get('revocationInfo')
endpoint.details.cert.crlURIs = e.get('details').get('cert').get('crlURIs')
endpoint.details.cert.ocspURIs = e.get('details').get('cert').get('ocspURIs')
endpoint.details.cert.revocationStatus = e.get('details').get('cert').get('revocationStatus')
endpoint.details.cert.sgc = e.get('details').get('cert').get('sgc')
endpoint.details.cert.validationType = e.get('details').get('cert').get('validationType')
endpoint.details.cert.issues = e.get('details').get('cert').get('issues')
endpoint.details.chain = Chain()
endpoint.details.chain.certs = []
for c in e.get('details').get('chain').get('certs'):
cc = ChainCert()
cc.subject = c.get('subject')
cc.label = c.get('label')
cc.notBefore = c.get('notBefore')
cc.notAfter = c.get('notAfter')
cc.issuerSubject = c.get('issuerSubject')
cc.issuerLabel = c.get('issuerLabel')
cc.sigAlg = c.get('sigAlg')
cc.issues = c.get('issues')
cc.keyAlg = c.get('keyAlg')
cc.keySize = c.get('keySize')
cc.raw = c.get('raw')
endpoint.details.chain.certs.append(cc)
endpoint.details.chain.issues = e.get('details').get('chain').get('issues')
endpoint.details.protocols = []
for i in e.get('details').get('protocols'):
p = Protocol()
p._id = i.get('id')
p.name = i.get('name')
p.version = i.get('version')
p.v2SuitesDisabled = i.get('v2SuitesDisabled')
p.q = i.get('q')
endpoint.details.protocols.append(p)
endpoint.details.suites = Suites()
endpoint.details.suites._list = []
for i in e.get('details').get('suites').get('list'):
s = Suite()
s._id = i.get('id')
s.name = i.get('name')
s.cipherStrength = i.get('cipherStrength')
s.dhStrength = i.get('dhStrength')
s.dhP = i.get('dhP')
s.dhG = i.get('dhG')
s.dhYs = i.get('dhYs')
s.ecdhBits = i.get('ecdhBits')
s.ecdhStrength = i.get('ecdhStrength')
s.q = i.get('q')
endpoint.details.suites._list.append(s)
endpoint.details.serverSignature = e.get('details').get('serverSignature')
endpoint.details.prefixDelegation = e.get('details').get('prefixDelegation')
endpoint.details.nonPrefixDelegation = e.get('details').get('nonPrefixDelegation')
endpoint.details.vulnBeast = e.get('details').get('vulnBeast')
endpoint.details.renegSupport = e.get('details').get('renegSupport')
endpoint.details.stsResponseHeader = e.get('details').get('stsResponseHeader')
endpoint.details.stsMaxAge = e.get('details').get('stsMaxAge')
endpoint.details.stsSubdomains = e.get('details').get('stsSubdomains')
endpoint.details.pkpResponseHeader = e.get('details').get('pkpResponseHeader')
endpoint.details.sessionResumption = e.get('details').get('sessionResumption')
endpoint.details.compressionMethods = e.get('details').get('compressionMethods')
endpoint.details.supportsNpn = e.get('details').get('supportsNpn')
endpoint.details.npnProtocols = e.get('details').get('npnProtocols')
endpoint.details.sessionTickets = e.get('details').get('sessionTickets')
endpoint.details.ocspStapling = e.get('details').get('ocspStapling')
endpoint.details.sniRequired = e.get('details').get('sniRequired')
endpoint.details.httpStatusCode = e.get('details').get('httpStatusCode')
endpoint.details.httpForwarding = e.get('details').get('httpForwarding')
endpoint.details.supportsRc4 = e.get('details').get('supportsRc4')
endpoint.details.forwardSecrecy = e.get('details').get('forwardSecrecy')
endpoint.details.rc4WithModern = e.get('details').get('rc4WithModern')
endpoint.details.sims = SimDetails()
endpoint.details.sims.results = []
for i in e.get('details').get('sims').get('results'):
s = Simulation()
s.client = SimClient()
s.client._id = i.get('client').get('id')
s.client.name = i.get('client').get('text')
s.client.platform = i.get('client').get('platform')
s.client.version = i.get('client').get('version')
s.client.isReference = i.get('client').get('isReference')
s._id = i.get('id')
s.errorCode = i.get('errorCode')
s.attempts = i.get('attempts')
s.protocolId = i.get('protocolId')
s.suiteId = i.get('suiteId')
endpoint.details.sims.results.append(s)
endpoint.details.heartbleed = e.get('details').get('heartbleed')
endpoint.details.heartbeat = e.get('details').get('heartbeat')
endpoint.details.openSslCcs = e.get('details').get('openSslCcs')
endpoint.details.poodleTls = e.get('details').get('poodleTls')
endpoint.details.fallbackScsv = e.get('details').get('fallbackScsv')
endpoint.details.freak = e.get('details').get('freak')
h.endpoints.append(endpoint)
return h
def getStatusCodes(self):
f = urllib2.urlopen(SSLLABS_API_ENTRYPOINT + 'getStatusCodes')
jsn = json.loads(f.read())
f.close()
s = StatusCodes()
s.statusDetails = jsn
return s
if __name__ == '__main__':
args = _parse_args().parse_args()
try:
from colorama import Fore, Style, init
init(autoreset=True)
hasColorama = True
except ImportError:
print('No color support. Falling back to normal output.')
args.nocolor = True
if args.info:
s = SSLLabs()
i = s.info()
if args.nocolor:
hasColorama = False
print(_c(Fore.WHITE) + i.messages[0] + '\n')
print(_c(Fore.BLUE) + 'Criteria Version: ' + '\t' +
_c(Fore.CYAN) + i.criteriaVersion)
print(_c(Fore.BLUE) + 'Maximum Assessments: ' + '\t' +
_c(Fore.CYAN) + str(i.maxAssessments))
print(_c(Fore.BLUE) + 'Current Assessments: ' + '\t' +
_c(Fore.CYAN) + str(i.currentAssessments))
print(_c(Fore.BLUE) + 'Engine Version: ' +'\t' +
_c(Fore.CYAN) + str(i.version))
elif args.statuscodes:
s = SSLLabs()
c = s.getStatusCodes()
for key, value in c.statusDetails['statusDetails'].iteritems():
print(_c(Fore.BLUE) + key + ': ' + _c(Fore.YELLOW) + value)
elif args.host:
s = SSLLabs()
h = s.analyze(args.host, startNew = 'on' if args.startnew else 'off')
if args.nocolor:
hasColorama = False
if h.status == 'READY':
for endpoint in h.endpoints:
if not args.grade:
msg = endpoint.serverName + ' (' + endpoint.ipAddress + ')' + ':'
print(_c(Style.BRIGHT) + _c(Fore.WHITE) + msg)
print(len(msg) * '-')
c = None
if endpoint.grade in [ 'A+', 'A', 'A-' ]:
c = Fore.GREEN
elif endpoint.grade in [ 'B', 'C', 'D', 'E' ]:
c = Fore.YELLOW
elif endpoint.grade in [ 'F', 'T', 'M' ]:
c = Fore.RED
if args.grade:
print(_c(Fore.WHITE) + endpoint.serverName + ': ' + _c(c) + endpoint.grade)
break
if endpoint.grade == 'T':
print(_c(Fore.BLUE) + 'Rating: ' + '\t\t' + _c(c) +
_c(Style.BRIGHT) + endpoint.grade + ' (no trust)')
elif endpoint.grade == 'M':
print(_c(Fore.BLUE) + 'Rating: ' + '\t\t' + _c(c) +
_c(Style.BRIGHT) +
endpoint.grade + ' (certificate name mismatch)')
elif endpoint.grade == 'F':
print(_c(Fore.BLUE) + 'Rating: ' + '\t\t' + _c(c) +
_c(Style.BRIGHT) + endpoint.grade)
else:
print(_c(Fore.BLUE) + 'Rating: ' + '\t\t' + _c(c) +
endpoint.grade)
print('')
if endpoint.details.supportsRc4:
print(_c(Fore.BLUE) + 'RC4: ' + '\t\t\t' +
_c(Fore.RED) + 'supported')
else:
print(_c(Fore.BLUE) + 'RC4: ' + '\t\t\t' +
_c(Fore.GREEN) + 'not supported')
if endpoint.details.heartbleed:
print(_c(Fore.BLUE) + 'Heartbleed: ' + '\t\t' +
_c(Fore.RED) + 'vulnerable')
else:
print(_c(Fore.BLUE) + 'Heartbleed: ' + '\t\t' +
_c(Fore.GREEN) + 'not vulnerable')
if endpoint.details.poodleTls == -1:
print(_c(Fore.BLUE) + 'POODLE: ' + '\t\t' +
_c(Fore.YELLOW) + 'test failed')
elif endpoint.details.poodleTls == -0:
print(_c(Fore.BLUE) + 'POODLE: ' + '\t\t' +
_c(Fore.YELLOW) + 'unknown')
elif endpoint.details.poodleTls == 1:
print(_c(Fore.BLUE) + 'POODLE: ' + '\t\t' +
_c(Fore.GREEN) + 'not vulnerable')
elif endpoint.details.poodleTls == 2:
print(_c(Fore.BLUE) + 'POODLE: ' + '\t\t' +
_c(Fore.RED) + 'vulnerable')
if endpoint.details.freak:
print(_c(Fore.BLUE) + 'FREAK: ' + '\t\t\t' +
_c(Fore.RED) + 'vulnerable')
else:
print(_c(Fore.BLUE) + 'FREAK: ' + '\t\t\t' +
_c(Fore.GREEN) + 'not vulnerable')
print('')
if not args.grade:
print(_c(Fore.BLUE) + 'Test starting time: ' + '\t' +
_c(Fore.CYAN) + _format_timestamp(h.startTime))
print(_c(Fore.BLUE) + 'Test completion time: ' + '\t' +
_c(Fore.CYAN) + _format_timestamp(h.testTime))
print(_c(Fore.BLUE) + 'Test duration: ' + '\t\t' +
_c(Fore.CYAN) +
str(datetime.strptime(_format_timestamp(h.testTime), _FMT) -
datetime.strptime(_format_timestamp(h.startTime), _FMT)))
if h.cacheExpiryTime:
print(_c(Fore.BLUE) + 'Cache expiry time: ' + '\t' +
_c(Fore.CYAN) + _format_timestamp(h.cacheExpiryTime))
sys.exit(0)
elif h.status == 'ERROR':
print(_c(Fore.RED) + h.statusMessage)
sys.exit(1)
elif h.status == 'DNS':
print(_c(Fore.CYAN) + h.statusMessage + '.' +
'Please try again in a few minutes.')
sys.exit(2)
elif h.status == 'IN_PROGRESS':
msg = 'Assessment is in Progress. Please try again in a few minutes.'
print(_c(Fore.WHITE) + msg)
print('')
print(_c(Fore.BLUE) + 'Test starting time: ' + '\t' +
_c(Fore.CYAN) + _format_timestamp(h.startTime))
sys.exit(3)
else:
msg = 'Unknown Status'
print(_c(Fore.RED) + msg)
sys.exit(255)
| unlicense | 5,589,052,208,164,433,000 | 36.552448 | 113 | 0.506657 | false |
geraldinepascal/FROGS | assessment/bin/treeSampling.py | 1 | 11792 | #!/usr/bin/env python2.7
#
# Copyright (C) 2016 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__author__ = 'Frederic Escudie - Plateforme bioinformatique Toulouse'
__copyright__ = 'Copyright (C) 2016 INRA'
__license__ = 'GNU General Public License'
__version__ = '0.2.0'
__email__ = '[email protected]'
__status__ = 'dev'
import sys
import random
import argparse
import numpy as np
from frogsNode import Node
from frogsSequenceIO import FastaIO
##################################################################################################################################################
#
# FUNCTIONS
#
##################################################################################################################################################
def write_subset( in_path, out_path, selected ):
FH_in = FastaIO(in_path)
FH_out = FastaIO(out_path, "w")
for record in FH_in:
if record.id in selected:
FH_out.write(record)
FH_in.close()
FH_out.close()
def print_summary( tree ):
max_depth = databank_tree.get_leaves()[0].get_depth()
print "##########################################################\n" + \
"# Representation\n" + \
"#\n"
print "Rank_depth\tNb_taxa\tNb_selected"
for idx in range(max_depth):
depth = idx + 1
rank_nodes = databank_tree.get_descendants(depth)
nb_node_selected = 0
for node in rank_nodes:
for leaf in node.get_leaves():
if leaf.metadata["selected"]:
nb_node_selected += 1
break
print depth, len(rank_nodes), nb_node_selected
print ""
print "##########################################################\n" + \
"# Distribution\n" + \
"#\n"
distrib_data = dict()
for idx in range(max_depth -1):
depth = idx + 1
nb_selected = list()
rank_nodes = databank_tree.get_descendants(depth)
for node in rank_nodes:
nb_children_selected = 0
for child in node.get_children():
for leaf in child.get_leaves():
if leaf.metadata["selected"]:
nb_children_selected += 1
break
nb_selected.append(nb_children_selected)
distrib_data[str(depth)] = {
"all": distrib(nb_selected),
"non-zero": distrib(filter(lambda a: a != 0, nb_selected))
}
print "Distribution in all nodes:"
print "\t" + "\t".join(sorted(distrib_data))
for field in ["min", "10/100", "25/100", "50/100", "50/100", "75/100", "90/100", "max"]:
print field + ":\t" + "\t".join([str(distrib_data[depth]["all"][field]) for depth in sorted(distrib_data)])
print "\nDistribution in represented nodes:"
print "\t" + "\t".join(sorted(distrib_data))
for field in ["min", "10/100", "25/100", "50/100", "50/100", "75/100", "90/100", "max"]:
print field + ":\t" + "\t".join([str(distrib_data[depth]["non-zero"][field]) for depth in sorted(distrib_data)])
def distrib( data ):
return {
"min": min(data),
"10/100": np.percentile(data, 10),
"25/100": np.percentile(data, 25),
"50/100": np.percentile(data, 50),
"75/100": np.percentile(data, 75),
"90/100": np.percentile(data, 90),
"max": max(data)
}
def ascending_walk(node, max_selection):
selected_leaf = list()
if max_selection > 0:
if random.randint(1, args.climb_prob) != 1:
log[-1].append("ascending")
parent = node.get_parent()
if parent is not None: # Node is not root
brothers = parent.get_children()
if len(brothers) > 1: # Node has brother(s)
if random.randint(1, args.neighbor_prob) != 1: # Brother recruitment
neighbors_leaves = list()
for brother in brothers:
if brother is not node:
for leaf in brother.get_leaves():
if not leaf.metadata["selected"]:
neighbors_leaves.append( leaf )
if len(neighbors_leaves) > 0:
selected_idx = random.randint(1, len(neighbors_leaves)) -1
log[-1].append("neighbor_selection: " + neighbors_leaves[selected_idx].metadata["retained_seq_id"])
selected_leaf.append( neighbors_leaves[selected_idx] )
max_selection -= 1
# Go to parent
selected_leaf.extend( ascending_walk(parent, max_selection) )
return selected_leaf
def descending_walk( node ):
selected_leaf = None
if not node.has_child():
selected_leaf = node
else:
accessible_children = list()
for child in node.get_children():
for leaf in child.get_leaves():
if not leaf.metadata["selected"]:
accessible_children.append(child)
break
selected_idx = random.randint(1, len(accessible_children)) -1
selected_leaf = descending_walk( accessible_children[selected_idx] )
return selected_leaf
def rank_sampling( tree, rank ):
selected_leaf = None
accessible_leaves = list()
for rank_node in tree.get_descendants(rank):
rank_is_accessible = rank_node.has_child
for leaf in rank_node.get_leaves():
if leaf.metadata["selected"]:
rank_is_accessible = False
break
if rank_is_accessible:
accessible_leaves.extend( rank_node.get_leaves() )
selected_idx = random.randint(1, len(accessible_leaves)) -1
selected_leaf = accessible_leaves[selected_idx]
return selected_leaf
def get_tree_from_fasta( in_fasta ):
"""
@warning: The root node must be present
"""
databank_tree = None
FH_databank = FastaIO(in_fasta)
for record in FH_databank:
if record.description.endswith(";"):
record.description = record.description[:-1]
taxonomy = record.description.split(";")
if databank_tree is None:
databank_tree = Node(taxonomy[0])
parent = databank_tree
for rank_depth, taxa in enumerate(taxonomy[1:]):
if not parent.has_child( taxa ):
taxa_node = Node(taxa, parent)
if (rank_depth+1) == (len(taxonomy)-1): # Current node is leaf
taxa_node.metadata["seq_ids"] = [record.id]
else:
if (rank_depth+1) == (len(taxonomy)-1): # Current node is leaf
taxa_node = parent.get_child(taxa)
taxa_node.metadata["seq_ids"].append(record.id)
parent = parent.get_child(taxa)
FH_databank.close()
return databank_tree
##################################################################################################################################################
#
# MAIN
#
##################################################################################################################################################
if __name__ == "__main__":
# Manage parameters
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=('''Produce a random subset of species (with near and distant species) from a databank.''')
)
parser.add_argument( '-c', '--climb-prob', default=4, type=int, help='Porbability when you have selected a node that you go to the parent node (to eventualy select a neighbor node) [DEFAULT: 4]. Example: -c 4 you have 1/4 chance to stop ascension ; -c 5 you have 1/4 chance to stop ascension.' )
parser.add_argument( '-n', '--neighbor-prob', default=4, type=int, help='Porbability when you have go to a parent to select a neighbor nodes in children [DEFAULT: 4]. Example: -c 4 you have 1/4 chance to skip selection at this level ; -c 5 you have 1/4 chance to skip selection at this level.' )
parser.add_argument( '-e', '--expected-nb', default=100, type=int, help='Number of selected sequences.' )
parser.add_argument( '-f', '--select-from', default="bottom", choices=['top', 'bottom', 'mix'], help='Select in "top", "bottom" or "mix". With top the less divided branch is favored ; with bottom the most divided branch is favored ; with mix the less divided and most divided are favored. [Default: bottom]' )
parser.add_argument( '-v', '--version', action='version', version=__version__ )
group_input = parser.add_argument_group( 'Inputs' ) # Inputs
group_input.add_argument( '-d', '--databank', required=True, help='The reference databank (format : FASTA). Each sequence must have the same number of tacxonomy level and the header must have this format: "ID<TAB>TAX_LVL1;TAX_LVL2".' )
group_output = parser.add_argument_group( 'Outputs' ) # Outputs
group_output.add_argument( '-o', '--output', required=True, help='The selected sequences (format : FASTA).' )
args = parser.parse_args()
log = list()
# Build tree
databank_tree = get_tree_from_fasta(args.databank)
# Select one sequence ID by leaf
for leaf in databank_tree.get_leaves():
nb_seq_ids = len(leaf.metadata["seq_ids"])
leaf.metadata["selected"] = False
leaf.metadata["retained_seq_id"] = leaf.metadata["seq_ids"][0]
if nb_seq_ids > 1:
leaf.metadata["retained_seq_id"] = leaf.metadata["seq_ids"][random.randint(0, nb_seq_ids-1)]
# Select leaves
current_nb = 0
next_from_top = (args.select_from == "top")
nb_asc = 0
selected_leaves_id = list()
while args.expected_nb > current_nb:
# Random selection
current_selected_leaf = None
if next_from_top:
current_selected_leaf = descending_walk(databank_tree)
log.append(["from_top_selection: " + current_selected_leaf.metadata["retained_seq_id"]])
else:
current_selected_leaf = rank_sampling( databank_tree, 6 )#################################################### Param
log.append(["from_bottom_selection: " + current_selected_leaf.metadata["retained_seq_id"]])
nb_asc += 1
if args.select_from == "mix":
if nb_asc == 2:
nb_asc = 0
next_from_top = True
else:
next_from_top = False
current_selected_leaf.metadata["selected"] = True
selected_leaves_id.append( current_selected_leaf.metadata["retained_seq_id"] )
current_nb += 1
# Neighbor selection
current_selected_leaves = ascending_walk(current_selected_leaf, (args.expected_nb-current_nb))
for leaf in current_selected_leaves:
leaf.metadata["selected"] = True
selected_leaves_id.append( leaf.metadata["retained_seq_id"] )
current_nb += 1
# Write selection
write_subset(args.databank, args.output, selected_leaves_id)
# Log
for action in log:
print action
# Summary
print_summary(databank_tree)
| gpl-3.0 | -7,714,959,664,225,086,000 | 43.164794 | 313 | 0.556988 | false |
bmsauer/pyenvi | test.py | 1 | 8046 | import json
import unittest
from unittest.mock import MagicMock
from pyenvi import PyEnvi
from pyenvi.exceptions import *
class PyEnviTest(unittest.TestCase):
"""
PyEnviTest: a test class for PyEnvi.
TODO: These tests are a little sketchy, due to the static nature of PyEnvi.
Furthermore, they do not test any expected values from the subprocess, which
should be added.
"""
def setUp(self):
"""
Clear any outstanding processes, instances, and start a new one.
"""
PyEnvi.cleanup()
PyEnvi._instance = None
PyEnvi.get_instance()
def tearDown(self):
"""
Close gracefully, then call cleanup just in case.
"""
try:
PyEnvi.get_instance().stop()
except NotRunningError:
pass
PyEnvi.cleanup()
def test_get_instance(self):
"""
PyEnvi.get_instance() test plan:
-Make sure a call to get_instance is not none
-Make sure second call equals first call
-If starts at none, should be not none
"""
first_instance = PyEnvi.get_instance()
self.assertNotEqual(first_instance,None)
second_instance = PyEnvi.get_instance()
self.assertEqual(first_instance,second_instance)
def test_constructor(self):
"""
PyEnvi.__init__() test plan:
-Make sure variables are all set
-make sure constructor sets _instance to itself
-make sure subp is none
-make sure exception raised if called twice
"""
pyenvi = PyEnvi.get_instance()
self.assertEqual(pyenvi.subp,None)
pyenvi.start()
self.assertNotEqual(pyenvi._instance,None)
self.assertEqual(pyenvi,pyenvi._instance)
self.assertRaises(MultipleInstanceError, PyEnvi.__init__,{})
def test_str(self):
"""
PyEnvi.__str__() test plan:
-ensure a string is returned with the environment variables.
"""
pyenvi = PyEnvi.get_instance()
pyenvi.start()
pyenvi.set("test","value")
self.assertEqual(str(pyenvi),"PyEnvi: " + str(pyenvi.environment_variables))
def test_is_running(self):
"""
PyEnvi.is_running() test plan:
-set subp to different things, make sure return value is right
"""
pyenvi = PyEnvi.get_instance()
self.assertEqual(pyenvi.is_running(),False)
pyenvi.start()
self.assertEqual(pyenvi.is_running(),True)
def test_start(self):
"""
PyEnvi.start() test plan:
-make sure subp is a subprocess once stared
-make sure on second call exception is raised
"""
pyenvi = PyEnvi.get_instance()
pyenvi.start()
self.assertEqual(str(type(pyenvi.subp)),"<class 'subprocess.Popen'>")
self.assertRaises(AlreadyRunningError,pyenvi.start)
def test_stop(self):
"""
PyEnvi.stop() test plan:
-raise exception if not running
-mock send_message, return ok,
-ensure subp set to none
-ensure return value matches mock
"""
def mock_send_message(message):
return "Mock value"
pyenvi = PyEnvi.get_instance()
self.assertRaises(NotRunningError,pyenvi.stop)
pyenvi.start()
pyenvi.send_message = mock_send_message
response = pyenvi.stop()
self.assertEqual(pyenvi.subp,None)
self.assertEqual(response,"Mock value")
def test_set(self):
"""
PyEnvi.set() test plan:
-if not running, exception should be raised
-mock send_message, make sure returned.
"""
def mock_send_message(message):
return "Mock value"
pyenvi = PyEnvi.get_instance()
self.assertRaises(NotRunningError,pyenvi.set,"one","two")
pyenvi.send_message = mock_send_message
pyenvi.start()
response = pyenvi.set("one","two")
self.assertEqual(response,"Mock value")
def test_get(self):
"""
PyeEvi.get() test plan:
-if not running, exception should be raised
-mock response to return "_NOT_SET" or "OK"
-on NOT_SET, should raise not set error
-one set, should return "OK"
"""
def mock_send_message(message):
return "Mock value"
def mock_send_message_not_set(message):
return "_NOT_SET"
pyenvi = PyEnvi.get_instance()
self.assertRaises(NotRunningError,pyenvi.get,"one")
pyenvi.send_message = mock_send_message_not_set
pyenvi.start()
self.assertRaises(NotSetError,pyenvi.get,"one")
pyenvi.send_message = mock_send_message
self.assertEqual(pyenvi.get("one"),"Mock value")
def test_exists(self):
"""
PyEnvi.exists() test plan:
-if not running, raise exception
-mock send message to return yes and no, return value should reflect that
"""
def mock_send_message_no(message):
return "NO"
def mock_send_message_yes(message):
return "YES"
pyenvi = PyEnvi.get_instance()
self.assertRaises(NotRunningError,pyenvi.exists,"one")
pyenvi.start()
pyenvi.send_message = mock_send_message_no
self.assertEqual(pyenvi.exists("one"),False)
pyenvi.send_message = mock_send_message_yes
self.assertEqual(pyenvi.exists("one"),True)
def test_parse_response(self):
"""
PyEnvi.parse_response() test plan:
-equivalence table:
type left padding right padding result
bytes 0 0 string
object 0 0 attribute error
bytes 1 0 string
bytes 0 1 string
bytes 1 1 string
"""
pyenvi = PyEnvi.get_instance()
response = bytes("this is a response","UTF-8")
self.assertEqual(pyenvi.parse_response(response),"this is a response")
response = 4
self.assertRaises(AttributeError,pyenvi.parse_response,response)
response = bytes(" this is a response","UTF-8")
self.assertEqual(pyenvi.parse_response(response),"this is a response")
response = bytes("this is a response ","UTF-8")
self.assertEqual(pyenvi.parse_response(response),"this is a response")
response = bytes(" this is a response \n","UTF-8")
self.assertEqual(pyenvi.parse_response(response),"this is a response")
def test_create_message(self):
"""
PyEnvi.create_message() test plan:
-make sure return value is bytes
-make sure that last character is \n
-make sure json has proper structure
"""
pyenvi = PyEnvi.get_instance()
value = pyenvi.create_message("actionname","datathing")
self.assertEqual(str(type(value)),"<class 'bytes'>")
value = value.decode("utf-8")
self.assertEqual(value[-1],"\n")
value = json.loads(value)
self.assertEqual(value["action"],"actionname")
self.assertEqual(value["data"],"datathing")
def test_send_message(self):
"""
PyEnvi.send_message() test plan:
-send bogus message to subp, get unknown action
"""
def mock_parse_response():
return "value"
pyenvi = PyEnvi.get_instance()
pyenvi.start()
self.parse_response = mock_parse_response
response = pyenvi.send_message(pyenvi.create_message("actionname","datathing"))
self.assertEqual(response,"_UNKNOWN_ACTION")
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -3,098,476,493,365,333,500 | 34.60177 | 87 | 0.570594 | false |
symarroun/FSND-movie-trailers | fav_movies.py | 1 | 4818 | import media
import fav_movies_web
# Instances of my favorite movies:
# Deadpool movie: movie title, sotryline, poster image and movie trailer
deadpool = media.Movie("Deadpool",
""" Wade Wilson (Ryan Reynolds) is a former Special Forces
operative who now works as a mercenary. His world comes
crashing down when evil scientist Ajax (Ed Skrein)
tortures, disfigures and transforms him into Deadpool.
The rogue experiment leaves Deadpool with accelerated
healing powers and a twisted sense of humor. With help
from mutant allies Colossus and Negasonic Teenage
Warhead (Brianna Hildebrand), Deadpool uses his new
skills to hunt down the man who nearly destroyed
his life""",
"https://www.flickeringmyth.com/wp-content/uploads/2016/01/Deadpool-poster-1.jpg", # NOQA
"https://www.youtube.com/watch?v=Xithigfg7dA"
) # NOQA
# Focus movie: movie title, sotryline, poster image and movie trailer
focus = media.Movie("Focus",
"""Nicky (Will Smith), a veteran con artist, takes a
novice named Jess(Margot Robbie) under his wing. While
Nicky teaches Jess the tricks of the trade, the pair
become romantically involved; but, when Jess gets
uncomfortably close, Nicky ends their relationship.""",
"http://static.rogerebert.com/uploads/movie/movie_poster/focus-2015/large_focus_ver2.jpg", # NOQA
"https://www.youtube.com/watch?v=MxCRgtdAuBo"
) # NOQA
# Mechanic: Resurrection movie: movie title, sotryline, poster image and
# movie trailer
mechanic = media.Movie("Mechanic: Resurrection",
"""Living under cover in Brazil, master assassin Arthur
Bishop(Jason Statham) springs back into action after an
old enemySam Hazeldine) kidnaps the woman (Jessica Alba)
he loves. To saveher life, Bishop must kill an
imprisoned African warlord, a humantrafficker (Toby
Eddington) and an arms dealer (Tommy Lee Jones),all
while making the deaths look like accidents. When things
don't goexactly as planned, Bishop turns the tables on
the people who forcedhim out of retirement.""",
"https://images-na.ssl-images-amazon.com/images/M/MV5BMjYwODExNzUwMV5BMl5BanBnXkFtZTgwNTgwNjUyOTE@._V1_UY1200_CR90,0,630,1200_AL_.jpg", # NOQA
"https://www.youtube.com/watch?v=G-P3f_wDXvs"
) # NOQA
# Enemy movie: movie title, sotryline, poster image and movie trailer
enemy = media.Movie("Enemy",
"""A mild-mannered college professor (Jake Gyllenhaal)
discovers a look-alike actor and delves into the other
man's private affairs.""",
"http://www.impawards.com/intl/misc/2014/posters/enemy.jpg", # NOQA
"https://www.youtube.com/watch?v=FJuaAWrgoUY"
) # NOQA
# Wonder Woman movie: movie title, sotryline, poster image and movie trailer
wonder_woman = media.Movie("Wonder Woman",
"""Before she was Wonder Woman (Gal Gadot), she was
Diana, princess ofthe Amazons, trained to be an
unconquerable warrior. Raised on asheltered island
paradise, Diana meets an American pilot (Chris Pine)
who tells her about the massive conflict that's
raging in the outsideworld. Convinced that she can
stop the threat, Diana leaves herhome for the first
time. Fighting alongside men in a war to end
allwars, she finally discovers her full powers and
true destiny""",
"http://cdn2-www.comingsoon.net/assets/uploads/gallery/wonder-woman/wwposter5.jpg", # NOQA
"https://www.youtube.com/watch?v=1Q8fG0TtVAY"
) # NOQA
# Ghost in the Shell movie: movie title, sotryline, poster image and movie
# trailer
ghost_in_the_shell = media.Movie("Ghost in the Shell",
"""In the near future, Major is the first of
herkind: a human who iscyber-enhanced to be a
perfect soldier devoted to stopping theworld's
most dangerous criminals. When terrorism
reaches a newlevel that includes the ability
to hack into people's minds and control them,
Major is uniquely qualified to stop it. As
sheprepares to face a new enemy, Major
discovers that her life was stoleninstead of
saved. Now, she will stop at nothing to
recover her pastwhile punishing those who did
this to her.""",
"http://cdn2-www.comingsoon.net/assets/uploads/gallery/ghost-in-the-shell/ghostinshellposter.jpg", # NOQA
"https://www.youtube.com/watch?v=G4VmJcZR0Yg"
) # NOQA
# All instances grouped together in a list
# The list is the sit of the movies that will be passed to the media file
movies = [
deadpool,
focus,
mechanic,
enemy, wonder_woman,
ghost_in_the_shell
]
# Open the HTML file in a webbrowser via the fav_movies_web.py
fav_movies_web.open_movies_page(movies) # the array/list (argument)
| mit | -5,561,107,779,786,455,000 | 45.326923 | 149 | 0.706932 | false |
statsmaths/stat665 | psets/pset01/pset01_starter.py | 1 | 1819 | """ Problem Set 01 starter code
Please make sure your code runs on Python version 3.5.0
Due date: 2016-02-05 13:00
"""
import numpy as np
from scipy import spatial
from scipy.stats import norm
def my_knn(X, y, k=1):
""" Basic k-nearest neighbor functionality
k-nearest neighbor regression for a numeric test
matrix. Prediction are returned for the same data matrix
used for training. For each row of the input, the k
closest rows (using the l2 distance) in the training
set are identified. The mean of the observations y
is used for the predicted value of a new observation.
Args:
X: an n by p numpy array; the data matrix of predictors
y: a length n numpy array; the observed response
k: integer giving the number of neighbors to include
Returns:
a 1d numpy array of predicted responses for each row of the input matrix X
"""
distmat = spatial.distance.pdist(X)
def my_ksmooth(X, y, sigma=1.0):
""" Kernel smoothing function
kernel smoother for a numeric test matrix with a Gaussian
kernel. Prediction are returned for the same data matrix
used for training. For each row of the input, a weighted
average of the input y is used for prediction. The weights
are given by the density of the normal distribution for
the distance of a training point to the input.
Args:
X: an n by p numpy array; the data matrix of predictors
y: a length n numpy vector; the observed response
sigma: the standard deviation of the normal density function
used for the weighting scheme
Returns:
a 1d numpy array of predicted responses for each row of the input matrix X
"""
distmat = spatial.distance.pdist(X)
value = 1
norm(scale=sigma).pdf(value) # normal density at 'value'
| gpl-2.0 | 2,707,504,578,652,846,000 | 32.072727 | 80 | 0.706432 | false |
vhavlena/appreal | netbench/pattern_match/parser.py | 1 | 6267 | ###############################################################################
# parser.py: Module for PATTERN MATCH, mataclass wrapping any parser based
# on nfa_parser base class.
# Copyright (C) 2011 Brno University of Technology, ANT @ FIT
# Author(s): Vlastimil Kosar <[email protected]>
###############################################################################
#
# LICENSE TERMS
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. All advertising materials mentioning features or use of this software
# or firmware must display the following acknowledgement:
#
# This product includes software developed by the University of
# Technology, Faculty of Information Technology, Brno and its
# contributors.
#
# 4. Neither the name of the Company nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# This software or firmware is provided ``as is'', and any express or implied
# warranties, including, but not limited to, the implied warranties of
# merchantability and fitness for a particular purpose are disclaimed.
# In no event shall the company or contributors be liable for any
# direct, indirect, incidental, special, exemplary, or consequential
# damages (including, but not limited to, procurement of substitute
# goods or services; loss of use, data, or profits; or business
# interruption) however caused and on any theory of liability, whether
# in contract, strict liability, or tort (including negligence or
# otherwise) arising in any way out of the use of this software, even
# if advised of the possibility of such damage.
#
# $Id$
import copy
from nfa_parser import nfa_parser
import pcre_parser
import pattern_exceptions
class parser(nfa_parser):
"""
A mata class wrapping under single interface any class for parsing of \
regular expressions based on base class nfa_parser.
:param selected_parser: Which class is used for parsing of regular \
expressions. Defaults to "pcre_parser". This parameter can be either \
name of parser class (eg. pcre_parser) or object of class based on \
nfa_parser class.
:type selected_parser: string or nfa_parser
:param args: any parser parameters. NOTE: Caller is suppossed to assign \
corect parametrs of corect types. If parameters excess the number of \
accepted parameters, then they are discarded.
:type args: list(Any type)
"""
def __init__(self, selected_parser = "pcre_parser", *args):
"""
Class constructor
"""
self.parser = None
if isinstance(selected_parser, str):
if selected_parser == "msfm_parser":
sys.stderr.write("ERROR: The class msfm_parser and coresponding \
RE parser was removed as deprecated. Use the class pcre_parser.\
\n")
exit()
elif selected_parser == "pcre_parser":
self.parser = pcre_parser.pcre_parser(*args)
else:
raise pattern_exceptions.unknown_parser(selected_parser)
else:
if isinstance(selected_parser, nfa_parser):
self.parser = selected_parser
else:
raise pattern_exceptions.unknown_parser(repr(selected_parser))
def load_file(self, filename):
"""
This function is used to specify input file and load the whole file into the input text atribute.
:param filename: Name of file.
:type filename: string
"""
self.parser.load_file(filename)
def set_text(self, input_text):
"""
Set text to parse - can have multiple text lines
:param input_text: Regular expressions.
:type input_text: string
"""
self.parser.set_text(input_text)
def get_nfa(self):
"""
Parse a current line and returns parsed nfa.
:returns: Created automaton in nfa_data format. Returns None if failure happens.
:rtype: nfa_data or None
"""
return self.parser.get_nfa()
def next_line(self):
"""
Move to the next line (next regular expression)
:returns: True if move was performed, Otherwise False is returned.
:rtype: boolean
"""
return self.parser.next_line()
def move_to_line(self, line):
"""
Move to the specified line
:param line: Line number.
:type line: int
:returns: True if move was performed, Otherwise False is returned.
:rtype: boolean
"""
return self.parser.move_to_line(line)
def num_lines(self):
"""
Returns number of lines.
:returns: Number of lines. Each line corespond to single regular expression.
:rtype: int
"""
return self.parser.num_lines()
def reset(self):
"""
Reset the position counter to 0. Parsing will continue from the begining.
"""
return self.parser.reset()
def get_position(self):
"""
Returns position in ruleset.
:returns: Position in ruleset.
:rtype: int
"""
return self.parser.get_position()
###############################################################################
# End of File parser.py #
############################################################################### | gpl-2.0 | -6,552,689,261,439,421,000 | 37.219512 | 109 | 0.588001 | false |
ReneHollander/rep0st | rep0st/index/post.py | 1 | 2758 | from typing import Iterable, NamedTuple
from elasticsearch import Elasticsearch
from elasticsearch_dsl import Date, Document, InnerDoc, Integer, Keyword, Nested
from injector import Module, inject
from rep0st.analyze.feature_vector_analyzer import TYPE_NAME as FEATURE_VECTOR_TYPE
from rep0st.config.rep0st_elasticsearch import Rep0stElasticsearchModule
from rep0st.db.post import Post as DBPost
from rep0st.framework.data.elasticsearch import ElastiknnDenseFloatVectorL2LSHMapping, Index
class PostIndexModule(Module):
def configure(self, binder):
binder.install(Rep0stElasticsearchModule)
binder.bind(PostIndex)
class Frame(InnerDoc):
id = Integer()
feature_vector = ElastiknnDenseFloatVectorL2LSHMapping(108, 180, 5, 3)
class Post(Document):
created = Date()
flags = Keyword()
type = Keyword()
tags = Keyword()
frames = Nested(Frame)
# TODO: Figure out how to disable dynamic mappings.
# dynamic = False
class Index:
name = 'posts'
settings = {
'number_of_shards': 6,
'elastiknn': True,
}
class SearchResult(NamedTuple):
score: float
id: int
class PostIndex(Index[Post]):
@inject
def __init__(self, elasticsearch: Elasticsearch):
super().__init__(Post, elasticsearch=elasticsearch)
def _index_post_from_post(self, post: DBPost) -> Post:
index_post = Post()
index_post.meta.id = post.id
index_post.created = post.created
index_post.type = post.type.value
index_post.flags = [flag.value for flag in post.get_flags()]
index_post.tags = [tag.tag for tag in post.tags]
index_post.frames = [
Frame(
id=feature.id,
feature_vector=[float(n / 255.0)
for n in feature.data])
for feature in post.features
if feature.type == FEATURE_VECTOR_TYPE
]
return index_post
def add_posts(self, posts: Iterable[Post]):
def _it():
for post in posts:
yield self._index_post_from_post(post)
self.save_all(_it())
def find_posts(self, feature_vector):
response = self.search().update_from_dict({
'size': 50,
'fields': [],
'_source': False,
'min_score': 0.3,
'query': {
'nested': {
'path': 'frames',
'query': {
'elastiknn_nearest_neighbors': {
'field': 'frames.feature_vector',
'vec': feature_vector,
'model': 'lsh',
'similarity': 'l2',
'candidates': 500
},
},
},
},
}).execute()
for post in response:
yield SearchResult(post.meta.score, post.meta.id)
| mit | -2,390,979,145,973,501,000 | 25.776699 | 92 | 0.603336 | false |
Janzert/halite_ranking | rating_stats.py | 1 | 8861 | #!/usr/bin/env python3
import argparse
import json
import math
import sys
from collections import defaultdict
import trueskill
import matplotlib.pyplot as plot
import utility
def phi(x):
"""Cumulative distribution function for the standard normal distribution
Taken from python math module documentation"""
return (1.0 + math.erf(x / math.sqrt(2.0))) / 2.0
def ts_winp(a, b, env=None):
"""Win probability of player a over b given their trueskill ratings.
Formula found at https://github.com/sublee/trueskill/issues/1#issuecomment-244699989"""
if not env:
env = trueskill.global_env()
epsilon = trueskill.calc_draw_margin(env.draw_probability, 2)
denom = math.sqrt(a.sigma**2 + b.sigma**2 + (2 * env.beta**2))
return phi((a.mu - b.mu - epsilon) / denom)
def wl_winp(a, b):
ciq = math.sqrt(a.sigma**2 + b.sigma**2 + (2 * (25/6)**2))
return 1 / (1 + math.exp((b.mu - a.mu) / ciq))
def pl_winp(a, b):
"""Win probability of player a over b given their PL ratings."""
return a / (a + b)
def ratings_rmse(game_results, ratings, winp_func, subjects=None):
sum_errors = 0
num_missed = 0
num_predictions = 0
for game in game_results:
gameranks = sorted(game.items(), key=lambda x: x[1])
for pix, (player, prank) in enumerate(gameranks[:-1]):
for opp, orank in gameranks[pix+1:]:
if subjects and player not in subjects and opp not in subjects:
continue
if player not in ratings or opp not in ratings:
num_missed += 1
continue
winp = winp_func(ratings[player], ratings[opp])
winr = 1 if prank < orank else 0
sum_errors += (winp - winr)**2
num_predictions += 1
if num_missed:
print("Could not make a prediction for %d pairs." % (
num_missed,))
print("With %d predictions made." % (num_predictions,))
return math.sqrt(sum_errors / num_predictions)
def ts_order(a, b):
return (a.mu - (a.sigma * 3)) > (b.mu - (b.sigma * 3))
def pl_order(a, b):
return a > b
def ratings_order_error(game_results, ratings, rank_order, subjects=None):
num_wrong = 0
num_missed = 0
num_predictions = 0
for game in game_results:
gameranks = sorted(game.items(), key=lambda x: x[1])
for pix, (player, prank) in enumerate(gameranks[:-1]):
for opp, orank in gameranks[pix+1:]:
if subjects and player not in subjects and opp not in subjects:
continue
if player not in ratings or opp not in ratings:
num_missed += 1
continue
better = rank_order(ratings[player], ratings[opp])
worse = rank_order(ratings[opp], ratings[player])
# if player rating is indecisive, count as wrong prediction
# see Weng and Lin 2011 Section 6
if (better == worse) or (better != (prank < orank)):
num_wrong += 1
num_predictions += 1
if num_missed:
print("Could not make a prediction for %d pairs." % (
num_missed,))
print("With %d predictions made." % (num_predictions,))
return num_wrong / num_predictions
def best_scores(game_results):
player_wins = defaultdict(lambda: defaultdict(int))
for game in game_results:
for player, prank in game.items():
for opp, orank in game.items():
if player == opp:
continue
if prank < orank:
player_wins[player][opp] += 1
ratings = {p: p for g in game_results for p in g}
def pwin(a, b):
if player_wins[a][b] == 0:
return 0
if player_wins[b][a] == 0:
return 1
return player_wins[a][b] / (player_wins[a][b] + player_wins[b][a])
def rank_order(a, b):
return player_wins[a][b] > player_wins[b][a]
rmse = ratings_rmse(game_results, ratings, pwin)
print("True probability RMSE %f" % (rmse,))
order_ratio = ratings_order_error(game_results, ratings, rank_order)
print("True probability incorrectly ordered %f%% results" % (order_ratio * 100,))
def load_ts_ratings(filename):
ratings = dict()
with open(filename) as rfile:
for line in rfile:
rank, player, score, mu, sigma = line.split(",")
rating = trueskill.Rating(mu=float(mu), sigma=float(sigma))
ratings[player.strip()] = rating
return ratings
def load_pl_ratings(filename):
ratings = dict()
with open(filename) as rfile:
for line in rfile:
rank, player, rating = line.split(",")
ratings[player.strip()] = float(rating)
return ratings
def main(args=sys.argv[1:]):
parser = argparse.ArgumentParser("Gather various performance statistics from ratings.")
parser.add_argument("game_files", nargs="+",
help="Json files containing game data.")
parser.add_argument("-n", "--num-games", type=int,
help="Limit the number of games used (positive for first, negative for last")
parser.add_argument("--remove-suspect", action="store_true",
help="Filter out suspect games based on workerID.")
parser.add_argument("--no-error", action="store_true",
help="Filter out games that had bot errors.")
parser.add_argument("-r", "--ratings", required=True,
help="File with ratings of players.")
parser.add_argument("--subjects",
help="File with players to include.")
parser.add_argument("--subjects-num", type=int,
help="Only use first n subjects.")
parser.add_argument("--calc-best", action="store_true",
help="Calculate best possible rates using true win percentages.")
parser.add_argument("--type", choices=["ts", "wl"],
help="Type of ratings, ts=trueskill or wl=Weng-Lin.")
config = parser.parse_args(args)
with open(config.ratings) as rfile:
line = rfile.readline()
fnum = len(line.split(","))
if fnum == 3:
load_ratings = load_pl_ratings
winp = pl_winp
rank_order = pl_order
print("Detected plackett-luce ratings.")
elif fnum == 5:
load_ratings = load_ts_ratings
if not config.type:
print("Rating type not given, use --type argument.")
return
if config.type == "ts":
winp = ts_winp
rank_order = ts_order
print("Detected trueskill ratings.")
elif config.type == "wl":
winp = wl_winp
rank_order = ts_order
print("Detected Weng-Lin ratings.")
ratings = load_ratings(config.ratings)
print("Loaded ratings for %d players." % (len(ratings)))
if config.subjects:
with open(config.subjects) as sfile:
slines = sfile.readlines()
if len(slines[0].split(",")) > 1:
slines = [l.split(",")[1] for l in slines]
if config.subjects_num:
if config.subjects_num > 0:
slines = slines[:config.subjects_num]
else:
slines = slines[config.subjects_num:]
subjects = frozenset(l.strip() for l in slines)
print("Restricting stats to %d players" % (len(subjects),))
else:
subjects = None
games = utility.load_games(config.game_files)
if config.no_error:
games = utility.filter_error_games(games)
print("Filtered out error games, leaving %d" % (len(games),))
if config.remove_suspect:
start_num = len(games)
games = utility.filter_suspect_games(games)
print("Filtered out %d suspect games, leaving %d" % (
start_num - len(games), len(games)))
game_results = [{"%s (%s)" % (u['username'], u['userID']): int(u['rank'])
for u in g['users']}
for g in games]
if config.num_games:
if config.num_games > 0:
game_results = game_results[:config.num_games]
print("Using first %d games." % (len(game_results),))
else:
game_results = game_results[config.num_games:]
print("Using last %d games." % (len(game_results),))
trueskill.setup(draw_probability = 0.)
rmse = ratings_rmse(game_results, ratings, winp, subjects)
print("Given ratings RMSE %f" % (rmse,))
ordering_ratio = ratings_order_error(game_results, ratings, rank_order, subjects)
print("Given ratings incorrectly ordered %.2f%% results" % (
ordering_ratio * 100,))
if config.calc_best:
best_scores(game_results)
if __name__ == "__main__":
main()
| mit | -1,659,496,511,031,441,400 | 38.035242 | 91 | 0.576684 | false |
TACC/tacc_stats | tacc_stats/site/machine/update_db.py | 1 | 7979 | #!/usr/bin/env python
import os,sys, pwd
from datetime import timedelta, datetime
from dateutil.parser import parse
from fcntl import flock, LOCK_EX, LOCK_NB
os.environ['DJANGO_SETTINGS_MODULE']='tacc_stats.site.tacc_stats_site.settings'
import django
django.setup()
from tacc_stats.site.machine.models import Job, Host, Libraries
from tacc_stats.site.xalt.models import run, join_run_object, lib
from tacc_stats.analysis.metrics import metrics
import tacc_stats.cfg as cfg
from tacc_stats.progress import progress
from tacc_stats.daterange import daterange
import pytz, calendar
import pickle as p
import traceback
import csv
import hostlist
def update_acct(date, rerun = False):
ftr = [3600,60,1]
tz = pytz.timezone('US/Central')
ctr = 0
with open(os.path.join(cfg.acct_path, date.strftime("%Y-%m-%d") + '.txt'), encoding = "latin1") as fd:
nrecords = sum(1 for record in csv.DictReader(fd))
fd.seek(0)
for job in csv.DictReader(fd, delimiter = '|'):
if '+' in job['JobID']:
jid, rid = job['JobID'].split('+')
job['JobID'] = int(jid) + int(rid)
if '_' in job['JobID']:
job['JobID'] = job['JobID'].split('_')[0]
if rerun:
pass
elif Job.objects.filter(id = job['JobID']).exists():
ctr += 1
continue
json = {}
json['id'] = job['JobID']
json['project'] = job['Account']
json['start_time'] = tz.localize(parse(job['Start']))
json['end_time'] = tz.localize(parse(job['End']))
json['start_epoch'] = calendar.timegm(json['start_time'].utctimetuple())
json['end_epoch'] = calendar.timegm(json['end_time'].utctimetuple())
json['run_time'] = json['end_epoch'] - json['start_epoch']
try:
if '-' in job['Timelimit']:
days, time = job['Timelimit'].split('-')
else:
time = job['Timelimit']
days = 0
json['requested_time'] = (int(days) * 86400 +
sum([a*b for a,b in zip(ftr, [int(i) for i in time.split(":")])]))/60
except: pass
json['queue_time'] = int(parse(job['Submit']).strftime('%s'))
try:
json['queue'] = job['Partition']
json['name'] = job['JobName'][0:128]
json['status'] = job['State'].split()[0]
json['nodes'] = int(job['NNodes'])
json['cores'] = int(job['ReqCPUS'])
json['wayness'] = json['cores']/json['nodes']
json['date'] = json['end_time'].date()
json['user'] = job['User']
except:
print(job)
continue
if "user" in json:
try:
json['uid'] = int(pwd.getpwnam(json['user']).pw_uid)
except: pass
host_list = hostlist.expand_hostlist(job['NodeList'])
del job['NodeList']
Job.objects.filter(id=json['id']).delete()
try:
obj, created = Job.objects.update_or_create(**json)
except:
continue
### If xalt is available add data to the DB
xd = None
try:
#xd = run.objects.using('xalt').filter(job_id = json['id'])[0]
for r in run.objects.using('xalt').filter(job_id = json['id']):
if "usr" in r.exec_path.split('/'): continue
xd = r
except: pass
if xd:
obj.exe = xd.exec_path.split('/')[-1][0:128]
obj.exec_path = xd.exec_path
obj.cwd = xd.cwd[0:128]
obj.threads = xd.num_threads
obj.save()
for join in join_run_object.objects.using('xalt').filter(run_id = xd.run_id):
object_path = lib.objects.using('xalt').get(obj_id = join.obj_id).object_path
module_name = lib.objects.using('xalt').get(obj_id = join.obj_id).module_name
if not module_name: module_name = 'none'
library = Libraries(object_path = object_path, module_name = module_name)
library.save()
library.jobs.add(obj)
### Build host table
for host_name in host_list:
h = Host(name=host_name)
h.save()
h.jobs.add(obj)
ctr += 1
progress(ctr, nrecords, date)
try:
with open(os.path.join(cfg.pickles_dir, date.strftime("%Y-%m-%d"), "validated")) as fd:
for line in fd.readlines():
Job.objects.filter(id = int(line)).update(validated = True)
except: pass
def update_metrics(date, pickles_dir, processes, rerun = False):
min_time = 60
metric_names = [
"avg_ethbw", "avg_cpi", "avg_freq", "avg_loads", "avg_l1loadhits",
"avg_l2loadhits", "avg_llcloadhits", "avg_sf_evictrate", "max_sf_evictrate",
"avg_mbw", "avg_page_hitrate", "time_imbalance",
"mem_hwm", "max_packetrate", "avg_packetsize", "node_imbalance",
"avg_flops_32b", "avg_flops_64b", "avg_vector_width_32b", "vecpercent_32b", "avg_vector_width_64b", "vecpercent_64b",
"avg_cpuusage", "max_mds", "avg_lnetmsgs", "avg_lnetbw", "max_lnetbw", "avg_fabricbw",
"max_fabricbw", "avg_mdcreqs", "avg_mdcwait", "avg_oscreqs",
"avg_oscwait", "avg_openclose", "avg_mcdrambw", "avg_blockbw", "max_load15", "avg_gpuutil"
]
aud = metrics.Metrics(metric_names, processes = processes)
print("Run the following tests for:",date)
for name in aud.metric_list:
print(name)
jobs_list = Job.objects.filter(date = date).exclude(run_time__lt = min_time)
#jobs_list = Job.objects.filter(date = date, queue__in = ['rtx', 'rtx-dev']).exclude(run_time__lt = min_time)
# Use avg_cpuusage to see if job was tested. It will always exist
if not rerun:
jobs_list = jobs_list.filter(avg_cpuusage = None)
paths = []
for job in jobs_list:
paths.append(os.path.join(pickles_dir,
job.date.strftime("%Y-%m-%d"),
str(job.id)))
num_jobs = jobs_list.count()
print("# Jobs to be tested:",num_jobs)
if num_jobs == 0 : return
for jobid, metric_dict in aud.run(paths):
try:
if metric_dict: jobs_list.filter(id = jobid).update(**metric_dict)
except: pass
if __name__ == "__main__":
import argparse
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), "update_db_lock"), "w") as fd:
try:
flock(fd, LOCK_EX | LOCK_NB)
except IOError:
print("update_db is already running")
sys.exit()
parser = argparse.ArgumentParser(description='Run database update')
parser.add_argument('start', type = parse, nargs='?', default = datetime.now(),
help = 'Start (YYYY-mm-dd)')
parser.add_argument('end', type = parse, nargs='?', default = False,
help = 'End (YYYY-mm-dd)')
parser.add_argument('-p', '--processes', type = int, default = 1,
help = 'number of processes')
parser.add_argument('-d', '--directory', type = str,
help='Directory to read data', default = cfg.pickles_dir)
args = parser.parse_args()
start = args.start
end = args.end
if not end: end = start
for date in daterange(start, end):
update_acct(date, rerun = False)
update_metrics(date, args.directory, args.processes, rerun = False)
| lgpl-2.1 | -1,956,235,082,091,073,300 | 40.557292 | 126 | 0.523499 | false |
andrewlrogers/srvy | collection/setup_db.py | 1 | 1042 | #!/usr/bin/python
""" checks to see if srvy.db exists in ../archive. If not it creates the db and appropriate table """
import sqlite3
from sqlite3 import Error
def create_conection(db_file):
try:
conn = sqlite3.connect(db_file)
print(sqlite3.version)
except Error as e:
print(e)
finally:
conn.close()
def create_table(db_file,create_table_sql):
try:
conn = sqlite3.connect(db_file)
c = conn.cursor()
c.execute(create_table_sql)
except Error as e:
print(e)
finally:
conn.close
def main():
database = "../archive/srvy.db"
create_conection(database)
create_srvy_table = """ CREATE TABLE IF NOT EXISTS responses (response_key INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
pythonDateTime TEXT NOT NULL,
unixTime REAL NOT NULL,
question TEXT NOT NULL,
opinion INTEGER NOT NULL
);"""
create_table(database, create_srvy_table)
main()
| mit | -6,962,049,424,461,966,000 | 27.162162 | 122 | 0.596929 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.