repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
jklughammer/RefFreeDMA | refs/heads/master | scripts/mergeRevComp.py | 1 | #! /usr/bin/env python
__author__ = 'jklughammer'
import sys
import collections
import re
import os
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
infile_name = str(sys.argv[1]) # needs ordered input (by sequence length)
working_dir=str(sys.argv[2])
infile = open(infile_name, 'r')
outfile = open(infile_name + "-res", 'w')
# make 2 letter lists for RC and FW
print("making search dicts...")
RC = collections.OrderedDict()
RC_rev = collections.OrderedDict()
FW = collections.OrderedDict()
count = 0
for line in infile:
seq = line.split("\t")[1]
name = line.split("\t")[0]
#RC
rc_seq = str(Seq(seq, generic_dna).reverse_complement())
rc_seq = rc_seq.replace("C", "T")
rc_seq = rc_seq.replace("G", "A")
RC[name] = [rc_seq, seq]
if not rc_seq in RC_rev:
RC_rev[rc_seq] = dict()
RC_rev[rc_seq][name] = seq
else:
RC_rev[rc_seq][name] = seq
#print("key " + str(rc_seq) + " already exists: " + str(RC_rev[rc_seq]) + ". Adding " + str(name))
#FW
fw_seq = seq.replace("C", "T")
fw_seq = fw_seq.replace("G", "A")
FW[name] = [fw_seq, seq]
count = count + 1
print("now finding matches...")
origEntries = len(FW)
count = 0
hitcount = 0
already_popped = 0
keep_correct=dict()
for fw_key, fw_both in FW.iteritems():
fw_val=fw_both[0]
fw_orig=fw_both[1]
count += 1
if count % 10000 == 0:
print(count)
if fw_val in RC_rev and (not fw_key in RC_rev[fw_val] or len(RC_rev[fw_val])>1):
hitcount += 1
fw_orig_l=list(fw_orig)
temp=RC_rev[fw_val]
if len(RC_rev[fw_val])>1:
similarity = dict()
for RC_rev_key, RC_rev_val in RC_rev[fw_val].iteritems():
if RC_rev_key == fw_key:
continue
similarity[RC_rev_key]= 0
RC_rev_val_l = list(str(Seq(RC_rev_val, generic_dna).reverse_complement()))
for i in range(0,len(fw_orig_l)-1):
if (fw_orig_l[i] == "T" and RC_rev_val_l[i] in ["T","C"]) or (fw_orig_l[i] == "C" and RC_rev_val_l[i] in ["C"])\
or (fw_orig_l[i] == "G" and RC_rev_val_l[i] in ["G","A"])or (fw_orig_l[i] == "A" and RC_rev_val_l[i] in ["A"]):
similarity[RC_rev_key]+= 1
rw_key = max(similarity)
else:
rw_key = RC_rev[fw_val].keys()[0]
rw_orig = RC_rev[fw_val][rw_key]
rw_orig_l=list(str(Seq(rw_orig, generic_dna).reverse_complement()))
new_fw_l=list(fw_orig)
for i in range(0,len(fw_orig_l)-1):
if fw_orig_l[i] == "T" and rw_orig_l[i] == "C":
new_fw_l[i] = "C"
new_fw = "".join(new_fw_l)
keep_correct[fw_key] = new_fw
try: FW.pop(rw_key) #prevent also finding the inverse combination
except: already_popped+=1
print(str(hitcount) + " RC-combies found")
print(str(already_popped) + " multihits found")
print(str(len(FW)) + " from " + str(origEntries) + " left")
print("now printing RC-reduced output")
printCount = 0
infile.seek(0)
for line in infile:
spl_line=line.split("\t")
name = spl_line[0]
if name in FW:
printCount += 1
if name in keep_correct:
spl_line[1] = keep_correct[name]
join_line = "\t".join(spl_line)
outfile.writelines(join_line)
keep_correct.pop(name)
else:
outfile.writelines(line)
FW.pop(name)
if printCount % 10000 == 0:
print(printCount)
print(str(printCount) + " lines printed")
open(os.path.join(working_dir,"mergeRevComp.done"), 'a') |
thirdwing/SFrame | refs/heads/master | oss_src/unity/python/sframe/deps/nltk_mock.py | 10 | """
Dummy mocking module for nltk.
When nltk is not available we will import this module as graphlab.deps.nltk,
and set HAS_NLTK to false. All methods that access nltk should check the
HAS_NLTK flag, therefore, attributes/class/methods in this module should never
be actually used.
"""
'''
Copyright (C) 2015 Dato, Inc.
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
'''
|
Tehsmash/nova | refs/heads/master | nova/tests/unit/api/test_wsgi.py | 111 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test WSGI basics and provide some helper functions for other WSGI tests.
"""
from nova import test
import routes
import webob
from nova import wsgi
class Test(test.NoDBTestCase):
def test_debug(self):
class Application(wsgi.Application):
"""Dummy application to test debug."""
def __call__(self, environ, start_response):
start_response("200", [("X-Test", "checking")])
return ['Test result']
application = wsgi.Debug(Application())
result = webob.Request.blank('/').get_response(application)
self.assertEqual(result.body, "Test result")
def test_router(self):
class Application(wsgi.Application):
"""Test application to call from router."""
def __call__(self, environ, start_response):
start_response("200", [])
return ['Router result']
class Router(wsgi.Router):
"""Test router."""
def __init__(self):
mapper = routes.Mapper()
mapper.connect("/test", controller=Application())
super(Router, self).__init__(mapper)
result = webob.Request.blank('/test').get_response(Router())
self.assertEqual(result.body, "Router result")
result = webob.Request.blank('/bad').get_response(Router())
self.assertNotEqual(result.body, "Router result")
|
egabancho/invenio | refs/heads/pu | invenio/modules/formatter/format_elements/bfe_record_url.py | 11 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints the record URL.
"""
__revision__ = "$Id$"
from invenio.config import \
CFG_SITE_URL, \
CFG_SITE_RECORD
def format_element(bfo, with_ln="yes"):
"""
Prints the record URL.
@param with_ln: if "yes" include "ln" attribute in the URL
"""
url = CFG_SITE_URL + "/" + CFG_SITE_RECORD + "/" + bfo.control_field('001')
if with_ln.lower() == "yes":
url += "?ln=" + bfo.lang
return url
|
MarshMue/NCAAMarchMadness | refs/heads/master | DataFetch.py | 1 | # DataFetch gets statistics about teams from ESPN and structures it within a database for use in a meaningful way
# Author: Marshall Mueller
# Date created: 3/1/2016
from bs4 import BeautifulSoup
import requests
from BTrees.OOBTree import OOBTree
from ZODB import FileStorage, DB
import transaction
from persistent import Persistent
storage = FileStorage.FileStorage("teams.fs")
db = DB(storage)
conn = db.open()
dbroot = conn.root()
if not dbroot.has_key("teams"):
dbroot["teams"] = OOBTree()
Teams = dbroot["teams"]
class Team(Persistent):
def __init__(self, name, statsUrl):
self.name = name
self.statsUrl = statsUrl
self.players = []
self.playerStats = {}
self.ppg = 0
self.wins = 0
self.losses = 0
def update(self):
response2 = requests.get("http://espn.go.com" + self.statsUrl)
soup2 = BeautifulSoup(response2.text, 'html5lib')
container2 = soup2.findAll("div", {"class": "mod-content"})
tr = container2[0].findAll("tr")
tr2 = container2[1].findAll("tr")
index = 2
while tr[index]["class"][0] != u'total':
td = tr[index].findAll("td")
self.players.append(td[0].text.strip())
stats = {}
# convert stat column names to strings for game stats
statname = tr[1].findAll("td")
gind = 0
while gind < len(statname):
statname[gind] = statname[gind].text.strip()
gind += 1
# do the same for season stats
statname2 = tr2[2].findAll("td")
sind = 0
while sind < len(statname2):
statname2[sind] = statname2[sind].text.strip()
sind += 1
# get each player stats and add to stats dictionary, with stat name as key value
statind = 1
statsrow = tr[index].findAll("td")
while statind < len(tr[1]):
stats[statname[statind]] = float(statsrow[statind].text.strip())
statind += 1
# do the same for season stats
statind2 = 1
statsrow2 = tr2[index + 1].findAll("td")
while statind2 < len(tr2[2]):
stats[statname2[statind2]] = float(statsrow2[statind2].text.strip())
statind2 += 1
# add stats to player dictionary, indexed by plyer name
self.playerStats[td[0].text.strip()] = stats
index += 1
# update overall team specific stats
totals = soup2.find("tr", {"class": "total"})
self.ppg = totals.findAll("td")[3].text.strip()
subtitle = soup2.find("div", {"class", "sub-title"})
subtitle = subtitle.text.strip()
record = subtitle[:subtitle.index(",")]
self.wins = record[:record.index("-")]
self.losses = record[record.index("-") + 1:]
if len(Teams) == 0:
# set up soup for team page and get all urls for team stats
url = "http://espn.go.com/mens-college-basketball/teams"
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html5lib')
container = soup.find("div", {"class" : "span-4"})
teamURLs = container.findAll("li")
# iterate through all the teams stats and pull relevant info
for teamURL in teamURLs:
links = teamURL.findAll("a")
name = links[0].text.strip()
statsUrl = links[1]['href']
team = Team(name, statsUrl)
team.update()
# add team to database
Teams[name] = team
transaction.commit()
# print for debug and verifying data
print team.name
for player in team.players:
print player + ": " + str(team.playerStats[player])
print ""
else:
# data already in database, print from it
for team in Teams:
print team
pind = 0
for player in Teams[team].players:
print str(player) + ": " + str(Teams[team].playerStats[player])
print ""
|
edgarcosta92/ns3 | refs/heads/master | src/core/bindings/callbacks_list.py | 153 | callback_classes = [
['void', 'unsigned char*', 'long', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'std::string', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
ricardogsilva/QGIS | refs/heads/master | python/core/auto_additions/qgsfieldproxymodel.py | 56 | # The following has been generated automatically from src/core/qgsfieldproxymodel.h
QgsFieldProxyModel.Filters.baseClass = QgsFieldProxyModel
Filters = QgsFieldProxyModel # dirty hack since SIP seems to introduce the flags in module
|
akosyakov/intellij-community | refs/heads/master | python/testData/inspections/PyNumpyType/Sort.py | 79 | def sort(self, axis=-1, kind='quicksort', order=None): # real signature unknown; restored from __doc__
"""
a.sort(axis=-1, kind='quicksort', order=None)
Sort an array, in-place.
Parameters
----------
axis : int, optional
Axis along which to sort. Default is -1, which means sort along the
last axis.
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm. Default is 'quicksort'.
order : list, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. Not all fields need be
specified.
"""
pass
a = np.array([('a', 2), ('c', 1)], dtype=[('x', 'S1'), ('y', int)])
print(sort(a, order='y')) |
juangj/selenium | refs/heads/master | py/test/unit/selenium/__init__.py | 2454 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
|
arikpoz/mxnet | refs/heads/master | example/reinforcement-learning/dqn/dqn_demo.py | 15 | import mxnet as mx
import mxnet.ndarray as nd
import numpy
from base import Base
from operators import *
from atari_game import AtariGame
from utils import *
import logging
import argparse
root = logging.getLogger()
root.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
mx.random.seed(100)
npy_rng = get_numpy_rng()
class DQNInitializer(mx.initializer.Xavier):
def _init_bias(self, _, arr):
arr[:] = .1
def main():
parser = argparse.ArgumentParser(description='Script to test the trained network on a game.')
parser.add_argument('-r', '--rom', required=False, type=str,
default=os.path.join('roms', 'breakout.bin'),
help='Path of the ROM File.')
parser.add_argument('-v', '--visualization', required=False, type=int, default=0,
help='Visualize the runs.')
parser.add_argument('--lr', required=False, type=float, default=0.01,
help='Learning rate of the AdaGrad optimizer')
parser.add_argument('--eps', required=False, type=float, default=0.01,
help='Eps of the AdaGrad optimizer')
parser.add_argument('--clip-gradient', required=False, type=float, default=None,
help='Clip threshold of the AdaGrad optimizer')
parser.add_argument('--double-q', required=False, type=bool, default=False,
help='Use Double DQN')
parser.add_argument('--wd', required=False, type=float, default=0.0,
help='Weight of the L2 Regularizer')
parser.add_argument('-c', '--ctx', required=False, type=str, default='gpu',
help='Running Context. E.g `-c gpu` or `-c gpu1` or `-c cpu`')
parser.add_argument('-d', '--dir-path', required=False, type=str, default='',
help='Saving directory of model files.')
parser.add_argument('--start-eps', required=False, type=float, default=1.0,
help='Eps of the epsilon-greedy policy at the beginning')
parser.add_argument('--replay-start-size', required=False, type=int, default=50000,
help='The step that the training starts')
parser.add_argument('--kvstore-update-period', required=False, type=int, default=1,
help='The period that the worker updates the parameters from the sever')
parser.add_argument('--kv-type', required=False, type=str, default=None,
help='type of kvstore, default will not use kvstore, could also be dist_async')
parser.add_argument('--optimizer', required=False, type=str, default="adagrad",
help='type of optimizer')
args = parser.parse_args()
if args.dir_path == '':
rom_name = os.path.splitext(os.path.basename(args.rom))[0]
args.dir_path = 'dqn-%s-lr%g' % (rom_name, args.lr)
replay_start_size = args.replay_start_size
max_start_nullops = 30
replay_memory_size = 1000000
history_length = 4
rows = 84
cols = 84
ctx = parse_ctx(args.ctx)
q_ctx = mx.Context(*ctx[0])
game = AtariGame(rom_path=args.rom, resize_mode='scale', replay_start_size=replay_start_size,
resized_rows=rows, resized_cols=cols, max_null_op=max_start_nullops,
replay_memory_size=replay_memory_size, display_screen=args.visualization,
history_length=history_length)
##RUN NATURE
freeze_interval = 10000
epoch_num = 200
steps_per_epoch = 250000
update_interval = 4
discount = 0.99
eps_start = args.start_eps
eps_min = 0.1
eps_decay = (eps_start - eps_min) / 1000000
eps_curr = eps_start
freeze_interval /= update_interval
minibatch_size = 32
action_num = len(game.action_set)
data_shapes = {'data': (minibatch_size, history_length) + (rows, cols),
'dqn_action': (minibatch_size,), 'dqn_reward': (minibatch_size,)}
dqn_sym = dqn_sym_nature(action_num)
qnet = Base(data_shapes=data_shapes, sym_gen=dqn_sym, name='QNet',
initializer=DQNInitializer(factor_type="in"),
ctx=q_ctx)
target_qnet = qnet.copy(name="TargetQNet", ctx=q_ctx)
use_easgd = False
optimizer = mx.optimizer.create(name=args.optimizer, learning_rate=args.lr, eps=args.eps,
clip_gradient=args.clip_gradient,
rescale_grad=1.0, wd=args.wd)
updater = mx.optimizer.get_updater(optimizer)
qnet.print_stat()
target_qnet.print_stat()
# Begin Playing Game
training_steps = 0
total_steps = 0
for epoch in range(epoch_num):
# Run Epoch
steps_left = steps_per_epoch
episode = 0
epoch_reward = 0
start = time.time()
game.start()
while steps_left > 0:
# Running New Episode
episode += 1
episode_loss = 0.0
episode_q_value = 0.0
episode_update_step = 0
episode_action_step = 0
time_episode_start = time.time()
game.begin_episode(steps_left)
while not game.episode_terminate:
# 1. We need to choose a new action based on the current game status
if game.state_enabled and game.replay_memory.sample_enabled:
do_exploration = (npy_rng.rand() < eps_curr)
eps_curr = max(eps_curr - eps_decay, eps_min)
if do_exploration:
action = npy_rng.randint(action_num)
else:
# TODO Here we can in fact play multiple gaming instances simultaneously and make actions for each
# We can simply stack the current_state() of gaming instances and give prediction for all of them
# We need to wait after calling calc_score(.), which makes the program slow
# TODO Profiling the speed of this part!
current_state = game.current_state()
state = nd.array(current_state.reshape((1,) + current_state.shape),
ctx=q_ctx) / float(255.0)
qval_npy = qnet.forward(is_train=False, data=state)[0].asnumpy()
action = numpy.argmax(qval_npy)
episode_q_value += qval_npy[0, action]
episode_action_step += 1
else:
action = npy_rng.randint(action_num)
# 2. Play the game for a single mega-step (Inside the game, the action may be repeated for several times)
game.play(action)
total_steps += 1
# 3. Update our Q network if we can start sampling from the replay memory
# Also, we update every `update_interval`
if total_steps % update_interval == 0 and game.replay_memory.sample_enabled:
# 3.1 Draw sample from the replay_memory
training_steps += 1
episode_update_step += 1
states, actions, rewards, next_states, terminate_flags \
= game.replay_memory.sample(batch_size=minibatch_size)
states = nd.array(states, ctx=q_ctx) / float(255.0)
next_states = nd.array(next_states, ctx=q_ctx) / float(255.0)
actions = nd.array(actions, ctx=q_ctx)
rewards = nd.array(rewards, ctx=q_ctx)
terminate_flags = nd.array(terminate_flags, ctx=q_ctx)
# 3.2 Use the target network to compute the scores and
# get the corresponding target rewards
if not args.double_q:
target_qval = target_qnet.forward(is_train=False, data=next_states)[0]
target_rewards = rewards + nd.choose_element_0index(target_qval,
nd.argmax_channel(target_qval))\
* (1.0 - terminate_flags) * discount
else:
target_qval = target_qnet.forward(is_train=False, data=next_states)[0]
qval = qnet.forward(is_train=False, data=next_states)[0]
target_rewards = rewards + nd.choose_element_0index(target_qval,
nd.argmax_channel(qval))\
* (1.0 - terminate_flags) * discount
outputs = qnet.forward(is_train=True,
data=states,
dqn_action=actions,
dqn_reward=target_rewards)
qnet.backward()
qnet.update(updater=updater)
# 3.3 Calculate Loss
diff = nd.abs(nd.choose_element_0index(outputs[0], actions) - target_rewards)
quadratic_part = nd.clip(diff, -1, 1)
loss = 0.5 * nd.sum(nd.square(quadratic_part)).asnumpy()[0] +\
nd.sum(diff - quadratic_part).asnumpy()[0]
episode_loss += loss
# 3.3 Update the target network every freeze_interval
if training_steps % freeze_interval == 0:
qnet.copy_params_to(target_qnet)
steps_left -= game.episode_step
time_episode_end = time.time()
# Update the statistics
epoch_reward += game.episode_reward
info_str = "Epoch:%d, Episode:%d, Steps Left:%d/%d, Reward:%f, fps:%f, Exploration:%f" \
% (epoch, episode, steps_left, steps_per_epoch, game.episode_reward,
game.episode_step / (time_episode_end - time_episode_start), eps_curr)
if episode_update_step > 0:
info_str += ", Avg Loss:%f/%d" % (episode_loss / episode_update_step,
episode_update_step)
if episode_action_step > 0:
info_str += ", Avg Q Value:%f/%d" % (episode_q_value / episode_action_step,
episode_action_step)
if episode % 100 == 0:
logging.info(info_str)
end = time.time()
fps = steps_per_epoch / (end - start)
qnet.save_params(dir_path=args.dir_path, epoch=epoch)
logging.info("Epoch:%d, FPS:%f, Avg Reward: %f/%d"
% (epoch, fps, epoch_reward / float(episode), episode))
if __name__ == '__main__':
main()
|
zerothi/sids | refs/heads/master | sisl/linalg/tests/test_solve.py | 1 | import pytest
import numpy as np
import scipy.linalg as sl
from sisl.linalg import solve, solve_destroy
pytestmark = [pytest.mark.linalg, pytest.mark.eig]
def test_solve1():
a = np.random.rand(10, 10)
b = np.random.rand(10, 10)
xs = sl.solve(a, b)
x = solve(a, b)
assert np.allclose(xs, x)
def test_solve2():
a = np.random.rand(10, 10)
ac = a.copy()
b = np.random.rand(10)
bc = b.copy()
xs = sl.solve(a, b)
x = solve(a, b)
assert np.allclose(xs, x)
assert x.shape == (10, )
assert np.allclose(a, ac)
assert np.allclose(b, bc)
def test_solve3():
a = np.random.rand(10, 2)
b = np.random.rand(10)
with pytest.raises(ValueError):
solve(a, b)
def test_solve4():
a = np.random.rand(10, 10)
b = np.random.rand(10)
xs = sl.solve(a, b)
x = solve_destroy(a, b)
assert np.allclose(xs, x)
assert x.shape == (10, )
|
menscool/nested-sets-yii2 | refs/heads/master | vendor/justinrainbow/json-schema/docs/conf.py | 74 | # -*- coding: utf-8 -*-
#
# JsonSchema documentation build configuration file, created by
# sphinx-quickstart on Sat Dec 10 15:34:44 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'JsonSchema'
copyright = u'2011, Justin Rainbow, Bruno Prieto Reis'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'JsonSchemadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'JsonSchema.tex', u'JsonSchema Documentation',
u'Justin Rainbow, Bruno Prieto Reis', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jsonschema', u'JsonSchema Documentation',
[u'Justin Rainbow, Bruno Prieto Reis'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'JsonSchema', u'JsonSchema Documentation', u'Justin Rainbow, Bruno Prieto Reis',
'JsonSchema', 'One line description of project.', 'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
zhangqifan/findSomething | refs/heads/master | FindSomething/Pods/AVOSCloudCrashReporting/Breakpad/src/tools/gyp/test/same-target-name-different-directory/src/touch.py | 679 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
f = open(sys.argv[1], 'w+')
f.write('Hello from touch.py\n')
f.close()
|
pdellaert/ansible | refs/heads/devel | lib/ansible/modules/network/f5/bigip_software_install.py | 15 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_software_install
short_description: Install software images on a BIG-IP
description:
- Install new images on a BIG-IP.
version_added: 2.7
options:
image:
description:
- Image to install on the remote device.
type: str
volume:
description:
- The volume to install the software image to.
type: str
state:
description:
- When C(installed), ensures that the software is installed on the volume
and the volume is set to be booted from. The device is B(not) rebooted
into the new software.
- When C(activated), performs the same operation as C(installed), but
the system is rebooted to the new software.
type: str
choices:
- activated
- installed
default: activated
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Ensure an existing image is installed in specified volume
bigip_software_install:
image: BIGIP-13.0.0.0.0.1645.iso
volume: HD1.2
state: installed
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Ensure an existing image is activated in specified volume
bigip_software_install:
image: BIGIP-13.0.0.0.0.1645.iso
state: activated
volume: HD1.2
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
# only common fields returned
'''
import time
import ssl
from ansible.module_utils.six.moves.urllib.error import URLError
from ansible.module_utils.urls import urlparse
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
class Parameters(AnsibleF5Parameters):
api_map = {
}
api_attributes = [
'options',
'volume',
]
returnables = [
]
updatables = [
]
class ApiParameters(Parameters):
@property
def image_names(self):
result = []
result += self.read_image_from_device('image')
result += self.read_image_from_device('hotfix')
return result
def read_image_from_device(self, t):
uri = "https://{0}:{1}/mgmt/tm/sys/software/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
t,
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return []
if 'code' in response and response['code'] == 400:
if 'message' in response:
return []
else:
return []
if 'items' not in response:
return []
return [x['name'].split('/')[0] for x in response['items']]
class ModuleParameters(Parameters):
@property
def version(self):
if self._values['version']:
return self._values['version']
self._values['version'] = self.image_info['version']
return self._values['version']
@property
def build(self):
# Return cached copy if we have it
if self._values['build']:
return self._values['build']
# Otherwise, get copy from image info cache
self._values['build'] = self.image_info['build']
return self._values['build']
@property
def image_info(self):
if self._values['image_info']:
image = self._values['image_info']
else:
# Otherwise, get a new copy and store in cache
image = self.read_image()
self._values['image_info'] = image
return image
@property
def image_type(self):
if self._values['image_type']:
return self._values['image_type']
if 'software:image' in self.image_info['kind']:
self._values['image_type'] = 'image'
else:
self._values['image_type'] = 'hotfix'
return self._values['image_type']
def read_image(self):
image = self.read_image_from_device(type='image')
if image:
return image
image = self.read_image_from_device(type='hotfix')
if image:
return image
return None
def read_image_from_device(self, type):
uri = "https://{0}:{1}/mgmt/tm/sys/software/{2}/".format(
self.client.provider['server'],
self.client.provider['server_port'],
type,
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'items' in response:
for item in response['items']:
if item['name'].startswith(self.image):
return item
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params, client=self.client)
self.have = ApiParameters(client=self.client)
self.changes = UsableChanges()
self.volume_url = None
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
result = dict()
changed = self.present()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return False
else:
return self.update()
def _set_volume_url(self, item):
path = urlparse(item['selfLink']).path
self.volume_url = "https://{0}:{1}{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
path
)
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/sys/software/volume/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.get(uri)
try:
collection = resp.json()
except ValueError:
return False
for item in collection['items']:
if item['name'].startswith(self.want.volume):
self._set_volume_url(item)
break
if not self.volume_url:
self.volume_url = uri + self.want.volume
resp = self.client.api.get(self.volume_url)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
# version key can be missing in the event that an existing volume has
# no installed software in it.
if self.want.version != response.get('version', None):
return False
if self.want.build != response.get('build', None):
return False
if self.want.state == 'installed':
return True
if self.want.state == 'activated':
if 'defaultBootLocation' in response['media'][0]:
return True
return False
def volume_exists(self):
resp = self.client.api.get(self.volume_url)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
if self.module.check_mode:
return True
if self.want.image and self.want.image not in self.have.image_names:
raise F5ModuleError(
"The specified image was not found on the device."
)
options = list()
if not self.volume_exists():
options.append({'create-volume': True})
if self.want.state == 'activated':
options.append({'reboot': True})
self.want.update({'options': options})
self.update_on_device()
self.wait_for_software_install_on_device()
if self.want.state == 'activated':
self.wait_for_device_reboot()
return True
def update_on_device(self):
params = {
"command": "install",
"name": self.want.image,
}
params.update(self.want.api_params())
uri = "https://{0}:{1}/mgmt/tm/sys/software/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.image_type
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
if 'commandResult' in response and len(response['commandResult'].strip()) > 0:
raise F5ModuleError(response['commandResult'])
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def wait_for_device_reboot(self):
while True:
time.sleep(5)
try:
self.client.reconnect()
volume = self.read_volume_from_device()
if volume is None:
continue
if 'active' in volume and volume['active'] is True:
break
except F5ModuleError:
# Handle all exceptions because if the system is offline (for a
# reboot) the REST client will raise exceptions about
# connections
pass
def wait_for_software_install_on_device(self):
# We need to delay this slightly in case the the volume needs to be
# created first
for dummy in range(10):
try:
if self.volume_exists():
break
except F5ModuleError:
pass
time.sleep(5)
while True:
time.sleep(10)
volume = self.read_volume_from_device()
if volume is None or 'status' not in volume:
self.client.reconnect()
continue
if volume['status'] == 'complete':
break
elif volume['status'] == 'failed':
raise F5ModuleError
def read_volume_from_device(self):
try:
resp = self.client.api.get(self.volume_url)
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
except ssl.SSLError:
# Suggests BIG-IP is still in the middle of restarting itself or
# restjavad is restarting.
return None
except URLError:
# At times during reboot BIG-IP will reset or timeout connections so we catch and pass this here.
return None
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
image=dict(),
volume=dict(),
state=dict(
default='activated',
choices=['activated', 'installed']
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
AdvancedClimateSystems/python-modbus | refs/heads/master | umodbus/client/serial/rtu.py | 2 | """
.. note:: This section is based on `MODBUS over Serial Line Specification and
Implementation Guide V1.02`_.
The ADU for Modbus RTU messages differs from Modbus TCP/IP messages. Messages
send over RTU don't have a MBAP header, instead they have an Address field.
This field contains the slave id. A CRC is appended to the message. Below all
parts of a Modbus RTU message are listed together with their byte size:
+---------------+-----------------+
| **Component** | **Size** (bytes)|
+---------------+-----------------+
| Address field | 1 |
+---------------+-----------------+
| PDU | N |
+---------------+-----------------+
| CRC | 2 |
+---------------+-----------------+
The CRC is calculated from the Address field and the PDU.
Below you see a hexadecimal presentation of request over RTU with Modbus
function code 1. It requests data of slave with 1, starting at coil 100, for
the length of 3 coils:
..
Note: the backslash in the bytes below are escaped using an extra back
slash. Without escaping the bytes aren't printed correctly in the HTML
output of this docs.
To work with the bytes in Python you need to remove the escape sequences.
`b'\\x01\\x00d` -> `b\x01\x00d`
.. code-block:: python
>>> # Read coils, starting from coil 100 for the length of 3 coils.
>>> adu = b'\\x01\\x01\\x00d\\x00\\x03=\\xd4'
The lenght of this ADU is 8 bytes::
>>> len(adu)
8
"""
import struct
from umodbus.client.serial.redundancy_check import get_crc, validate_crc
from umodbus.functions import (create_function_from_response_pdu,
expected_response_pdu_size_from_request_pdu,
pdu_to_function_code_or_raise_error, ReadCoils,
ReadDiscreteInputs, ReadHoldingRegisters,
ReadInputRegisters, WriteSingleCoil,
WriteSingleRegister, WriteMultipleCoils,
WriteMultipleRegisters)
from umodbus.utils import recv_exactly
def _create_request_adu(slave_id, req_pdu):
""" Return request ADU for Modbus RTU.
:param slave_id: Slave id.
:param req_pdu: Byte array with PDU.
:return: Byte array with ADU.
"""
first_part_adu = struct.pack('>B', slave_id) + req_pdu
return first_part_adu + get_crc(first_part_adu)
def read_coils(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 01: Read Coils.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadCoils()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu)
def read_discrete_inputs(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 02: Read Discrete Inputs.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadDiscreteInputs()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu)
def read_holding_registers(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 03: Read Holding Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadHoldingRegisters()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu)
def read_input_registers(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 04: Read Input Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadInputRegisters()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu)
def write_single_coil(slave_id, address, value):
""" Return ADU for Modbus function code 05: Write Single Coil.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteSingleCoil()
function.address = address
function.value = value
return _create_request_adu(slave_id, function.request_pdu)
def write_single_register(slave_id, address, value):
""" Return ADU for Modbus function code 06: Write Single Register.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteSingleRegister()
function.address = address
function.value = value
return _create_request_adu(slave_id, function.request_pdu)
def write_multiple_coils(slave_id, starting_address, values):
""" Return ADU for Modbus function code 15: Write Multiple Coils.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteMultipleCoils()
function.starting_address = starting_address
function.values = values
return _create_request_adu(slave_id, function.request_pdu)
def write_multiple_registers(slave_id, starting_address, values):
""" Return ADU for Modbus function code 16: Write Multiple Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = WriteMultipleRegisters()
function.starting_address = starting_address
function.values = values
return _create_request_adu(slave_id, function.request_pdu)
def parse_response_adu(resp_adu, req_adu=None):
""" Parse response ADU and return response data. Some functions require
request ADU to fully understand request ADU.
:param resp_adu: Resonse ADU.
:param req_adu: Request ADU, default None.
:return: Response data.
"""
resp_pdu = resp_adu[1:-2]
validate_crc(resp_adu)
req_pdu = None
if req_adu is not None:
req_pdu = req_adu[1:-2]
function = create_function_from_response_pdu(resp_pdu, req_pdu)
return function.data
def raise_for_exception_adu(resp_adu):
""" Check a response ADU for error
:param resp_adu: Response ADU.
:raises ModbusError: When a response contains an error code.
"""
resp_pdu = resp_adu[1:-2]
pdu_to_function_code_or_raise_error(resp_pdu)
def send_message(adu, serial_port):
""" Send ADU over serial to to server and return parsed response.
:param adu: Request ADU.
:param sock: Serial port instance.
:return: Parsed response from server.
"""
serial_port.write(adu)
serial_port.flush()
# Check exception ADU (which is shorter than all other responses) first.
exception_adu_size = 5
response_error_adu = recv_exactly(serial_port.read, exception_adu_size)
raise_for_exception_adu(response_error_adu)
expected_response_size = \
expected_response_pdu_size_from_request_pdu(adu[1:-2]) + 3
response_remainder = recv_exactly(
serial_port.read, expected_response_size - exception_adu_size)
return parse_response_adu(response_error_adu + response_remainder, adu)
|
mitya57/django | refs/heads/master | tests/generic_inline_admin/tests.py | 47 | from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.contrib.contenttypes.admin import GenericTabularInline
from django.contrib.contenttypes.forms import generic_inlineformset_factory
from django.contrib.contenttypes.models import ContentType
from django.forms.formsets import DEFAULT_MAX_NUM
from django.forms.models import ModelForm
from django.test import (
RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from django.urls import reverse
from .admin import MediaInline, MediaPermanentInline, site as admin_site
from .models import Category, Episode, EpisodePermanent, Media, PhoneNumber
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
# Set DEBUG to True to ensure {% include %} will raise exceptions.
# That is how inlines are rendered and #9498 will bubble up if it is an issue.
@override_settings(DEBUG=True, ROOT_URLCONF='generic_inline_admin.urls')
class GenericAdminViewTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
e = Episode.objects.create(name='This Week in Django')
self.episode_pk = e.pk
m = Media(content_object=e, url='http://example.com/podcast.mp3')
m.save()
self.mp3_media_pk = m.pk
m = Media(content_object=e, url='http://example.com/logo.png')
m.save()
self.png_media_pk = m.pk
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:generic_inline_admin_episode_add'))
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,))
)
self.assertEqual(response.status_code, 200)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:generic_inline_admin_episode_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "3",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "2",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-0-id": "%d" % self.mp3_media_pk,
"generic_inline_admin-media-content_type-object_id-0-url": "http://example.com/podcast.mp3",
"generic_inline_admin-media-content_type-object_id-1-id": "%d" % self.png_media_pk,
"generic_inline_admin-media-content_type-object_id-1-url": "http://example.com/logo.png",
"generic_inline_admin-media-content_type-object_id-2-id": "",
"generic_inline_admin-media-content_type-object_id-2-url": "",
}
url = reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,))
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_generic_inline_formset(self):
EpisodeMediaFormSet = generic_inlineformset_factory(
Media,
can_delete=False,
exclude=['description', 'keywords'],
extra=3,
)
e = Episode.objects.get(name='This Week in Django')
# Works with no queryset
formset = EpisodeMediaFormSet(instance=e)
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">'
'Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-0-url" '
'type="url" name="generic_inline_admin-media-content_type-object_id-0-url" '
'value="http://example.com/podcast.mp3" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" '
'value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>'
% self.mp3_media_pk
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">'
'Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" '
'type="url" name="generic_inline_admin-media-content_type-object_id-1-url" '
'value="http://example.com/logo.png" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" '
'value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>'
% self.png_media_pk
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">Url:</label>'
'<input id="id_generic_inline_admin-media-content_type-object_id-2-url" type="url" '
'name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" '
'id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>'
)
# A queryset can be used to alter display ordering
formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.order_by('url'))
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label>'
'<input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" '
'name="generic_inline_admin-media-content_type-object_id-0-url"'
'value="http://example.com/logo.png" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" '
'value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>'
% self.png_media_pk
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">Url:</label>'
'<input id="id_generic_inline_admin-media-content_type-object_id-1-url" type="url" '
'name="generic_inline_admin-media-content_type-object_id-1-url" '
'value="http://example.com/podcast.mp3" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" '
'value="%s" id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>'
% self.mp3_media_pk
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-2-url">'
'Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-2-url" '
'type="url" name="generic_inline_admin-media-content_type-object_id-2-url" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-2-id" '
'id="id_generic_inline_admin-media-content_type-object_id-2-id" /></p>'
)
# Works with a queryset that omits items
formset = EpisodeMediaFormSet(instance=e, queryset=Media.objects.filter(url__endswith=".png"))
self.assertEqual(len(formset.forms), 4)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-0-url">Url:</label>'
' <input id="id_generic_inline_admin-media-content_type-object_id-0-url" type="url" '
'name="generic_inline_admin-media-content_type-object_id-0-url" '
'value="http://example.com/logo.png" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-0-id" '
'value="%s" id="id_generic_inline_admin-media-content_type-object_id-0-id" /></p>'
% self.png_media_pk
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_generic_inline_admin-media-content_type-object_id-1-url">'
'Url:</label> <input id="id_generic_inline_admin-media-content_type-object_id-1-url" '
'type="url" name="generic_inline_admin-media-content_type-object_id-1-url" maxlength="200" />'
'<input type="hidden" name="generic_inline_admin-media-content_type-object_id-1-id" '
'id="id_generic_inline_admin-media-content_type-object_id-1-id" /></p>'
)
def test_generic_inline_formset_factory(self):
# Regression test for #10522.
inline_formset = generic_inlineformset_factory(Media, exclude=('url',))
# Regression test for #12340.
e = Episode.objects.get(name='This Week in Django')
formset = inline_formset(instance=e)
self.assertTrue(formset.get_queryset().ordered)
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineAdminParametersTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
self.factory = RequestFactory()
def _create_object(self, model):
"""
Create a model with an attached Media object via GFK. We can't
load content via a fixture (since the GenericForeignKey relies on
content type IDs, which will vary depending on what other tests
have been run), thus we do it here.
"""
e = model.objects.create(name='This Week in Django')
Media.objects.create(content_object=e, url='http://example.com/podcast.mp3')
return e
def test_no_param(self):
"""
With one initial form, extra (default) at 3, there should be 4 forms.
"""
e = self._create_object(Episode)
response = self.client.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
formset = response.context['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 4)
self.assertEqual(formset.initial_form_count(), 1)
def test_extra_param(self):
"""
With extra=0, there should be one form.
"""
class ExtraInline(GenericTabularInline):
model = Media
extra = 0
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [ExtraInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 1)
self.assertEqual(formset.initial_form_count(), 1)
def testMaxNumParam(self):
"""
With extra=5 and max_num=2, there should be only 2 forms.
"""
class MaxNumInline(GenericTabularInline):
model = Media
extra = 5
max_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 2)
self.assertEqual(formset.initial_form_count(), 1)
def test_min_num_param(self):
"""
With extra=3 and min_num=2, there should be five forms.
"""
class MinNumInline(GenericTabularInline):
model = Media
extra = 3
min_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MinNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 5)
self.assertEqual(formset.initial_form_count(), 1)
def test_get_extra(self):
class GetExtraInline(GenericTabularInline):
model = Media
extra = 4
def get_extra(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetExtraInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.extra, 2)
def test_get_min_num(self):
class GetMinNumInline(GenericTabularInline):
model = Media
min_num = 5
def get_min_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMinNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.min_num, 2)
def test_get_max_num(self):
class GetMaxNumInline(GenericTabularInline):
model = Media
extra = 5
def get_max_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.max_num, 2)
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineAdminWithUniqueTogetherTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_add(self):
category_id = Category.objects.create(name='male').pk
post_data = {
"name": "John Doe",
# inline data
"generic_inline_admin-phonenumber-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-phonenumber-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-phonenumber-content_type-object_id-MAX_NUM_FORMS": "0",
"generic_inline_admin-phonenumber-content_type-object_id-0-id": "",
"generic_inline_admin-phonenumber-content_type-object_id-0-phone_number": "555-555-5555",
"generic_inline_admin-phonenumber-content_type-object_id-0-category": "%s" % category_id,
}
response = self.client.get(reverse('admin:generic_inline_admin_contact_add'))
self.assertEqual(response.status_code, 200)
response = self.client.post(reverse('admin:generic_inline_admin_contact_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_delete(self):
from .models import Contact
c = Contact.objects.create(name='foo')
PhoneNumber.objects.create(
object_id=c.id,
content_type=ContentType.objects.get_for_model(Contact),
phone_number="555-555-5555",
)
response = self.client.post(reverse('admin:generic_inline_admin_contact_delete', args=[c.pk]))
self.assertContains(response, 'Are you sure you want to delete')
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class NoInlineDeletionTest(SimpleTestCase):
def test_no_deletion(self):
inline = MediaPermanentInline(EpisodePermanent, admin_site)
fake_request = object()
formset = inline.get_formset(fake_request)
self.assertFalse(formset.can_delete)
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm):
return True
request = MockRequest()
request.user = MockSuperUser()
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineModelAdminTest(SimpleTestCase):
def setUp(self):
self.site = AdminSite()
def test_get_formset_kwargs(self):
media_inline = MediaInline(Media, AdminSite())
# Create a formset with default arguments
formset = media_inline.get_formset(request)
self.assertEqual(formset.max_num, DEFAULT_MAX_NUM)
self.assertIs(formset.can_order, False)
# Create a formset with custom keyword arguments
formset = media_inline.get_formset(request, max_num=100, can_order=True)
self.assertEqual(formset.max_num, 100)
self.assertIs(formset.can_order, True)
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when
used in conjunction with `GenericInlineModelAdmin.readonly_fields`
and when no `ModelAdmin.exclude` is defined.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
readonly_fields = ['description']
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['keywords', 'id', 'DELETE'])
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is respected by
`GenericInlineModelAdmin.get_formset`, and overridden if
`ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined.
Refs #15907.
"""
# First with `GenericInlineModelAdmin` -----------------
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
exclude = ['description']
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['url', 'keywords', 'id', 'DELETE'])
# Then, only with `ModelForm` -----------------
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['description', 'keywords', 'id', 'DELETE'])
def test_get_fieldsets(self):
# get_fieldsets is called when figuring out form fields.
# Refs #18681.
class MediaForm(ModelForm):
class Meta:
model = Media
fields = '__all__'
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['url', 'description']})]
ma = MediaInline(Media, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ['url', 'description'])
def test_get_formsets_with_inlines_returns_tuples(self):
"""
get_formsets_with_inlines() returns the correct tuples.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class AlternateInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
AlternateInline, MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
inlines = ma.get_inline_instances(request)
for (formset, inline), other_inline in zip(ma.get_formsets_with_inlines(request), inlines):
self.assertIsInstance(formset, other_inline.get_formset(request).__class__)
|
trondhindenes/ansible-modules-core | refs/heads/devel | cloud/amazon/ec2_snapshot.py | 53 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_snapshot
short_description: creates a snapshot from an existing volume
description:
- creates an EC2 snapshot from an existing EBS volume
version_added: "1.5"
options:
volume_id:
description:
- volume from which to take the snapshot
required: false
description:
description:
- description to be applied to the snapshot
required: false
instance_id:
description:
- instance that has the required volume to snapshot mounted
required: false
device_name:
description:
- device name of a mounted volume to be snapshotted
required: false
snapshot_tags:
description:
- a hash/dictionary of tags to add to the snapshot
required: false
version_added: "1.6"
wait:
description:
- wait for the snapshot to be ready
choices: ['yes', 'no']
required: false
default: yes
version_added: "1.5.1"
wait_timeout:
description:
- how long before wait gives up, in seconds
- specify 0 to wait forever
required: false
default: 0
version_added: "1.5.1"
state:
description:
- whether to add or create a snapshot
required: false
default: present
choices: ['absent', 'present']
version_added: "1.9"
snapshot_id:
description:
- snapshot id to remove
required: false
version_added: "1.9"
last_snapshot_min_age:
description:
- If the volume's most recent snapshot has started less than `last_snapshot_min_age' minutes ago, a new snapshot will not be created.
required: false
default: 0
version_added: "2.0"
author: "Will Thames (@willthames)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Simple snapshot of volume using volume_id
- ec2_snapshot:
volume_id: vol-abcdef12
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume mounted on device_name attached to instance_id
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume with tagging
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
snapshot_tags:
frequency: hourly
source: /data
# Remove a snapshot
- local_action:
module: ec2_snapshot
snapshot_id: snap-abcd1234
state: absent
# Create a snapshot only if the most recent one is older than 1 hour
- local_action:
module: ec2_snapshot
volume_id: vol-abcdef12
last_snapshot_min_age: 60
'''
import time
import datetime
try:
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# Find the most recent snapshot
def _get_snapshot_starttime(snap):
return datetime.datetime.strptime(snap.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
def _get_most_recent_snapshot(snapshots, max_snapshot_age_secs=None, now=None):
"""
Gets the most recently created snapshot and optionally filters the result
if the snapshot is too old
:param snapshots: list of snapshots to search
:param max_snapshot_age_secs: filter the result if its older than this
:param now: simulate time -- used for unit testing
:return:
"""
if len(snapshots) == 0:
return None
if not now:
now = datetime.datetime.utcnow()
youngest_snapshot = max(snapshots, key=_get_snapshot_starttime)
# See if the snapshot is younger that the given max age
snapshot_start = datetime.datetime.strptime(youngest_snapshot.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
snapshot_age = now - snapshot_start
if max_snapshot_age_secs is not None:
if snapshot_age.total_seconds() > max_snapshot_age_secs:
return None
return youngest_snapshot
def _create_with_wait(snapshot, wait_timeout_secs, sleep_func=time.sleep):
"""
Wait for the snapshot to be created
:param snapshot:
:param wait_timeout_secs: fail this step after this many seconds
:param sleep_func:
:return:
"""
time_waited = 0
snapshot.update()
while snapshot.status != 'completed':
sleep_func(3)
snapshot.update()
time_waited += 3
if wait_timeout_secs and time_waited > wait_timeout_secs:
return False
return True
def create_snapshot(module, ec2, state=None, description=None, wait=None,
wait_timeout=None, volume_id=None, instance_id=None,
snapshot_id=None, device_name=None, snapshot_tags=None,
last_snapshot_min_age=None):
snapshot = None
changed = False
required = [volume_id, snapshot_id, instance_id]
if required.count(None) != len(required) - 1: # only 1 must be set
module.fail_json(msg='One and only one of volume_id or instance_id or snapshot_id must be specified')
if instance_id and not device_name or device_name and not instance_id:
module.fail_json(msg='Instance ID and device name must both be specified')
if instance_id:
try:
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id, 'attachment.device': device_name})
except boto.exception.BotoServerError as e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if not volumes:
module.fail_json(msg="Could not find volume with name %s attached to instance %s" % (device_name, instance_id))
volume_id = volumes[0].id
if state == 'absent':
if not snapshot_id:
module.fail_json(msg = 'snapshot_id must be set when state is absent')
try:
ec2.delete_snapshot(snapshot_id)
except boto.exception.BotoServerError as e:
# exception is raised if snapshot does not exist
if e.error_code == 'InvalidSnapshot.NotFound':
module.exit_json(changed=False)
else:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
# successful delete
module.exit_json(changed=True)
if last_snapshot_min_age > 0:
try:
current_snapshots = ec2.get_all_snapshots(filters={'volume_id': volume_id})
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
last_snapshot_min_age = last_snapshot_min_age * 60 # Convert to seconds
snapshot = _get_most_recent_snapshot(current_snapshots,
max_snapshot_age_secs=last_snapshot_min_age)
try:
# Create a new snapshot if we didn't find an existing one to use
if snapshot is None:
snapshot = ec2.create_snapshot(volume_id, description=description)
changed = True
if wait:
if not _create_with_wait(snapshot, wait_timeout):
module.fail_json(msg='Timed out while creating snapshot.')
if snapshot_tags:
for k, v in snapshot_tags.items():
snapshot.add_tag(k, v)
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
module.exit_json(changed=changed,
snapshot_id=snapshot.id,
volume_id=snapshot.volume_id,
volume_size=snapshot.volume_size,
tags=snapshot.tags.copy())
def create_snapshot_ansible_module():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
volume_id = dict(),
description = dict(),
instance_id = dict(),
snapshot_id = dict(),
device_name = dict(),
wait = dict(type='bool', default=True),
wait_timeout = dict(type='int', default=0),
last_snapshot_min_age = dict(type='int', default=0),
snapshot_tags = dict(type='dict', default=dict()),
state = dict(choices=['absent','present'], default='present'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
return module
def main():
module = create_snapshot_ansible_module()
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
volume_id = module.params.get('volume_id')
snapshot_id = module.params.get('snapshot_id')
description = module.params.get('description')
instance_id = module.params.get('instance_id')
device_name = module.params.get('device_name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
last_snapshot_min_age = module.params.get('last_snapshot_min_age')
snapshot_tags = module.params.get('snapshot_tags')
state = module.params.get('state')
ec2 = ec2_connect(module)
create_snapshot(
module=module,
state=state,
description=description,
wait=wait,
wait_timeout=wait_timeout,
ec2=ec2,
volume_id=volume_id,
instance_id=instance_id,
snapshot_id=snapshot_id,
device_name=device_name,
snapshot_tags=snapshot_tags,
last_snapshot_min_age=last_snapshot_min_age
)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
sienatime/python_koans | refs/heads/master | python3/koans/about_with_statements.py | 33 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutSandwichCode in the Ruby Koans
#
from runner.koan import *
import re # For regular expression string comparisons
class AboutWithStatements(Koan):
def count_lines(self, file_name):
try:
file = open(file_name)
try:
count = 0
for line in file.readlines():
count += 1
return count
finally:
file.close()
except IOError:
# should never happen
self.fail()
def test_counting_lines(self):
self.assertEqual(__, self.count_lines("example_file.txt"))
# ------------------------------------------------------------------
def find_line(self, file_name):
try:
file = open(file_name)
try:
for line in file.readlines():
match = re.search('e', line)
if match:
return line
finally:
file.close()
except IOError:
# should never happen
self.fail()
def test_finding_lines(self):
self.assertEqual(__, self.find_line("example_file.txt"))
## ------------------------------------------------------------------
## THINK ABOUT IT:
##
## The count_lines and find_line are similar, and yet different.
## They both follow the pattern of "sandwich code".
##
## Sandwich code is code that comes in three parts: (1) the top slice
## of bread, (2) the meat, and (3) the bottom slice of bread.
## The bread part of the sandwich almost always goes together, but
## the meat part changes all the time.
##
## Because the changing part of the sandwich code is in the middle,
## abstracting the top and bottom bread slices to a library can be
## difficult in many languages.
##
## (Aside for C++ programmers: The idiom of capturing allocated
## pointers in a smart pointer constructor is an attempt to deal with
## the problem of sandwich code for resource allocation.)
##
## Python solves the problem using Context Managers. Consider the
## following code:
##
class FileContextManager():
def __init__(self, file_name):
self._file_name = file_name
self._file = None
def __enter__(self):
self._file = open(self._file_name)
return self._file
def __exit__(self, cls, value, tb):
self._file.close()
# Now we write:
def count_lines2(self, file_name):
with self.FileContextManager(file_name) as file:
count = 0
for line in file.readlines():
count += 1
return count
def test_counting_lines2(self):
self.assertEqual(__, self.count_lines2("example_file.txt"))
# ------------------------------------------------------------------
def find_line2(self, file_name):
# Rewrite find_line using the Context Manager.
pass
def test_finding_lines2(self):
self.assertEqual(__, self.find_line2("example_file.txt"))
self.assertNotEqual(__, self.find_line2("example_file.txt"))
# ------------------------------------------------------------------
def count_lines3(self, file_name):
with open(file_name) as file:
count = 0
for line in file.readlines():
count += 1
return count
def test_open_already_has_its_own_built_in_context_manager(self):
self.assertEqual(__, self.count_lines3("example_file.txt"))
|
curtiszimmerman/orp | refs/heads/master | third-party/qemu-orp/tests/image-fuzzer/runner.py | 66 | #!/usr/bin/env python
# Tool for running fuzz tests
#
# Copyright (C) 2014 Maria Kustova <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import signal
import subprocess
import random
import shutil
from itertools import count
import time
import getopt
import StringIO
import resource
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
print >>sys.stderr, \
"Warning: Module for JSON processing is not found.\n" \
"'--config' and '--command' options are not supported."
# Backing file sizes in MB
MAX_BACKING_FILE_SIZE = 10
MIN_BACKING_FILE_SIZE = 1
def multilog(msg, *output):
""" Write an object to all of specified file descriptors."""
for fd in output:
fd.write(msg)
fd.flush()
def str_signal(sig):
""" Convert a numeric value of a system signal to the string one
defined by the current operational system.
"""
for k, v in signal.__dict__.items():
if v == sig:
return k
def run_app(fd, q_args):
"""Start an application with specified arguments and return its exit code
or kill signal depending on the result of execution.
"""
class Alarm(Exception):
"""Exception for signal.alarm events."""
pass
def handler(*args):
"""Notify that an alarm event occurred."""
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(600)
term_signal = signal.SIGKILL
devnull = open('/dev/null', 'r+')
process = subprocess.Popen(q_args, stdin=devnull,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
out, err = process.communicate()
signal.alarm(0)
fd.write(out)
fd.write(err)
fd.flush()
return process.returncode
except Alarm:
os.kill(process.pid, term_signal)
fd.write('The command was terminated by timeout.\n')
fd.flush()
return -term_signal
class TestException(Exception):
"""Exception for errors risen by TestEnv objects."""
pass
class TestEnv(object):
"""Test object.
The class sets up test environment, generates backing and test images
and executes application under tests with specified arguments and a test
image provided.
All logs are collected.
The summary log will contain short descriptions and statuses of tests in
a run.
The test log will include application (e.g. 'qemu-img') logs besides info
sent to the summary log.
"""
def __init__(self, test_id, seed, work_dir, run_log,
cleanup=True, log_all=False):
"""Set test environment in a specified work directory.
Path to qemu-img and qemu-io will be retrieved from 'QEMU_IMG' and
'QEMU_IO' environment variables.
"""
if seed is not None:
self.seed = seed
else:
self.seed = str(random.randint(0, sys.maxint))
random.seed(self.seed)
self.init_path = os.getcwd()
self.work_dir = work_dir
self.current_dir = os.path.join(work_dir, 'test-' + test_id)
self.qemu_img = \
os.environ.get('QEMU_IMG', 'qemu-img').strip().split(' ')
self.qemu_io = os.environ.get('QEMU_IO', 'qemu-io').strip().split(' ')
self.commands = [['qemu-img', 'check', '-f', 'qcow2', '$test_img'],
['qemu-img', 'info', '-f', 'qcow2', '$test_img'],
['qemu-io', '$test_img', '-c', 'read $off $len'],
['qemu-io', '$test_img', '-c', 'write $off $len'],
['qemu-io', '$test_img', '-c',
'aio_read $off $len'],
['qemu-io', '$test_img', '-c',
'aio_write $off $len'],
['qemu-io', '$test_img', '-c', 'flush'],
['qemu-io', '$test_img', '-c',
'discard $off $len'],
['qemu-io', '$test_img', '-c',
'truncate $off']]
for fmt in ['raw', 'vmdk', 'vdi', 'qcow2', 'file', 'qed', 'vpc']:
self.commands.append(
['qemu-img', 'convert', '-f', 'qcow2', '-O', fmt,
'$test_img', 'converted_image.' + fmt])
try:
os.makedirs(self.current_dir)
except OSError, e:
print >>sys.stderr, \
"Error: The working directory '%s' cannot be used. Reason: %s"\
% (self.work_dir, e[1])
raise TestException
self.log = open(os.path.join(self.current_dir, "test.log"), "w")
self.parent_log = open(run_log, "a")
self.failed = False
self.cleanup = cleanup
self.log_all = log_all
def _create_backing_file(self):
"""Create a backing file in the current directory.
Return a tuple of a backing file name and format.
Format of a backing file is randomly chosen from all formats supported
by 'qemu-img create'.
"""
# All formats supported by the 'qemu-img create' command.
backing_file_fmt = random.choice(['raw', 'vmdk', 'vdi', 'qcow2',
'file', 'qed', 'vpc'])
backing_file_name = 'backing_img.' + backing_file_fmt
backing_file_size = random.randint(MIN_BACKING_FILE_SIZE,
MAX_BACKING_FILE_SIZE) * (1 << 20)
cmd = self.qemu_img + ['create', '-f', backing_file_fmt,
backing_file_name, str(backing_file_size)]
temp_log = StringIO.StringIO()
retcode = run_app(temp_log, cmd)
if retcode == 0:
temp_log.close()
return (backing_file_name, backing_file_fmt)
else:
multilog("Warning: The %s backing file was not created.\n\n"
% backing_file_fmt, sys.stderr, self.log, self.parent_log)
self.log.write("Log for the failure:\n" + temp_log.getvalue() +
'\n\n')
temp_log.close()
return (None, None)
def execute(self, input_commands=None, fuzz_config=None):
""" Execute a test.
The method creates backing and test images, runs test app and analyzes
its exit status. If the application was killed by a signal, the test
is marked as failed.
"""
if input_commands is None:
commands = self.commands
else:
commands = input_commands
os.chdir(self.current_dir)
backing_file_name, backing_file_fmt = self._create_backing_file()
img_size = image_generator.create_image(
'test.img', backing_file_name, backing_file_fmt, fuzz_config)
for item in commands:
shutil.copy('test.img', 'copy.img')
# 'off' and 'len' are multiple of the sector size
sector_size = 512
start = random.randrange(0, img_size + 1, sector_size)
end = random.randrange(start, img_size + 1, sector_size)
if item[0] == 'qemu-img':
current_cmd = list(self.qemu_img)
elif item[0] == 'qemu-io':
current_cmd = list(self.qemu_io)
else:
multilog("Warning: test command '%s' is not defined.\n"
% item[0], sys.stderr, self.log, self.parent_log)
continue
# Replace all placeholders with their real values
for v in item[1:]:
c = (v
.replace('$test_img', 'copy.img')
.replace('$off', str(start))
.replace('$len', str(end - start)))
current_cmd.append(c)
# Log string with the test header
test_summary = "Seed: %s\nCommand: %s\nTest directory: %s\n" \
"Backing file: %s\n" \
% (self.seed, " ".join(current_cmd),
self.current_dir, backing_file_name)
temp_log = StringIO.StringIO()
try:
retcode = run_app(temp_log, current_cmd)
except OSError, e:
multilog("%sError: Start of '%s' failed. Reason: %s\n\n"
% (test_summary, os.path.basename(current_cmd[0]),
e[1]),
sys.stderr, self.log, self.parent_log)
raise TestException
if retcode < 0:
self.log.write(temp_log.getvalue())
multilog("%sFAIL: Test terminated by signal %s\n\n"
% (test_summary, str_signal(-retcode)),
sys.stderr, self.log, self.parent_log)
self.failed = True
else:
if self.log_all:
self.log.write(temp_log.getvalue())
multilog("%sPASS: Application exited with the code " \
"'%d'\n\n" % (test_summary, retcode),
sys.stdout, self.log, self.parent_log)
temp_log.close()
os.remove('copy.img')
def finish(self):
"""Restore the test environment after a test execution."""
self.log.close()
self.parent_log.close()
os.chdir(self.init_path)
if self.cleanup and not self.failed:
shutil.rmtree(self.current_dir)
if __name__ == '__main__':
def usage():
print """
Usage: runner.py [OPTION...] TEST_DIR IMG_GENERATOR
Set up test environment in TEST_DIR and run a test in it. A module for
test image generation should be specified via IMG_GENERATOR.
Example:
runner.py -c '[["qemu-img", "info", "$test_img"]]' /tmp/test qcow2
Optional arguments:
-h, --help display this help and exit
-d, --duration=NUMBER finish tests after NUMBER of seconds
-c, --command=JSON run tests for all commands specified in
the JSON array
-s, --seed=STRING seed for a test image generation,
by default will be generated randomly
--config=JSON take fuzzer configuration from the JSON
array
-k, --keep_passed don't remove folders of passed tests
-v, --verbose log information about passed tests
JSON:
'--command' accepts a JSON array of commands. Each command presents
an application under test with all its paramaters as a list of strings,
e.g. ["qemu-io", "$test_img", "-c", "write $off $len"].
Supported application aliases: 'qemu-img' and 'qemu-io'.
Supported argument aliases: $test_img for the fuzzed image, $off
for an offset, $len for length.
Values for $off and $len will be generated based on the virtual disk
size of the fuzzed image.
Paths to 'qemu-img' and 'qemu-io' are retrevied from 'QEMU_IMG' and
'QEMU_IO' environment variables.
'--config' accepts a JSON array of fields to be fuzzed, e.g.
'[["header"], ["header", "version"]]'.
Each of the list elements can consist of a complex image element only
as ["header"] or ["feature_name_table"] or an exact field as
["header", "version"]. In the first case random portion of the element
fields will be fuzzed, in the second one the specified field will be
fuzzed always.
If '--config' argument is specified, fields not listed in
the configuration array will not be fuzzed.
"""
def run_test(test_id, seed, work_dir, run_log, cleanup, log_all,
command, fuzz_config):
"""Setup environment for one test and execute this test."""
try:
test = TestEnv(test_id, seed, work_dir, run_log, cleanup,
log_all)
except TestException:
sys.exit(1)
# Python 2.4 doesn't support 'finally' and 'except' in the same 'try'
# block
try:
try:
test.execute(command, fuzz_config)
except TestException:
sys.exit(1)
finally:
test.finish()
def should_continue(duration, start_time):
"""Return True if a new test can be started and False otherwise."""
current_time = int(time.time())
return (duration is None) or (current_time - start_time < duration)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'c:hs:kvd:',
['command=', 'help', 'seed=', 'config=',
'keep_passed', 'verbose', 'duration='])
except getopt.error, e:
print >>sys.stderr, \
"Error: %s\n\nTry 'runner.py --help' for more information" % e
sys.exit(1)
command = None
cleanup = True
log_all = False
seed = None
config = None
duration = None
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
sys.exit()
elif opt in ('-c', '--command'):
try:
command = json.loads(arg)
except (TypeError, ValueError, NameError), e:
print >>sys.stderr, \
"Error: JSON array of test commands cannot be loaded.\n" \
"Reason: %s" % e
sys.exit(1)
elif opt in ('-k', '--keep_passed'):
cleanup = False
elif opt in ('-v', '--verbose'):
log_all = True
elif opt in ('-s', '--seed'):
seed = arg
elif opt in ('-d', '--duration'):
duration = int(arg)
elif opt == '--config':
try:
config = json.loads(arg)
except (TypeError, ValueError, NameError), e:
print >>sys.stderr, \
"Error: JSON array with the fuzzer configuration cannot" \
" be loaded\nReason: %s" % e
sys.exit(1)
if not len(args) == 2:
print >>sys.stderr, \
"Expected two parameters\nTry 'runner.py --help'" \
" for more information."
sys.exit(1)
work_dir = os.path.realpath(args[0])
# run_log is created in 'main', because multiple tests are expected to
# log in it
run_log = os.path.join(work_dir, 'run.log')
# Add the path to the image generator module to sys.path
sys.path.append(os.path.realpath(os.path.dirname(args[1])))
# Remove a script extension from image generator module if any
generator_name = os.path.splitext(os.path.basename(args[1]))[0]
try:
image_generator = __import__(generator_name)
except ImportError, e:
print >>sys.stderr, \
"Error: The image generator '%s' cannot be imported.\n" \
"Reason: %s" % (generator_name, e)
sys.exit(1)
# Enable core dumps
resource.setrlimit(resource.RLIMIT_CORE, (-1, -1))
# If a seed is specified, only one test will be executed.
# Otherwise runner will terminate after a keyboard interruption
start_time = int(time.time())
test_id = count(1)
while should_continue(duration, start_time):
try:
run_test(str(test_id.next()), seed, work_dir, run_log, cleanup,
log_all, command, config)
except (KeyboardInterrupt, SystemExit):
sys.exit(1)
if seed is not None:
break
|
developersociety/django-glitter | refs/heads/master | glitter/assets/migrations/0002_image_category_field_optional.py | 2 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('glitter_assets', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='image',
name='category',
field=models.ForeignKey(null=True, to='glitter_assets.ImageCategory', blank=True),
),
]
|
weidel-p/nest-simulator | refs/heads/master | pynest/examples/clopath_synapse_spike_pairing.py | 13 | # -*- coding: utf-8 -*-
#
# clopath_synapse_spike_pairing.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Clopath Rule: Spike pairing experiment
----------------------------------------
This script simulates one ``aeif_psc_delta_clopath`` neuron that is connected with
a Clopath connection [1]_. The synapse receives pairs of a pre- and a postsynaptic
spikes that are separated by either 10 ms (pre before post) or -10 ms (post
before pre). The change of the synaptic weight is measured after five of such
pairs. This experiment is repeated five times with different rates of the
sequence of the spike pairs: 10Hz, 20Hz, 30Hz, 40Hz, and 50Hz.
References
~~~~~~~~~~~
.. [1] Clopath C, Büsing L, Vasilaki E, Gerstner W (2010). Connectivity reflects coding:
a model of voltage-based STDP with homeostasis.
Nature Neuroscience 13:3, 344--352
"""
import numpy as np
import matplotlib.pyplot as plt
import nest
##############################################################################
# First we specify the neuron parameters. To enable voltage dependent
# prefactor ``A_LTD(u_bar_bar)`` add ``A_LTD_const: False`` to the dictionary.
nrn_params = {'V_m': -70.6,
'E_L': -70.6,
'C_m': 281.0,
'theta_minus': -70.6,
'theta_plus': -45.3,
'A_LTD': 14.0e-5,
'A_LTP': 8.0e-5,
'tau_minus': 10.0,
'tau_plus': 7.0,
'delay_u_bars': 4.0,
'a': 4.0,
'b': 0.0805,
'V_reset': -70.6 + 21.0,
'V_clamp': 33.0,
't_clamp': 2.0,
't_ref': 0.0,
}
##############################################################################
# Hardcoded spike times of presynaptic spike generator
spike_times_pre = [
# Presynaptic spike before the postsynaptic
[20., 120., 220., 320., 420.],
[20., 70., 120., 170., 220.],
[20., 53.3, 86.7, 120., 153.3],
[20., 45., 70., 95., 120.],
[20., 40., 60., 80., 100.],
# Presynaptic spike after the postsynaptic
[120., 220., 320., 420., 520., 620.],
[70., 120., 170., 220., 270., 320.],
[53.3, 86.6, 120., 153.3, 186.6, 220.],
[45., 70., 95., 120., 145., 170.],
[40., 60., 80., 100., 120., 140.]]
##############################################################################
# Hardcoded spike times of postsynaptic spike generator
spike_times_post = [
[10., 110., 210., 310., 410.],
[10., 60., 110., 160., 210.],
[10., 43.3, 76.7, 110., 143.3],
[10., 35., 60., 85., 110.],
[10., 30., 50., 70., 90.],
[130., 230., 330., 430., 530., 630.],
[80., 130., 180., 230., 280., 330.],
[63.3, 96.6, 130., 163.3, 196.6, 230.],
[55., 80., 105., 130., 155., 180.],
[50., 70., 90., 110., 130., 150.]]
init_w = 0.5
syn_weights = []
resolution = 0.1
##############################################################################
# Loop over pairs of spike trains
for (s_t_pre, s_t_post) in zip(spike_times_pre, spike_times_post):
nest.ResetKernel()
nest.SetKernelStatus({"resolution": resolution})
# Create one neuron
nrn = nest.Create("aeif_psc_delta_clopath", 1, nrn_params)
# We need a parrot neuron since spike generators can only
# be connected with static connections
prrt_nrn = nest.Create("parrot_neuron", 1)
# Create and connect spike generators
spike_gen_pre = nest.Create("spike_generator", 1, {
"spike_times": s_t_pre})
nest.Connect(spike_gen_pre, prrt_nrn,
syn_spec={"delay": resolution})
spike_gen_post = nest.Create("spike_generator", 1, {
"spike_times": s_t_post})
nest.Connect(spike_gen_post, nrn, syn_spec={
"delay": resolution, "weight": 80.0})
# Create weight recorder
wr = nest.Create('weight_recorder', 1)
# Create Clopath connection with weight recorder
nest.CopyModel("clopath_synapse", "clopath_synapse_rec",
{"weight_recorder": wr})
syn_dict = {"synapse_model": "clopath_synapse_rec",
"weight": init_w, "delay": resolution}
nest.Connect(prrt_nrn, nrn, syn_spec=syn_dict)
# Simulation
simulation_time = (10.0 + max(s_t_pre[-1], s_t_post[-1]))
nest.Simulate(simulation_time)
# Extract and save synaptic weights
weights = wr.get("events", "weights")
syn_weights.append(weights[-1])
syn_weights = np.array(syn_weights)
# scaling of the weights so that they are comparable to [1]
syn_weights = 100.0*15.0*(syn_weights - init_w)/init_w + 100.0
# Plot results
fig1, axA = plt.subplots(1, sharex=False)
axA.plot([10., 20., 30., 40., 50.], syn_weights[5:], color='b', lw=2.5, ls='-',
label="pre-post pairing")
axA.plot([10., 20., 30., 40., 50.], syn_weights[:5], color='g', lw=2.5, ls='-',
label="post-pre pairing")
axA.set_ylabel("normalized weight change")
axA.set_xlabel("rho (Hz)")
axA.legend()
axA.set_title("synaptic weight")
plt.show()
|
kuri65536/python-for-android | refs/heads/master | python-build/python-libs/gdata/build/lib/gdata/tlslite/utils/Python_AES.py | 359 | """Pure-Python AES implementation."""
from cryptomath import *
from AES import *
from rijndael import rijndael
def new(key, mode, IV):
return Python_AES(key, mode, IV)
class Python_AES(AES):
def __init__(self, key, mode, IV):
AES.__init__(self, key, mode, IV, "python")
self.rijndael = rijndael(key, 16)
self.IV = IV
def encrypt(self, plaintext):
AES.encrypt(self, plaintext)
plaintextBytes = stringToBytes(plaintext)
chainBytes = stringToBytes(self.IV)
#CBC Mode: For each block...
for x in range(len(plaintextBytes)/16):
#XOR with the chaining block
blockBytes = plaintextBytes[x*16 : (x*16)+16]
for y in range(16):
blockBytes[y] ^= chainBytes[y]
blockString = bytesToString(blockBytes)
#Encrypt it
encryptedBytes = stringToBytes(self.rijndael.encrypt(blockString))
#Overwrite the input with the output
for y in range(16):
plaintextBytes[(x*16)+y] = encryptedBytes[y]
#Set the next chaining block
chainBytes = encryptedBytes
self.IV = bytesToString(chainBytes)
return bytesToString(plaintextBytes)
def decrypt(self, ciphertext):
AES.decrypt(self, ciphertext)
ciphertextBytes = stringToBytes(ciphertext)
chainBytes = stringToBytes(self.IV)
#CBC Mode: For each block...
for x in range(len(ciphertextBytes)/16):
#Decrypt it
blockBytes = ciphertextBytes[x*16 : (x*16)+16]
blockString = bytesToString(blockBytes)
decryptedBytes = stringToBytes(self.rijndael.decrypt(blockString))
#XOR with the chaining block and overwrite the input with output
for y in range(16):
decryptedBytes[y] ^= chainBytes[y]
ciphertextBytes[(x*16)+y] = decryptedBytes[y]
#Set the next chaining block
chainBytes = blockBytes
self.IV = bytesToString(chainBytes)
return bytesToString(ciphertextBytes)
|
neilLasrado/erpnext | refs/heads/develop | erpnext/hr/doctype/leave_policy/leave_policy.py | 18 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
class LeavePolicy(Document):
def validate(self):
if self.leave_policy_details:
for lp_detail in self.leave_policy_details:
max_leaves_allowed = frappe.db.get_value("Leave Type", lp_detail.leave_type, "max_leaves_allowed")
if max_leaves_allowed > 0 and lp_detail.annual_allocation > max_leaves_allowed:
frappe.throw(_("Maximum leave allowed in the leave type {0} is {1}").format(lp_detail.leave_type, max_leaves_allowed))
|
rrooij/youtube-dl | refs/heads/master | youtube_dl/extractor/cspan.py | 23 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
ExtractorError,
extract_attributes,
find_xpath_attr,
get_element_by_class,
int_or_none,
smuggle_url,
unescapeHTML,
)
from .senateisvp import SenateISVPIE
from .ustream import UstreamIE
class CSpanIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?c-span\.org/video/\?(?P<id>[0-9a-f]+)'
IE_DESC = 'C-SPAN'
_TESTS = [{
'url': 'http://www.c-span.org/video/?313572-1/HolderonV',
'md5': '94b29a4f131ff03d23471dd6f60b6a1d',
'info_dict': {
'id': '315139',
'title': 'Attorney General Eric Holder on Voting Rights Act Decision',
},
'playlist_mincount': 2,
'skip': 'Regularly fails on travis, for unknown reasons',
}, {
'url': 'http://www.c-span.org/video/?c4486943/cspan-international-health-care-models',
# md5 is unstable
'info_dict': {
'id': 'c4486943',
'ext': 'mp4',
'title': 'CSPAN - International Health Care Models',
'description': 'md5:7a985a2d595dba00af3d9c9f0783c967',
}
}, {
'url': 'http://www.c-span.org/video/?318608-1/gm-ignition-switch-recall',
'info_dict': {
'id': '342759',
'title': 'General Motors Ignition Switch Recall',
},
'playlist_mincount': 6,
}, {
# Video from senate.gov
'url': 'http://www.c-span.org/video/?104517-1/immigration-reforms-needed-protect-skilled-american-workers',
'info_dict': {
'id': 'judiciary031715',
'ext': 'mp4',
'title': 'Immigration Reforms Needed to Protect Skilled American Workers',
},
'params': {
'skip_download': True, # m3u8 downloads
}
}, {
# Ustream embedded video
'url': 'https://www.c-span.org/video/?114917-1/armed-services',
'info_dict': {
'id': '58428542',
'ext': 'flv',
'title': 'USHR07 Armed Services Committee',
'description': 'hsas00-2118-20150204-1000et-07\n\n\nUSHR07 Armed Services Committee',
'timestamp': 1423060374,
'upload_date': '20150204',
'uploader': 'HouseCommittee',
'uploader_id': '12987475',
},
}, {
# Audio Only
'url': 'https://www.c-span.org/video/?437336-1/judiciary-antitrust-competition-policy-consumer-rights',
'only_matching': True,
}]
BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/%s/%s_%s/index.html?videoId=%s'
def _real_extract(self, url):
video_id = self._match_id(url)
video_type = None
webpage = self._download_webpage(url, video_id)
ustream_url = UstreamIE._extract_url(webpage)
if ustream_url:
return self.url_result(ustream_url, UstreamIE.ie_key())
if '&vod' not in url:
bc = self._search_regex(
r"(<[^>]+id='brightcove-player-embed'[^>]+>)",
webpage, 'brightcove embed', default=None)
if bc:
bc_attr = extract_attributes(bc)
bc_url = self.BRIGHTCOVE_URL_TEMPLATE % (
bc_attr.get('data-bcaccountid', '3162030207001'),
bc_attr.get('data-noprebcplayerid', 'SyGGpuJy3g'),
bc_attr.get('data-newbcplayerid', 'default'),
bc_attr['data-bcid'])
return self.url_result(smuggle_url(bc_url, {'source_url': url}))
# We first look for clipid, because clipprog always appears before
patterns = [r'id=\'clip(%s)\'\s*value=\'([0-9]+)\'' % t for t in ('id', 'prog')]
results = list(filter(None, (re.search(p, webpage) for p in patterns)))
if results:
matches = results[0]
video_type, video_id = matches.groups()
video_type = 'clip' if video_type == 'id' else 'program'
else:
m = re.search(r'data-(?P<type>clip|prog)id=["\'](?P<id>\d+)', webpage)
if m:
video_id = m.group('id')
video_type = 'program' if m.group('type') == 'prog' else 'clip'
else:
senate_isvp_url = SenateISVPIE._search_iframe_url(webpage)
if senate_isvp_url:
title = self._og_search_title(webpage)
surl = smuggle_url(senate_isvp_url, {'force_title': title})
return self.url_result(surl, 'SenateISVP', video_id, title)
video_id = self._search_regex(
r'jwsetup\.clipprog\s*=\s*(\d+);',
webpage, 'jwsetup program id', default=None)
if video_id:
video_type = 'program'
if video_type is None or video_id is None:
error_message = get_element_by_class('VLplayer-error-message', webpage)
if error_message:
raise ExtractorError(error_message)
raise ExtractorError('unable to find video id and type')
def get_text_attr(d, attr):
return d.get(attr, {}).get('#text')
data = self._download_json(
'http://www.c-span.org/assets/player/ajax-player.php?os=android&html5=%s&id=%s' % (video_type, video_id),
video_id)['video']
if data['@status'] != 'Success':
raise ExtractorError('%s said: %s' % (self.IE_NAME, get_text_attr(data, 'error')), expected=True)
doc = self._download_xml(
'http://www.c-span.org/common/services/flashXml.php?%sid=%s' % (video_type, video_id),
video_id)
description = self._html_search_meta('description', webpage)
title = find_xpath_attr(doc, './/string', 'name', 'title').text
thumbnail = find_xpath_attr(doc, './/string', 'name', 'poster').text
files = data['files']
capfile = get_text_attr(data, 'capfile')
entries = []
for partnum, f in enumerate(files):
formats = []
for quality in f.get('qualities', []):
formats.append({
'format_id': '%s-%sp' % (get_text_attr(quality, 'bitrate'), get_text_attr(quality, 'height')),
'url': unescapeHTML(get_text_attr(quality, 'file')),
'height': int_or_none(get_text_attr(quality, 'height')),
'tbr': int_or_none(get_text_attr(quality, 'bitrate')),
})
if not formats:
path = unescapeHTML(get_text_attr(f, 'path'))
if not path:
continue
formats = self._extract_m3u8_formats(
path, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls') if determine_ext(path) == 'm3u8' else [{'url': path, }]
self._sort_formats(formats)
entries.append({
'id': '%s_%d' % (video_id, partnum + 1),
'title': (
title if len(files) == 1 else
'%s part %d' % (title, partnum + 1)),
'formats': formats,
'description': description,
'thumbnail': thumbnail,
'duration': int_or_none(get_text_attr(f, 'length')),
'subtitles': {
'en': [{
'url': capfile,
'ext': determine_ext(capfile, 'dfxp')
}],
} if capfile else None,
})
if len(entries) == 1:
entry = dict(entries[0])
entry['id'] = 'c' + video_id if video_type == 'clip' else video_id
return entry
else:
return {
'_type': 'playlist',
'entries': entries,
'title': title,
'id': 'c' + video_id if video_type == 'clip' else video_id,
}
|
eugene1g/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/mux.py | 636 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file provides classes and helper functions for multiplexing extension.
Specification:
http://tools.ietf.org/html/draft-ietf-hybi-websocket-multiplexing-06
"""
import collections
import copy
import email
import email.parser
import logging
import math
import struct
import threading
import traceback
from mod_pywebsocket import common
from mod_pywebsocket import handshake
from mod_pywebsocket import util
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_hybi import Frame
from mod_pywebsocket._stream_hybi import Stream
from mod_pywebsocket._stream_hybi import StreamOptions
from mod_pywebsocket._stream_hybi import create_binary_frame
from mod_pywebsocket._stream_hybi import create_closing_handshake_body
from mod_pywebsocket._stream_hybi import create_header
from mod_pywebsocket._stream_hybi import create_length_header
from mod_pywebsocket._stream_hybi import parse_frame
from mod_pywebsocket.handshake import hybi
_CONTROL_CHANNEL_ID = 0
_DEFAULT_CHANNEL_ID = 1
_MUX_OPCODE_ADD_CHANNEL_REQUEST = 0
_MUX_OPCODE_ADD_CHANNEL_RESPONSE = 1
_MUX_OPCODE_FLOW_CONTROL = 2
_MUX_OPCODE_DROP_CHANNEL = 3
_MUX_OPCODE_NEW_CHANNEL_SLOT = 4
_MAX_CHANNEL_ID = 2 ** 29 - 1
_INITIAL_NUMBER_OF_CHANNEL_SLOTS = 64
_INITIAL_QUOTA_FOR_CLIENT = 8 * 1024
_HANDSHAKE_ENCODING_IDENTITY = 0
_HANDSHAKE_ENCODING_DELTA = 1
# We need only these status code for now.
_HTTP_BAD_RESPONSE_MESSAGES = {
common.HTTP_STATUS_BAD_REQUEST: 'Bad Request',
}
# DropChannel reason code
# TODO(bashi): Define all reason code defined in -05 draft.
_DROP_CODE_NORMAL_CLOSURE = 1000
_DROP_CODE_INVALID_ENCAPSULATING_MESSAGE = 2001
_DROP_CODE_CHANNEL_ID_TRUNCATED = 2002
_DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED = 2003
_DROP_CODE_UNKNOWN_MUX_OPCODE = 2004
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK = 2005
_DROP_CODE_CHANNEL_ALREADY_EXISTS = 2006
_DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION = 2007
_DROP_CODE_UNKNOWN_REQUEST_ENCODING = 2010
_DROP_CODE_SEND_QUOTA_VIOLATION = 3005
_DROP_CODE_SEND_QUOTA_OVERFLOW = 3006
_DROP_CODE_ACKNOWLEDGED = 3008
_DROP_CODE_BAD_FRAGMENTATION = 3009
class MuxUnexpectedException(Exception):
"""Exception in handling multiplexing extension."""
pass
# Temporary
class MuxNotImplementedException(Exception):
"""Raised when a flow enters unimplemented code path."""
pass
class LogicalConnectionClosedException(Exception):
"""Raised when logical connection is gracefully closed."""
pass
class PhysicalConnectionError(Exception):
"""Raised when there is a physical connection error."""
def __init__(self, drop_code, message=''):
super(PhysicalConnectionError, self).__init__(
'code=%d, message=%r' % (drop_code, message))
self.drop_code = drop_code
self.message = message
class LogicalChannelError(Exception):
"""Raised when there is a logical channel error."""
def __init__(self, channel_id, drop_code, message=''):
super(LogicalChannelError, self).__init__(
'channel_id=%d, code=%d, message=%r' % (
channel_id, drop_code, message))
self.channel_id = channel_id
self.drop_code = drop_code
self.message = message
def _encode_channel_id(channel_id):
if channel_id < 0:
raise ValueError('Channel id %d must not be negative' % channel_id)
if channel_id < 2 ** 7:
return chr(channel_id)
if channel_id < 2 ** 14:
return struct.pack('!H', 0x8000 + channel_id)
if channel_id < 2 ** 21:
first = chr(0xc0 + (channel_id >> 16))
return first + struct.pack('!H', channel_id & 0xffff)
if channel_id < 2 ** 29:
return struct.pack('!L', 0xe0000000 + channel_id)
raise ValueError('Channel id %d is too large' % channel_id)
def _encode_number(number):
return create_length_header(number, False)
def _create_add_channel_response(channel_id, encoded_handshake,
encoding=0, rejected=False):
if encoding != 0 and encoding != 1:
raise ValueError('Invalid encoding %d' % encoding)
first_byte = ((_MUX_OPCODE_ADD_CHANNEL_RESPONSE << 5) |
(rejected << 4) | encoding)
block = (chr(first_byte) +
_encode_channel_id(channel_id) +
_encode_number(len(encoded_handshake)) +
encoded_handshake)
return block
def _create_drop_channel(channel_id, code=None, message=''):
if len(message) > 0 and code is None:
raise ValueError('Code must be specified if message is specified')
first_byte = _MUX_OPCODE_DROP_CHANNEL << 5
block = chr(first_byte) + _encode_channel_id(channel_id)
if code is None:
block += _encode_number(0) # Reason size
else:
reason = struct.pack('!H', code) + message
reason_size = _encode_number(len(reason))
block += reason_size + reason
return block
def _create_flow_control(channel_id, replenished_quota):
first_byte = _MUX_OPCODE_FLOW_CONTROL << 5
block = (chr(first_byte) +
_encode_channel_id(channel_id) +
_encode_number(replenished_quota))
return block
def _create_new_channel_slot(slots, send_quota):
if slots < 0 or send_quota < 0:
raise ValueError('slots and send_quota must be non-negative.')
first_byte = _MUX_OPCODE_NEW_CHANNEL_SLOT << 5
block = (chr(first_byte) +
_encode_number(slots) +
_encode_number(send_quota))
return block
def _create_fallback_new_channel_slot():
first_byte = (_MUX_OPCODE_NEW_CHANNEL_SLOT << 5) | 1 # Set the F flag
block = (chr(first_byte) + _encode_number(0) + _encode_number(0))
return block
def _parse_request_text(request_text):
request_line, header_lines = request_text.split('\r\n', 1)
words = request_line.split(' ')
if len(words) != 3:
raise ValueError('Bad Request-Line syntax %r' % request_line)
[command, path, version] = words
if version != 'HTTP/1.1':
raise ValueError('Bad request version %r' % version)
# email.parser.Parser() parses RFC 2822 (RFC 822) style headers.
# RFC 6455 refers RFC 2616 for handshake parsing, and RFC 2616 refers
# RFC 822.
headers = email.parser.Parser().parsestr(header_lines)
return command, path, version, headers
class _ControlBlock(object):
"""A structure that holds parsing result of multiplexing control block.
Control block specific attributes will be added by _MuxFramePayloadParser.
(e.g. encoded_handshake will be added for AddChannelRequest and
AddChannelResponse)
"""
def __init__(self, opcode):
self.opcode = opcode
class _MuxFramePayloadParser(object):
"""A class that parses multiplexed frame payload."""
def __init__(self, payload):
self._data = payload
self._read_position = 0
self._logger = util.get_class_logger(self)
def read_channel_id(self):
"""Reads channel id.
Raises:
ValueError: when the payload doesn't contain
valid channel id.
"""
remaining_length = len(self._data) - self._read_position
pos = self._read_position
if remaining_length == 0:
raise ValueError('Invalid channel id format')
channel_id = ord(self._data[pos])
channel_id_length = 1
if channel_id & 0xe0 == 0xe0:
if remaining_length < 4:
raise ValueError('Invalid channel id format')
channel_id = struct.unpack('!L',
self._data[pos:pos+4])[0] & 0x1fffffff
channel_id_length = 4
elif channel_id & 0xc0 == 0xc0:
if remaining_length < 3:
raise ValueError('Invalid channel id format')
channel_id = (((channel_id & 0x1f) << 16) +
struct.unpack('!H', self._data[pos+1:pos+3])[0])
channel_id_length = 3
elif channel_id & 0x80 == 0x80:
if remaining_length < 2:
raise ValueError('Invalid channel id format')
channel_id = struct.unpack('!H',
self._data[pos:pos+2])[0] & 0x3fff
channel_id_length = 2
self._read_position += channel_id_length
return channel_id
def read_inner_frame(self):
"""Reads an inner frame.
Raises:
PhysicalConnectionError: when the inner frame is invalid.
"""
if len(self._data) == self._read_position:
raise PhysicalConnectionError(
_DROP_CODE_ENCAPSULATED_FRAME_IS_TRUNCATED)
bits = ord(self._data[self._read_position])
self._read_position += 1
fin = (bits & 0x80) == 0x80
rsv1 = (bits & 0x40) == 0x40
rsv2 = (bits & 0x20) == 0x20
rsv3 = (bits & 0x10) == 0x10
opcode = bits & 0xf
payload = self.remaining_data()
# Consume rest of the message which is payload data of the original
# frame.
self._read_position = len(self._data)
return fin, rsv1, rsv2, rsv3, opcode, payload
def _read_number(self):
if self._read_position + 1 > len(self._data):
raise ValueError(
'Cannot read the first byte of number field')
number = ord(self._data[self._read_position])
if number & 0x80 == 0x80:
raise ValueError(
'The most significant bit of the first byte of number should '
'be unset')
self._read_position += 1
pos = self._read_position
if number == 127:
if pos + 8 > len(self._data):
raise ValueError('Invalid number field')
self._read_position += 8
number = struct.unpack('!Q', self._data[pos:pos+8])[0]
if number > 0x7FFFFFFFFFFFFFFF:
raise ValueError('Encoded number(%d) >= 2^63' % number)
if number <= 0xFFFF:
raise ValueError(
'%d should not be encoded by 9 bytes encoding' % number)
return number
if number == 126:
if pos + 2 > len(self._data):
raise ValueError('Invalid number field')
self._read_position += 2
number = struct.unpack('!H', self._data[pos:pos+2])[0]
if number <= 125:
raise ValueError(
'%d should not be encoded by 3 bytes encoding' % number)
return number
def _read_size_and_contents(self):
"""Reads data that consists of followings:
- the size of the contents encoded the same way as payload length
of the WebSocket Protocol with 1 bit padding at the head.
- the contents.
"""
try:
size = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
pos = self._read_position
if pos + size > len(self._data):
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Cannot read %d bytes data' % size)
self._read_position += size
return self._data[pos:pos+size]
def _read_add_channel_request(self, first_byte, control_block):
reserved = (first_byte >> 2) & 0x7
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
# Invalid encoding will be handled by MuxHandler.
encoding = first_byte & 0x3
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
control_block.encoding = encoding
encoded_handshake = self._read_size_and_contents()
control_block.encoded_handshake = encoded_handshake
return control_block
def _read_add_channel_response(self, first_byte, control_block):
reserved = (first_byte >> 2) & 0x3
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
control_block.accepted = (first_byte >> 4) & 1
control_block.encoding = first_byte & 0x3
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
control_block.encoded_handshake = self._read_size_and_contents()
return control_block
def _read_flow_control(self, first_byte, control_block):
reserved = first_byte & 0x1f
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
try:
control_block.channel_id = self.read_channel_id()
control_block.send_quota = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
return control_block
def _read_drop_channel(self, first_byte, control_block):
reserved = first_byte & 0x1f
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
try:
control_block.channel_id = self.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK)
reason = self._read_size_and_contents()
if len(reason) == 0:
control_block.drop_code = None
control_block.drop_message = ''
elif len(reason) >= 2:
control_block.drop_code = struct.unpack('!H', reason[:2])[0]
control_block.drop_message = reason[2:]
else:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received DropChannel that conains only 1-byte reason')
return control_block
def _read_new_channel_slot(self, first_byte, control_block):
reserved = first_byte & 0x1e
if reserved != 0:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Reserved bits must be unset')
control_block.fallback = first_byte & 1
try:
control_block.slots = self._read_number()
control_block.send_quota = self._read_number()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
str(e))
return control_block
def read_control_blocks(self):
"""Reads control block(s).
Raises:
PhysicalConnectionError: when the payload contains invalid control
block(s).
StopIteration: when no control blocks left.
"""
while self._read_position < len(self._data):
first_byte = ord(self._data[self._read_position])
self._read_position += 1
opcode = (first_byte >> 5) & 0x7
control_block = _ControlBlock(opcode=opcode)
if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST:
yield self._read_add_channel_request(first_byte, control_block)
elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
yield self._read_add_channel_response(
first_byte, control_block)
elif opcode == _MUX_OPCODE_FLOW_CONTROL:
yield self._read_flow_control(first_byte, control_block)
elif opcode == _MUX_OPCODE_DROP_CHANNEL:
yield self._read_drop_channel(first_byte, control_block)
elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
yield self._read_new_channel_slot(first_byte, control_block)
else:
raise PhysicalConnectionError(
_DROP_CODE_UNKNOWN_MUX_OPCODE,
'Invalid opcode %d' % opcode)
assert self._read_position == len(self._data)
raise StopIteration
def remaining_data(self):
"""Returns remaining data."""
return self._data[self._read_position:]
class _LogicalRequest(object):
"""Mimics mod_python request."""
def __init__(self, channel_id, command, path, protocol, headers,
connection):
"""Constructs an instance.
Args:
channel_id: the channel id of the logical channel.
command: HTTP request command.
path: HTTP request path.
headers: HTTP headers.
connection: _LogicalConnection instance.
"""
self.channel_id = channel_id
self.method = command
self.uri = path
self.protocol = protocol
self.headers_in = headers
self.connection = connection
self.server_terminated = False
self.client_terminated = False
def is_https(self):
"""Mimics request.is_https(). Returns False because this method is
used only by old protocols (hixie and hybi00).
"""
return False
class _LogicalConnection(object):
"""Mimics mod_python mp_conn."""
# For details, see the comment of set_read_state().
STATE_ACTIVE = 1
STATE_GRACEFULLY_CLOSED = 2
STATE_TERMINATED = 3
def __init__(self, mux_handler, channel_id):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
channel_id: channel id of this connection.
"""
self._mux_handler = mux_handler
self._channel_id = channel_id
self._incoming_data = ''
# - Protects _waiting_write_completion
# - Signals the thread waiting for completion of write by mux handler
self._write_condition = threading.Condition()
self._waiting_write_completion = False
self._read_condition = threading.Condition()
self._read_state = self.STATE_ACTIVE
def get_local_addr(self):
"""Getter to mimic mp_conn.local_addr."""
return self._mux_handler.physical_connection.get_local_addr()
local_addr = property(get_local_addr)
def get_remote_addr(self):
"""Getter to mimic mp_conn.remote_addr."""
return self._mux_handler.physical_connection.get_remote_addr()
remote_addr = property(get_remote_addr)
def get_memorized_lines(self):
"""Gets memorized lines. Not supported."""
raise MuxUnexpectedException('_LogicalConnection does not support '
'get_memorized_lines')
def write(self, data):
"""Writes data. mux_handler sends data asynchronously. The caller will
be suspended until write done.
Args:
data: data to be written.
Raises:
MuxUnexpectedException: when called before finishing the previous
write.
"""
try:
self._write_condition.acquire()
if self._waiting_write_completion:
raise MuxUnexpectedException(
'Logical connection %d is already waiting the completion '
'of write' % self._channel_id)
self._waiting_write_completion = True
self._mux_handler.send_data(self._channel_id, data)
self._write_condition.wait()
# TODO(tyoshino): Raise an exception if woke up by on_writer_done.
finally:
self._write_condition.release()
def write_control_data(self, data):
"""Writes data via the control channel. Don't wait finishing write
because this method can be called by mux dispatcher.
Args:
data: data to be written.
"""
self._mux_handler.send_control_data(data)
def on_write_data_done(self):
"""Called when sending data is completed."""
try:
self._write_condition.acquire()
if not self._waiting_write_completion:
raise MuxUnexpectedException(
'Invalid call of on_write_data_done for logical '
'connection %d' % self._channel_id)
self._waiting_write_completion = False
self._write_condition.notify()
finally:
self._write_condition.release()
def on_writer_done(self):
"""Called by the mux handler when the writer thread has finished."""
try:
self._write_condition.acquire()
self._waiting_write_completion = False
self._write_condition.notify()
finally:
self._write_condition.release()
def append_frame_data(self, frame_data):
"""Appends incoming frame data. Called when mux_handler dispatches
frame data to the corresponding application.
Args:
frame_data: incoming frame data.
"""
self._read_condition.acquire()
self._incoming_data += frame_data
self._read_condition.notify()
self._read_condition.release()
def read(self, length):
"""Reads data. Blocks until enough data has arrived via physical
connection.
Args:
length: length of data to be read.
Raises:
LogicalConnectionClosedException: when closing handshake for this
logical channel has been received.
ConnectionTerminatedException: when the physical connection has
closed, or an error is caused on the reader thread.
"""
self._read_condition.acquire()
while (self._read_state == self.STATE_ACTIVE and
len(self._incoming_data) < length):
self._read_condition.wait()
try:
if self._read_state == self.STATE_GRACEFULLY_CLOSED:
raise LogicalConnectionClosedException(
'Logical channel %d has closed.' % self._channel_id)
elif self._read_state == self.STATE_TERMINATED:
raise ConnectionTerminatedException(
'Receiving %d byte failed. Logical channel (%d) closed' %
(length, self._channel_id))
value = self._incoming_data[:length]
self._incoming_data = self._incoming_data[length:]
finally:
self._read_condition.release()
return value
def set_read_state(self, new_state):
"""Sets the state of this connection. Called when an event for this
connection has occurred.
Args:
new_state: state to be set. new_state must be one of followings:
- STATE_GRACEFULLY_CLOSED: when closing handshake for this
connection has been received.
- STATE_TERMINATED: when the physical connection has closed or
DropChannel of this connection has received.
"""
self._read_condition.acquire()
self._read_state = new_state
self._read_condition.notify()
self._read_condition.release()
class _InnerMessage(object):
"""Holds the result of _InnerMessageBuilder.build().
"""
def __init__(self, opcode, payload):
self.opcode = opcode
self.payload = payload
class _InnerMessageBuilder(object):
"""A class that holds the context of inner message fragmentation and
builds a message from fragmented inner frame(s).
"""
def __init__(self):
self._control_opcode = None
self._pending_control_fragments = []
self._message_opcode = None
self._pending_message_fragments = []
self._frame_handler = self._handle_first
def _handle_first(self, frame):
if frame.opcode == common.OPCODE_CONTINUATION:
raise InvalidFrameException('Sending invalid continuation opcode')
if common.is_control_opcode(frame.opcode):
return self._process_first_fragmented_control(frame)
else:
return self._process_first_fragmented_message(frame)
def _process_first_fragmented_control(self, frame):
self._control_opcode = frame.opcode
self._pending_control_fragments.append(frame.payload)
if not frame.fin:
self._frame_handler = self._handle_fragmented_control
return None
return self._reassemble_fragmented_control()
def _process_first_fragmented_message(self, frame):
self._message_opcode = frame.opcode
self._pending_message_fragments.append(frame.payload)
if not frame.fin:
self._frame_handler = self._handle_fragmented_message
return None
return self._reassemble_fragmented_message()
def _handle_fragmented_control(self, frame):
if frame.opcode != common.OPCODE_CONTINUATION:
raise InvalidFrameException(
'Sending invalid opcode %d while sending fragmented control '
'message' % frame.opcode)
self._pending_control_fragments.append(frame.payload)
if not frame.fin:
return None
return self._reassemble_fragmented_control()
def _reassemble_fragmented_control(self):
opcode = self._control_opcode
payload = ''.join(self._pending_control_fragments)
self._control_opcode = None
self._pending_control_fragments = []
if self._message_opcode is not None:
self._frame_handler = self._handle_fragmented_message
else:
self._frame_handler = self._handle_first
return _InnerMessage(opcode, payload)
def _handle_fragmented_message(self, frame):
# Sender can interleave a control message while sending fragmented
# messages.
if common.is_control_opcode(frame.opcode):
if self._control_opcode is not None:
raise MuxUnexpectedException(
'Should not reach here(Bug in builder)')
return self._process_first_fragmented_control(frame)
if frame.opcode != common.OPCODE_CONTINUATION:
raise InvalidFrameException(
'Sending invalid opcode %d while sending fragmented message' %
frame.opcode)
self._pending_message_fragments.append(frame.payload)
if not frame.fin:
return None
return self._reassemble_fragmented_message()
def _reassemble_fragmented_message(self):
opcode = self._message_opcode
payload = ''.join(self._pending_message_fragments)
self._message_opcode = None
self._pending_message_fragments = []
self._frame_handler = self._handle_first
return _InnerMessage(opcode, payload)
def build(self, frame):
"""Build an inner message. Returns an _InnerMessage instance when
the given frame is the last fragmented frame. Returns None otherwise.
Args:
frame: an inner frame.
Raises:
InvalidFrameException: when received invalid opcode. (e.g.
receiving non continuation data opcode but the fin flag of
the previous inner frame was not set.)
"""
return self._frame_handler(frame)
class _LogicalStream(Stream):
"""Mimics the Stream class. This class interprets multiplexed WebSocket
frames.
"""
def __init__(self, request, stream_options, send_quota, receive_quota):
"""Constructs an instance.
Args:
request: _LogicalRequest instance.
stream_options: StreamOptions instance.
send_quota: Initial send quota.
receive_quota: Initial receive quota.
"""
# Physical stream is responsible for masking.
stream_options.unmask_receive = False
Stream.__init__(self, request, stream_options)
self._send_closed = False
self._send_quota = send_quota
# - Protects _send_closed and _send_quota
# - Signals the thread waiting for send quota replenished
self._send_condition = threading.Condition()
# The opcode of the first frame in messages.
self._message_opcode = common.OPCODE_TEXT
# True when the last message was fragmented.
self._last_message_was_fragmented = False
self._receive_quota = receive_quota
self._write_inner_frame_semaphore = threading.Semaphore()
self._inner_message_builder = _InnerMessageBuilder()
def _create_inner_frame(self, opcode, payload, end=True):
frame = Frame(fin=end, opcode=opcode, payload=payload)
for frame_filter in self._options.outgoing_frame_filters:
frame_filter.filter(frame)
if len(payload) != len(frame.payload):
raise MuxUnexpectedException(
'Mux extension must not be used after extensions which change '
' frame boundary')
first_byte = ((frame.fin << 7) | (frame.rsv1 << 6) |
(frame.rsv2 << 5) | (frame.rsv3 << 4) | frame.opcode)
return chr(first_byte) + frame.payload
def _write_inner_frame(self, opcode, payload, end=True):
payload_length = len(payload)
write_position = 0
try:
# An inner frame will be fragmented if there is no enough send
# quota. This semaphore ensures that fragmented inner frames are
# sent in order on the logical channel.
# Note that frames that come from other logical channels or
# multiplexing control blocks can be inserted between fragmented
# inner frames on the physical channel.
self._write_inner_frame_semaphore.acquire()
# Consume an octet quota when this is the first fragmented frame.
if opcode != common.OPCODE_CONTINUATION:
try:
self._send_condition.acquire()
while (not self._send_closed) and self._send_quota == 0:
self._send_condition.wait()
if self._send_closed:
raise BadOperationException(
'Logical connection %d is closed' %
self._request.channel_id)
self._send_quota -= 1
finally:
self._send_condition.release()
while write_position < payload_length:
try:
self._send_condition.acquire()
while (not self._send_closed) and self._send_quota == 0:
self._logger.debug(
'No quota. Waiting FlowControl message for %d.' %
self._request.channel_id)
self._send_condition.wait()
if self._send_closed:
raise BadOperationException(
'Logical connection %d is closed' %
self.request._channel_id)
remaining = payload_length - write_position
write_length = min(self._send_quota, remaining)
inner_frame_end = (
end and
(write_position + write_length == payload_length))
inner_frame = self._create_inner_frame(
opcode,
payload[write_position:write_position+write_length],
inner_frame_end)
self._send_quota -= write_length
self._logger.debug('Consumed quota=%d, remaining=%d' %
(write_length, self._send_quota))
finally:
self._send_condition.release()
# Writing data will block the worker so we need to release
# _send_condition before writing.
self._logger.debug('Sending inner frame: %r' % inner_frame)
self._request.connection.write(inner_frame)
write_position += write_length
opcode = common.OPCODE_CONTINUATION
except ValueError, e:
raise BadOperationException(e)
finally:
self._write_inner_frame_semaphore.release()
def replenish_send_quota(self, send_quota):
"""Replenish send quota."""
try:
self._send_condition.acquire()
if self._send_quota + send_quota > 0x7FFFFFFFFFFFFFFF:
self._send_quota = 0
raise LogicalChannelError(
self._request.channel_id, _DROP_CODE_SEND_QUOTA_OVERFLOW)
self._send_quota += send_quota
self._logger.debug('Replenished send quota for channel id %d: %d' %
(self._request.channel_id, self._send_quota))
finally:
self._send_condition.notify()
self._send_condition.release()
def consume_receive_quota(self, amount):
"""Consumes receive quota. Returns False on failure."""
if self._receive_quota < amount:
self._logger.debug('Violate quota on channel id %d: %d < %d' %
(self._request.channel_id,
self._receive_quota, amount))
return False
self._receive_quota -= amount
return True
def send_message(self, message, end=True, binary=False):
"""Override Stream.send_message."""
if self._request.server_terminated:
raise BadOperationException(
'Requested send_message after sending out a closing handshake')
if binary and isinstance(message, unicode):
raise BadOperationException(
'Message for binary frame must be instance of str')
if binary:
opcode = common.OPCODE_BINARY
else:
opcode = common.OPCODE_TEXT
message = message.encode('utf-8')
for message_filter in self._options.outgoing_message_filters:
message = message_filter.filter(message, end, binary)
if self._last_message_was_fragmented:
if opcode != self._message_opcode:
raise BadOperationException('Message types are different in '
'frames for the same message')
opcode = common.OPCODE_CONTINUATION
else:
self._message_opcode = opcode
self._write_inner_frame(opcode, message, end)
self._last_message_was_fragmented = not end
def _receive_frame(self):
"""Overrides Stream._receive_frame.
In addition to call Stream._receive_frame, this method adds the amount
of payload to receiving quota and sends FlowControl to the client.
We need to do it here because Stream.receive_message() handles
control frames internally.
"""
opcode, payload, fin, rsv1, rsv2, rsv3 = Stream._receive_frame(self)
amount = len(payload)
# Replenish extra one octet when receiving the first fragmented frame.
if opcode != common.OPCODE_CONTINUATION:
amount += 1
self._receive_quota += amount
frame_data = _create_flow_control(self._request.channel_id,
amount)
self._logger.debug('Sending flow control for %d, replenished=%d' %
(self._request.channel_id, amount))
self._request.connection.write_control_data(frame_data)
return opcode, payload, fin, rsv1, rsv2, rsv3
def _get_message_from_frame(self, frame):
"""Overrides Stream._get_message_from_frame.
"""
try:
inner_message = self._inner_message_builder.build(frame)
except InvalidFrameException:
raise LogicalChannelError(
self._request.channel_id, _DROP_CODE_BAD_FRAGMENTATION)
if inner_message is None:
return None
self._original_opcode = inner_message.opcode
return inner_message.payload
def receive_message(self):
"""Overrides Stream.receive_message."""
# Just call Stream.receive_message(), but catch
# LogicalConnectionClosedException, which is raised when the logical
# connection has closed gracefully.
try:
return Stream.receive_message(self)
except LogicalConnectionClosedException, e:
self._logger.debug('%s', e)
return None
def _send_closing_handshake(self, code, reason):
"""Overrides Stream._send_closing_handshake."""
body = create_closing_handshake_body(code, reason)
self._logger.debug('Sending closing handshake for %d: (%r, %r)' %
(self._request.channel_id, code, reason))
self._write_inner_frame(common.OPCODE_CLOSE, body, end=True)
self._request.server_terminated = True
def send_ping(self, body=''):
"""Overrides Stream.send_ping"""
self._logger.debug('Sending ping on logical channel %d: %r' %
(self._request.channel_id, body))
self._write_inner_frame(common.OPCODE_PING, body, end=True)
self._ping_queue.append(body)
def _send_pong(self, body):
"""Overrides Stream._send_pong"""
self._logger.debug('Sending pong on logical channel %d: %r' %
(self._request.channel_id, body))
self._write_inner_frame(common.OPCODE_PONG, body, end=True)
def close_connection(self, code=common.STATUS_NORMAL_CLOSURE, reason=''):
"""Overrides Stream.close_connection."""
# TODO(bashi): Implement
self._logger.debug('Closing logical connection %d' %
self._request.channel_id)
self._request.server_terminated = True
def stop_sending(self):
"""Stops accepting new send operation (_write_inner_frame)."""
self._send_condition.acquire()
self._send_closed = True
self._send_condition.notify()
self._send_condition.release()
class _OutgoingData(object):
"""A structure that holds data to be sent via physical connection and
origin of the data.
"""
def __init__(self, channel_id, data):
self.channel_id = channel_id
self.data = data
class _PhysicalConnectionWriter(threading.Thread):
"""A thread that is responsible for writing data to physical connection.
TODO(bashi): Make sure there is no thread-safety problem when the reader
thread reads data from the same socket at a time.
"""
def __init__(self, mux_handler):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self.setDaemon(True)
# When set, make this thread stop accepting new data, flush pending
# data and exit.
self._stop_requested = False
# The close code of the physical connection.
self._close_code = common.STATUS_NORMAL_CLOSURE
# Deque for passing write data. It's protected by _deque_condition
# until _stop_requested is set.
self._deque = collections.deque()
# - Protects _deque, _stop_requested and _close_code
# - Signals threads waiting for them to be available
self._deque_condition = threading.Condition()
def put_outgoing_data(self, data):
"""Puts outgoing data.
Args:
data: _OutgoingData instance.
Raises:
BadOperationException: when the thread has been requested to
terminate.
"""
try:
self._deque_condition.acquire()
if self._stop_requested:
raise BadOperationException('Cannot write data anymore')
self._deque.append(data)
self._deque_condition.notify()
finally:
self._deque_condition.release()
def _write_data(self, outgoing_data):
message = (_encode_channel_id(outgoing_data.channel_id) +
outgoing_data.data)
try:
self._mux_handler.physical_stream.send_message(
message=message, end=True, binary=True)
except Exception, e:
util.prepend_message_to_exception(
'Failed to send message to %r: ' %
(self._mux_handler.physical_connection.remote_addr,), e)
raise
# TODO(bashi): It would be better to block the thread that sends
# control data as well.
if outgoing_data.channel_id != _CONTROL_CHANNEL_ID:
self._mux_handler.notify_write_data_done(outgoing_data.channel_id)
def run(self):
try:
self._deque_condition.acquire()
while not self._stop_requested:
if len(self._deque) == 0:
self._deque_condition.wait()
continue
outgoing_data = self._deque.popleft()
self._deque_condition.release()
self._write_data(outgoing_data)
self._deque_condition.acquire()
# Flush deque.
#
# At this point, self._deque_condition is always acquired.
try:
while len(self._deque) > 0:
outgoing_data = self._deque.popleft()
self._write_data(outgoing_data)
finally:
self._deque_condition.release()
# Close physical connection.
try:
# Don't wait the response here. The response will be read
# by the reader thread.
self._mux_handler.physical_stream.close_connection(
self._close_code, wait_response=False)
except Exception, e:
util.prepend_message_to_exception(
'Failed to close the physical connection: %r' % e)
raise
finally:
self._mux_handler.notify_writer_done()
def stop(self, close_code=common.STATUS_NORMAL_CLOSURE):
"""Stops the writer thread."""
self._deque_condition.acquire()
self._stop_requested = True
self._close_code = close_code
self._deque_condition.notify()
self._deque_condition.release()
class _PhysicalConnectionReader(threading.Thread):
"""A thread that is responsible for reading data from physical connection.
"""
def __init__(self, mux_handler):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self.setDaemon(True)
def run(self):
while True:
try:
physical_stream = self._mux_handler.physical_stream
message = physical_stream.receive_message()
if message is None:
break
# Below happens only when a data message is received.
opcode = physical_stream.get_last_received_opcode()
if opcode != common.OPCODE_BINARY:
self._mux_handler.fail_physical_connection(
_DROP_CODE_INVALID_ENCAPSULATING_MESSAGE,
'Received a text message on physical connection')
break
except ConnectionTerminatedException, e:
self._logger.debug('%s', e)
break
try:
self._mux_handler.dispatch_message(message)
except PhysicalConnectionError, e:
self._mux_handler.fail_physical_connection(
e.drop_code, e.message)
break
except LogicalChannelError, e:
self._mux_handler.fail_logical_channel(
e.channel_id, e.drop_code, e.message)
except Exception, e:
self._logger.debug(traceback.format_exc())
break
self._mux_handler.notify_reader_done()
class _Worker(threading.Thread):
"""A thread that is responsible for running the corresponding application
handler.
"""
def __init__(self, mux_handler, request):
"""Constructs an instance.
Args:
mux_handler: _MuxHandler instance.
request: _LogicalRequest instance.
"""
threading.Thread.__init__(self)
self._logger = util.get_class_logger(self)
self._mux_handler = mux_handler
self._request = request
self.setDaemon(True)
def run(self):
self._logger.debug('Logical channel worker started. (id=%d)' %
self._request.channel_id)
try:
# Non-critical exceptions will be handled by dispatcher.
self._mux_handler.dispatcher.transfer_data(self._request)
except LogicalChannelError, e:
self._mux_handler.fail_logical_channel(
e.channel_id, e.drop_code, e.message)
finally:
self._mux_handler.notify_worker_done(self._request.channel_id)
class _MuxHandshaker(hybi.Handshaker):
"""Opening handshake processor for multiplexing."""
_DUMMY_WEBSOCKET_KEY = 'dGhlIHNhbXBsZSBub25jZQ=='
def __init__(self, request, dispatcher, send_quota, receive_quota):
"""Constructs an instance.
Args:
request: _LogicalRequest instance.
dispatcher: Dispatcher instance (dispatch.Dispatcher).
send_quota: Initial send quota.
receive_quota: Initial receive quota.
"""
hybi.Handshaker.__init__(self, request, dispatcher)
self._send_quota = send_quota
self._receive_quota = receive_quota
# Append headers which should not be included in handshake field of
# AddChannelRequest.
# TODO(bashi): Make sure whether we should raise exception when
# these headers are included already.
request.headers_in[common.UPGRADE_HEADER] = (
common.WEBSOCKET_UPGRADE_TYPE)
request.headers_in[common.SEC_WEBSOCKET_VERSION_HEADER] = (
str(common.VERSION_HYBI_LATEST))
request.headers_in[common.SEC_WEBSOCKET_KEY_HEADER] = (
self._DUMMY_WEBSOCKET_KEY)
def _create_stream(self, stream_options):
"""Override hybi.Handshaker._create_stream."""
self._logger.debug('Creating logical stream for %d' %
self._request.channel_id)
return _LogicalStream(
self._request, stream_options, self._send_quota,
self._receive_quota)
def _create_handshake_response(self, accept):
"""Override hybi._create_handshake_response."""
response = []
response.append('HTTP/1.1 101 Switching Protocols\r\n')
# Upgrade and Sec-WebSocket-Accept should be excluded.
response.append('%s: %s\r\n' % (
common.CONNECTION_HEADER, common.UPGRADE_CONNECTION_TYPE))
if self._request.ws_protocol is not None:
response.append('%s: %s\r\n' % (
common.SEC_WEBSOCKET_PROTOCOL_HEADER,
self._request.ws_protocol))
if (self._request.ws_extensions is not None and
len(self._request.ws_extensions) != 0):
response.append('%s: %s\r\n' % (
common.SEC_WEBSOCKET_EXTENSIONS_HEADER,
common.format_extensions(self._request.ws_extensions)))
response.append('\r\n')
return ''.join(response)
def _send_handshake(self, accept):
"""Override hybi.Handshaker._send_handshake."""
# Don't send handshake response for the default channel
if self._request.channel_id == _DEFAULT_CHANNEL_ID:
return
handshake_response = self._create_handshake_response(accept)
frame_data = _create_add_channel_response(
self._request.channel_id,
handshake_response)
self._logger.debug('Sending handshake response for %d: %r' %
(self._request.channel_id, frame_data))
self._request.connection.write_control_data(frame_data)
class _LogicalChannelData(object):
"""A structure that holds information about logical channel.
"""
def __init__(self, request, worker):
self.request = request
self.worker = worker
self.drop_code = _DROP_CODE_NORMAL_CLOSURE
self.drop_message = ''
class _HandshakeDeltaBase(object):
"""A class that holds information for delta-encoded handshake."""
def __init__(self, headers):
self._headers = headers
def create_headers(self, delta=None):
"""Creates request headers for an AddChannelRequest that has
delta-encoded handshake.
Args:
delta: headers should be overridden.
"""
headers = copy.copy(self._headers)
if delta:
for key, value in delta.items():
# The spec requires that a header with an empty value is
# removed from the delta base.
if len(value) == 0 and headers.has_key(key):
del headers[key]
else:
headers[key] = value
return headers
class _MuxHandler(object):
"""Multiplexing handler. When a handler starts, it launches three
threads; the reader thread, the writer thread, and a worker thread.
The reader thread reads data from the physical stream, i.e., the
ws_stream object of the underlying websocket connection. The reader
thread interprets multiplexed frames and dispatches them to logical
channels. Methods of this class are mostly called by the reader thread.
The writer thread sends multiplexed frames which are created by
logical channels via the physical connection.
The worker thread launched at the starting point handles the
"Implicitly Opened Connection". If multiplexing handler receives
an AddChannelRequest and accepts it, the handler will launch a new worker
thread and dispatch the request to it.
"""
def __init__(self, request, dispatcher):
"""Constructs an instance.
Args:
request: mod_python request of the physical connection.
dispatcher: Dispatcher instance (dispatch.Dispatcher).
"""
self.original_request = request
self.dispatcher = dispatcher
self.physical_connection = request.connection
self.physical_stream = request.ws_stream
self._logger = util.get_class_logger(self)
self._logical_channels = {}
self._logical_channels_condition = threading.Condition()
# Holds client's initial quota
self._channel_slots = collections.deque()
self._handshake_base = None
self._worker_done_notify_received = False
self._reader = None
self._writer = None
def start(self):
"""Starts the handler.
Raises:
MuxUnexpectedException: when the handler already started, or when
opening handshake of the default channel fails.
"""
if self._reader or self._writer:
raise MuxUnexpectedException('MuxHandler already started')
self._reader = _PhysicalConnectionReader(self)
self._writer = _PhysicalConnectionWriter(self)
self._reader.start()
self._writer.start()
# Create "Implicitly Opened Connection".
logical_connection = _LogicalConnection(self, _DEFAULT_CHANNEL_ID)
headers = copy.copy(self.original_request.headers_in)
# Add extensions for logical channel.
headers[common.SEC_WEBSOCKET_EXTENSIONS_HEADER] = (
common.format_extensions(
self.original_request.mux_processor.extensions()))
self._handshake_base = _HandshakeDeltaBase(headers)
logical_request = _LogicalRequest(
_DEFAULT_CHANNEL_ID,
self.original_request.method,
self.original_request.uri,
self.original_request.protocol,
self._handshake_base.create_headers(),
logical_connection)
# Client's send quota for the implicitly opened connection is zero,
# but we will send FlowControl later so set the initial quota to
# _INITIAL_QUOTA_FOR_CLIENT.
self._channel_slots.append(_INITIAL_QUOTA_FOR_CLIENT)
send_quota = self.original_request.mux_processor.quota()
if not self._do_handshake_for_logical_request(
logical_request, send_quota=send_quota):
raise MuxUnexpectedException(
'Failed handshake on the default channel id')
self._add_logical_channel(logical_request)
# Send FlowControl for the implicitly opened connection.
frame_data = _create_flow_control(_DEFAULT_CHANNEL_ID,
_INITIAL_QUOTA_FOR_CLIENT)
logical_request.connection.write_control_data(frame_data)
def add_channel_slots(self, slots, send_quota):
"""Adds channel slots.
Args:
slots: number of slots to be added.
send_quota: initial send quota for slots.
"""
self._channel_slots.extend([send_quota] * slots)
# Send NewChannelSlot to client.
frame_data = _create_new_channel_slot(slots, send_quota)
self.send_control_data(frame_data)
def wait_until_done(self, timeout=None):
"""Waits until all workers are done. Returns False when timeout has
occurred. Returns True on success.
Args:
timeout: timeout in sec.
"""
self._logical_channels_condition.acquire()
try:
while len(self._logical_channels) > 0:
self._logger.debug('Waiting workers(%d)...' %
len(self._logical_channels))
self._worker_done_notify_received = False
self._logical_channels_condition.wait(timeout)
if not self._worker_done_notify_received:
self._logger.debug('Waiting worker(s) timed out')
return False
finally:
self._logical_channels_condition.release()
# Flush pending outgoing data
self._writer.stop()
self._writer.join()
return True
def notify_write_data_done(self, channel_id):
"""Called by the writer thread when a write operation has done.
Args:
channel_id: objective channel id.
"""
try:
self._logical_channels_condition.acquire()
if channel_id in self._logical_channels:
channel_data = self._logical_channels[channel_id]
channel_data.request.connection.on_write_data_done()
else:
self._logger.debug('Seems that logical channel for %d has gone'
% channel_id)
finally:
self._logical_channels_condition.release()
def send_control_data(self, data):
"""Sends data via the control channel.
Args:
data: data to be sent.
"""
self._writer.put_outgoing_data(_OutgoingData(
channel_id=_CONTROL_CHANNEL_ID, data=data))
def send_data(self, channel_id, data):
"""Sends data via given logical channel. This method is called by
worker threads.
Args:
channel_id: objective channel id.
data: data to be sent.
"""
self._writer.put_outgoing_data(_OutgoingData(
channel_id=channel_id, data=data))
def _send_drop_channel(self, channel_id, code=None, message=''):
frame_data = _create_drop_channel(channel_id, code, message)
self._logger.debug(
'Sending drop channel for channel id %d' % channel_id)
self.send_control_data(frame_data)
def _send_error_add_channel_response(self, channel_id, status=None):
if status is None:
status = common.HTTP_STATUS_BAD_REQUEST
if status in _HTTP_BAD_RESPONSE_MESSAGES:
message = _HTTP_BAD_RESPONSE_MESSAGES[status]
else:
self._logger.debug('Response message for %d is not found' % status)
message = '???'
response = 'HTTP/1.1 %d %s\r\n\r\n' % (status, message)
frame_data = _create_add_channel_response(channel_id,
encoded_handshake=response,
encoding=0, rejected=True)
self.send_control_data(frame_data)
def _create_logical_request(self, block):
if block.channel_id == _CONTROL_CHANNEL_ID:
# TODO(bashi): Raise PhysicalConnectionError with code 2006
# instead of MuxUnexpectedException.
raise MuxUnexpectedException(
'Received the control channel id (0) as objective channel '
'id for AddChannel')
if block.encoding > _HANDSHAKE_ENCODING_DELTA:
raise PhysicalConnectionError(
_DROP_CODE_UNKNOWN_REQUEST_ENCODING)
method, path, version, headers = _parse_request_text(
block.encoded_handshake)
if block.encoding == _HANDSHAKE_ENCODING_DELTA:
headers = self._handshake_base.create_headers(headers)
connection = _LogicalConnection(self, block.channel_id)
request = _LogicalRequest(block.channel_id, method, path, version,
headers, connection)
return request
def _do_handshake_for_logical_request(self, request, send_quota=0):
try:
receive_quota = self._channel_slots.popleft()
except IndexError:
raise LogicalChannelError(
request.channel_id, _DROP_CODE_NEW_CHANNEL_SLOT_VIOLATION)
handshaker = _MuxHandshaker(request, self.dispatcher,
send_quota, receive_quota)
try:
handshaker.do_handshake()
except handshake.VersionException, e:
self._logger.info('%s', e)
self._send_error_add_channel_response(
request.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
return False
except handshake.HandshakeException, e:
# TODO(bashi): Should we _Fail the Logical Channel_ with 3001
# instead?
self._logger.info('%s', e)
self._send_error_add_channel_response(request.channel_id,
status=e.status)
return False
except handshake.AbortedByUserException, e:
self._logger.info('%s', e)
self._send_error_add_channel_response(request.channel_id)
return False
return True
def _add_logical_channel(self, logical_request):
try:
self._logical_channels_condition.acquire()
if logical_request.channel_id in self._logical_channels:
self._logger.debug('Channel id %d already exists' %
logical_request.channel_id)
raise PhysicalConnectionError(
_DROP_CODE_CHANNEL_ALREADY_EXISTS,
'Channel id %d already exists' %
logical_request.channel_id)
worker = _Worker(self, logical_request)
channel_data = _LogicalChannelData(logical_request, worker)
self._logical_channels[logical_request.channel_id] = channel_data
worker.start()
finally:
self._logical_channels_condition.release()
def _process_add_channel_request(self, block):
try:
logical_request = self._create_logical_request(block)
except ValueError, e:
self._logger.debug('Failed to create logical request: %r' % e)
self._send_error_add_channel_response(
block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
return
if self._do_handshake_for_logical_request(logical_request):
if block.encoding == _HANDSHAKE_ENCODING_IDENTITY:
# Update handshake base.
# TODO(bashi): Make sure this is the right place to update
# handshake base.
self._handshake_base = _HandshakeDeltaBase(
logical_request.headers_in)
self._add_logical_channel(logical_request)
else:
self._send_error_add_channel_response(
block.channel_id, status=common.HTTP_STATUS_BAD_REQUEST)
def _process_flow_control(self, block):
try:
self._logical_channels_condition.acquire()
if not block.channel_id in self._logical_channels:
return
channel_data = self._logical_channels[block.channel_id]
channel_data.request.ws_stream.replenish_send_quota(
block.send_quota)
finally:
self._logical_channels_condition.release()
def _process_drop_channel(self, block):
self._logger.debug(
'DropChannel received for %d: code=%r, reason=%r' %
(block.channel_id, block.drop_code, block.drop_message))
try:
self._logical_channels_condition.acquire()
if not block.channel_id in self._logical_channels:
return
channel_data = self._logical_channels[block.channel_id]
channel_data.drop_code = _DROP_CODE_ACKNOWLEDGED
# Close the logical channel
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
channel_data.request.ws_stream.stop_sending()
finally:
self._logical_channels_condition.release()
def _process_control_blocks(self, parser):
for control_block in parser.read_control_blocks():
opcode = control_block.opcode
self._logger.debug('control block received, opcode: %d' % opcode)
if opcode == _MUX_OPCODE_ADD_CHANNEL_REQUEST:
self._process_add_channel_request(control_block)
elif opcode == _MUX_OPCODE_ADD_CHANNEL_RESPONSE:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received AddChannelResponse')
elif opcode == _MUX_OPCODE_FLOW_CONTROL:
self._process_flow_control(control_block)
elif opcode == _MUX_OPCODE_DROP_CHANNEL:
self._process_drop_channel(control_block)
elif opcode == _MUX_OPCODE_NEW_CHANNEL_SLOT:
raise PhysicalConnectionError(
_DROP_CODE_INVALID_MUX_CONTROL_BLOCK,
'Received NewChannelSlot')
else:
raise MuxUnexpectedException(
'Unexpected opcode %r' % opcode)
def _process_logical_frame(self, channel_id, parser):
self._logger.debug('Received a frame. channel id=%d' % channel_id)
try:
self._logical_channels_condition.acquire()
if not channel_id in self._logical_channels:
# We must ignore the message for an inactive channel.
return
channel_data = self._logical_channels[channel_id]
fin, rsv1, rsv2, rsv3, opcode, payload = parser.read_inner_frame()
consuming_byte = len(payload)
if opcode != common.OPCODE_CONTINUATION:
consuming_byte += 1
if not channel_data.request.ws_stream.consume_receive_quota(
consuming_byte):
# The client violates quota. Close logical channel.
raise LogicalChannelError(
channel_id, _DROP_CODE_SEND_QUOTA_VIOLATION)
header = create_header(opcode, len(payload), fin, rsv1, rsv2, rsv3,
mask=False)
frame_data = header + payload
channel_data.request.connection.append_frame_data(frame_data)
finally:
self._logical_channels_condition.release()
def dispatch_message(self, message):
"""Dispatches message. The reader thread calls this method.
Args:
message: a message that contains encapsulated frame.
Raises:
PhysicalConnectionError: if the message contains physical
connection level errors.
LogicalChannelError: if the message contains logical channel
level errors.
"""
parser = _MuxFramePayloadParser(message)
try:
channel_id = parser.read_channel_id()
except ValueError, e:
raise PhysicalConnectionError(_DROP_CODE_CHANNEL_ID_TRUNCATED)
if channel_id == _CONTROL_CHANNEL_ID:
self._process_control_blocks(parser)
else:
self._process_logical_frame(channel_id, parser)
def notify_worker_done(self, channel_id):
"""Called when a worker has finished.
Args:
channel_id: channel id corresponded with the worker.
"""
self._logger.debug('Worker for channel id %d terminated' % channel_id)
try:
self._logical_channels_condition.acquire()
if not channel_id in self._logical_channels:
raise MuxUnexpectedException(
'Channel id %d not found' % channel_id)
channel_data = self._logical_channels.pop(channel_id)
finally:
self._worker_done_notify_received = True
self._logical_channels_condition.notify()
self._logical_channels_condition.release()
if not channel_data.request.server_terminated:
self._send_drop_channel(
channel_id, code=channel_data.drop_code,
message=channel_data.drop_message)
def notify_reader_done(self):
"""This method is called by the reader thread when the reader has
finished.
"""
self._logger.debug(
'Termiating all logical connections waiting for incoming data '
'...')
self._logical_channels_condition.acquire()
for channel_data in self._logical_channels.values():
try:
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
except Exception:
self._logger.debug(traceback.format_exc())
self._logical_channels_condition.release()
def notify_writer_done(self):
"""This method is called by the writer thread when the writer has
finished.
"""
self._logger.debug(
'Termiating all logical connections waiting for write '
'completion ...')
self._logical_channels_condition.acquire()
for channel_data in self._logical_channels.values():
try:
channel_data.request.connection.on_writer_done()
except Exception:
self._logger.debug(traceback.format_exc())
self._logical_channels_condition.release()
def fail_physical_connection(self, code, message):
"""Fail the physical connection.
Args:
code: drop reason code.
message: drop message.
"""
self._logger.debug('Failing the physical connection...')
self._send_drop_channel(_CONTROL_CHANNEL_ID, code, message)
self._writer.stop(common.STATUS_INTERNAL_ENDPOINT_ERROR)
def fail_logical_channel(self, channel_id, code, message):
"""Fail a logical channel.
Args:
channel_id: channel id.
code: drop reason code.
message: drop message.
"""
self._logger.debug('Failing logical channel %d...' % channel_id)
try:
self._logical_channels_condition.acquire()
if channel_id in self._logical_channels:
channel_data = self._logical_channels[channel_id]
# Close the logical channel. notify_worker_done() will be
# called later and it will send DropChannel.
channel_data.drop_code = code
channel_data.drop_message = message
channel_data.request.connection.set_read_state(
_LogicalConnection.STATE_TERMINATED)
channel_data.request.ws_stream.stop_sending()
else:
self._send_drop_channel(channel_id, code, message)
finally:
self._logical_channels_condition.release()
def use_mux(request):
return hasattr(request, 'mux_processor') and (
request.mux_processor.is_active())
def start(request, dispatcher):
mux_handler = _MuxHandler(request, dispatcher)
mux_handler.start()
mux_handler.add_channel_slots(_INITIAL_NUMBER_OF_CHANNEL_SLOTS,
_INITIAL_QUOTA_FOR_CLIENT)
mux_handler.wait_until_done()
# vi:sts=4 sw=4 et
|
GiovanniConserva/TestDeploy | refs/heads/master | venv/Lib/site-packages/pytz/tzfile.py | 480 | #!/usr/bin/env python
'''
$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
'''
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from datetime import datetime, timedelta
from struct import unpack, calcsize
from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
from pytz.tzinfo import memorized_datetime, memorized_timedelta
def _byte_string(s):
"""Cast a string or byte string to an ASCII byte string."""
return s.encode('US-ASCII')
_NULL = _byte_string('\0')
def _std_string(s):
"""Cast a string or byte string to an ASCII string."""
return str(s.decode('US-ASCII'))
def build_tzinfo(zone, fp):
head_fmt = '>4s c 15x 6l'
head_size = calcsize(head_fmt)
(magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt,
typecnt, charcnt) = unpack(head_fmt, fp.read(head_size))
# Make sure it is a tzfile(5) file
assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic)
# Read out the transition times, localtime indices and ttinfo structures.
data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt)
data_size = calcsize(data_fmt)
data = unpack(data_fmt, fp.read(data_size))
# make sure we unpacked the right number of values
assert len(data) == 2 * timecnt + 3 * typecnt + 1
transitions = [memorized_datetime(trans)
for trans in data[:timecnt]]
lindexes = list(data[timecnt:2 * timecnt])
ttinfo_raw = data[2 * timecnt:-1]
tznames_raw = data[-1]
del data
# Process ttinfo into separate structs
ttinfo = []
tznames = {}
i = 0
while i < len(ttinfo_raw):
# have we looked up this timezone name yet?
tzname_offset = ttinfo_raw[i+2]
if tzname_offset not in tznames:
nul = tznames_raw.find(_NULL, tzname_offset)
if nul < 0:
nul = len(tznames_raw)
tznames[tzname_offset] = _std_string(
tznames_raw[tzname_offset:nul])
ttinfo.append((ttinfo_raw[i],
bool(ttinfo_raw[i+1]),
tznames[tzname_offset]))
i += 3
# Now build the timezone object
if len(transitions) == 0:
ttinfo[0][0], ttinfo[0][2]
cls = type(zone, (StaticTzInfo,), dict(
zone=zone,
_utcoffset=memorized_timedelta(ttinfo[0][0]),
_tzname=ttinfo[0][2]))
else:
# Early dates use the first standard time ttinfo
i = 0
while ttinfo[i][1]:
i += 1
if ttinfo[i] == ttinfo[lindexes[0]]:
transitions[0] = datetime.min
else:
transitions.insert(0, datetime.min)
lindexes.insert(0, i)
# calculate transition info
transition_info = []
for i in range(len(transitions)):
inf = ttinfo[lindexes[i]]
utcoffset = inf[0]
if not inf[1]:
dst = 0
else:
for j in range(i-1, -1, -1):
prev_inf = ttinfo[lindexes[j]]
if not prev_inf[1]:
break
dst = inf[0] - prev_inf[0] # dst offset
# Bad dst? Look further. DST > 24 hours happens when
# a timzone has moved across the international dateline.
if dst <= 0 or dst > 3600*3:
for j in range(i+1, len(transitions)):
stdinf = ttinfo[lindexes[j]]
if not stdinf[1]:
dst = inf[0] - stdinf[0]
if dst > 0:
break # Found a useful std time.
tzname = inf[2]
# Round utcoffset and dst to the nearest minute or the
# datetime library will complain. Conversions to these timezones
# might be up to plus or minus 30 seconds out, but it is
# the best we can do.
utcoffset = int((utcoffset + 30) // 60) * 60
dst = int((dst + 30) // 60) * 60
transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
cls = type(zone, (DstTzInfo,), dict(
zone=zone,
_utc_transition_times=transitions,
_transition_info=transition_info))
return cls()
if __name__ == '__main__':
import os.path
from pprint import pprint
base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
tz = build_tzinfo('Australia/Melbourne',
open(os.path.join(base,'Australia','Melbourne'), 'rb'))
tz = build_tzinfo('US/Eastern',
open(os.path.join(base,'US','Eastern'), 'rb'))
pprint(tz._utc_transition_times)
#print tz.asPython(4)
#print tz.transitions_mapping
|
gef756/scipy | refs/heads/master | scipy/sparse/linalg/isolve/setup.py | 108 | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
from os.path import join
def configuration(parent_package='',top_path=None):
from numpy.distutils.system_info import get_info, NotFoundError
from numpy.distutils.misc_util import Configuration
from scipy._build_utils import get_g77_abi_wrappers
config = Configuration('isolve',parent_package,top_path)
lapack_opt = get_info('lapack_opt')
if not lapack_opt:
raise NotFoundError('no lapack/blas resources found')
# iterative methods
methods = ['BiCGREVCOM.f.src',
'BiCGSTABREVCOM.f.src',
'CGREVCOM.f.src',
'CGSREVCOM.f.src',
# 'ChebyREVCOM.f.src',
'GMRESREVCOM.f.src',
# 'JacobiREVCOM.f.src',
'QMRREVCOM.f.src',
# 'SORREVCOM.f.src'
]
Util = ['STOPTEST2.f.src','getbreak.f.src']
sources = Util + methods + ['_iterative.pyf.src']
sources = [join('iterative', x) for x in sources]
sources += get_g77_abi_wrappers(lapack_opt)
config.add_extension('_iterative',
sources=sources,
extra_info=lapack_opt)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
foss-transportationmodeling/rettina-server | refs/heads/master | flask/lib/python2.7/site-packages/sqlalchemy/testing/pickleable.py | 81 | # testing/pickleable.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Classes used in pickling tests, need to be at the module level for
unpickling.
"""
from . import fixtures
class User(fixtures.ComparableEntity):
pass
class Order(fixtures.ComparableEntity):
pass
class Dingaling(fixtures.ComparableEntity):
pass
class EmailUser(User):
pass
class Address(fixtures.ComparableEntity):
pass
# TODO: these are kind of arbitrary....
class Child1(fixtures.ComparableEntity):
pass
class Child2(fixtures.ComparableEntity):
pass
class Parent(fixtures.ComparableEntity):
pass
class Screen(object):
def __init__(self, obj, parent=None):
self.obj = obj
self.parent = parent
class Foo(object):
def __init__(self, moredata):
self.data = 'im data'
self.stuff = 'im stuff'
self.moredata = moredata
__hash__ = object.__hash__
def __eq__(self, other):
return other.data == self.data and \
other.stuff == self.stuff and \
other.moredata == self.moredata
class Bar(object):
def __init__(self, x, y):
self.x = x
self.y = y
__hash__ = object.__hash__
def __eq__(self, other):
return other.__class__ is self.__class__ and \
other.x == self.x and \
other.y == self.y
def __str__(self):
return "Bar(%d, %d)" % (self.x, self.y)
class OldSchool:
def __init__(self, x, y):
self.x = x
self.y = y
def __eq__(self, other):
return other.__class__ is self.__class__ and \
other.x == self.x and \
other.y == self.y
class OldSchoolWithoutCompare:
def __init__(self, x, y):
self.x = x
self.y = y
class BarWithoutCompare(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __str__(self):
return "Bar(%d, %d)" % (self.x, self.y)
class NotComparable(object):
def __init__(self, data):
self.data = data
def __hash__(self):
return id(self)
def __eq__(self, other):
return NotImplemented
def __ne__(self, other):
return NotImplemented
class BrokenComparable(object):
def __init__(self, data):
self.data = data
def __hash__(self):
return id(self)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
raise NotImplementedError
|
gxk/libnl | refs/heads/master | python/netlink/route/tc.py | 3 | #
# Copyright (c) 2011 Thomas Graf <[email protected]>
#
__all__ = [
'TcCache',
'Tc',
'QdiscCache',
'Qdisc',
'TcClassCache',
'TcClass']
import socket
import sys
import netlink.core as netlink
import netlink.capi as core_capi
import netlink.route.capi as capi
import netlink.util as util
import netlink.route.link as Link
TC_PACKETS = 0
TC_BYTES = 1
TC_RATE_BPS = 2
TC_RATE_PPS = 3
TC_QLEN = 4
TC_BACKLOG = 5
TC_DROPS = 6
TC_REQUEUES = 7
TC_OVERLIMITS = 9
TC_H_ROOT = 0xFFFFFFFF
TC_H_INGRESS = 0xFFFFFFF1
STAT_PACKETS = 0
STAT_BYTES = 1
STAT_RATE_BPS = 2
STAT_RATE_PPS = 3
STAT_QLEN = 4
STAT_BACKLOG = 5
STAT_DROPS = 6
STAT_REQUEUES = 7
STAT_OVERLIMITS = 8
STAT_MAX = STAT_OVERLIMITS
###########################################################################
# Handle
class Handle(object):
""" Traffic control handle
Representation of a traffic control handle which uniquely identifies
each traffic control object in its link namespace.
handle = tc.Handle('10:20')
handle = tc.handle('root')
print int(handle)
print str(handle)
"""
def __init__(self, val=None):
if type(val) is str:
val = capi.tc_str2handle(val)
elif not val:
val = 0
self._val = int(val)
def __cmp__(self, other):
if other is None:
other = 0
if isinstance(other, Handle):
return int(self) - int(other)
elif isinstance(other, int):
return int(self) - other
else:
raise TypeError()
def __int__(self):
return self._val
def __str__(self):
return capi.rtnl_tc_handle2str(self._val, 64)[0]
def isroot(self):
return self._val == TC_H_ROOT or self._val == TC_H_INGRESS
###########################################################################
# TC Cache
class TcCache(netlink.Cache):
"""Cache of traffic control object"""
def __getitem__(self, key):
raise NotImplementedError()
###########################################################################
# Tc Object
class Tc(netlink.Object):
def __cmp__(self, other):
diff = self.ifindex - other.ifindex
if diff == 0:
diff = int(self.handle) - int(other.handle)
return diff
def _tc_module_lookup(self):
self._module_lookup(self._module_path + self.kind,
'init_' + self._name)
@property
def root(self):
"""True if tc object is a root object"""
return self.parent.isroot()
#####################################################################
# ifindex
@property
def ifindex(self):
"""interface index"""
return capi.rtnl_tc_get_ifindex(self._rtnl_tc)
@ifindex.setter
def ifindex(self, value):
capi.rtnl_tc_set_ifindex(self._rtnl_tc, int(value))
#####################################################################
# link
@property
def link(self):
link = capi.rtnl_tc_get_link(self._rtnl_tc)
if not link:
return None
return Link.Link.from_capi(link)
@link.setter
def link(self, value):
capi.rtnl_tc_set_link(self._rtnl_tc, value._link)
#####################################################################
# mtu
@property
def mtu(self):
return capi.rtnl_tc_get_mtu(self._rtnl_tc)
@mtu.setter
def mtu(self, value):
capi.rtnl_tc_set_mtu(self._rtnl_tc, int(value))
#####################################################################
# mpu
@property
def mpu(self):
return capi.rtnl_tc_get_mpu(self._rtnl_tc)
@mpu.setter
def mpu(self, value):
capi.rtnl_tc_set_mpu(self._rtnl_tc, int(value))
#####################################################################
# overhead
@property
def overhead(self):
return capi.rtnl_tc_get_overhead(self._rtnl_tc)
@overhead.setter
def overhead(self, value):
capi.rtnl_tc_set_overhead(self._rtnl_tc, int(value))
#####################################################################
# linktype
@property
def linktype(self):
return capi.rtnl_tc_get_linktype(self._rtnl_tc)
@linktype.setter
def linktype(self, value):
capi.rtnl_tc_set_linktype(self._rtnl_tc, int(value))
#####################################################################
# handle
@property
def handle(self):
return Handle(capi.rtnl_tc_get_handle(self._rtnl_tc))
@handle.setter
def handle(self, value):
capi.rtnl_tc_set_handle(self._rtnl_tc, int(value))
#####################################################################
# parent
@property
def parent(self):
return Handle(capi.rtnl_tc_get_parent(self._rtnl_tc))
@parent.setter
def parent(self, value):
capi.rtnl_tc_set_parent(self._rtnl_tc, int(value))
#####################################################################
# kind
@property
def kind(self):
return capi.rtnl_tc_get_kind(self._rtnl_tc)
@kind.setter
def kind(self, value):
capi.rtnl_tc_set_kind(self._rtnl_tc, value)
self._tc_module_lookup()
def get_stat(self, id):
return capi.rtnl_tc_get_stat(self._rtnl_tc, id)
@property
def _dev(self):
buf = util.kw('dev') + ' '
if self.link:
return buf + util.string(self.link.name)
else:
return buf + util.num(self.ifindex)
def brief(self, title, nodev=False, noparent=False):
ret = title + ' {a|kind} {a|handle}'
if not nodev:
ret += ' {a|_dev}'
if not noparent:
ret += ' {t|parent}'
return ret + self._module_brief()
def details(self):
return '{t|mtu} {t|mpu} {t|overhead} {t|linktype}'
@property
def packets(self):
return self.get_stat(STAT_PACKETS)
@property
def bytes(self):
return self.get_stat(STAT_BYTES)
@property
def qlen(self):
return self.get_stat(STAT_QLEN)
def stats(self, fmt):
return fmt.nl('{t|packets} {t|bytes} {t|qlen}')
###########################################################################
# Queueing discipline cache
class QdiscCache(netlink.Cache):
"""Cache of qdiscs"""
def __init__(self, cache=None):
if not cache:
cache = self._alloc_cache_name("route/qdisc")
self._protocol = netlink.NETLINK_ROUTE
self._nl_cache = cache
# def __getitem__(self, key):
# if type(key) is int:
# link = capi.rtnl_link_get(self._this, key)
# elif type(key) is str:
# link = capi.rtnl_link_get_by_name(self._this, key)
#
# if qdisc is None:
# raise KeyError()
# else:
# return Qdisc._from_capi(capi.qdisc2obj(qdisc))
def _new_object(self, obj):
return Qdisc(obj)
def _new_cache(self, cache):
return QdiscCache(cache=cache)
###########################################################################
# Qdisc Object
class Qdisc(Tc):
"""Queueing discipline"""
def __init__(self, obj=None):
netlink.Object.__init__(self, "route/qdisc", "qdisc", obj)
self._module_path = 'netlink.route.qdisc.'
self._rtnl_qdisc = self._obj2type(self._nl_object)
self._rtnl_tc = capi.obj2tc(self._nl_object)
netlink.add_attr('qdisc.handle', fmt=util.handle)
netlink.add_attr('qdisc.parent', fmt=util.handle)
netlink.add_attr('qdisc.kind', fmt=util.bold)
if self.kind:
self._tc_module_lookup()
@classmethod
def from_capi(cls, obj):
return cls(capi.qdisc2obj(obj))
def _obj2type(self, obj):
return capi.obj2qdisc(obj)
def _new_instance(self, obj):
if not obj:
raise ValueError()
return Qdisc(obj)
@property
def childs(self):
ret = []
if int(self.handle):
ret += get_cls(self.ifindex, parent=self.handle)
if self.root:
ret += get_class(self.ifindex, parent=TC_H_ROOT)
ret += get_class(self.ifindex, parent=self.handle)
return ret
# #####################################################################
# # add()
# def add(self, socket, flags=None):
# if not flags:
# flags = netlink.NLM_F_CREATE
#
# ret = capi.rtnl_link_add(socket._sock, self._link, flags)
# if ret < 0:
# raise netlink.KernelError(ret)
#
# #####################################################################
# # change()
# def change(self, socket, flags=0):
# """Commit changes made to the link object"""
# if not self._orig:
# raise NetlinkError("Original link not available")
# ret = capi.rtnl_link_change(socket._sock, self._orig, self._link, flags)
# if ret < 0:
# raise netlink.KernelError(ret)
#
# #####################################################################
# # delete()
# def delete(self, socket):
# """Attempt to delete this link in the kernel"""
# ret = capi.rtnl_link_delete(socket._sock, self._link)
# if ret < 0:
# raise netlink.KernelError(ret)
###################################################################
#
# format(details=False, stats=False)
#
def format(self, details=False, stats=False, nodev=False,
noparent=False, indent=''):
"""Return qdisc as formatted text"""
fmt = util.MyFormatter(self, indent)
buf = fmt.format(self.brief('qdisc', nodev, noparent))
if details:
buf += fmt.nl('\t' + self.details())
if stats:
buf += self.stats(fmt)
# if stats:
# l = [['Packets', RX_PACKETS, TX_PACKETS],
# ['Bytes', RX_BYTES, TX_BYTES],
# ['Errors', RX_ERRORS, TX_ERRORS],
# ['Dropped', RX_DROPPED, TX_DROPPED],
# ['Compressed', RX_COMPRESSED, TX_COMPRESSED],
# ['FIFO Errors', RX_FIFO_ERR, TX_FIFO_ERR],
# ['Length Errors', RX_LEN_ERR, None],
# ['Over Errors', RX_OVER_ERR, None],
# ['CRC Errors', RX_CRC_ERR, None],
# ['Frame Errors', RX_FRAME_ERR, None],
# ['Missed Errors', RX_MISSED_ERR, None],
# ['Abort Errors', None, TX_ABORT_ERR],
# ['Carrier Errors', None, TX_CARRIER_ERR],
# ['Heartbeat Errors', None, TX_HBEAT_ERR],
# ['Window Errors', None, TX_WIN_ERR],
# ['Collisions', None, COLLISIONS],
# ['Multicast', None, MULTICAST],
# ['', None, None],
# ['Ipv6:', None, None],
# ['Packets', IP6_INPKTS, IP6_OUTPKTS],
# ['Bytes', IP6_INOCTETS, IP6_OUTOCTETS],
# ['Discards', IP6_INDISCARDS, IP6_OUTDISCARDS],
# ['Multicast Packets', IP6_INMCASTPKTS, IP6_OUTMCASTPKTS],
# ['Multicast Bytes', IP6_INMCASTOCTETS, IP6_OUTMCASTOCTETS],
# ['Broadcast Packets', IP6_INBCASTPKTS, IP6_OUTBCASTPKTS],
# ['Broadcast Bytes', IP6_INBCASTOCTETS, IP6_OUTBCASTOCTETS],
# ['Delivers', IP6_INDELIVERS, None],
# ['Forwarded', None, IP6_OUTFORWDATAGRAMS],
# ['No Routes', IP6_INNOROUTES, IP6_OUTNOROUTES],
# ['Header Errors', IP6_INHDRERRORS, None],
# ['Too Big Errors', IP6_INTOOBIGERRORS, None],
# ['Address Errors', IP6_INADDRERRORS, None],
# ['Unknown Protocol', IP6_INUNKNOWNPROTOS, None],
# ['Truncated Packets', IP6_INTRUNCATEDPKTS, None],
# ['Reasm Timeouts', IP6_REASMTIMEOUT, None],
# ['Reasm Requests', IP6_REASMREQDS, None],
# ['Reasm Failures', IP6_REASMFAILS, None],
# ['Reasm OK', IP6_REASMOKS, None],
# ['Frag Created', None, IP6_FRAGCREATES],
# ['Frag Failures', None, IP6_FRAGFAILS],
# ['Frag OK', None, IP6_FRAGOKS],
# ['', None, None],
# ['ICMPv6:', None, None],
# ['Messages', ICMP6_INMSGS, ICMP6_OUTMSGS],
# ['Errors', ICMP6_INERRORS, ICMP6_OUTERRORS]]
#
# buf += '\n\t%s%s%s%s\n' % (33 * ' ', util.title('RX'),
# 15 * ' ', util.title('TX'))
#
# for row in l:
# row[0] = util.kw(row[0])
# row[1] = self.get_stat(row[1]) if row[1] else ''
# row[2] = self.get_stat(row[2]) if row[2] else ''
# buf += '\t{0:27} {1:>16} {2:>16}\n'.format(*row)
return buf
###########################################################################
# Traffic class cache
class TcClassCache(netlink.Cache):
"""Cache of traffic classes"""
def __init__(self, ifindex, cache=None):
if not cache:
cache = self._alloc_cache_name("route/class")
self._protocol = netlink.NETLINK_ROUTE
self._nl_cache = cache
self._set_arg1(ifindex)
def _new_object(self, obj):
return TcClass(obj)
def _new_cache(self, cache):
return TcClassCache(self.arg1, cache=cache)
###########################################################################
# Traffic Class Object
class TcClass(Tc):
"""Traffic Class"""
def __init__(self, obj=None):
netlink.Object.__init__(self, "route/class", "class", obj)
self._module_path = 'netlink.route.qdisc.'
self._rtnl_class = self._obj2type(self._nl_object)
self._rtnl_tc = capi.obj2tc(self._nl_object)
netlink.add_attr('class.handle', fmt=util.handle)
netlink.add_attr('class.parent', fmt=util.handle)
netlink.add_attr('class.kind', fmt=util.bold)
if self.kind:
self._tc_module_lookup()
@classmethod
def from_capi(cls, obj):
return cls(capi.class2obj(obj))
def _obj2type(self, obj):
return capi.obj2class(obj)
def _new_instance(self, obj):
if not obj:
raise ValueError()
return TcClass(obj)
@property
def childs(self):
ret = []
# classes can have classifiers, child classes and leaf
# qdiscs
ret += get_cls(self.ifindex, parent=self.handle)
ret += get_class(self.ifindex, parent=self.handle)
ret += get_qdisc(self.ifindex, parent=self.handle)
return ret
###################################################################
#
# format(details=False, stats=False)
#
def format(self, details=False, stats=False, nodev=False,
noparent=False, indent=''):
"""Return class as formatted text"""
fmt = util.MyFormatter(self, indent)
buf = fmt.format(self.brief('class', nodev, noparent))
if details:
buf += fmt.nl('\t' + self.details())
return buf
###########################################################################
# Classifier Cache
class ClassifierCache(netlink.Cache):
"""Cache of traffic classifiers objects"""
def __init__(self, ifindex, parent, cache=None):
if not cache:
cache = self._alloc_cache_name("route/cls")
self._protocol = netlink.NETLINK_ROUTE
self._nl_cache = cache
self._set_arg1(ifindex)
self._set_arg2(int(parent))
def _new_object(self, obj):
return Classifier(obj)
def _new_cache(self, cache):
return ClassifierCache(self.arg1, self.arg2, cache=cache)
###########################################################################
# Classifier Object
class Classifier(Tc):
"""Classifier"""
def __init__(self, obj=None):
netlink.Object.__init__(self, "route/cls", "cls", obj)
self._module_path = 'netlink.route.cls.'
self._rtnl_cls = self._obj2type(self._nl_object)
self._rtnl_tc = capi.obj2tc(self._nl_object)
netlink.add_attr('cls.handle', fmt=util.handle)
netlink.add_attr('cls.parent', fmt=util.handle)
netlink.add_attr('cls.kind', fmt=util.bold)
@classmethod
def from_capi(cls, obj):
return cls(capi.cls2obj(obj))
def _obj2type(self, obj):
return capi.obj2cls(obj)
def _new_instance(self, obj):
if not obj:
raise ValueError()
return Classifier(obj)
#####################################################################
# priority
@property
def priority(self):
return capi.rtnl_cls_get_prio(self._rtnl_cls)
@priority.setter
def priority(self, value):
capi.rtnl_cls_set_prio(self._rtnl_cls, int(value))
#####################################################################
# protocol
@property
def protocol(self):
return capi.rtnl_cls_get_protocol(self._rtnl_cls)
@protocol.setter
def protocol(self, value):
capi.rtnl_cls_set_protocol(self._rtnl_cls, int(value))
@property
def childs(self):
return []
###################################################################
#
# format(details=False, stats=False)
#
def format(self, details=False, stats=False, nodev=False,
noparent=False, indent=''):
"""Return class as formatted text"""
fmt = util.MyFormatter(self, indent)
buf = fmt.format(self.brief('classifier', nodev, noparent))
buf += fmt.format(' {t|priority} {t|protocol}')
if details:
buf += fmt.nl('\t' + self.details())
return buf
_qdisc_cache = QdiscCache()
def get_qdisc(ifindex, handle=None, parent=None):
l = []
_qdisc_cache.refill()
for qdisc in _qdisc_cache:
if qdisc.ifindex == ifindex and \
(handle == None or qdisc.handle == handle) and \
(parent == None or qdisc.parent == parent):
l.append(qdisc)
return l
_class_cache = {}
def get_class(ifindex, parent, handle=None):
l = []
try:
cache = _class_cache[ifindex]
except KeyError:
cache = TcClassCache(ifindex)
_class_cache[ifindex] = cache
cache.refill()
for cl in cache:
if (parent == None or cl.parent == parent) and \
(handle == None or cl.handle == handle):
l.append(cl)
return l
_cls_cache = {}
def get_cls(ifindex, parent, handle=None):
l = []
try:
chain = _cls_cache[ifindex]
except KeyError:
_cls_cache[ifindex] = {}
try:
cache = _cls_cache[ifindex][parent]
except KeyError:
cache = ClassifierCache(ifindex, parent)
_cls_cache[ifindex][parent] = cache
cache.refill()
for cls in cache:
if handle == None or cls.handle == handle:
l.append(cls)
return l
|
devs4v/devs4v-information-retrieval15 | refs/heads/master | project/venv/lib/python2.7/site-packages/django/contrib/gis/db/backends/postgis/models.py | 396 | """
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class PostGISGeometryColumns(models.Model):
"""
The 'geometry_columns' table from the PostGIS. See the PostGIS
documentation at Ch. 4.3.2.
On PostGIS 2, this is a view.
"""
f_table_catalog = models.CharField(max_length=256)
f_table_schema = models.CharField(max_length=256)
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
type = models.CharField(max_length=30)
class Meta:
app_label = 'gis'
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
documentaiton at Ch. 4.2.1.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
srtext = models.CharField(max_length=2048)
proj4text = models.CharField(max_length=2048)
class Meta:
app_label = 'gis'
db_table = 'spatial_ref_sys'
managed = False
@property
def wkt(self):
return self.srtext
@classmethod
def wkt_col(cls):
return 'srtext'
|
HtmlUnit/selenium | refs/heads/master | py/test/selenium/webdriver/common/alerts_tests.py | 5 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from selenium.common.exceptions import ElementNotVisibleException
from selenium.common.exceptions import InvalidElementStateException
from selenium.common.exceptions import NoAlertPresentException
from selenium.common.exceptions import UnexpectedAlertPresentException
import unittest
class AlertsTest(unittest.TestCase):
def testShouldBeAbleToOverrideTheWindowAlertMethod(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.execute_script(
"window.alert = function(msg) { document.getElementById('text').innerHTML = msg; }")
self.driver.find_element(by=By.ID, value="alert").click()
try:
self.assertEqual(self.driver.find_element_by_id('text').text, "cheese")
except Exception as e:
# if we're here, likely the alert is displayed
# not dismissing it will affect other tests
try:
self._waitForAlert().dismiss()
except Exception:
pass
raise e
def testShouldAllowUsersToAcceptAnAlertManually(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="alert").click()
alert = self._waitForAlert()
alert.accept()
# If we can perform any action, we're good to go
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldAllowUsersToAcceptAnAlertWithNoTextManually(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID,"empty-alert").click();
alert = self._waitForAlert()
alert.accept()
# If we can perform any action, we're good to go
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldGetTextOfAlertOpenedInSetTimeout(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element_by_id("slow-alert").click()
# DO NOT WAIT OR SLEEP HERE
# This is a regression test for a bug where only the first switchTo call would throw,
# and only if it happens before the alert actually loads.
alert = self._waitForAlert()
try:
self.assertEqual("Slow", alert.text)
finally:
alert.accept()
@pytest.mark.ignore_chrome
def testShouldAllowUsersToDismissAnAlertManually(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="alert").click()
alert = self._waitForAlert()
alert.dismiss()
# If we can perform any action, we're good to go
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldAllowAUserToAcceptAPrompt(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="prompt").click()
alert = self._waitForAlert()
alert.accept()
# If we can perform any action, we're good to go
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldAllowAUserToDismissAPrompt(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="prompt").click()
alert = self._waitForAlert()
alert.dismiss()
# If we can perform any action, we're good to go
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldAllowAUserToSetTheValueOfAPrompt(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="prompt").click()
alert = self._waitForAlert()
alert.send_keys("cheese")
alert.accept()
result = self.driver.find_element(by=By.ID, value="text").text
self.assertEqual("cheese", result)
def testSettingTheValueOfAnAlertThrows(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID,"alert").click();
alert = self._waitForAlert()
try:
alert.send_keys("cheese");
self.fail("Expected exception");
except ElementNotVisibleException:
pass
except InvalidElementStateException:
pass
finally:
alert.accept()
def testAlertShouldNotAllowAdditionalCommandsIfDimissed(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts");
self.driver.find_element(By.ID, "alert").click()
alert = self._waitForAlert()
alert.dismiss()
try:
alert.text
self.fail("Expected NoAlertPresentException")
except NoAlertPresentException:
pass
def testShouldAllowUsersToAcceptAnAlertInAFrame(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.switch_to.frame(self.driver.find_element(By.NAME, "iframeWithAlert"))
self.driver.find_element_by_id("alertInFrame").click()
alert = self._waitForAlert()
alert.accept()
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldAllowUsersToAcceptAnAlertInANestedFrame(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.switch_to.frame(self.driver.find_element(By.NAME, "iframeWithIframe"))
self.driver.switch_to.frame(self.driver.find_element(By.NAME, "iframeWithAlert"))
self.driver.find_element_by_id("alertInFrame").click()
alert = self._waitForAlert()
alert.accept()
self.assertEqual("Testing Alerts", self.driver.title)
def testShouldThrowAnExceptionIfAnAlertHasNotBeenDealtWithAndDismissTheAlert(self):
pass
# //TODO(David) Complete this test
def testPromptShouldUseDefaultValueIfNoKeysSent(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID, "prompt-with-default").click()
alert = self._waitForAlert()
alert.accept()
txt = self.driver.find_element(By.ID, "text").text
self.assertEqual("This is a default value", txt)
def testPromptShouldHaveNullValueIfDismissed(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID, "prompt-with-default").click()
alert = self._waitForAlert()
alert.dismiss()
self.assertEqual("null", self.driver.find_element(By.ID, "text").text)
def testHandlesTwoAlertsFromOneInteraction(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID, "double-prompt").click()
alert1 = self._waitForAlert()
alert1.send_keys("brie")
alert1.accept()
alert2 = self._waitForAlert()
alert2.send_keys("cheddar")
alert2.accept();
self.assertEqual(self.driver.find_element(By.ID, "text1").text, "brie")
self.assertEqual(self.driver.find_element(By.ID, "text2").text, "cheddar")
def testShouldHandleAlertOnPageLoad(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(By.ID, "open-page-with-onload-alert").click()
alert = self._waitForAlert()
value = alert.text
alert.accept()
self.assertEquals("onload", value)
def testShouldAllowTheUserToGetTheTextOfAnAlert(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="alert").click()
alert = self._waitForAlert()
value = alert.text
alert.accept()
self.assertEqual("cheese", value)
def testUnexpectedAlertPresentExceptionContainsAlertText(self):
if self.driver.capabilities['browserName'] == 'phantomjs':
pytest.xfail("phantomjs driver does not support alerts")
self._loadPage("alerts")
self.driver.find_element(by=By.ID, value="alert").click()
alert = self._waitForAlert()
value = alert.text
try:
self._loadPage("simpleTest")
raise Exception("UnexpectedAlertPresentException should have been thrown")
except UnexpectedAlertPresentException as uape:
self.assertEquals(value, uape.alert_text)
self.assertTrue(str(uape).startswith("Alert Text: %s" % value))
def _waitForAlert(self):
return WebDriverWait(self.driver, 3).until(EC.alert_is_present())
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
try:
# just in case a previous test left open an alert
self.driver.switch_to.alert().dismiss()
except:
pass
self.driver.get(self._pageURL(name))
|
XiaosongWei/chromium-crosswalk | refs/heads/master | tools/perf/profile_creators/extension_profile_extender_unittest.py | 7 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import tempfile
from profile_creators import extension_profile_extender
from telemetry import decorators
from telemetry.testing import options_for_unittests
from telemetry.testing import page_test_test_case
class ExtensionProfileExtenderUnitTest(page_test_test_case.PageTestTestCase):
"""Smoke test for creating an extension profile.
Creates an extension profile and verifies that it has non-empty contents.
"""
@decorators.Enabled('mac') # Extension generation only works on Mac for now.
def testExtensionProfileCreation(self):
tmp_dir = tempfile.mkdtemp()
files_in_crx_dir = 0
try:
options = options_for_unittests.GetCopy()
options.output_profile_path = tmp_dir
extender = extension_profile_extender.ExtensionProfileExtender(options)
extender.Run()
crx_dir = os.path.join(tmp_dir, 'external_extensions_crx')
files_in_crx_dir = len(os.listdir(crx_dir))
finally:
shutil.rmtree(tmp_dir)
self.assertGreater(files_in_crx_dir, 0)
|
idea4bsd/idea4bsd | refs/heads/idea4bsd-master | python/helpers/pydev/tests_pydevd/test_pydev_ipython_011.py | 14 | import sys
import unittest
import threading
import os
from nose.tools import eq_
from _pydev_bundle.pydev_imports import StringIO, SimpleXMLRPCServer
from _pydev_bundle.pydev_localhost import get_localhost
from _pydev_bundle.pydev_console_utils import StdIn
import socket
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
import time
try:
xrange
except:
xrange = range
class TestBase(unittest.TestCase):
def setUp(self):
# PyDevFrontEnd depends on singleton in IPython, so you
# can't make multiple versions. So we reuse self.front_end for
# all the tests
self.front_end = get_pydev_frontend(get_localhost(), 0)
from pydev_ipython.inputhook import set_return_control_callback
set_return_control_callback(lambda:True)
self.front_end.clear_buffer()
def tearDown(self):
pass
def add_exec(self, code, expected_more=False):
more = self.front_end.add_exec(code)
eq_(expected_more, more)
def redirect_stdout(self):
from IPython.utils import io
self.original_stdout = sys.stdout
sys.stdout = io.stdout = StringIO()
def restore_stdout(self):
from IPython.utils import io
io.stdout = sys.stdout = self.original_stdout
class TestPyDevFrontEnd(TestBase):
def testAddExec_1(self):
self.add_exec('if True:', True)
def testAddExec_2(self):
#Change: 'more' must now be controlled in the client side after the initial 'True' returned.
self.add_exec('if True:\n testAddExec_a = 10\n', False)
assert 'testAddExec_a' in self.front_end.get_namespace()
def testAddExec_3(self):
assert 'testAddExec_x' not in self.front_end.get_namespace()
self.add_exec('if True:\n testAddExec_x = 10\n\n')
assert 'testAddExec_x' in self.front_end.get_namespace()
eq_(self.front_end.get_namespace()['testAddExec_x'], 10)
def test_get_namespace(self):
assert 'testGetNamespace_a' not in self.front_end.get_namespace()
self.add_exec('testGetNamespace_a = 10')
assert 'testGetNamespace_a' in self.front_end.get_namespace()
eq_(self.front_end.get_namespace()['testGetNamespace_a'], 10)
def test_complete(self):
unused_text, matches = self.front_end.complete('%')
assert len(matches) > 1, 'at least one magic should appear in completions'
def test_complete_does_not_do_python_matches(self):
# Test that IPython's completions do not do the things that
# PyDev's completions will handle
self.add_exec('testComplete_a = 5')
self.add_exec('testComplete_b = 10')
self.add_exec('testComplete_c = 15')
unused_text, matches = self.front_end.complete('testComplete_')
assert len(matches) == 0
def testGetCompletions_1(self):
# Test the merged completions include the standard completions
self.add_exec('testComplete_a = 5')
self.add_exec('testComplete_b = 10')
self.add_exec('testComplete_c = 15')
res = self.front_end.getCompletions('testComplete_', 'testComplete_')
matches = [f[0] for f in res]
assert len(matches) == 3
eq_(set(['testComplete_a', 'testComplete_b', 'testComplete_c']), set(matches))
def testGetCompletions_2(self):
# Test that we get IPython completions in results
# we do this by checking kw completion which PyDev does
# not do by default
self.add_exec('def ccc(ABC=123): pass')
res = self.front_end.getCompletions('ccc(', '')
matches = [f[0] for f in res]
assert 'ABC=' in matches
def testGetCompletions_3(self):
# Test that magics return IPYTHON magic as type
res = self.front_end.getCompletions('%cd', '%cd')
assert len(res) == 1
eq_(res[0][3], '12') # '12' == IToken.TYPE_IPYTHON_MAGIC
assert len(res[0][1]) > 100, 'docstring for %cd should be a reasonably long string'
class TestRunningCode(TestBase):
def test_print(self):
self.redirect_stdout()
try:
self.add_exec('print("output")')
eq_(sys.stdout.getvalue(), 'output\n')
finally:
self.restore_stdout()
def testQuestionMark_1(self):
self.redirect_stdout()
try:
self.add_exec('?')
assert len(sys.stdout.getvalue()) > 1000, 'IPython help should be pretty big'
finally:
self.restore_stdout()
def testQuestionMark_2(self):
self.redirect_stdout()
try:
self.add_exec('int?')
assert sys.stdout.getvalue().find('Convert') != -1
finally:
self.restore_stdout()
def test_gui(self):
try:
import Tkinter
except:
return
else:
from pydev_ipython.inputhook import get_inputhook
assert get_inputhook() is None
self.add_exec('%gui tk')
# we can't test the GUI works here because we aren't connected to XML-RPC so
# nowhere for hook to run
assert get_inputhook() is not None
self.add_exec('%gui none')
assert get_inputhook() is None
def test_history(self):
''' Make sure commands are added to IPython's history '''
self.redirect_stdout()
try:
self.add_exec('a=1')
self.add_exec('b=2')
_ih = self.front_end.get_namespace()['_ih']
eq_(_ih[-1], 'b=2')
eq_(_ih[-2], 'a=1')
self.add_exec('history')
hist = sys.stdout.getvalue().split('\n')
eq_(hist[-1], '')
eq_(hist[-2], 'history')
eq_(hist[-3], 'b=2')
eq_(hist[-4], 'a=1')
finally:
self.restore_stdout()
def test_edit(self):
''' Make sure we can issue an edit command'''
called_RequestInput = [False]
called_IPythonEditor = [False]
def start_client_thread(client_port):
class ClientThread(threading.Thread):
def __init__(self, client_port):
threading.Thread.__init__(self)
self.client_port = client_port
def run(self):
class HandleRequestInput:
def RequestInput(self):
called_RequestInput[0] = True
return '\n'
def IPythonEditor(self, name, line):
called_IPythonEditor[0] = (name, line)
return True
handle_request_input = HandleRequestInput()
from _pydev_bundle import pydev_localhost
self.client_server = client_server = SimpleXMLRPCServer(
(pydev_localhost.get_localhost(), self.client_port), logRequests=False)
client_server.register_function(handle_request_input.RequestInput)
client_server.register_function(handle_request_input.IPythonEditor)
client_server.serve_forever()
def shutdown(self):
return
self.client_server.shutdown()
client_thread = ClientThread(client_port)
client_thread.setDaemon(True)
client_thread.start()
return client_thread
# PyDevFrontEnd depends on singleton in IPython, so you
# can't make multiple versions. So we reuse self.front_end for
# all the tests
s = socket.socket()
s.bind(('', 0))
self.client_port = client_port = s.getsockname()[1]
s.close()
self.front_end = get_pydev_frontend(get_localhost(), client_port)
client_thread = start_client_thread(self.client_port)
orig_stdin = sys.stdin
sys.stdin = StdIn(self, get_localhost(), self.client_port, orig_stdin)
try:
filename = 'made_up_file.py'
self.add_exec('%edit ' + filename)
for i in xrange(10):
if called_IPythonEditor[0] == (os.path.abspath(filename), '0'):
break
time.sleep(.1)
if not called_IPythonEditor[0]:
# File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2883, in run_code
# exec(code_obj, self.user_global_ns, self.user_ns)
# File "<ipython-input-15-09583ca3bce1>", line 1, in <module>
# get_ipython().magic('edit made_up_file.py')
# File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2205, in magic
# return self.run_line_magic(magic_name, magic_arg_s)
# File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2126, in run_line_magic
# result = fn(*args,**kwargs)
# File "<string>", line 2, in edit
# File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/magic.py", line 193, in <lambda>
# call = lambda f, *a, **k: f(*a, **k)
# File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/magics/code.py", line 662, in edit
# self.shell.hooks.editor(filename,lineno)
# File "/home/travis/build/fabioz/PyDev.Debugger/pydev_ipython_console_011.py", line 70, in call_editor
# server.IPythonEditor(filename, str(line))
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1090, in __call__
# return self.__send(self.__name, args)
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1419, in __request
# verbose=self.__verbose
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1132, in request
# return self.single_request(host, handler, request_body, verbose)
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1143, in single_request
# http_conn = self.send_request(host, handler, request_body, verbose)
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1255, in send_request
# self.send_content(connection, request_body)
# File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1285, in send_content
# connection.endheaders(request_body)
# File "/home/travis/miniconda/lib/python3.3/http/client.py", line 1061, in endheaders
# self._send_output(message_body)
# File "/home/travis/miniconda/lib/python3.3/http/client.py", line 906, in _send_output
# self.send(msg)
# File "/home/travis/miniconda/lib/python3.3/http/client.py", line 844, in send
# self.connect()
# File "/home/travis/miniconda/lib/python3.3/http/client.py", line 822, in connect
# self.timeout, self.source_address)
# File "/home/travis/miniconda/lib/python3.3/socket.py", line 435, in create_connection
# raise err
# File "/home/travis/miniconda/lib/python3.3/socket.py", line 426, in create_connection
# sock.connect(sa)
# ConnectionRefusedError: [Errno 111] Connection refused
# I.e.: just warn that the test failing, don't actually fail.
sys.stderr.write('Test failed: this test is brittle in travis because sometimes the connection is refused (as above) and we do not have a callback.\n')
return
eq_(called_IPythonEditor[0], (os.path.abspath(filename), '0'))
assert called_RequestInput[0], "Make sure the 'wait' parameter has been respected"
finally:
sys.stdin = orig_stdin
client_thread.shutdown()
if __name__ == '__main__':
#Just doing: unittest.main() was not working for me when run directly (not sure why)
#And doing it the way below the test with the import: from pydev_ipython.inputhook import get_inputhook, set_stdin_file
#is failing (but if I do a Ctrl+F9 in PyDev to run it, it works properly, so, I'm a bit puzzled here).
unittest.TextTestRunner(verbosity=1).run(unittest.makeSuite(TestRunningCode))
unittest.TextTestRunner(verbosity=1).run(unittest.makeSuite(TestPyDevFrontEnd))
|
curlyjr25/FrogWebsite | refs/heads/master | frogs/apps/surgery/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
bdrung/audacity | refs/heads/master | lib-src/lv2/lv2/plugins/eg-metro.lv2/waflib/Tools/fc_scan.py | 183 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import re
from waflib import Utils,Task,TaskGen,Logs
from waflib.TaskGen import feature,before_method,after_method,extension
from waflib.Configure import conf
INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
re_inc=re.compile(INC_REGEX,re.I)
re_use=re.compile(USE_REGEX,re.I)
re_mod=re.compile(MOD_REGEX,re.I)
class fortran_parser(object):
def __init__(self,incpaths):
self.seen=[]
self.nodes=[]
self.names=[]
self.incpaths=incpaths
def find_deps(self,node):
txt=node.read()
incs=[]
uses=[]
mods=[]
for line in txt.splitlines():
m=re_inc.search(line)
if m:
incs.append(m.group(1))
m=re_use.search(line)
if m:
uses.append(m.group(1))
m=re_mod.search(line)
if m:
mods.append(m.group(1))
return(incs,uses,mods)
def start(self,node):
self.waiting=[node]
while self.waiting:
nd=self.waiting.pop(0)
self.iter(nd)
def iter(self,node):
path=node.abspath()
incs,uses,mods=self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)
for x in uses:
name="USE@%s"%x
if not name in self.names:
self.names.append(name)
for x in mods:
name="MOD@%s"%x
if not name in self.names:
self.names.append(name)
def tryfind_header(self,filename):
found=None
for n in self.incpaths:
found=n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
|
gauribhoite/personfinder | refs/heads/master | env/google_appengine/lib/django-1.5/django/contrib/gis/management/commands/ogrinspect.py | 126 | import os
from optparse import make_option
from django.contrib.gis import gdal
from django.core.management.base import LabelCommand, CommandError
def layer_option(option, opt, value, parser):
"""
Callback for `make_option` for the `ogrinspect` `layer_key`
keyword option which may be an integer or a string.
"""
try:
dest = int(value)
except ValueError:
dest = value
setattr(parser.values, option.dest, dest)
def list_option(option, opt, value, parser):
"""
Callback for `make_option` for `ogrinspect` keywords that require
a string list. If the string is 'True'/'true' then the option
value will be a boolean instead.
"""
if value.lower() == 'true':
dest = True
else:
dest = [s for s in value.split(',')]
setattr(parser.values, option.dest, dest)
class Command(LabelCommand):
help = ('Inspects the given OGR-compatible data source (e.g., a shapefile) and outputs\n'
'a GeoDjango model with the given model name. For example:\n'
' ./manage.py ogrinspect zipcode.shp Zipcode')
args = '[data_source] [model_name]'
option_list = LabelCommand.option_list + (
make_option('--blank', dest='blank', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `blank=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--decimal', dest='decimal', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR float fields to '
'generate `DecimalField` instead of the default '
'`FloatField`. Set to `true` to apply to all OGR float fields.'),
make_option('--geom-name', dest='geom_name', type='string', default='geom',
help='Specifies the model name for the Geometry Field '
'(defaults to `geom`)'),
make_option('--layer', dest='layer_key', type='string', action='callback',
callback=layer_option, default=0,
help='The key for specifying which layer in the OGR data '
'source to use. Defaults to 0 (the first layer). May be '
'an integer or a string identifier for the layer.'),
make_option('--multi-geom', action='store_true', dest='multi_geom', default=False,
help='Treat the geometry in the data source as a geometry collection.'),
make_option('--name-field', dest='name_field',
help='Specifies a field name to return for the `__unicode__` function.'),
make_option('--no-imports', action='store_false', dest='imports', default=True,
help='Do not include `from django.contrib.gis.db import models` '
'statement.'),
make_option('--null', dest='null', type='string', action='callback',
callback=list_option, default=False,
help='Use a comma separated list of OGR field names to add '
'the `null=True` option to the field definition. Set with'
'`true` to apply to all applicable fields.'),
make_option('--srid', dest='srid',
help='The SRID to use for the Geometry Field. If it can be '
'determined, the SRID of the data source is used.'),
make_option('--mapping', action='store_true', dest='mapping',
help='Generate mapping dictionary for use with `LayerMapping`.')
)
requires_model_validation = False
def handle(self, *args, **options):
try:
data_source, model_name = args
except ValueError:
raise CommandError('Invalid arguments, must provide: %s' % self.args)
if not gdal.HAS_GDAL:
raise CommandError('GDAL is required to inspect geospatial data sources.')
# Removing options with `None` values.
options = dict([(k, v) for k, v in options.items() if not v is None])
# Getting the OGR DataSource from the string parameter.
try:
ds = gdal.DataSource(data_source)
except gdal.OGRException as msg:
raise CommandError(msg)
# Whether the user wants to generate the LayerMapping dictionary as well.
show_mapping = options.pop('mapping', False)
# Getting rid of settings that `_ogrinspect` doesn't like.
verbosity = options.pop('verbosity', False)
settings = options.pop('settings', False)
# Returning the output of ogrinspect with the given arguments
# and options.
from django.contrib.gis.utils.ogrinspect import _ogrinspect, mapping
output = [s for s in _ogrinspect(ds, model_name, **options)]
if show_mapping:
# Constructing the keyword arguments for `mapping`, and
# calling it on the data source.
kwargs = {'geom_name' : options['geom_name'],
'layer_key' : options['layer_key'],
'multi_geom' : options['multi_geom'],
}
mapping_dict = mapping(ds, **kwargs)
# This extra legwork is so that the dictionary definition comes
# out in the same order as the fields in the model definition.
rev_mapping = dict([(v, k) for k, v in mapping_dict.items()])
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()])
output.extend([" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields])
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}'])
return '\n'.join(output) + '\n'
|
kelvin13/shifty-octocat | refs/heads/master | pygments/lexers/_scilab_builtins.py | 48 | # -*- coding: utf-8 -*-
"""
pygments.lexers._scilab_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Builtin list for the ScilabLexer.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# Autogenerated
commands_kw = (
'abort',
'apropos',
'break',
'case',
'catch',
'continue',
'do',
'else',
'elseif',
'end',
'endfunction',
'for',
'function',
'help',
'if',
'pause',
'quit',
'select',
'then',
'try',
'while',
)
functions_kw = (
'!!_invoke_',
'%H5Object_e',
'%H5Object_fieldnames',
'%H5Object_p',
'%XMLAttr_6',
'%XMLAttr_e',
'%XMLAttr_i_XMLElem',
'%XMLAttr_length',
'%XMLAttr_p',
'%XMLAttr_size',
'%XMLDoc_6',
'%XMLDoc_e',
'%XMLDoc_i_XMLList',
'%XMLDoc_p',
'%XMLElem_6',
'%XMLElem_e',
'%XMLElem_i_XMLDoc',
'%XMLElem_i_XMLElem',
'%XMLElem_i_XMLList',
'%XMLElem_p',
'%XMLList_6',
'%XMLList_e',
'%XMLList_i_XMLElem',
'%XMLList_i_XMLList',
'%XMLList_length',
'%XMLList_p',
'%XMLList_size',
'%XMLNs_6',
'%XMLNs_e',
'%XMLNs_i_XMLElem',
'%XMLNs_p',
'%XMLSet_6',
'%XMLSet_e',
'%XMLSet_length',
'%XMLSet_p',
'%XMLSet_size',
'%XMLValid_p',
'%_EClass_6',
'%_EClass_e',
'%_EClass_p',
'%_EObj_0',
'%_EObj_1__EObj',
'%_EObj_1_b',
'%_EObj_1_c',
'%_EObj_1_i',
'%_EObj_1_s',
'%_EObj_2__EObj',
'%_EObj_2_b',
'%_EObj_2_c',
'%_EObj_2_i',
'%_EObj_2_s',
'%_EObj_3__EObj',
'%_EObj_3_b',
'%_EObj_3_c',
'%_EObj_3_i',
'%_EObj_3_s',
'%_EObj_4__EObj',
'%_EObj_4_b',
'%_EObj_4_c',
'%_EObj_4_i',
'%_EObj_4_s',
'%_EObj_5',
'%_EObj_6',
'%_EObj_a__EObj',
'%_EObj_a_b',
'%_EObj_a_c',
'%_EObj_a_i',
'%_EObj_a_s',
'%_EObj_d__EObj',
'%_EObj_d_b',
'%_EObj_d_c',
'%_EObj_d_i',
'%_EObj_d_s',
'%_EObj_disp',
'%_EObj_e',
'%_EObj_g__EObj',
'%_EObj_g_b',
'%_EObj_g_c',
'%_EObj_g_i',
'%_EObj_g_s',
'%_EObj_h__EObj',
'%_EObj_h_b',
'%_EObj_h_c',
'%_EObj_h_i',
'%_EObj_h_s',
'%_EObj_i__EObj',
'%_EObj_j__EObj',
'%_EObj_j_b',
'%_EObj_j_c',
'%_EObj_j_i',
'%_EObj_j_s',
'%_EObj_k__EObj',
'%_EObj_k_b',
'%_EObj_k_c',
'%_EObj_k_i',
'%_EObj_k_s',
'%_EObj_l__EObj',
'%_EObj_l_b',
'%_EObj_l_c',
'%_EObj_l_i',
'%_EObj_l_s',
'%_EObj_m__EObj',
'%_EObj_m_b',
'%_EObj_m_c',
'%_EObj_m_i',
'%_EObj_m_s',
'%_EObj_n__EObj',
'%_EObj_n_b',
'%_EObj_n_c',
'%_EObj_n_i',
'%_EObj_n_s',
'%_EObj_o__EObj',
'%_EObj_o_b',
'%_EObj_o_c',
'%_EObj_o_i',
'%_EObj_o_s',
'%_EObj_p',
'%_EObj_p__EObj',
'%_EObj_p_b',
'%_EObj_p_c',
'%_EObj_p_i',
'%_EObj_p_s',
'%_EObj_q__EObj',
'%_EObj_q_b',
'%_EObj_q_c',
'%_EObj_q_i',
'%_EObj_q_s',
'%_EObj_r__EObj',
'%_EObj_r_b',
'%_EObj_r_c',
'%_EObj_r_i',
'%_EObj_r_s',
'%_EObj_s__EObj',
'%_EObj_s_b',
'%_EObj_s_c',
'%_EObj_s_i',
'%_EObj_s_s',
'%_EObj_t',
'%_EObj_x__EObj',
'%_EObj_x_b',
'%_EObj_x_c',
'%_EObj_x_i',
'%_EObj_x_s',
'%_EObj_y__EObj',
'%_EObj_y_b',
'%_EObj_y_c',
'%_EObj_y_i',
'%_EObj_y_s',
'%_EObj_z__EObj',
'%_EObj_z_b',
'%_EObj_z_c',
'%_EObj_z_i',
'%_EObj_z_s',
'%_eigs',
'%_load',
'%b_1__EObj',
'%b_2__EObj',
'%b_3__EObj',
'%b_4__EObj',
'%b_a__EObj',
'%b_d__EObj',
'%b_g__EObj',
'%b_h__EObj',
'%b_i_XMLList',
'%b_i__EObj',
'%b_j__EObj',
'%b_k__EObj',
'%b_l__EObj',
'%b_m__EObj',
'%b_n__EObj',
'%b_o__EObj',
'%b_p__EObj',
'%b_q__EObj',
'%b_r__EObj',
'%b_s__EObj',
'%b_x__EObj',
'%b_y__EObj',
'%b_z__EObj',
'%c_1__EObj',
'%c_2__EObj',
'%c_3__EObj',
'%c_4__EObj',
'%c_a__EObj',
'%c_d__EObj',
'%c_g__EObj',
'%c_h__EObj',
'%c_i_XMLAttr',
'%c_i_XMLDoc',
'%c_i_XMLElem',
'%c_i_XMLList',
'%c_i__EObj',
'%c_j__EObj',
'%c_k__EObj',
'%c_l__EObj',
'%c_m__EObj',
'%c_n__EObj',
'%c_o__EObj',
'%c_p__EObj',
'%c_q__EObj',
'%c_r__EObj',
'%c_s__EObj',
'%c_x__EObj',
'%c_y__EObj',
'%c_z__EObj',
'%ce_i_XMLList',
'%fptr_i_XMLList',
'%h_i_XMLList',
'%hm_i_XMLList',
'%i_1__EObj',
'%i_2__EObj',
'%i_3__EObj',
'%i_4__EObj',
'%i_a__EObj',
'%i_abs',
'%i_cumprod',
'%i_cumsum',
'%i_d__EObj',
'%i_diag',
'%i_g__EObj',
'%i_h__EObj',
'%i_i_XMLList',
'%i_i__EObj',
'%i_j__EObj',
'%i_k__EObj',
'%i_l__EObj',
'%i_m__EObj',
'%i_matrix',
'%i_max',
'%i_maxi',
'%i_min',
'%i_mini',
'%i_mput',
'%i_n__EObj',
'%i_o__EObj',
'%i_p',
'%i_p__EObj',
'%i_prod',
'%i_q__EObj',
'%i_r__EObj',
'%i_s__EObj',
'%i_sum',
'%i_tril',
'%i_triu',
'%i_x__EObj',
'%i_y__EObj',
'%i_z__EObj',
'%ip_i_XMLList',
'%l_i_XMLList',
'%l_i__EObj',
'%lss_i_XMLList',
'%mc_i_XMLList',
'%msp_full',
'%msp_i_XMLList',
'%msp_spget',
'%p_i_XMLList',
'%ptr_i_XMLList',
'%r_i_XMLList',
'%s_1__EObj',
'%s_2__EObj',
'%s_3__EObj',
'%s_4__EObj',
'%s_a__EObj',
'%s_d__EObj',
'%s_g__EObj',
'%s_h__EObj',
'%s_i_XMLList',
'%s_i__EObj',
'%s_j__EObj',
'%s_k__EObj',
'%s_l__EObj',
'%s_m__EObj',
'%s_n__EObj',
'%s_o__EObj',
'%s_p__EObj',
'%s_q__EObj',
'%s_r__EObj',
'%s_s__EObj',
'%s_x__EObj',
'%s_y__EObj',
'%s_z__EObj',
'%sp_i_XMLList',
'%spb_i_XMLList',
'%st_i_XMLList',
'Calendar',
'ClipBoard',
'Matplot',
'Matplot1',
'PlaySound',
'TCL_DeleteInterp',
'TCL_DoOneEvent',
'TCL_EvalFile',
'TCL_EvalStr',
'TCL_ExistArray',
'TCL_ExistInterp',
'TCL_ExistVar',
'TCL_GetVar',
'TCL_GetVersion',
'TCL_SetVar',
'TCL_UnsetVar',
'TCL_UpVar',
'_',
'_code2str',
'_d',
'_str2code',
'about',
'abs',
'acos',
'addModulePreferences',
'addcolor',
'addf',
'addhistory',
'addinter',
'addlocalizationdomain',
'amell',
'and',
'argn',
'arl2_ius',
'ascii',
'asin',
'atan',
'backslash',
'balanc',
'banner',
'base2dec',
'basename',
'bdiag',
'beep',
'besselh',
'besseli',
'besselj',
'besselk',
'bessely',
'beta',
'bezout',
'bfinit',
'blkfc1i',
'blkslvi',
'bool2s',
'browsehistory',
'browsevar',
'bsplin3val',
'buildDoc',
'buildouttb',
'bvode',
'c_link',
'call',
'callblk',
'captions',
'cd',
'cdfbet',
'cdfbin',
'cdfchi',
'cdfchn',
'cdff',
'cdffnc',
'cdfgam',
'cdfnbn',
'cdfnor',
'cdfpoi',
'cdft',
'ceil',
'champ',
'champ1',
'chdir',
'chol',
'clc',
'clean',
'clear',
'clearfun',
'clearglobal',
'closeEditor',
'closeEditvar',
'closeXcos',
'code2str',
'coeff',
'color',
'comp',
'completion',
'conj',
'contour2di',
'contr',
'conv2',
'convstr',
'copy',
'copyfile',
'corr',
'cos',
'coserror',
'createdir',
'cshep2d',
'csvDefault',
'csvIsnum',
'csvRead',
'csvStringToDouble',
'csvTextScan',
'csvWrite',
'ctree2',
'ctree3',
'ctree4',
'cumprod',
'cumsum',
'curblock',
'curblockc',
'daskr',
'dasrt',
'dassl',
'data2sig',
'datatipCreate',
'datatipManagerMode',
'datatipMove',
'datatipRemove',
'datatipSetDisplay',
'datatipSetInterp',
'datatipSetOrientation',
'datatipSetStyle',
'datatipToggle',
'dawson',
'dct',
'debug',
'dec2base',
'deff',
'definedfields',
'degree',
'delbpt',
'delete',
'deletefile',
'delip',
'delmenu',
'det',
'dgettext',
'dhinf',
'diag',
'diary',
'diffobjs',
'disp',
'dispbpt',
'displayhistory',
'disposefftwlibrary',
'dlgamma',
'dnaupd',
'dneupd',
'double',
'drawaxis',
'drawlater',
'drawnow',
'driver',
'dsaupd',
'dsearch',
'dseupd',
'dst',
'duplicate',
'editvar',
'emptystr',
'end_scicosim',
'ereduc',
'erf',
'erfc',
'erfcx',
'erfi',
'errcatch',
'errclear',
'error',
'eval_cshep2d',
'exec',
'execstr',
'exists',
'exit',
'exp',
'expm',
'exportUI',
'export_to_hdf5',
'eye',
'fadj2sp',
'fec',
'feval',
'fft',
'fftw',
'fftw_flags',
'fftw_forget_wisdom',
'fftwlibraryisloaded',
'figure',
'file',
'filebrowser',
'fileext',
'fileinfo',
'fileparts',
'filesep',
'find',
'findBD',
'findfiles',
'fire_closing_finished',
'floor',
'format',
'fort',
'fprintfMat',
'freq',
'frexp',
'fromc',
'fromjava',
'fscanfMat',
'fsolve',
'fstair',
'full',
'fullpath',
'funcprot',
'funptr',
'gamma',
'gammaln',
'geom3d',
'get',
'getURL',
'get_absolute_file_path',
'get_fftw_wisdom',
'getblocklabel',
'getcallbackobject',
'getdate',
'getdebuginfo',
'getdefaultlanguage',
'getdrives',
'getdynlibext',
'getenv',
'getfield',
'gethistory',
'gethistoryfile',
'getinstalledlookandfeels',
'getio',
'getlanguage',
'getlongpathname',
'getlookandfeel',
'getmd5',
'getmemory',
'getmodules',
'getos',
'getpid',
'getrelativefilename',
'getscicosvars',
'getscilabmode',
'getshortpathname',
'gettext',
'getvariablesonstack',
'getversion',
'glist',
'global',
'glue',
'grand',
'graphicfunction',
'grayplot',
'grep',
'gsort',
'gstacksize',
'h5attr',
'h5close',
'h5cp',
'h5dataset',
'h5dump',
'h5exists',
'h5flush',
'h5get',
'h5group',
'h5isArray',
'h5isAttr',
'h5isCompound',
'h5isFile',
'h5isGroup',
'h5isList',
'h5isRef',
'h5isSet',
'h5isSpace',
'h5isType',
'h5isVlen',
'h5label',
'h5ln',
'h5ls',
'h5mount',
'h5mv',
'h5open',
'h5read',
'h5readattr',
'h5rm',
'h5umount',
'h5write',
'h5writeattr',
'havewindow',
'helpbrowser',
'hess',
'hinf',
'historymanager',
'historysize',
'host',
'htmlDump',
'htmlRead',
'htmlReadStr',
'htmlWrite',
'iconvert',
'ieee',
'ilib_verbose',
'imag',
'impl',
'import_from_hdf5',
'imult',
'inpnvi',
'int',
'int16',
'int2d',
'int32',
'int3d',
'int8',
'interp',
'interp2d',
'interp3d',
'intg',
'intppty',
'inttype',
'inv',
'invoke_lu',
'is_handle_valid',
'is_hdf5_file',
'isalphanum',
'isascii',
'isdef',
'isdigit',
'isdir',
'isequal',
'isequalbitwise',
'iserror',
'isfile',
'isglobal',
'isletter',
'isnum',
'isreal',
'iswaitingforinput',
'jallowClassReloading',
'jarray',
'jautoTranspose',
'jautoUnwrap',
'javaclasspath',
'javalibrarypath',
'jcast',
'jcompile',
'jconvMatrixMethod',
'jcreatejar',
'jdeff',
'jdisableTrace',
'jenableTrace',
'jexists',
'jgetclassname',
'jgetfield',
'jgetfields',
'jgetinfo',
'jgetmethods',
'jimport',
'jinvoke',
'jinvoke_db',
'jnewInstance',
'jremove',
'jsetfield',
'junwrap',
'junwraprem',
'jwrap',
'jwrapinfloat',
'kron',
'lasterror',
'ldiv',
'ldivf',
'legendre',
'length',
'lib',
'librarieslist',
'libraryinfo',
'light',
'linear_interpn',
'lines',
'link',
'linmeq',
'list',
'listvar_in_hdf5',
'load',
'loadGui',
'loadScicos',
'loadXcos',
'loadfftwlibrary',
'loadhistory',
'log',
'log1p',
'lsq',
'lsq_splin',
'lsqrsolve',
'lsslist',
'lstcat',
'lstsize',
'ltitr',
'lu',
'ludel',
'lufact',
'luget',
'lusolve',
'macr2lst',
'macr2tree',
'matfile_close',
'matfile_listvar',
'matfile_open',
'matfile_varreadnext',
'matfile_varwrite',
'matrix',
'max',
'maxfiles',
'mclearerr',
'mclose',
'meof',
'merror',
'messagebox',
'mfprintf',
'mfscanf',
'mget',
'mgeti',
'mgetl',
'mgetstr',
'min',
'mlist',
'mode',
'model2blk',
'mopen',
'move',
'movefile',
'mprintf',
'mput',
'mputl',
'mputstr',
'mscanf',
'mseek',
'msprintf',
'msscanf',
'mtell',
'mtlb_mode',
'mtlb_sparse',
'mucomp',
'mulf',
'name2rgb',
'nearfloat',
'newaxes',
'newest',
'newfun',
'nnz',
'norm',
'notify',
'number_properties',
'ode',
'odedc',
'ones',
'openged',
'opentk',
'optim',
'or',
'ordmmd',
'parallel_concurrency',
'parallel_run',
'param3d',
'param3d1',
'part',
'pathconvert',
'pathsep',
'phase_simulation',
'plot2d',
'plot2d1',
'plot2d2',
'plot2d3',
'plot2d4',
'plot3d',
'plot3d1',
'plotbrowser',
'pointer_xproperty',
'poly',
'ppol',
'pppdiv',
'predef',
'preferences',
'print',
'printf',
'printfigure',
'printsetupbox',
'prod',
'progressionbar',
'prompt',
'pwd',
'qld',
'qp_solve',
'qr',
'raise_window',
'rand',
'rankqr',
'rat',
'rcond',
'rdivf',
'read',
'read4b',
'read_csv',
'readb',
'readgateway',
'readmps',
'real',
'realtime',
'realtimeinit',
'regexp',
'relocate_handle',
'remez',
'removeModulePreferences',
'removedir',
'removelinehistory',
'res_with_prec',
'resethistory',
'residu',
'resume',
'return',
'ricc',
'rlist',
'roots',
'rotate_axes',
'round',
'rpem',
'rtitr',
'rubberbox',
'save',
'saveGui',
'saveafterncommands',
'saveconsecutivecommands',
'savehistory',
'schur',
'sci_haltscicos',
'sci_tree2',
'sci_tree3',
'sci_tree4',
'sciargs',
'scicos_debug',
'scicos_debug_count',
'scicos_time',
'scicosim',
'scinotes',
'sctree',
'semidef',
'set',
'set_blockerror',
'set_fftw_wisdom',
'set_xproperty',
'setbpt',
'setdefaultlanguage',
'setenv',
'setfield',
'sethistoryfile',
'setlanguage',
'setlookandfeel',
'setmenu',
'sfact',
'sfinit',
'show_window',
'sident',
'sig2data',
'sign',
'simp',
'simp_mode',
'sin',
'size',
'slash',
'sleep',
'sorder',
'sparse',
'spchol',
'spcompack',
'spec',
'spget',
'splin',
'splin2d',
'splin3d',
'splitURL',
'spones',
'sprintf',
'sqrt',
'stacksize',
'str2code',
'strcat',
'strchr',
'strcmp',
'strcspn',
'strindex',
'string',
'stringbox',
'stripblanks',
'strncpy',
'strrchr',
'strrev',
'strsplit',
'strspn',
'strstr',
'strsubst',
'strtod',
'strtok',
'subf',
'sum',
'svd',
'swap_handles',
'symfcti',
'syredi',
'system_getproperty',
'system_setproperty',
'ta2lpd',
'tan',
'taucs_chdel',
'taucs_chfact',
'taucs_chget',
'taucs_chinfo',
'taucs_chsolve',
'tempname',
'testmatrix',
'timer',
'tlist',
'tohome',
'tokens',
'toolbar',
'toprint',
'tr_zer',
'tril',
'triu',
'type',
'typename',
'uiDisplayTree',
'uicontextmenu',
'uicontrol',
'uigetcolor',
'uigetdir',
'uigetfile',
'uigetfont',
'uimenu',
'uint16',
'uint32',
'uint8',
'uipopup',
'uiputfile',
'uiwait',
'ulink',
'umf_ludel',
'umf_lufact',
'umf_luget',
'umf_luinfo',
'umf_lusolve',
'umfpack',
'unglue',
'unix',
'unsetmenu',
'unzoom',
'updatebrowsevar',
'usecanvas',
'useeditor',
'user',
'var2vec',
'varn',
'vec2var',
'waitbar',
'warnBlockByUID',
'warning',
'what',
'where',
'whereis',
'who',
'winsid',
'with_module',
'writb',
'write',
'write4b',
'write_csv',
'x_choose',
'x_choose_modeless',
'x_dialog',
'x_mdialog',
'xarc',
'xarcs',
'xarrows',
'xchange',
'xchoicesi',
'xclick',
'xcos',
'xcosAddToolsMenu',
'xcosConfigureXmlFile',
'xcosDiagramToScilab',
'xcosPalCategoryAdd',
'xcosPalDelete',
'xcosPalDisable',
'xcosPalEnable',
'xcosPalGenerateIcon',
'xcosPalGet',
'xcosPalLoad',
'xcosPalMove',
'xcosSimulationStarted',
'xcosUpdateBlock',
'xdel',
'xend',
'xfarc',
'xfarcs',
'xfpoly',
'xfpolys',
'xfrect',
'xget',
'xgetmouse',
'xgraduate',
'xgrid',
'xinit',
'xlfont',
'xls_open',
'xls_read',
'xmlAddNs',
'xmlAppend',
'xmlAsNumber',
'xmlAsText',
'xmlDTD',
'xmlDelete',
'xmlDocument',
'xmlDump',
'xmlElement',
'xmlFormat',
'xmlGetNsByHref',
'xmlGetNsByPrefix',
'xmlGetOpenDocs',
'xmlIsValidObject',
'xmlName',
'xmlNs',
'xmlRead',
'xmlReadStr',
'xmlRelaxNG',
'xmlRemove',
'xmlSchema',
'xmlSetAttributes',
'xmlValidate',
'xmlWrite',
'xmlXPath',
'xname',
'xpause',
'xpoly',
'xpolys',
'xrect',
'xrects',
'xs2bmp',
'xs2emf',
'xs2eps',
'xs2gif',
'xs2jpg',
'xs2pdf',
'xs2png',
'xs2ppm',
'xs2ps',
'xs2svg',
'xsegs',
'xset',
'xstring',
'xstringb',
'xtitle',
'zeros',
'znaupd',
'zneupd',
'zoom_rect',
)
macros_kw = (
'!_deff_wrapper',
'%0_i_st',
'%3d_i_h',
'%Block_xcosUpdateBlock',
'%TNELDER_p',
'%TNELDER_string',
'%TNMPLOT_p',
'%TNMPLOT_string',
'%TOPTIM_p',
'%TOPTIM_string',
'%TSIMPLEX_p',
'%TSIMPLEX_string',
'%_EVoid_p',
'%_gsort',
'%_listvarinfile',
'%_rlist',
'%_save',
'%_sodload',
'%_strsplit',
'%_unwrap',
'%ar_p',
'%asn',
'%b_a_b',
'%b_a_s',
'%b_c_s',
'%b_c_spb',
'%b_cumprod',
'%b_cumsum',
'%b_d_s',
'%b_diag',
'%b_e',
'%b_f_s',
'%b_f_spb',
'%b_g_s',
'%b_g_spb',
'%b_grand',
'%b_h_s',
'%b_h_spb',
'%b_i_b',
'%b_i_ce',
'%b_i_h',
'%b_i_hm',
'%b_i_s',
'%b_i_sp',
'%b_i_spb',
'%b_i_st',
'%b_iconvert',
'%b_l_b',
'%b_l_s',
'%b_m_b',
'%b_m_s',
'%b_matrix',
'%b_n_hm',
'%b_o_hm',
'%b_p_s',
'%b_prod',
'%b_r_b',
'%b_r_s',
'%b_s_b',
'%b_s_s',
'%b_string',
'%b_sum',
'%b_tril',
'%b_triu',
'%b_x_b',
'%b_x_s',
'%bicg',
'%bicgstab',
'%c_a_c',
'%c_b_c',
'%c_b_s',
'%c_diag',
'%c_dsearch',
'%c_e',
'%c_eye',
'%c_f_s',
'%c_grand',
'%c_i_c',
'%c_i_ce',
'%c_i_h',
'%c_i_hm',
'%c_i_lss',
'%c_i_r',
'%c_i_s',
'%c_i_st',
'%c_matrix',
'%c_n_l',
'%c_n_st',
'%c_o_l',
'%c_o_st',
'%c_ones',
'%c_rand',
'%c_tril',
'%c_triu',
'%cblock_c_cblock',
'%cblock_c_s',
'%cblock_e',
'%cblock_f_cblock',
'%cblock_p',
'%cblock_size',
'%ce_6',
'%ce_c_ce',
'%ce_e',
'%ce_f_ce',
'%ce_i_ce',
'%ce_i_s',
'%ce_i_st',
'%ce_matrix',
'%ce_p',
'%ce_size',
'%ce_string',
'%ce_t',
'%cgs',
'%champdat_i_h',
'%choose',
'%diagram_xcos',
'%dir_p',
'%fptr_i_st',
'%grand_perm',
'%grayplot_i_h',
'%h_i_st',
'%hmS_k_hmS_generic',
'%hm_1_hm',
'%hm_1_s',
'%hm_2_hm',
'%hm_2_s',
'%hm_3_hm',
'%hm_3_s',
'%hm_4_hm',
'%hm_4_s',
'%hm_5',
'%hm_a_hm',
'%hm_a_r',
'%hm_a_s',
'%hm_abs',
'%hm_and',
'%hm_bool2s',
'%hm_c_hm',
'%hm_ceil',
'%hm_conj',
'%hm_cos',
'%hm_cumprod',
'%hm_cumsum',
'%hm_d_hm',
'%hm_d_s',
'%hm_degree',
'%hm_dsearch',
'%hm_e',
'%hm_exp',
'%hm_eye',
'%hm_f_hm',
'%hm_find',
'%hm_floor',
'%hm_g_hm',
'%hm_grand',
'%hm_gsort',
'%hm_h_hm',
'%hm_i_b',
'%hm_i_ce',
'%hm_i_h',
'%hm_i_hm',
'%hm_i_i',
'%hm_i_p',
'%hm_i_r',
'%hm_i_s',
'%hm_i_st',
'%hm_iconvert',
'%hm_imag',
'%hm_int',
'%hm_isnan',
'%hm_isreal',
'%hm_j_hm',
'%hm_j_s',
'%hm_k_hm',
'%hm_k_s',
'%hm_log',
'%hm_m_p',
'%hm_m_r',
'%hm_m_s',
'%hm_matrix',
'%hm_max',
'%hm_mean',
'%hm_median',
'%hm_min',
'%hm_n_b',
'%hm_n_c',
'%hm_n_hm',
'%hm_n_i',
'%hm_n_p',
'%hm_n_s',
'%hm_o_b',
'%hm_o_c',
'%hm_o_hm',
'%hm_o_i',
'%hm_o_p',
'%hm_o_s',
'%hm_ones',
'%hm_or',
'%hm_p',
'%hm_prod',
'%hm_q_hm',
'%hm_r_s',
'%hm_rand',
'%hm_real',
'%hm_round',
'%hm_s',
'%hm_s_hm',
'%hm_s_r',
'%hm_s_s',
'%hm_sign',
'%hm_sin',
'%hm_size',
'%hm_sqrt',
'%hm_stdev',
'%hm_string',
'%hm_sum',
'%hm_x_hm',
'%hm_x_p',
'%hm_x_s',
'%hm_zeros',
'%i_1_s',
'%i_2_s',
'%i_3_s',
'%i_4_s',
'%i_Matplot',
'%i_a_i',
'%i_a_s',
'%i_and',
'%i_ascii',
'%i_b_s',
'%i_bezout',
'%i_champ',
'%i_champ1',
'%i_contour',
'%i_contour2d',
'%i_d_i',
'%i_d_s',
'%i_dsearch',
'%i_e',
'%i_fft',
'%i_g_i',
'%i_gcd',
'%i_grand',
'%i_h_i',
'%i_i_ce',
'%i_i_h',
'%i_i_hm',
'%i_i_i',
'%i_i_s',
'%i_i_st',
'%i_j_i',
'%i_j_s',
'%i_l_s',
'%i_lcm',
'%i_length',
'%i_m_i',
'%i_m_s',
'%i_mfprintf',
'%i_mprintf',
'%i_msprintf',
'%i_n_s',
'%i_o_s',
'%i_or',
'%i_p_i',
'%i_p_s',
'%i_plot2d',
'%i_plot2d1',
'%i_plot2d2',
'%i_q_s',
'%i_r_i',
'%i_r_s',
'%i_round',
'%i_s_i',
'%i_s_s',
'%i_sign',
'%i_string',
'%i_x_i',
'%i_x_s',
'%ip_a_s',
'%ip_i_st',
'%ip_m_s',
'%ip_n_ip',
'%ip_o_ip',
'%ip_p',
'%ip_part',
'%ip_s_s',
'%ip_string',
'%k',
'%l_i_h',
'%l_i_s',
'%l_i_st',
'%l_isequal',
'%l_n_c',
'%l_n_l',
'%l_n_m',
'%l_n_p',
'%l_n_s',
'%l_n_st',
'%l_o_c',
'%l_o_l',
'%l_o_m',
'%l_o_p',
'%l_o_s',
'%l_o_st',
'%lss_a_lss',
'%lss_a_p',
'%lss_a_r',
'%lss_a_s',
'%lss_c_lss',
'%lss_c_p',
'%lss_c_r',
'%lss_c_s',
'%lss_e',
'%lss_eye',
'%lss_f_lss',
'%lss_f_p',
'%lss_f_r',
'%lss_f_s',
'%lss_i_ce',
'%lss_i_lss',
'%lss_i_p',
'%lss_i_r',
'%lss_i_s',
'%lss_i_st',
'%lss_inv',
'%lss_l_lss',
'%lss_l_p',
'%lss_l_r',
'%lss_l_s',
'%lss_m_lss',
'%lss_m_p',
'%lss_m_r',
'%lss_m_s',
'%lss_n_lss',
'%lss_n_p',
'%lss_n_r',
'%lss_n_s',
'%lss_norm',
'%lss_o_lss',
'%lss_o_p',
'%lss_o_r',
'%lss_o_s',
'%lss_ones',
'%lss_r_lss',
'%lss_r_p',
'%lss_r_r',
'%lss_r_s',
'%lss_rand',
'%lss_s',
'%lss_s_lss',
'%lss_s_p',
'%lss_s_r',
'%lss_s_s',
'%lss_size',
'%lss_t',
'%lss_v_lss',
'%lss_v_p',
'%lss_v_r',
'%lss_v_s',
'%lt_i_s',
'%m_n_l',
'%m_o_l',
'%mc_i_h',
'%mc_i_s',
'%mc_i_st',
'%mc_n_st',
'%mc_o_st',
'%mc_string',
'%mps_p',
'%mps_string',
'%msp_a_s',
'%msp_abs',
'%msp_e',
'%msp_find',
'%msp_i_s',
'%msp_i_st',
'%msp_length',
'%msp_m_s',
'%msp_maxi',
'%msp_n_msp',
'%msp_nnz',
'%msp_o_msp',
'%msp_p',
'%msp_sparse',
'%msp_spones',
'%msp_t',
'%p_a_lss',
'%p_a_r',
'%p_c_lss',
'%p_c_r',
'%p_cumprod',
'%p_cumsum',
'%p_d_p',
'%p_d_r',
'%p_d_s',
'%p_det',
'%p_e',
'%p_f_lss',
'%p_f_r',
'%p_grand',
'%p_i_ce',
'%p_i_h',
'%p_i_hm',
'%p_i_lss',
'%p_i_p',
'%p_i_r',
'%p_i_s',
'%p_i_st',
'%p_inv',
'%p_j_s',
'%p_k_p',
'%p_k_r',
'%p_k_s',
'%p_l_lss',
'%p_l_p',
'%p_l_r',
'%p_l_s',
'%p_m_hm',
'%p_m_lss',
'%p_m_r',
'%p_matrix',
'%p_n_l',
'%p_n_lss',
'%p_n_r',
'%p_o_l',
'%p_o_lss',
'%p_o_r',
'%p_o_sp',
'%p_p_s',
'%p_part',
'%p_prod',
'%p_q_p',
'%p_q_r',
'%p_q_s',
'%p_r_lss',
'%p_r_p',
'%p_r_r',
'%p_r_s',
'%p_s_lss',
'%p_s_r',
'%p_simp',
'%p_string',
'%p_sum',
'%p_v_lss',
'%p_v_p',
'%p_v_r',
'%p_v_s',
'%p_x_hm',
'%p_x_r',
'%p_y_p',
'%p_y_r',
'%p_y_s',
'%p_z_p',
'%p_z_r',
'%p_z_s',
'%pcg',
'%plist_p',
'%plist_string',
'%r_0',
'%r_a_hm',
'%r_a_lss',
'%r_a_p',
'%r_a_r',
'%r_a_s',
'%r_c_lss',
'%r_c_p',
'%r_c_r',
'%r_c_s',
'%r_clean',
'%r_cumprod',
'%r_cumsum',
'%r_d_p',
'%r_d_r',
'%r_d_s',
'%r_det',
'%r_diag',
'%r_e',
'%r_eye',
'%r_f_lss',
'%r_f_p',
'%r_f_r',
'%r_f_s',
'%r_i_ce',
'%r_i_hm',
'%r_i_lss',
'%r_i_p',
'%r_i_r',
'%r_i_s',
'%r_i_st',
'%r_inv',
'%r_j_s',
'%r_k_p',
'%r_k_r',
'%r_k_s',
'%r_l_lss',
'%r_l_p',
'%r_l_r',
'%r_l_s',
'%r_m_hm',
'%r_m_lss',
'%r_m_p',
'%r_m_r',
'%r_m_s',
'%r_matrix',
'%r_n_lss',
'%r_n_p',
'%r_n_r',
'%r_n_s',
'%r_norm',
'%r_o_lss',
'%r_o_p',
'%r_o_r',
'%r_o_s',
'%r_ones',
'%r_p',
'%r_p_s',
'%r_prod',
'%r_q_p',
'%r_q_r',
'%r_q_s',
'%r_r_lss',
'%r_r_p',
'%r_r_r',
'%r_r_s',
'%r_rand',
'%r_s',
'%r_s_hm',
'%r_s_lss',
'%r_s_p',
'%r_s_r',
'%r_s_s',
'%r_simp',
'%r_size',
'%r_string',
'%r_sum',
'%r_t',
'%r_tril',
'%r_triu',
'%r_v_lss',
'%r_v_p',
'%r_v_r',
'%r_v_s',
'%r_varn',
'%r_x_p',
'%r_x_r',
'%r_x_s',
'%r_y_p',
'%r_y_r',
'%r_y_s',
'%r_z_p',
'%r_z_r',
'%r_z_s',
'%s_1_hm',
'%s_1_i',
'%s_2_hm',
'%s_2_i',
'%s_3_hm',
'%s_3_i',
'%s_4_hm',
'%s_4_i',
'%s_5',
'%s_a_b',
'%s_a_hm',
'%s_a_i',
'%s_a_ip',
'%s_a_lss',
'%s_a_msp',
'%s_a_r',
'%s_a_sp',
'%s_and',
'%s_b_i',
'%s_b_s',
'%s_bezout',
'%s_c_b',
'%s_c_cblock',
'%s_c_lss',
'%s_c_r',
'%s_c_sp',
'%s_d_b',
'%s_d_i',
'%s_d_p',
'%s_d_r',
'%s_d_sp',
'%s_e',
'%s_f_b',
'%s_f_cblock',
'%s_f_lss',
'%s_f_r',
'%s_f_sp',
'%s_g_b',
'%s_g_s',
'%s_gcd',
'%s_grand',
'%s_h_b',
'%s_h_s',
'%s_i_b',
'%s_i_c',
'%s_i_ce',
'%s_i_h',
'%s_i_hm',
'%s_i_i',
'%s_i_lss',
'%s_i_p',
'%s_i_r',
'%s_i_s',
'%s_i_sp',
'%s_i_spb',
'%s_i_st',
'%s_j_i',
'%s_k_hm',
'%s_k_p',
'%s_k_r',
'%s_k_sp',
'%s_l_b',
'%s_l_hm',
'%s_l_i',
'%s_l_lss',
'%s_l_p',
'%s_l_r',
'%s_l_s',
'%s_l_sp',
'%s_lcm',
'%s_m_b',
'%s_m_hm',
'%s_m_i',
'%s_m_ip',
'%s_m_lss',
'%s_m_msp',
'%s_m_r',
'%s_matrix',
'%s_n_hm',
'%s_n_i',
'%s_n_l',
'%s_n_lss',
'%s_n_r',
'%s_n_st',
'%s_o_hm',
'%s_o_i',
'%s_o_l',
'%s_o_lss',
'%s_o_r',
'%s_o_st',
'%s_or',
'%s_p_b',
'%s_p_i',
'%s_pow',
'%s_q_hm',
'%s_q_i',
'%s_q_p',
'%s_q_r',
'%s_q_sp',
'%s_r_b',
'%s_r_i',
'%s_r_lss',
'%s_r_p',
'%s_r_r',
'%s_r_s',
'%s_r_sp',
'%s_s_b',
'%s_s_hm',
'%s_s_i',
'%s_s_ip',
'%s_s_lss',
'%s_s_r',
'%s_s_sp',
'%s_simp',
'%s_v_lss',
'%s_v_p',
'%s_v_r',
'%s_v_s',
'%s_x_b',
'%s_x_hm',
'%s_x_i',
'%s_x_r',
'%s_y_p',
'%s_y_r',
'%s_y_sp',
'%s_z_p',
'%s_z_r',
'%s_z_sp',
'%sn',
'%sp_a_s',
'%sp_a_sp',
'%sp_and',
'%sp_c_s',
'%sp_ceil',
'%sp_conj',
'%sp_cos',
'%sp_cumprod',
'%sp_cumsum',
'%sp_d_s',
'%sp_d_sp',
'%sp_det',
'%sp_diag',
'%sp_e',
'%sp_exp',
'%sp_f_s',
'%sp_floor',
'%sp_grand',
'%sp_gsort',
'%sp_i_ce',
'%sp_i_h',
'%sp_i_s',
'%sp_i_sp',
'%sp_i_st',
'%sp_int',
'%sp_inv',
'%sp_k_s',
'%sp_k_sp',
'%sp_l_s',
'%sp_l_sp',
'%sp_length',
'%sp_max',
'%sp_min',
'%sp_norm',
'%sp_or',
'%sp_p_s',
'%sp_prod',
'%sp_q_s',
'%sp_q_sp',
'%sp_r_s',
'%sp_r_sp',
'%sp_round',
'%sp_s_s',
'%sp_s_sp',
'%sp_sin',
'%sp_sqrt',
'%sp_string',
'%sp_sum',
'%sp_tril',
'%sp_triu',
'%sp_y_s',
'%sp_y_sp',
'%sp_z_s',
'%sp_z_sp',
'%spb_and',
'%spb_c_b',
'%spb_cumprod',
'%spb_cumsum',
'%spb_diag',
'%spb_e',
'%spb_f_b',
'%spb_g_b',
'%spb_g_spb',
'%spb_h_b',
'%spb_h_spb',
'%spb_i_b',
'%spb_i_ce',
'%spb_i_h',
'%spb_i_st',
'%spb_or',
'%spb_prod',
'%spb_sum',
'%spb_tril',
'%spb_triu',
'%st_6',
'%st_c_st',
'%st_e',
'%st_f_st',
'%st_i_b',
'%st_i_c',
'%st_i_fptr',
'%st_i_h',
'%st_i_i',
'%st_i_ip',
'%st_i_lss',
'%st_i_msp',
'%st_i_p',
'%st_i_r',
'%st_i_s',
'%st_i_sp',
'%st_i_spb',
'%st_i_st',
'%st_matrix',
'%st_n_c',
'%st_n_l',
'%st_n_mc',
'%st_n_p',
'%st_n_s',
'%st_o_c',
'%st_o_l',
'%st_o_mc',
'%st_o_p',
'%st_o_s',
'%st_o_tl',
'%st_p',
'%st_size',
'%st_string',
'%st_t',
'%ticks_i_h',
'%xls_e',
'%xls_p',
'%xlssheet_e',
'%xlssheet_p',
'%xlssheet_size',
'%xlssheet_string',
'DominationRank',
'G_make',
'IsAScalar',
'NDcost',
'OS_Version',
'PlotSparse',
'ReadHBSparse',
'TCL_CreateSlave',
'abcd',
'abinv',
'accept_func_default',
'accept_func_vfsa',
'acf',
'acosd',
'acosh',
'acoshm',
'acosm',
'acot',
'acotd',
'acoth',
'acsc',
'acscd',
'acsch',
'add_demo',
'add_help_chapter',
'add_module_help_chapter',
'add_param',
'add_profiling',
'adj2sp',
'aff2ab',
'ana_style',
'analpf',
'analyze',
'aplat',
'arhnk',
'arl2',
'arma2p',
'arma2ss',
'armac',
'armax',
'armax1',
'arobasestring2strings',
'arsimul',
'ascii2string',
'asciimat',
'asec',
'asecd',
'asech',
'asind',
'asinh',
'asinhm',
'asinm',
'assert_checkalmostequal',
'assert_checkequal',
'assert_checkerror',
'assert_checkfalse',
'assert_checkfilesequal',
'assert_checktrue',
'assert_comparecomplex',
'assert_computedigits',
'assert_cond2reltol',
'assert_cond2reqdigits',
'assert_generror',
'atand',
'atanh',
'atanhm',
'atanm',
'atomsAutoload',
'atomsAutoloadAdd',
'atomsAutoloadDel',
'atomsAutoloadList',
'atomsCategoryList',
'atomsCheckModule',
'atomsDepTreeShow',
'atomsGetConfig',
'atomsGetInstalled',
'atomsGetInstalledPath',
'atomsGetLoaded',
'atomsGetLoadedPath',
'atomsInstall',
'atomsIsInstalled',
'atomsIsLoaded',
'atomsList',
'atomsLoad',
'atomsQuit',
'atomsRemove',
'atomsRepositoryAdd',
'atomsRepositoryDel',
'atomsRepositoryList',
'atomsRestoreConfig',
'atomsSaveConfig',
'atomsSearch',
'atomsSetConfig',
'atomsShow',
'atomsSystemInit',
'atomsSystemUpdate',
'atomsTest',
'atomsUpdate',
'atomsVersion',
'augment',
'auread',
'auwrite',
'balreal',
'bench_run',
'bilin',
'bilt',
'bin2dec',
'binomial',
'bitand',
'bitcmp',
'bitget',
'bitor',
'bitset',
'bitxor',
'black',
'blanks',
'bloc2exp',
'bloc2ss',
'block_parameter_error',
'bode',
'bode_asymp',
'bstap',
'buttmag',
'bvodeS',
'bytecode',
'bytecodewalk',
'cainv',
'calendar',
'calerf',
'calfrq',
'canon',
'casc',
'cat',
'cat_code',
'cb_m2sci_gui',
'ccontrg',
'cell',
'cell2mat',
'cellstr',
'center',
'cepstrum',
'cfspec',
'char',
'chart',
'cheb1mag',
'cheb2mag',
'check_gateways',
'check_modules_xml',
'check_versions',
'chepol',
'chfact',
'chsolve',
'classmarkov',
'clean_help',
'clock',
'cls2dls',
'cmb_lin',
'cmndred',
'cmoment',
'coding_ga_binary',
'coding_ga_identity',
'coff',
'coffg',
'colcomp',
'colcompr',
'colinout',
'colregul',
'companion',
'complex',
'compute_initial_temp',
'cond',
'cond2sp',
'condestsp',
'configure_msifort',
'configure_msvc',
'conjgrad',
'cont_frm',
'cont_mat',
'contrss',
'conv',
'convert_to_float',
'convertindex',
'convol',
'convol2d',
'copfac',
'correl',
'cosd',
'cosh',
'coshm',
'cosm',
'cotd',
'cotg',
'coth',
'cothm',
'cov',
'covar',
'createXConfiguration',
'createfun',
'createstruct',
'cross',
'crossover_ga_binary',
'crossover_ga_default',
'csc',
'cscd',
'csch',
'csgn',
'csim',
'cspect',
'ctr_gram',
'czt',
'dae',
'daeoptions',
'damp',
'datafit',
'date',
'datenum',
'datevec',
'dbphi',
'dcf',
'ddp',
'dec2bin',
'dec2hex',
'dec2oct',
'del_help_chapter',
'del_module_help_chapter',
'demo_begin',
'demo_choose',
'demo_compiler',
'demo_end',
'demo_file_choice',
'demo_folder_choice',
'demo_function_choice',
'demo_gui',
'demo_run',
'demo_viewCode',
'denom',
'derivat',
'derivative',
'des2ss',
'des2tf',
'detectmsifort64tools',
'detectmsvc64tools',
'determ',
'detr',
'detrend',
'devtools_run_builder',
'dhnorm',
'diff',
'diophant',
'dir',
'dirname',
'dispfiles',
'dllinfo',
'dscr',
'dsimul',
'dt_ility',
'dtsi',
'edit',
'edit_error',
'editor',
'eigenmarkov',
'eigs',
'ell1mag',
'enlarge_shape',
'entropy',
'eomday',
'epred',
'eqfir',
'eqiir',
'equil',
'equil1',
'erfinv',
'etime',
'eval',
'evans',
'evstr',
'example_run',
'expression2code',
'extract_help_examples',
'factor',
'factorial',
'factors',
'faurre',
'ffilt',
'fft2',
'fftshift',
'fieldnames',
'filt_sinc',
'filter',
'findABCD',
'findAC',
'findBDK',
'findR',
'find_freq',
'find_links',
'find_scicos_version',
'findm',
'findmsifortcompiler',
'findmsvccompiler',
'findx0BD',
'firstnonsingleton',
'fix',
'fixedpointgcd',
'flipdim',
'flts',
'fminsearch',
'formatBlackTip',
'formatBodeMagTip',
'formatBodePhaseTip',
'formatGainplotTip',
'formatHallModuleTip',
'formatHallPhaseTip',
'formatNicholsGainTip',
'formatNicholsPhaseTip',
'formatNyquistTip',
'formatPhaseplotTip',
'formatSgridDampingTip',
'formatSgridFreqTip',
'formatZgridDampingTip',
'formatZgridFreqTip',
'format_txt',
'fourplan',
'frep2tf',
'freson',
'frfit',
'frmag',
'fseek_origin',
'fsfirlin',
'fspec',
'fspecg',
'fstabst',
'ftest',
'ftuneq',
'fullfile',
'fullrf',
'fullrfk',
'fun2string',
'g_margin',
'gainplot',
'gamitg',
'gcare',
'gcd',
'gencompilationflags_unix',
'generateBlockImage',
'generateBlockImages',
'generic_i_ce',
'generic_i_h',
'generic_i_hm',
'generic_i_s',
'generic_i_st',
'genlib',
'genmarkov',
'geomean',
'getDiagramVersion',
'getModelicaPath',
'getPreferencesValue',
'get_file_path',
'get_function_path',
'get_param',
'get_profile',
'get_scicos_version',
'getd',
'getscilabkeywords',
'getshell',
'gettklib',
'gfare',
'gfrancis',
'givens',
'glever',
'gmres',
'group',
'gschur',
'gspec',
'gtild',
'h2norm',
'h_cl',
'h_inf',
'h_inf_st',
'h_norm',
'hallchart',
'halt',
'hank',
'hankelsv',
'harmean',
'haveacompiler',
'head_comments',
'help_from_sci',
'help_skeleton',
'hermit',
'hex2dec',
'hilb',
'hilbert',
'histc',
'horner',
'householder',
'hrmt',
'htrianr',
'hypermat',
'idct',
'idst',
'ifft',
'ifftshift',
'iir',
'iirgroup',
'iirlp',
'iirmod',
'ilib_build',
'ilib_build_jar',
'ilib_compile',
'ilib_for_link',
'ilib_gen_Make',
'ilib_gen_Make_unix',
'ilib_gen_cleaner',
'ilib_gen_gateway',
'ilib_gen_loader',
'ilib_include_flag',
'ilib_mex_build',
'im_inv',
'importScicosDiagram',
'importScicosPal',
'importXcosDiagram',
'imrep2ss',
'ind2sub',
'inistate',
'init_ga_default',
'init_param',
'initial_scicos_tables',
'input',
'instruction2code',
'intc',
'intdec',
'integrate',
'interp1',
'interpln',
'intersect',
'intl',
'intsplin',
'inttrap',
'inv_coeff',
'invr',
'invrs',
'invsyslin',
'iqr',
'isLeapYear',
'is_absolute_path',
'is_param',
'iscell',
'iscellstr',
'iscolumn',
'isempty',
'isfield',
'isinf',
'ismatrix',
'isnan',
'isrow',
'isscalar',
'issparse',
'issquare',
'isstruct',
'isvector',
'jmat',
'justify',
'kalm',
'karmarkar',
'kernel',
'kpure',
'krac2',
'kroneck',
'lattn',
'lattp',
'launchtest',
'lcf',
'lcm',
'lcmdiag',
'leastsq',
'leqe',
'leqr',
'lev',
'levin',
'lex_sort',
'lft',
'lin',
'lin2mu',
'lincos',
'lindquist',
'linf',
'linfn',
'linsolve',
'linspace',
'list2vec',
'list_param',
'listfiles',
'listfunctions',
'listvarinfile',
'lmisolver',
'lmitool',
'loadXcosLibs',
'loadmatfile',
'loadwave',
'log10',
'log2',
'logm',
'logspace',
'lqe',
'lqg',
'lqg2stan',
'lqg_ltr',
'lqr',
'ls',
'lyap',
'm2sci_gui',
'm_circle',
'macglov',
'macrovar',
'mad',
'makecell',
'manedit',
'mapsound',
'markp2ss',
'matfile2sci',
'mdelete',
'mean',
'meanf',
'median',
'members',
'mese',
'meshgrid',
'mfft',
'mfile2sci',
'minreal',
'minss',
'mkdir',
'modulo',
'moment',
'mrfit',
'msd',
'mstr2sci',
'mtlb',
'mtlb_0',
'mtlb_a',
'mtlb_all',
'mtlb_any',
'mtlb_axes',
'mtlb_axis',
'mtlb_beta',
'mtlb_box',
'mtlb_choices',
'mtlb_close',
'mtlb_colordef',
'mtlb_cond',
'mtlb_cov',
'mtlb_cumprod',
'mtlb_cumsum',
'mtlb_dec2hex',
'mtlb_delete',
'mtlb_diag',
'mtlb_diff',
'mtlb_dir',
'mtlb_double',
'mtlb_e',
'mtlb_echo',
'mtlb_error',
'mtlb_eval',
'mtlb_exist',
'mtlb_eye',
'mtlb_false',
'mtlb_fft',
'mtlb_fftshift',
'mtlb_filter',
'mtlb_find',
'mtlb_findstr',
'mtlb_fliplr',
'mtlb_fopen',
'mtlb_format',
'mtlb_fprintf',
'mtlb_fread',
'mtlb_fscanf',
'mtlb_full',
'mtlb_fwrite',
'mtlb_get',
'mtlb_grid',
'mtlb_hold',
'mtlb_i',
'mtlb_ifft',
'mtlb_image',
'mtlb_imp',
'mtlb_int16',
'mtlb_int32',
'mtlb_int8',
'mtlb_is',
'mtlb_isa',
'mtlb_isfield',
'mtlb_isletter',
'mtlb_isspace',
'mtlb_l',
'mtlb_legendre',
'mtlb_linspace',
'mtlb_logic',
'mtlb_logical',
'mtlb_loglog',
'mtlb_lower',
'mtlb_max',
'mtlb_mean',
'mtlb_median',
'mtlb_mesh',
'mtlb_meshdom',
'mtlb_min',
'mtlb_more',
'mtlb_num2str',
'mtlb_ones',
'mtlb_pcolor',
'mtlb_plot',
'mtlb_prod',
'mtlb_qr',
'mtlb_qz',
'mtlb_rand',
'mtlb_randn',
'mtlb_rcond',
'mtlb_realmax',
'mtlb_realmin',
'mtlb_s',
'mtlb_semilogx',
'mtlb_semilogy',
'mtlb_setstr',
'mtlb_size',
'mtlb_sort',
'mtlb_sortrows',
'mtlb_sprintf',
'mtlb_sscanf',
'mtlb_std',
'mtlb_strcmp',
'mtlb_strcmpi',
'mtlb_strfind',
'mtlb_strrep',
'mtlb_subplot',
'mtlb_sum',
'mtlb_t',
'mtlb_toeplitz',
'mtlb_tril',
'mtlb_triu',
'mtlb_true',
'mtlb_type',
'mtlb_uint16',
'mtlb_uint32',
'mtlb_uint8',
'mtlb_upper',
'mtlb_var',
'mtlb_zeros',
'mu2lin',
'mutation_ga_binary',
'mutation_ga_default',
'mvcorrel',
'mvvacov',
'nancumsum',
'nand2mean',
'nanmax',
'nanmean',
'nanmeanf',
'nanmedian',
'nanmin',
'nanreglin',
'nanstdev',
'nansum',
'narsimul',
'ndgrid',
'ndims',
'nehari',
'neigh_func_csa',
'neigh_func_default',
'neigh_func_fsa',
'neigh_func_vfsa',
'neldermead_cget',
'neldermead_configure',
'neldermead_costf',
'neldermead_defaultoutput',
'neldermead_destroy',
'neldermead_function',
'neldermead_get',
'neldermead_log',
'neldermead_new',
'neldermead_restart',
'neldermead_search',
'neldermead_updatesimp',
'nextpow2',
'nfreq',
'nicholschart',
'nlev',
'nmplot_cget',
'nmplot_configure',
'nmplot_contour',
'nmplot_destroy',
'nmplot_function',
'nmplot_get',
'nmplot_historyplot',
'nmplot_log',
'nmplot_new',
'nmplot_outputcmd',
'nmplot_restart',
'nmplot_search',
'nmplot_simplexhistory',
'noisegen',
'nonreg_test_run',
'now',
'nthroot',
'null',
'num2cell',
'numderivative',
'numdiff',
'numer',
'nyquist',
'nyquistfrequencybounds',
'obs_gram',
'obscont',
'observer',
'obsv_mat',
'obsvss',
'oct2dec',
'odeoptions',
'optim_ga',
'optim_moga',
'optim_nsga',
'optim_nsga2',
'optim_sa',
'optimbase_cget',
'optimbase_checkbounds',
'optimbase_checkcostfun',
'optimbase_checkx0',
'optimbase_configure',
'optimbase_destroy',
'optimbase_function',
'optimbase_get',
'optimbase_hasbounds',
'optimbase_hasconstraints',
'optimbase_hasnlcons',
'optimbase_histget',
'optimbase_histset',
'optimbase_incriter',
'optimbase_isfeasible',
'optimbase_isinbounds',
'optimbase_isinnonlincons',
'optimbase_log',
'optimbase_logshutdown',
'optimbase_logstartup',
'optimbase_new',
'optimbase_outputcmd',
'optimbase_outstruct',
'optimbase_proj2bnds',
'optimbase_set',
'optimbase_stoplog',
'optimbase_terminate',
'optimget',
'optimplotfunccount',
'optimplotfval',
'optimplotx',
'optimset',
'optimsimplex_center',
'optimsimplex_check',
'optimsimplex_compsomefv',
'optimsimplex_computefv',
'optimsimplex_deltafv',
'optimsimplex_deltafvmax',
'optimsimplex_destroy',
'optimsimplex_dirmat',
'optimsimplex_fvmean',
'optimsimplex_fvstdev',
'optimsimplex_fvvariance',
'optimsimplex_getall',
'optimsimplex_getallfv',
'optimsimplex_getallx',
'optimsimplex_getfv',
'optimsimplex_getn',
'optimsimplex_getnbve',
'optimsimplex_getve',
'optimsimplex_getx',
'optimsimplex_gradientfv',
'optimsimplex_log',
'optimsimplex_new',
'optimsimplex_reflect',
'optimsimplex_setall',
'optimsimplex_setallfv',
'optimsimplex_setallx',
'optimsimplex_setfv',
'optimsimplex_setn',
'optimsimplex_setnbve',
'optimsimplex_setve',
'optimsimplex_setx',
'optimsimplex_shrink',
'optimsimplex_size',
'optimsimplex_sort',
'optimsimplex_xbar',
'orth',
'output_ga_default',
'output_moga_default',
'output_nsga2_default',
'output_nsga_default',
'p_margin',
'pack',
'pareto_filter',
'parrot',
'pbig',
'pca',
'pcg',
'pdiv',
'pen2ea',
'pencan',
'pencost',
'penlaur',
'perctl',
'perl',
'perms',
'permute',
'pertrans',
'pfactors',
'pfss',
'phasemag',
'phaseplot',
'phc',
'pinv',
'playsnd',
'plotprofile',
'plzr',
'pmodulo',
'pol2des',
'pol2str',
'polar',
'polfact',
'prbs_a',
'prettyprint',
'primes',
'princomp',
'profile',
'proj',
'projsl',
'projspec',
'psmall',
'pspect',
'qmr',
'qpsolve',
'quart',
'quaskro',
'rafiter',
'randpencil',
'range',
'rank',
'readxls',
'recompilefunction',
'recons',
'reglin',
'regress',
'remezb',
'remove_param',
'remove_profiling',
'repfreq',
'replace_Ix_by_Fx',
'repmat',
'reset_profiling',
'resize_matrix',
'returntoscilab',
'rhs2code',
'ric_desc',
'riccati',
'rmdir',
'routh_t',
'rowcomp',
'rowcompr',
'rowinout',
'rowregul',
'rowshuff',
'rref',
'sample',
'samplef',
'samwr',
'savematfile',
'savewave',
'scanf',
'sci2exp',
'sciGUI_init',
'sci_sparse',
'scicos_getvalue',
'scicos_simulate',
'scicos_workspace_init',
'scisptdemo',
'scitest',
'sdiff',
'sec',
'secd',
'sech',
'selection_ga_elitist',
'selection_ga_random',
'sensi',
'setPreferencesValue',
'set_param',
'setdiff',
'sgrid',
'show_margins',
'show_pca',
'showprofile',
'signm',
'sinc',
'sincd',
'sind',
'sinh',
'sinhm',
'sinm',
'sm2des',
'sm2ss',
'smga',
'smooth',
'solve',
'sound',
'soundsec',
'sp2adj',
'spaninter',
'spanplus',
'spantwo',
'specfact',
'speye',
'sprand',
'spzeros',
'sqroot',
'sqrtm',
'squarewave',
'squeeze',
'srfaur',
'srkf',
'ss2des',
'ss2ss',
'ss2tf',
'sskf',
'ssprint',
'ssrand',
'st_deviation',
'st_i_generic',
'st_ility',
'stabil',
'statgain',
'stdev',
'stdevf',
'steadycos',
'strange',
'strcmpi',
'struct',
'sub2ind',
'sva',
'svplot',
'sylm',
'sylv',
'sysconv',
'sysdiag',
'sysfact',
'syslin',
'syssize',
'system',
'systmat',
'tabul',
'tand',
'tanh',
'tanhm',
'tanm',
'tbx_build_blocks',
'tbx_build_cleaner',
'tbx_build_gateway',
'tbx_build_gateway_clean',
'tbx_build_gateway_loader',
'tbx_build_help',
'tbx_build_help_loader',
'tbx_build_loader',
'tbx_build_localization',
'tbx_build_macros',
'tbx_build_pal_loader',
'tbx_build_src',
'tbx_builder',
'tbx_builder_gateway',
'tbx_builder_gateway_lang',
'tbx_builder_help',
'tbx_builder_help_lang',
'tbx_builder_macros',
'tbx_builder_src',
'tbx_builder_src_lang',
'tbx_generate_pofile',
'temp_law_csa',
'temp_law_default',
'temp_law_fsa',
'temp_law_huang',
'temp_law_vfsa',
'test_clean',
'test_on_columns',
'test_run',
'test_run_level',
'testexamples',
'tf2des',
'tf2ss',
'thrownan',
'tic',
'time_id',
'toc',
'toeplitz',
'tokenpos',
'toolboxes',
'trace',
'trans',
'translatepaths',
'tree2code',
'trfmod',
'trianfml',
'trimmean',
'trisolve',
'trzeros',
'typeof',
'ui_observer',
'union',
'unique',
'unit_test_run',
'unix_g',
'unix_s',
'unix_w',
'unix_x',
'unobs',
'unpack',
'unwrap',
'variance',
'variancef',
'vec2list',
'vectorfind',
'ver',
'warnobsolete',
'wavread',
'wavwrite',
'wcenter',
'weekday',
'wfir',
'wfir_gui',
'whereami',
'who_user',
'whos',
'wiener',
'wigner',
'window',
'winlist',
'with_javasci',
'with_macros_source',
'with_modelica_compiler',
'with_tk',
'xcorr',
'xcosBlockEval',
'xcosBlockInterface',
'xcosCodeGeneration',
'xcosConfigureModelica',
'xcosPal',
'xcosPalAdd',
'xcosPalAddBlock',
'xcosPalExport',
'xcosPalGenerateAllIcons',
'xcosShowBlockWarning',
'xcosValidateBlockSet',
'xcosValidateCompareBlock',
'xcos_compile',
'xcos_debug_gui',
'xcos_run',
'xcos_simulate',
'xcov',
'xmltochm',
'xmltoformat',
'xmltohtml',
'xmltojar',
'xmltopdf',
'xmltops',
'xmltoweb',
'yulewalk',
'zeropen',
'zgrid',
'zpbutt',
'zpch1',
'zpch2',
'zpell',
)
variables_kw = (
'$',
'%F',
'%T',
'%e',
'%eps',
'%f',
'%fftw',
'%gui',
'%i',
'%inf',
'%io',
'%modalWarning',
'%nan',
'%pi',
'%s',
'%t',
'%tk',
'%toolboxes',
'%toolboxes_dir',
'%z',
'PWD',
'SCI',
'SCIHOME',
'TMPDIR',
'arnoldilib',
'assertlib',
'atomslib',
'cacsdlib',
'compatibility_functilib',
'corelib',
'data_structureslib',
'demo_toolslib',
'development_toolslib',
'differential_equationlib',
'dynamic_linklib',
'elementary_functionslib',
'enull',
'evoid',
'external_objectslib',
'fd',
'fileiolib',
'functionslib',
'genetic_algorithmslib',
'helptoolslib',
'home',
'integerlib',
'interpolationlib',
'iolib',
'jnull',
'jvoid',
'linear_algebralib',
'm2scilib',
'matiolib',
'modules_managerlib',
'neldermeadlib',
'optimbaselib',
'optimizationlib',
'optimsimplexlib',
'output_streamlib',
'overloadinglib',
'parameterslib',
'polynomialslib',
'preferenceslib',
'randliblib',
'scicos_autolib',
'scicos_utilslib',
'scinoteslib',
'signal_processinglib',
'simulated_annealinglib',
'soundlib',
'sparselib',
'special_functionslib',
'spreadsheetlib',
'statisticslib',
'stringlib',
'tclscilib',
'timelib',
'umfpacklib',
'xcoslib',
)
if __name__ == '__main__': # pragma: no cover
import subprocess
from pygments.util import format_lines, duplicates_removed
mapping = {'variables': 'builtin'}
def extract_completion(var_type):
s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = s.communicate('''\
fd = mopen("/dev/stderr", "wt");
mputl(strcat(completion("", "%s"), "||"), fd);
mclose(fd)\n''' % var_type)
if '||' not in output[1]:
raise Exception(output[0])
# Invalid DISPLAY causes this to be output:
text = output[1].strip()
if text.startswith('Error: unable to open display \n'):
text = text[len('Error: unable to open display \n'):]
return text.split('||')
new_data = {}
seen = set() # only keep first type for a given word
for t in ('functions', 'commands', 'macros', 'variables'):
new_data[t] = duplicates_removed(extract_completion(t), seen)
seen.update(set(new_data[t]))
with open(__file__) as f:
content = f.read()
header = content[:content.find('# Autogenerated')]
footer = content[content.find("if __name__ == '__main__':"):]
with open(__file__, 'w') as f:
f.write(header)
f.write('# Autogenerated\n\n')
for k, v in sorted(new_data.iteritems()):
f.write(format_lines(k + '_kw', v) + '\n\n')
f.write(footer)
|
feeds/igraph | refs/heads/master | tools/seqdict/mdict.py | 7 | ################################################################################
# Sequential Dictionary Class #
# #
# by Wolfgang Grafen #
# #
# Version 0.2 11. February 2004
# #
# email to: [email protected] #
# #
################################################################################
from ndict import seqdict #Sequential Single Value Dictionary
from UserList import UserList
class MyUserList(UserList):
from UserList import UserList
def __init__(self,parent,liste=None):
UserList.__init__(self,liste)
self.parent = parent #remember parent for call-back
def __delitem__(self, i):
del self.data[i]
if self.data==[]: #call-back, deletes item of parent
index = self.parent.values().index([])
del self.parent[index:index+1]
class mseqdict(seqdict): #Sequential Multiple Value Dictionary
def __init__(self,List=[],Dict={}):
self.list = []
self.dict = {}
if not List:
pass
elif type(List)==type({}):
for key,value in List.items():
self.__setitem__(key,value)
elif List and not Dict: #dict.items()
for key,value in List:
if isinstance(value,MyUserList):
for v in value:
self.__setitem__(key,v)
else:
self.__setitem__(key,value)
elif type(List)==type(Dict)==type([]):
for key,value in map(None,List,Dict):
self.__setitem__(key,value)
else:
if isinstance(Dict.values()[0],MyUserList):
self.dict = Dict
self.list = List
else:
for key in List:
value = Dict[key]
if type(value)==type([]):
for v in value:
self.__setitem__(key,v)
else:
self.__setitem__(key,value)
self_list = self.list
self_dict = self.dict
for k in self_list:
assert self_dict.has_key(k),"key %r not in self.dict" % k
for k in self_dict.keys():
if k not in self_list:
self_list.append(k)
def __setitem__(self,key,value):
if not self.dict.has_key(key):
self.list.append(key)
if isinstance(value,MyUserList):
self.dict[key] = value
else:
self.dict[key]=MyUserList(self,[value])
else:
values = self.dict[key]
if isinstance(value,MyUserList):
for v in value:
if not v in values:
values.extend(MyUserList(self,[v]))
else:
#if not value in values:
for v in values:
if v is value:
break
values.extend(MyUserList(self,[value]))
def __delitem__(self, key):
del self.dict[key]
self.list.remove(key)
def append(self,key,value):
self.__setitem__(key,value)
def __setslice__(self,start,stop,newdict):
start = max(start,0); stop = max(stop,0)
delindexes = []
for key in newdict.keys():
if self.dict.has_key(key):
index = self.list.index(key)
delindexes.append(index)
if index < start:
start = start - 1
stop = stop - 1
elif index >= stop:
pass
else:
stop = stop - 1
else:
self.dict[key]=UserList(self)
delindexes.sort()
delindexes.reverse()
for index in delindexes:
key = self.list[index]
#del self.dict[key]
del self.list[index]
self.list[start:stop] = newdict.list[:]
self.dict.update(newdict.dict)
def copy(self):
values = map(lambda x:x[:],self.values())
return self.__class__(self.list,values)
def count(self,value):
vallist = self.dict.values()
return map(lambda x,y=value:x.count(y),vallist).count(1)
def filter(self,function,filtervalues=0):
if filtervalues == 1: #consider key and all keyvalues at once
dict = self.__class__()
for key,values in self.items():
if function(key,values):
dict[key]=values
return dict
elif filtervalues == 2: #consider key and every keyvalue for itself
dict = self.__class__()
for key,values in self.items():
for value in values:
if function(key,value):
dict[key]=value
return dict
else: #consider key only
liste=filter(function,self.list)
dict = {}
for i in liste:
dict[i]=self.dict[i]
return self.__class__(liste,dict)
def map(self,function,mapvalues=2):
if mapvalues == 1: #consider key and all keyvalues at once
dict = self.__class__()
for key,values in self.items():
k,v = function(key,values)
dict[k]=v
return dict
else: #if mapvalues!=1: #consider key and every keyvalue for itself
dict = self.__class__()
for key,values in self.items():
for value in values:
k,v = function(key,value)
dict[k]=v
return dict
def pop(self,key='...None',value='...None'):
if value=='...None':
if key=='...None':
pos = -1
key = self.list[pos]
else:
pos = self.list.index(key)
tmp = self.dict[key]
del self.dict[key]
return {self.list.pop(pos):tmp}
else:
val = self.dict[key]
index = val.index(value)
tmp = val[index]
del val[index]
return {key:tmp}
def remove(self,key,value='...None'):
if value=='...None':
del self[key]
else:
index = self[key].index(value)
del self[key][index]
def sort(self,func1=None,func2=None):
if not func1:
self.list.sort()
else:
apply(self.list.sort,[func1])
if func2:
for value in self.values():
apply(value.sort,[func2])
def swap(self):
tmp = self.__class__()
for key,values in self.items():
for value in values:
tmp[value]=key
self.list,self.dict = tmp.list,tmp.dict
del tmp
def __repr__(self):return 'mseqdict(\n%s,\n%s)'%(self.list,self.dict)
|
heinrich5991/django-denorm | refs/heads/master | docs/conf.py | 28 | # -*- coding: utf-8 -*-
#
# django-denorm documentation build configuration file, created by
# sphinx-quickstart on Fri May 22 22:07:05 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If your extensions (or modules documented by autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings_sqlite'
sys.path.append(os.path.abspath('../../../'))
sys.path.append(os.path.abspath('../test_project/'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc','sphinxtogithub']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-denorm'
copyright = u'2009, Christian Schilling'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['.build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-denormdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'django-denorm.tex', ur'django-denorm Documentation',
ur'Christian Schilling', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
|
carlohamalainen/nipype | refs/heads/master | nipype/interfaces/slicer/legacy/tests/test_auto_BSplineDeformableRegistration.py | 5 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.slicer.legacy.registration import BSplineDeformableRegistration
def test_BSplineDeformableRegistration_inputs():
input_map = dict(FixedImageFileName=dict(argstr='%s',
position=-2,
),
MovingImageFileName=dict(argstr='%s',
position=-1,
),
args=dict(argstr='%s',
),
constrain=dict(argstr='--constrain ',
),
default=dict(argstr='--default %d',
),
environ=dict(nohash=True,
usedefault=True,
),
gridSize=dict(argstr='--gridSize %d',
),
histogrambins=dict(argstr='--histogrambins %d',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
initialtransform=dict(argstr='--initialtransform %s',
),
iterations=dict(argstr='--iterations %d',
),
maximumDeformation=dict(argstr='--maximumDeformation %f',
),
outputtransform=dict(argstr='--outputtransform %s',
hash_files=False,
),
outputwarp=dict(argstr='--outputwarp %s',
hash_files=False,
),
resampledmovingfilename=dict(argstr='--resampledmovingfilename %s',
hash_files=False,
),
spatialsamples=dict(argstr='--spatialsamples %d',
),
terminal_output=dict(mandatory=True,
nohash=True,
),
)
inputs = BSplineDeformableRegistration.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_BSplineDeformableRegistration_outputs():
output_map = dict(outputtransform=dict(),
outputwarp=dict(),
resampledmovingfilename=dict(),
)
outputs = BSplineDeformableRegistration.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
felipenaselva/repo.felipe | refs/heads/master | plugin.video.sembilhete.tv/resources/lib/requests/packages/urllib3/connectionpool.py | 196 | from __future__ import absolute_import
import errno
import logging
import sys
import warnings
from socket import error as SocketError, timeout as SocketTimeout
import socket
try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
# Queue is imported for side effects on MS Windows
import Queue as _unused_module_Queue # noqa: unused
from .exceptions import (
ClosedPoolError,
ProtocolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
LocationValueError,
MaxRetryError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
InsecureRequestWarning,
NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host, Url
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
self.host = host
self.port = port
def __str__(self):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close():
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`httplib.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.connectionpool.ProxyManager`"
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.connectionpool.ProxyManager`"
:param \**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = 'http'
ConnectionCls = HTTPConnection
def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
headers=None, retries=None,
_proxy=None, _proxy_headers=None,
**conn_kw):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault('socket_options', [])
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s" %
(self.num_connections, self.host))
conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
if self.block:
raise EmptyPoolError(self,
"Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
if getattr(conn, 'auto_open', 1) == 0:
# This is a proxied connection that has been mutated by
# httplib._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s" %
self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, 'errno') and err.errno in _blocking_errnos:
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, 'sock', None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
httplib_response.status,
httplib_response.length))
try:
assert_header_parsing(httplib_response.msg)
except HeaderParsingError as hpe: # Platform-specific: Python 3
log.warning(
'Failed to parse headers (url=%s): %s',
self._absolute_url(url), hpe, exc_info=True)
return httplib_response
def _absolute_url(self, path):
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self):
"""
Close all pooled connections and disable the pool.
"""
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith('/'):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(self, method, url, body=None, headers=None, retries=None,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, **response_kw):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param \**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
conn = None
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == 'http':
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
timeout=timeout_obj,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
# the request doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = not release_conn and conn
# Import httplib's response into our own wrapper object
response = HTTPResponse.from_httplib(httplib_response,
pool=self,
connection=response_conn,
**response_kw)
# else:
# The connection will be put back into the pool when
# ``response.release_conn()`` is called (implicitly by
# ``response.read()``)
except Empty:
# Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
except (BaseSSLError, CertificateError) as e:
# Close the connection. If a connection is reused on which there
# was a Certificate error, the next request will certainly raise
# another Certificate error.
conn = conn and conn.close()
release_conn = True
raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
conn = conn and conn.close()
release_conn = True
raise
except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
# Discard the connection for these exceptions. It will be
# be replaced during the next _get_conn() call.
conn = conn and conn.close()
release_conn = True
if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, _pool=self,
_stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if release_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning("Retrying (%r) after connection "
"broken by '%r': %s" % (retries, err, url))
return self.urlopen(method, url, body, headers, retries,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
# Release the connection for this response, since we're not
# returning it to be released manually.
response.release_conn()
raise
return response
log.info("Redirecting %s -> %s" % (url, redirect_location))
return self.urlopen(
method, redirect_location, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
retries = retries.increment(method, url, response=response, _pool=self)
retries.sleep()
log.info("Forced retry: %s" % url)
return self.urlopen(
method, url, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = 'https'
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None,
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None, **conn_kw):
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
block, headers, retries, _proxy, _proxy_headers,
**conn_kw)
if ca_certs and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(key_file=self.key_file,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.org/en/latest/security.html'),
InsecureRequestWarning)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
if scheme == 'https':
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
|
ol-loginov/intellij-community | refs/heads/master | python/testData/resolve/multiFile/resolveQualifiedSuperClassInPackage/foo/baz.py | 83 | class SuperDuper(object):
def copy(self): print "duper"
|
peoplepower/botengine | refs/heads/master | com.ppc.Bot/narrative.py | 1 | '''
Created on June 28, 2016
This file is subject to the terms and conditions defined in the
file 'LICENSE.txt', which is part of this source code package.
@author: David Moss
'''
class Narrative:
"""
December 3, 2019 - Deprecated.
SHOULD BE SAFE TO DELETE THIS FILE AFTER MARCH, 2020. TEST BY RUNNING YOUR BOT.
This file remains active for now while its object is still being referenced by existing bots with dill/pickle.
We've updated the location.py file to reference utilities.narrative for future narrative object creation,
but expect it will take awhile for everyone's current bots to clear out those old references.
"""
def __init__(self, narrative_id, narrative_time, admin):
"""
Constructor
:param narrative_id: Narrative ID
:param narrative_time: Narrative timestamp
:param to_admin: True if this is to the admin, False if it's to a user
"""
# Narrative ID
self.narrative_id = narrative_id
# Narrative Timestamp
self.narrative_time = narrative_time
# To admin
self.admin = admin
def resolve(self, botengine):
"""
Resolve this narrative
:param botengine: BotEngine environment
"""
response = botengine.narrate(update_narrative_id=self.narrative_id, update_narrative_timestamp=self.narrative_time, admin=self.admin, status=2)
if response is not None:
self.narrative_id = response['narrativeId']
self.narrative_time = response['narrativeTime']
def add_comment(self, botengine, comment):
"""
Add a comment to this narrative
:param botengine: BotEngine environment
:param comment: Comment to add
:return:
"""
narrative_content = botengine.get_narration(self.narrative_id, self.admin)
if narrative_content is None:
return
else:
if 'target' not in narrative_content:
narrative_content['target'] = {}
if 'comment' not in narrative_content['target']:
narrative_content['target']['comment'] = ""
narrative_content['target']['comment'] += comment + "\n"
response = botengine.narrate(update_narrative_id=self.narrative_id, update_narrative_timestamp=self.narrative_time, admin=self.admin, extra_json_dict=narrative_content['target'])
if response is not None:
self.narrative_id = response['narrativeId']
self.narrative_time = response['narrativeTime']
def update_description(self, botengine, description):
"""
Update the description of an existing narrative
:param botengine: BotEngine environment
:param description: New description
"""
response = botengine.narrate(update_narrative_id=self.narrative_id, update_narrative_timestamp=self.narrative_time, admin=self.admin, description=description)
if response is not None:
self.narrative_id = response['narrativeId']
self.narrative_time = response['narrativeTime']
def delete(self, botengine):
"""
Delete this narrative
:param botengine: BotEngine environment
"""
botengine.delete_narration(self.narrative_id, self.narrative_time) |
GISPPU/GrenadaLandInformation | refs/heads/master | geonode/version.py | 9 | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import datetime
import os
import subprocess
def get_version(version=None):
"Returns a PEP 386-compliant version number from VERSION."
if version is None:
from geonode import __version__ as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return main + sub
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_show = subprocess.Popen('git show --pretty=format:%ct --quiet HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_show.communicate()[0].partition('\n')[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
|
DNSUsher/securethenews | refs/heads/master | securethenews/search/views.py | 36 | from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
wuhengzhi/chromium-crosswalk | refs/heads/master | tools/idl_parser/idl_node.py | 125 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
#
# IDL Node
#
# IDL Node defines the IDLAttribute and IDLNode objects which are constructed
# by the parser as it processes the various 'productions'. The IDLAttribute
# objects are assigned to the IDLNode's property dictionary instead of being
# applied as children of The IDLNodes, so they do not exist in the final tree.
# The AST of IDLNodes is the output from the parsing state and will be used
# as the source data by the various generators.
#
#
# CopyToList
#
# Takes an input item, list, or None, and returns a new list of that set.
def CopyToList(item):
# If the item is 'Empty' make it an empty list
if not item:
item = []
# If the item is not a list
if type(item) is not type([]):
item = [item]
# Make a copy we can modify
return list(item)
# IDLSearch
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLSearch(object):
def __init__(self):
self.depth = 0
def Enter(self, node):
pass
def Exit(self, node):
pass
# IDLAttribute
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLAttribute(object):
def __init__(self, name, value):
self._cls = 'Property'
self.name = name
self.value = value
def __str__(self):
return '%s=%s' % (self.name, self.value)
def GetClass(self):
return self._cls
#
# IDLNode
#
# This class implements the AST tree, providing the associations between
# parents and children. It also contains a namepsace and propertynode to
# allow for look-ups. IDLNode is derived from IDLRelease, so it is
# version aware.
#
class IDLNode(object):
def __init__(self, cls, filename, lineno, pos, children=None):
self._cls = cls
self._properties = {
'ERRORS' : [],
'WARNINGS': [],
'FILENAME': filename,
'LINENO' : lineno,
'POSSITION' : pos,
}
self._children = []
self._parent = None
self.AddChildren(children)
#
#
#
# Return a string representation of this node
def __str__(self):
name = self.GetProperty('NAME','')
return '%s(%s)' % (self._cls, name)
def GetLogLine(self, msg):
filename, lineno = self.GetFileAndLine()
return '%s(%d) : %s\n' % (filename, lineno, msg)
# Log an error for this object
def Error(self, msg):
self.GetProperty('ERRORS').append(msg)
sys.stderr.write(self.GetLogLine('error: ' + msg))
# Log a warning for this object
def Warning(self, msg):
self.GetProperty('WARNINGS').append(msg)
sys.stdout.write(self.GetLogLine('warning:' + msg))
# Return file and line number for where node was defined
def GetFileAndLine(self):
return self.GetProperty('FILENAME'), self.GetProperty('LINENO')
def GetClass(self):
return self._cls
def GetName(self):
return self.GetProperty('NAME')
def GetParent(self):
return self._parent
def Traverse(self, search, filter_nodes):
if self._cls in filter_nodes:
return ''
search.Enter(self)
search.depth += 1
for child in self._children:
child.Traverse(search, filter_nodes)
search.depth -= 1
search.Exit(self)
def Tree(self, filter_nodes=None, accept_props=None):
class DumpTreeSearch(IDLSearch):
def __init__(self, props):
IDLSearch.__init__(self)
self.out = []
self.props = props
def Enter(self, node):
tab = ''.rjust(self.depth * 2)
self.out.append(tab + str(node))
if self.props:
proplist = []
for key, value in node.GetProperties().iteritems():
if key in self.props:
proplist.append(tab + ' %s: %s' % (key, str(value)))
if proplist:
self.out.append(tab + ' PROPERTIES')
self.out.extend(proplist)
if filter_nodes == None:
filter_nodes = ['Comment', 'Copyright']
search = DumpTreeSearch(accept_props)
self.Traverse(search, filter_nodes)
return search.out
#
# Search related functions
#
# Check if node is of a given type
def IsA(self, *typelist):
if self._cls in typelist:
return True
return False
# Get a list of all children
def GetChildren(self):
return self._children
def GetListOf(self, *keys):
out = []
for child in self.GetChildren():
if child.GetClass() in keys:
out.append(child)
return out
def GetOneOf(self, *keys):
out = self.GetListOf(*keys)
if out:
return out[0]
return None
def AddChildren(self, children):
children = CopyToList(children)
for child in children:
if not child:
continue
if type(child) == IDLAttribute:
self.SetProperty(child.name, child.value)
continue
if type(child) == IDLNode:
child._parent = self
self._children.append(child)
continue
raise RuntimeError('Adding child of type %s.\n' % type(child).__name__)
#
# Property Functions
#
def SetProperty(self, name, val):
self._properties[name] = val
def GetProperty(self, name, default=None):
return self._properties.get(name, default)
def GetProperties(self):
return self._properties
|
ProfessionalIT/professionalit-webiste | refs/heads/master | sdk/google_appengine/google/appengine/tools/devappserver2/wsgi_request_info_test.py | 8 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.apphosting.tools.devappserver2.wsgi_request_info."""
import re
import unittest
import wsgiref.util
from google.appengine.tools.devappserver2 import wsgi_request_info
class TestWSGIRequestInfo(unittest.TestCase):
"""Tests for WSGIRequestInfo."""
def setUp(self):
self.dispatcher = object()
self.request_info = wsgi_request_info.WSGIRequestInfo(self.dispatcher)
def _assert_request_id(self, request_id):
self.assertTrue(re.match('[a-zA-Z]{10}$', request_id),
'invalid request id: %r' % request_id)
def _create_environ(self, scheme, host, path='', query=''):
environ = {'wsgi.url_scheme': scheme,
'HTTP_HOST': host,
'PATH_INFO': path,
'QUERY_STRING': query}
wsgiref.util.setup_testing_defaults(environ)
return environ
def _create_module_configuration(self, module_name, version_id):
class ModuleConfiguration(object):
pass
config = ModuleConfiguration()
config.major_version = version_id
config.module_name = module_name
return config
def test_get_request_url(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertEqual('https://machine:8080/foo?bar=baz',
self.request_info.get_request_url(request_id))
def test_get_request_environ(self):
environ = object()
with self.request_info.request(
environ,
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertIs(environ, self.request_info.get_request_environ(request_id))
def test_get_dispatcher(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertEqual(self.dispatcher,
self.request_info.get_dispatcher())
def test_get_module(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertEqual('default', self.request_info.get_module(request_id))
def test_get_version(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertEqual('1', self.request_info.get_version(request_id))
def test_get_instance_unset(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
self._assert_request_id(request_id)
self.assertIsNone(self.request_info.get_instance(request_id))
def test_get_instance(self):
with self.request_info.request(
self._create_environ('https', 'machine:8080',
'/foo', 'bar=baz'),
self._create_module_configuration('default', '1')) as request_id:
instance = object()
self.request_info.set_request_instance(request_id, instance)
self._assert_request_id(request_id)
self.assertEqual(instance, self.request_info.get_instance(request_id))
def test_concurrent_requests(self):
request_id1 = self.request_info.start_request(
self._create_environ('http', 'machine:8081'),
self._create_module_configuration('default', '1'))
request_id2 = self.request_info.start_request(
self._create_environ('http', 'machine:8082'),
self._create_module_configuration('default', '2'))
request_id3 = self.request_info.start_request(
self._create_environ('http', 'machine:8083'),
self._create_module_configuration('other', '1'))
self._assert_request_id(request_id1)
self._assert_request_id(request_id2)
self._assert_request_id(request_id3)
self.assertTrue(request_id1 != request_id2 != request_id3)
self.assertEqual('http://machine:8081/',
self.request_info.get_request_url(request_id1))
self.assertEqual(self.dispatcher,
self.request_info.get_dispatcher())
self.assertEqual('default', self.request_info.get_module(request_id1))
self.assertEqual('1', self.request_info.get_version(request_id1))
self.assertEqual('http://machine:8082/',
self.request_info.get_request_url(request_id2))
self.assertEqual(self.dispatcher,
self.request_info.get_dispatcher())
self.assertEqual('default', self.request_info.get_module(request_id2))
self.assertEqual('2', self.request_info.get_version(request_id2))
self.request_info.end_request(request_id1)
self.request_info.end_request(request_id2)
self.assertEqual('http://machine:8083/',
self.request_info.get_request_url(request_id3))
self.assertEqual(self.dispatcher,
self.request_info.get_dispatcher())
self.assertEqual('other', self.request_info.get_module(request_id3))
self.assertEqual('1', self.request_info.get_version(request_id3))
if __name__ == '__main__':
unittest.main()
|
pavelchristof/gomoku-ai | refs/heads/master | tensorflow/contrib/learn/python/learn/monitors_test.py | 40 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Monitors tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import shutil
import tempfile
import time
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib import testing
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn import estimators
from tensorflow.python.client import session as session_lib
from tensorflow.python.estimator import estimator as core_estimator
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
from tensorflow.python.training import training_util
class _MyEveryN(learn.monitors.EveryN):
def __init__(self, every_n_steps=100, first_n_steps=1):
super(_MyEveryN, self).__init__(
every_n_steps=every_n_steps, first_n_steps=first_n_steps)
self._steps_begun = []
self._steps_ended = []
self._post_steps = []
@property
def steps_begun(self):
return self._steps_begun
@property
def steps_ended(self):
return self._steps_ended
@property
def post_steps(self):
return self._post_steps
def every_n_step_begin(self, step):
super(_MyEveryN, self).every_n_step_begin(step)
self._steps_begun.append(step)
return []
def every_n_step_end(self, step, outputs):
super(_MyEveryN, self).every_n_step_end(step, outputs)
self._steps_ended.append(step)
return False
def every_n_post_step(self, step, session):
super(_MyEveryN, self).every_n_post_step(step, session)
self._post_steps.append(step)
return False
class MonitorsTest(test.TestCase):
"""Monitors tests."""
def setUp(self):
# Mock out logging calls so we can verify whether correct tensors are being
# monitored.
self._actual_log = logging.info
def mockLog(*args, **kwargs): # pylint: disable=invalid-name
self.logged_message = args
self._actual_log(*args, **kwargs)
logging.info = mockLog
def tearDown(self):
logging.info = self._actual_log
def _run_monitor(self,
monitor,
num_epochs=3,
num_steps_per_epoch=10,
pass_max_steps=True):
if pass_max_steps:
max_steps = num_epochs * num_steps_per_epoch - 1
else:
max_steps = None
monitor.begin(max_steps=max_steps)
for epoch in xrange(num_epochs):
monitor.epoch_begin(epoch)
should_stop = False
step = epoch * num_steps_per_epoch
next_epoch_step = step + num_steps_per_epoch
while (not should_stop) and (step < next_epoch_step):
tensors = monitor.step_begin(step)
output = ops.get_default_session().run(tensors) if tensors else {}
output = dict(
zip([t.name if isinstance(t, ops.Tensor) else t for t in tensors],
output))
should_stop = monitor.step_end(step=step, output=output)
monitor.post_step(step=step, session=None)
step += 1
monitor.epoch_end(epoch)
monitor.end()
def test_base_monitor(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(learn.monitors.BaseMonitor())
def test_every_0(self):
monitor = _MyEveryN(every_n_steps=0, first_n_steps=-1)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor, num_epochs=3, num_steps_per_epoch=10)
expected_steps = list(range(30))
self.assertAllEqual(expected_steps, monitor.steps_begun)
self.assertAllEqual(expected_steps, monitor.steps_ended)
self.assertAllEqual(expected_steps, monitor.post_steps)
def test_every_1(self):
monitor = _MyEveryN(every_n_steps=1, first_n_steps=-1)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor, num_epochs=3, num_steps_per_epoch=10)
expected_steps = list(range(1, 30))
self.assertEqual(expected_steps, monitor.steps_begun)
self.assertEqual(expected_steps, monitor.steps_ended)
self.assertEqual(expected_steps, monitor.post_steps)
def test_every_2(self):
monitor = _MyEveryN(every_n_steps=2, first_n_steps=-1)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor, num_epochs=3, num_steps_per_epoch=10)
expected_steps = list(range(2, 29, 2)) + [29]
self.assertEqual(expected_steps, monitor.steps_begun)
self.assertEqual(expected_steps, monitor.steps_ended)
self.assertEqual(expected_steps, monitor.post_steps)
def test_every_8(self):
monitor = _MyEveryN(every_n_steps=8, first_n_steps=2)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor, num_epochs=3, num_steps_per_epoch=10)
expected_steps = [0, 1, 2, 10, 18, 26, 29]
self.assertEqual(expected_steps, monitor.steps_begun)
self.assertEqual(expected_steps, monitor.steps_ended)
self.assertEqual(expected_steps, monitor.post_steps)
def test_every_8_no_max_steps(self):
monitor = _MyEveryN(every_n_steps=8, first_n_steps=2)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(
monitor, num_epochs=3, num_steps_per_epoch=10, pass_max_steps=False)
begin_end_steps = [0, 1, 2, 10, 18, 26]
post_steps = [0, 1, 2, 10, 18, 26, 29]
self.assertEqual(begin_end_steps, monitor.steps_begun)
self.assertEqual(begin_end_steps, monitor.steps_ended)
self.assertEqual(post_steps, monitor.post_steps)
def test_every_8_recovered_after_step_begin(self):
monitor = _MyEveryN(every_n_steps=8)
with ops.Graph().as_default() as g, self.test_session(g):
for step in [8, 16]:
monitor.step_begin(step)
monitor.step_begin(step)
monitor.step_end(step, output=None)
monitor.post_step(step, session=None)
# It should call begin again since, end was not called
self.assertEqual([8, 8, 16, 16], monitor.steps_begun)
self.assertEqual([8, 16], monitor.steps_ended)
self.assertEqual([8, 16], monitor.post_steps)
def test_every_8_recovered_after_step_end(self):
monitor = _MyEveryN(every_n_steps=8)
with ops.Graph().as_default() as g, self.test_session(g):
for step in [8, 16]:
monitor.step_begin(step)
monitor.step_end(step, output=None)
monitor.post_step(step, session=None)
monitor.step_begin(step)
monitor.step_end(step, output=None)
monitor.post_step(step, session=None)
# It should not call begin twice since end was called
self.assertEqual([8, 16], monitor.steps_begun)
self.assertEqual([8, 16], monitor.steps_ended)
self.assertEqual([8, 16], monitor.post_steps)
def test_every_8_call_post_step_at_the_end(self):
monitor = _MyEveryN(every_n_steps=8)
with ops.Graph().as_default() as g, self.test_session(g):
monitor.begin()
for step in [8, 16]:
monitor.step_begin(step)
monitor.step_end(step, output=None)
monitor.post_step(step, session=None)
monitor.step_begin(19)
monitor.step_end(19, output=None)
monitor.post_step(19, session=None)
monitor.end(session=None)
# It should not call begin twice since end was called
self.assertEqual([8, 16], monitor.steps_begun)
self.assertEqual([8, 16], monitor.steps_ended)
self.assertEqual([8, 16, 19], monitor.post_steps)
def test_every_8_call_post_step_should_not_be_called_twice(self):
monitor = _MyEveryN(every_n_steps=8)
with ops.Graph().as_default() as g, self.test_session(g):
monitor.begin()
for step in [8, 16]:
monitor.step_begin(step)
monitor.step_end(step, output=None)
monitor.post_step(step, session=None)
monitor.step_begin(16)
monitor.step_end(16, output=None)
monitor.post_step(16, session=None)
monitor.end(session=None)
# It should not call begin twice since end was called
self.assertEqual([8, 16], monitor.steps_begun)
self.assertEqual([8, 16], monitor.steps_ended)
self.assertEqual([8, 16], monitor.post_steps)
def test_print(self):
with ops.Graph().as_default() as g, self.test_session(g):
t = constant_op.constant(42.0, name='foo')
self._run_monitor(learn.monitors.PrintTensor(tensor_names=[t.name]))
self.assertRegexpMatches(str(self.logged_message), t.name)
def test_logging_trainable(self):
with ops.Graph().as_default() as g, self.test_session(g):
var = variables.Variable(constant_op.constant(42.0), name='foo')
var.initializer.run()
cof = constant_op.constant(1.0)
loss = math_ops.subtract(
math_ops.multiply(var, cof), constant_op.constant(1.0))
train_step = gradient_descent.GradientDescentOptimizer(0.5).minimize(loss)
ops.get_default_session().run(train_step)
self._run_monitor(learn.monitors.LoggingTrainable('foo'))
self.assertRegexpMatches(str(self.logged_message), var.name)
def test_summary_saver(self):
with ops.Graph().as_default() as g, self.test_session(g):
log_dir = 'log/dir'
summary_writer = testing.FakeSummaryWriter(log_dir, g)
var = variables.Variable(0.0)
var.initializer.run()
tensor = state_ops.assign_add(var, 1.0)
summary_op = summary.scalar('my_summary', tensor)
self._run_monitor(
learn.monitors.SummarySaver(
summary_op=summary_op,
save_steps=8,
summary_writer=summary_writer),
num_epochs=3,
num_steps_per_epoch=10)
summary_writer.assert_summaries(
test_case=self,
expected_logdir=log_dir,
expected_graph=g,
expected_summaries={
0: {
'my_summary': 1.0
},
1: {
'my_summary': 2.0
},
9: {
'my_summary': 3.0
},
17: {
'my_summary': 4.0
},
25: {
'my_summary': 5.0
},
29: {
'my_summary': 6.0
},
})
def _assert_validation_monitor(self,
monitor,
expected_early_stopped=False,
expected_best_step=None,
expected_best_value=None,
expected_best_metrics=None):
self.assertEqual(expected_early_stopped, monitor.early_stopped)
self.assertEqual(expected_best_step, monitor.best_step)
self.assertEqual(expected_best_value, monitor.best_value)
self.assertEqual(expected_best_metrics, monitor.best_metrics)
def test_validation_monitor_no_estimator(self):
monitor = learn.monitors.ValidationMonitor(
x=constant_op.constant(2.0), every_n_steps=0)
self._assert_validation_monitor(monitor)
with ops.Graph().as_default() as g, self.test_session(g):
with self.assertRaisesRegexp(ValueError, 'set_estimator'):
self._run_monitor(monitor)
@test.mock.patch.object(estimators, 'Estimator', autospec=True)
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor_no_ckpt(self, mock_latest_checkpoint,
mock_estimator_class):
estimator = mock_estimator_class()
model_dir = 'model/dir'
estimator.model_dir = model_dir
mock_latest_checkpoint.return_value = None
# Do nothing with no checkpoint.
monitor = learn.monitors.ValidationMonitor(
x=constant_op.constant(2.0), every_n_steps=0)
self._assert_validation_monitor(monitor)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor)
self._assert_validation_monitor(monitor)
mock_latest_checkpoint.assert_called_with(model_dir)
@test.mock.patch.object(estimators, 'Estimator', autospec=True)
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor_no_early_stopping_rounds(self,
mock_latest_checkpoint,
mock_estimator_class):
estimator = mock_estimator_class()
model_dir = 'model/dir'
estimator.model_dir = model_dir
estimator.evaluate.return_value = {}
mock_latest_checkpoint.return_value = '%s/ckpt' % model_dir
# Do nothing with early_stopping_rounds=None.
monitor = learn.monitors.ValidationMonitor(
x=constant_op.constant(2.0), every_n_steps=0)
self._assert_validation_monitor(monitor)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
self._run_monitor(monitor)
self._assert_validation_monitor(monitor)
@test.mock.patch.object(estimators, 'Estimator', autospec=True)
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor_invalid_metric(self, mock_latest_checkpoint,
mock_estimator_class):
estimator = mock_estimator_class()
model_dir = 'model/dir'
estimator.model_dir = model_dir
estimator.evaluate.return_value = {}
mock_latest_checkpoint.return_value = '%s/ckpt' % model_dir
# Fail for missing metric.
monitor = learn.monitors.ValidationMonitor(
x=constant_op.constant(2.0), every_n_steps=0, early_stopping_rounds=1)
self._assert_validation_monitor(monitor)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
with self.assertRaisesRegexp(ValueError, 'missing from outputs'):
self._run_monitor(monitor, num_epochs=1, num_steps_per_epoch=1)
@test.mock.patch.object(estimators, 'Estimator', autospec=True)
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor(self, mock_latest_checkpoint,
mock_estimator_class):
estimator = mock_estimator_class()
model_dir = 'model/dir'
estimator.model_dir = model_dir
validation_outputs = {'loss': None, 'auc': None}
estimator.evaluate.return_value = validation_outputs
monitor = learn.monitors.ValidationMonitor(
x=constant_op.constant(2.0), every_n_steps=0, early_stopping_rounds=2)
self._assert_validation_monitor(monitor)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
monitor.begin(max_steps=100)
monitor.epoch_begin(epoch=0)
self.assertEqual(0, estimator.evaluate.call_count)
# Step 0, initial loss.
step = 0
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 42.0
validation_outputs['auc'] = 0.5
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
self.assertEqual(1, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor, expected_best_step=0, expected_best_value=42.0,
expected_best_metrics={'loss': 42.0, 'auc': 0.5})
monitor.post_step(step=step, session=None)
# Step 1, same checkpoint, no eval.
step = 1
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
self.assertEqual(1, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor, expected_best_step=0, expected_best_value=42.0,
expected_best_metrics={'loss': 42.0, 'auc': 0.5})
monitor.post_step(step=step, session=None)
# Step 2, lower loss.
step = 2
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 40.0
validation_outputs['auc'] = 0.6
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
self.assertEqual(2, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor, expected_best_step=2, expected_best_value=40.0,
expected_best_metrics={'loss': 40.0, 'auc': 0.6})
monitor.post_step(step=step, session=None)
# Step 3, higher loss.
step = 3
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 44.0
validation_outputs['auc'] = 0.7
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
self.assertEqual(3, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor, expected_best_step=2, expected_best_value=40.0,
expected_best_metrics={'loss': 40.0, 'auc': 0.6})
monitor.post_step(step=step, session=None)
# Step 4, higher loss for 2 steps, early stopping.
step = 4
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 43.0
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertTrue(monitor.step_end(step=step, output={}))
self.assertEqual(4, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor,
expected_early_stopped=True,
expected_best_step=2,
expected_best_value=40.0,
expected_best_metrics={'loss': 40.0, 'auc': 0.6})
monitor.post_step(step=step, session=None)
monitor.epoch_end(epoch=0)
monitor.end()
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor_with_core_estimator(self, mock_latest_checkpoint):
estimator = test.mock.Mock(spec=core_estimator.Estimator)
model_dir = 'model/dir'
estimator.model_dir = model_dir
validation_outputs = {'loss': None, 'auc': None}
estimator.evaluate.return_value = validation_outputs
monitor = learn.monitors.ValidationMonitor(
input_fn=lambda: constant_op.constant(2.0),
every_n_steps=0, early_stopping_rounds=2)
self._assert_validation_monitor(monitor)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
monitor.begin(max_steps=100)
monitor.epoch_begin(epoch=0)
self.assertEqual(0, estimator.evaluate.call_count)
# Step 0, initial loss.
step = 0
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 42.0
validation_outputs['auc'] = 0.5
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
self.assertEqual(1, estimator.evaluate.call_count)
self._assert_validation_monitor(
monitor, expected_best_step=0, expected_best_value=42.0,
expected_best_metrics={'loss': 42.0, 'auc': 0.5})
monitor.post_step(step=step, session=None)
@test.mock.patch.object(saver, 'latest_checkpoint')
def test_validation_monitor_fail_with_core_estimator_and_metrics(
self, mock_latest_checkpoint):
estimator = test.mock.Mock(spec=core_estimator.Estimator)
model_dir = 'model/dir'
estimator.model_dir = model_dir
validation_outputs = {'loss': None}
estimator.evaluate.return_value = validation_outputs
monitor = learn.monitors.ValidationMonitor(
input_fn=lambda: constant_op.constant(2.0),
metrics=constant_op.constant(2.0),
every_n_steps=0, early_stopping_rounds=2)
monitor.set_estimator(estimator)
with ops.Graph().as_default() as g, self.test_session(g):
monitor.begin(max_steps=100)
monitor.epoch_begin(epoch=0)
with self.assertRaisesRegexp(
ValueError,
'tf.estimator.Estimator does not support .* metrics'):
step = 0
mock_latest_checkpoint.return_value = '%s/ckpt.%s' % (model_dir, step)
validation_outputs['loss'] = 42.0
self.assertEqual(0, len(monitor.step_begin(step=step)))
self.assertFalse(monitor.step_end(step=step, output={}))
def test_graph_dump(self):
monitor0 = learn.monitors.GraphDump()
monitor1 = learn.monitors.GraphDump()
with ops.Graph().as_default() as g, self.test_session(g):
const_var = variables.Variable(42.0, name='my_const')
counter_var = variables.Variable(0.0, name='my_counter')
assign_add = state_ops.assign_add(counter_var, 1.0, name='my_assign_add')
variables.global_variables_initializer().run()
self._run_monitor(monitor0, num_epochs=3, num_steps_per_epoch=10)
self.assertEqual({
step: {
const_var.name: 42.0,
counter_var.name: step + 1.0,
assign_add.name: step + 1.0,
}
for step in xrange(30)
}, monitor0.data)
self._run_monitor(monitor1, num_epochs=3, num_steps_per_epoch=10)
self.assertEqual({
step: {
const_var.name: 42.0,
counter_var.name: step + 31.0,
assign_add.name: step + 31.0,
}
for step in xrange(30)
}, monitor1.data)
for step in xrange(30):
matched, non_matched = monitor1.compare(monitor0, step=step)
self.assertEqual([const_var.name], matched)
self.assertEqual({
assign_add.name: (step + 31.0, step + 1.0),
counter_var.name: (step + 31.0, step + 1.0),
}, non_matched)
matched, non_matched = monitor0.compare(monitor1, step=step)
self.assertEqual([const_var.name], matched)
self.assertEqual({
assign_add.name: (step + 1.0, step + 31.0),
counter_var.name: (step + 1.0, step + 31.0),
}, non_matched)
def test_capture_variable(self):
monitor = learn.monitors.CaptureVariable(
var_name='my_assign_add:0', every_n=8, first_n=2)
with ops.Graph().as_default() as g, self.test_session(g):
var = variables.Variable(0.0, name='my_var')
var.initializer.run()
state_ops.assign_add(var, 1.0, name='my_assign_add')
self._run_monitor(monitor, num_epochs=3, num_steps_per_epoch=10)
self.assertEqual({
0: 1.0,
1: 2.0,
2: 3.0,
10: 4.0,
18: 5.0,
26: 6.0,
29: 7.0,
}, monitor.values)
class StopAtStepTest(test.TestCase):
def test_raise_in_both_last_step_and_num_steps(self):
with self.assertRaises(ValueError):
learn.monitors.StopAtStep(num_steps=10, last_step=20)
def test_stop_based_on_last_step(self):
m = learn.monitors.StopAtStep(last_step=10)
m.step_begin(5)
self.assertFalse(m.step_end(5, None))
m.step_begin(9)
self.assertFalse(m.step_end(9, None))
m.step_begin(10)
self.assertTrue(m.step_end(10, None))
m.step_begin(11)
self.assertTrue(m.step_end(11, None))
def test_stop_based_on_num_step(self):
m = learn.monitors.StopAtStep(num_steps=10)
m.step_begin(5)
self.assertFalse(m.step_end(5, None))
m.step_begin(13)
self.assertFalse(m.step_end(13, None))
m.step_begin(14)
self.assertTrue(m.step_end(14, None))
m.step_begin(15)
self.assertTrue(m.step_end(15, None))
class CheckpointSaverTest(test.TestCase):
def setUp(self):
self.model_dir = tempfile.mkdtemp()
self.graph = ops.Graph()
with self.graph.as_default():
self.scaffold = monitored_session.Scaffold()
self.global_step = training_util.get_or_create_global_step()
self.train_op = state_ops.assign_add(self.global_step, 1)
def tearDown(self):
shutil.rmtree(self.model_dir, ignore_errors=True)
def _run(self, monitor, step, train_op, sess):
monitor.step_begin(step)
sess.run(train_op)
monitor.post_step(step, sess)
def test_raise_in_both_secs_and_steps(self):
with self.assertRaises(ValueError):
learn.monitors.CheckpointSaver(
self.model_dir, save_secs=10, save_steps=20)
def test_raise_in_none_secs_and_steps(self):
with self.assertRaises(ValueError):
learn.monitors.CheckpointSaver(self.model_dir)
def test_save_secs_saves_in_first_step(self):
with self.graph.as_default():
monitor = learn.monitors.CheckpointSaver(
self.model_dir, save_secs=2, scaffold=self.scaffold)
monitor.begin()
self.scaffold.finalize()
with session_lib.Session() as sess:
sess.run(self.scaffold.init_op)
self._run(monitor, 1, self.train_op, sess)
self.assertEqual(1,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
# TODO(gunan): Reenable this test after b/32446874 is fixed.
def disabled_test_save_secs_saves_periodically(self):
with self.graph.as_default():
monitor = learn.monitors.CheckpointSaver(
self.model_dir, save_secs=2, scaffold=self.scaffold)
monitor.begin()
self.scaffold.finalize()
with session_lib.Session() as sess:
sess.run(self.scaffold.init_op)
self._run(monitor, 1, self.train_op, sess)
self._run(monitor, 2, self.train_op, sess)
# Not saved
self.assertEqual(1,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
time.sleep(2.5)
self._run(monitor, 3, self.train_op, sess)
# saved
self.assertEqual(3,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
self._run(monitor, 4, self.train_op, sess)
self._run(monitor, 5, self.train_op, sess)
# Not saved
self.assertEqual(3,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
time.sleep(2.5)
self._run(monitor, 6, self.train_op, sess)
# saved
self.assertEqual(6,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
def test_save_steps_saves_in_first_step(self):
with self.graph.as_default():
monitor = learn.monitors.CheckpointSaver(
self.model_dir, save_steps=2, scaffold=self.scaffold)
monitor.begin()
self.scaffold.finalize()
with session_lib.Session() as sess:
sess.run(self.scaffold.init_op)
self._run(monitor, 1, self.train_op, sess)
self.assertEqual(1,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
def test_save_steps_saves_periodically(self):
with self.graph.as_default():
monitor = learn.monitors.CheckpointSaver(
self.model_dir, save_steps=2, scaffold=self.scaffold)
monitor.begin()
self.scaffold.finalize()
with session_lib.Session() as sess:
sess.run(self.scaffold.init_op)
self._run(monitor, 1, self.train_op, sess)
self._run(monitor, 2, self.train_op, sess)
# Not saved
self.assertEqual(1,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
self._run(monitor, 3, self.train_op, sess)
# saved
self.assertEqual(3,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
self._run(monitor, 4, self.train_op, sess)
# Not saved
self.assertEqual(3,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
self._run(monitor, 5, self.train_op, sess)
# saved
self.assertEqual(5,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
def test_save_saves_at_end(self):
with self.graph.as_default():
monitor = learn.monitors.CheckpointSaver(
self.model_dir, save_secs=2, scaffold=self.scaffold)
monitor.begin()
self.scaffold.finalize()
with session_lib.Session() as sess:
sess.run(self.scaffold.init_op)
self._run(monitor, 1, self.train_op, sess)
self._run(monitor, 2, self.train_op, sess)
monitor.end(sess)
self.assertEqual(2,
checkpoint_utils.load_variable(self.model_dir,
self.global_step.name))
class FakeMonitor(learn.monitors.BaseMonitor):
def __init__(self):
learn.monitors.BaseMonitor.__init__(self)
self.should_stop = False
self.requested_tensors = []
self.call_counter = collections.Counter()
self.last_begin_step = None
self.last_end_step = None
self.last_post_step = None
def begin(self, max_steps):
self.call_counter['begin'] += 1
def end(self, session):
self.call_counter['end'] += 1
def step_begin(self, step):
self.call_counter['step_begin'] += 1
self.last_begin_step = step
return self.requested_tensors
def step_end(self, step, output):
self.call_counter['step_end'] += 1
self.last_end_step = step
self.output = output
return self.should_stop
def post_step(self, step, session):
self.call_counter['post_step'] += 1
self.last_post_step = step
self.session = session
class RunHookAdapterForMonitorsTest(test.TestCase):
def test_calls_and_steps(self):
with ops.Graph().as_default(), session_lib.Session() as sess:
global_step_tensor = training_util.create_global_step()
inc_5 = state_ops.assign_add(global_step_tensor, 5)
mock_mon = FakeMonitor()
mock_mon2 = FakeMonitor()
hook = learn.monitors.RunHookAdapterForMonitors([mock_mon, mock_mon2])
hook.begin()
for mon in [mock_mon, mock_mon2]:
self.assertEqual(mon.call_counter['begin'], 1)
sess.run(variables.global_variables_initializer())
sess.run(global_step_tensor.assign(10))
mon_sess = monitored_session._HookedSession(sess=sess, hooks=[hook])
mon_sess.run(inc_5)
for mon in [mock_mon, mock_mon2]:
self.assertEqual(mon.output, {})
self.assertEqual(mon.last_begin_step, 11)
self.assertEqual(mon.last_end_step, 11)
self.assertEqual(mon.last_post_step, 11)
self.assertEqual(mon.call_counter['step_end'], 1)
self.assertEqual(mon.call_counter['step_begin'], 1)
self.assertEqual(mon.call_counter['post_step'], 1)
mon_sess.run(inc_5)
for mon in [mock_mon, mock_mon2]:
self.assertEqual(mon.output, {})
self.assertEqual(mon.last_begin_step, 16)
self.assertEqual(mon.last_end_step, 16)
self.assertEqual(mon.last_post_step, 16)
self.assertEqual(mon.call_counter['step_end'], 2)
self.assertEqual(mon.call_counter['step_begin'], 2)
self.assertEqual(mon.call_counter['post_step'], 2)
hook.end(sess)
for mon in [mock_mon, mock_mon2]:
self.assertEqual(mon.call_counter['end'], 1)
def test_requests(self):
with ops.Graph().as_default(), session_lib.Session() as sess:
training_util.create_global_step()
mock_mon = FakeMonitor()
mock_mon2 = FakeMonitor()
hook = learn.monitors.RunHookAdapterForMonitors([mock_mon, mock_mon2])
hook.begin()
mon_sess = monitored_session._HookedSession(sess=sess, hooks=[hook])
a_tensor = constant_op.constant([0], name='a_tensor')
constant_op.constant([5], name='another_tensor')
constant_op.constant([10], name='third_tensor')
mock_mon.requested_tensors = ['another_tensor']
mock_mon2.requested_tensors = ['third_tensor']
sess.run(variables.global_variables_initializer())
output = mon_sess.run(a_tensor)
self.assertEqual(output, [0])
self.assertEqual(mock_mon.output['another_tensor'], [5])
self.assertEqual(mock_mon2.output['third_tensor'], [10])
if __name__ == '__main__':
test.main()
|
kenshay/ImageScript | refs/heads/master | ProgramData/SystemFiles/Python/Lib/site-packages/sphinx/jinja2glue.py | 4 | # -*- coding: utf-8 -*-
"""
sphinx.jinja2glue
~~~~~~~~~~~~~~~~~
Glue code for the jinja2 templating engine.
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from os import path
from pprint import pformat
from six import string_types
from jinja2 import FileSystemLoader, BaseLoader, TemplateNotFound, \
contextfunction
from jinja2.utils import open_if_exists
from jinja2.sandbox import SandboxedEnvironment
from sphinx.application import TemplateBridge
from sphinx.util.osutil import mtimes_of_files
def _tobool(val):
if isinstance(val, string_types):
return val.lower() in ('true', '1', 'yes', 'on')
return bool(val)
def _toint(val):
try:
return int(val)
except ValueError:
return 0
def _slice_index(values, slices):
seq = list(values)
length = 0
for value in values:
length += 1 + len(value[1][1]) # count includes subitems
items_per_slice = length // slices
offset = 0
for slice_number in range(slices):
count = 0
start = offset
if slices == slice_number + 1: # last column
offset = len(seq)
else:
for value in values[offset:]:
count += 1 + len(value[1][1])
offset += 1
if count >= items_per_slice:
break
yield seq[start:offset]
def accesskey(context, key):
"""Helper to output each access key only once."""
if '_accesskeys' not in context:
context.vars['_accesskeys'] = {}
if key and key not in context.vars['_accesskeys']:
context.vars['_accesskeys'][key] = 1
return 'accesskey="%s"' % key
return ''
class idgen(object):
def __init__(self):
self.id = 0
def current(self):
return self.id
def __next__(self):
self.id += 1
return self.id
next = __next__ # Python 2/Jinja compatibility
class SphinxFileSystemLoader(FileSystemLoader):
"""
FileSystemLoader subclass that is not so strict about '..' entries in
template names.
"""
def get_source(self, environment, template):
for searchpath in self.searchpath:
filename = path.join(searchpath, template)
f = open_if_exists(filename)
if f is None:
continue
with f:
contents = f.read().decode(self.encoding)
mtime = path.getmtime(filename)
def uptodate():
try:
return path.getmtime(filename) == mtime
except OSError:
return False
return contents, filename, uptodate
raise TemplateNotFound(template)
class BuiltinTemplateLoader(TemplateBridge, BaseLoader):
"""
Interfaces the rendering environment of jinja2 for use in Sphinx.
"""
# TemplateBridge interface
def init(self, builder, theme=None, dirs=None):
# create a chain of paths to search
if theme:
# the theme's own dir and its bases' dirs
pathchain = theme.get_dirchain()
# then the theme parent paths
loaderchain = pathchain + theme.themepath
elif dirs:
pathchain = list(dirs)
loaderchain = list(dirs)
else:
pathchain = []
loaderchain = []
# prepend explicit template paths
self.templatepathlen = len(builder.config.templates_path)
if builder.config.templates_path:
cfg_templates_path = [path.join(builder.confdir, tp)
for tp in builder.config.templates_path]
pathchain[0:0] = cfg_templates_path
loaderchain[0:0] = cfg_templates_path
# store it for use in newest_template_mtime
self.pathchain = pathchain
# make the paths into loaders
self.loaders = [SphinxFileSystemLoader(x) for x in loaderchain]
use_i18n = builder.app.translator is not None
extensions = use_i18n and ['jinja2.ext.i18n'] or []
self.environment = SandboxedEnvironment(loader=self,
extensions=extensions)
self.environment.filters['tobool'] = _tobool
self.environment.filters['toint'] = _toint
self.environment.filters['slice_index'] = _slice_index
self.environment.globals['debug'] = contextfunction(pformat)
self.environment.globals['accesskey'] = contextfunction(accesskey)
self.environment.globals['idgen'] = idgen
if use_i18n:
self.environment.install_gettext_translations(
builder.app.translator)
def render(self, template, context):
return self.environment.get_template(template).render(context)
def render_string(self, source, context):
return self.environment.from_string(source).render(context)
def newest_template_mtime(self):
return max(mtimes_of_files(self.pathchain, '.html'))
# Loader interface
def get_source(self, environment, template):
loaders = self.loaders
# exclamation mark starts search from theme
if template.startswith('!'):
loaders = loaders[self.templatepathlen:]
template = template[1:]
for loader in loaders:
try:
return loader.get_source(environment, template)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
|
tengyifei/grpc | refs/heads/master | src/python/grpcio/grpc_core_dependencies.py | 1 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
CORE_SOURCE_FILES = [
'src/core/lib/profiling/basic_timers.c',
'src/core/lib/profiling/stap_timers.c',
'src/core/lib/support/alloc.c',
'src/core/lib/support/avl.c',
'src/core/lib/support/backoff.c',
'src/core/lib/support/cmdline.c',
'src/core/lib/support/cpu_iphone.c',
'src/core/lib/support/cpu_linux.c',
'src/core/lib/support/cpu_posix.c',
'src/core/lib/support/cpu_windows.c',
'src/core/lib/support/env_linux.c',
'src/core/lib/support/env_posix.c',
'src/core/lib/support/env_windows.c',
'src/core/lib/support/histogram.c',
'src/core/lib/support/host_port.c',
'src/core/lib/support/log.c',
'src/core/lib/support/log_android.c',
'src/core/lib/support/log_linux.c',
'src/core/lib/support/log_posix.c',
'src/core/lib/support/log_windows.c',
'src/core/lib/support/murmur_hash.c',
'src/core/lib/support/slice.c',
'src/core/lib/support/slice_buffer.c',
'src/core/lib/support/stack_lockfree.c',
'src/core/lib/support/string.c',
'src/core/lib/support/string_posix.c',
'src/core/lib/support/string_util_windows.c',
'src/core/lib/support/string_windows.c',
'src/core/lib/support/subprocess_posix.c',
'src/core/lib/support/subprocess_windows.c',
'src/core/lib/support/sync.c',
'src/core/lib/support/sync_posix.c',
'src/core/lib/support/sync_windows.c',
'src/core/lib/support/thd.c',
'src/core/lib/support/thd_posix.c',
'src/core/lib/support/thd_windows.c',
'src/core/lib/support/time.c',
'src/core/lib/support/time_posix.c',
'src/core/lib/support/time_precise.c',
'src/core/lib/support/time_windows.c',
'src/core/lib/support/tls_pthread.c',
'src/core/lib/support/tmpfile_msys.c',
'src/core/lib/support/tmpfile_posix.c',
'src/core/lib/support/tmpfile_windows.c',
'src/core/lib/support/wrap_memcpy.c',
'src/core/lib/surface/init.c',
'src/core/lib/channel/channel_args.c',
'src/core/lib/channel/channel_stack.c',
'src/core/lib/channel/channel_stack_builder.c',
'src/core/lib/channel/compress_filter.c',
'src/core/lib/channel/connected_channel.c',
'src/core/lib/channel/handshaker.c',
'src/core/lib/channel/http_client_filter.c',
'src/core/lib/channel/http_server_filter.c',
'src/core/lib/compression/compression.c',
'src/core/lib/compression/message_compress.c',
'src/core/lib/debug/trace.c',
'src/core/lib/http/format_request.c',
'src/core/lib/http/httpcli.c',
'src/core/lib/http/parser.c',
'src/core/lib/iomgr/closure.c',
'src/core/lib/iomgr/endpoint.c',
'src/core/lib/iomgr/endpoint_pair_posix.c',
'src/core/lib/iomgr/endpoint_pair_windows.c',
'src/core/lib/iomgr/error.c',
'src/core/lib/iomgr/ev_epoll_linux.c',
'src/core/lib/iomgr/ev_poll_and_epoll_posix.c',
'src/core/lib/iomgr/ev_poll_posix.c',
'src/core/lib/iomgr/ev_posix.c',
'src/core/lib/iomgr/exec_ctx.c',
'src/core/lib/iomgr/executor.c',
'src/core/lib/iomgr/iocp_windows.c',
'src/core/lib/iomgr/iomgr.c',
'src/core/lib/iomgr/iomgr_posix.c',
'src/core/lib/iomgr/iomgr_windows.c',
'src/core/lib/iomgr/load_file.c',
'src/core/lib/iomgr/network_status_tracker.c',
'src/core/lib/iomgr/polling_entity.c',
'src/core/lib/iomgr/pollset_set_windows.c',
'src/core/lib/iomgr/pollset_windows.c',
'src/core/lib/iomgr/resolve_address_posix.c',
'src/core/lib/iomgr/resolve_address_windows.c',
'src/core/lib/iomgr/sockaddr_utils.c',
'src/core/lib/iomgr/socket_utils_common_posix.c',
'src/core/lib/iomgr/socket_utils_linux.c',
'src/core/lib/iomgr/socket_utils_posix.c',
'src/core/lib/iomgr/socket_windows.c',
'src/core/lib/iomgr/tcp_client_posix.c',
'src/core/lib/iomgr/tcp_client_windows.c',
'src/core/lib/iomgr/tcp_posix.c',
'src/core/lib/iomgr/tcp_server_posix.c',
'src/core/lib/iomgr/tcp_server_windows.c',
'src/core/lib/iomgr/tcp_windows.c',
'src/core/lib/iomgr/time_averaged_stats.c',
'src/core/lib/iomgr/timer.c',
'src/core/lib/iomgr/timer_heap.c',
'src/core/lib/iomgr/udp_server.c',
'src/core/lib/iomgr/unix_sockets_posix.c',
'src/core/lib/iomgr/unix_sockets_posix_noop.c',
'src/core/lib/iomgr/wakeup_fd_eventfd.c',
'src/core/lib/iomgr/wakeup_fd_nospecial.c',
'src/core/lib/iomgr/wakeup_fd_pipe.c',
'src/core/lib/iomgr/wakeup_fd_posix.c',
'src/core/lib/iomgr/workqueue_posix.c',
'src/core/lib/iomgr/workqueue_windows.c',
'src/core/lib/json/json.c',
'src/core/lib/json/json_reader.c',
'src/core/lib/json/json_string.c',
'src/core/lib/json/json_writer.c',
'src/core/lib/surface/alarm.c',
'src/core/lib/surface/api_trace.c',
'src/core/lib/surface/byte_buffer.c',
'src/core/lib/surface/byte_buffer_reader.c',
'src/core/lib/surface/call.c',
'src/core/lib/surface/call_details.c',
'src/core/lib/surface/call_log_batch.c',
'src/core/lib/surface/channel.c',
'src/core/lib/surface/channel_init.c',
'src/core/lib/surface/channel_ping.c',
'src/core/lib/surface/channel_stack_type.c',
'src/core/lib/surface/completion_queue.c',
'src/core/lib/surface/event_string.c',
'src/core/lib/surface/lame_client.c',
'src/core/lib/surface/metadata_array.c',
'src/core/lib/surface/server.c',
'src/core/lib/surface/validate_metadata.c',
'src/core/lib/surface/version.c',
'src/core/lib/transport/byte_stream.c',
'src/core/lib/transport/connectivity_state.c',
'src/core/lib/transport/metadata.c',
'src/core/lib/transport/metadata_batch.c',
'src/core/lib/transport/static_metadata.c',
'src/core/lib/transport/timeout_encoding.c',
'src/core/lib/transport/transport.c',
'src/core/lib/transport/transport_op_string.c',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.c',
'src/core/ext/transport/chttp2/transport/bin_decoder.c',
'src/core/ext/transport/chttp2/transport/bin_encoder.c',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.c',
'src/core/ext/transport/chttp2/transport/chttp2_transport.c',
'src/core/ext/transport/chttp2/transport/frame_data.c',
'src/core/ext/transport/chttp2/transport/frame_goaway.c',
'src/core/ext/transport/chttp2/transport/frame_ping.c',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.c',
'src/core/ext/transport/chttp2/transport/frame_settings.c',
'src/core/ext/transport/chttp2/transport/frame_window_update.c',
'src/core/ext/transport/chttp2/transport/hpack_encoder.c',
'src/core/ext/transport/chttp2/transport/hpack_parser.c',
'src/core/ext/transport/chttp2/transport/hpack_table.c',
'src/core/ext/transport/chttp2/transport/huffsyms.c',
'src/core/ext/transport/chttp2/transport/incoming_metadata.c',
'src/core/ext/transport/chttp2/transport/parsing.c',
'src/core/ext/transport/chttp2/transport/status_conversion.c',
'src/core/ext/transport/chttp2/transport/stream_lists.c',
'src/core/ext/transport/chttp2/transport/stream_map.c',
'src/core/ext/transport/chttp2/transport/varint.c',
'src/core/ext/transport/chttp2/transport/writing.c',
'src/core/ext/transport/chttp2/alpn/alpn.c',
'src/core/lib/http/httpcli_security_connector.c',
'src/core/lib/security/context/security_context.c',
'src/core/lib/security/credentials/composite/composite_credentials.c',
'src/core/lib/security/credentials/credentials.c',
'src/core/lib/security/credentials/credentials_metadata.c',
'src/core/lib/security/credentials/fake/fake_credentials.c',
'src/core/lib/security/credentials/google_default/credentials_posix.c',
'src/core/lib/security/credentials/google_default/credentials_windows.c',
'src/core/lib/security/credentials/google_default/google_default_credentials.c',
'src/core/lib/security/credentials/iam/iam_credentials.c',
'src/core/lib/security/credentials/jwt/json_token.c',
'src/core/lib/security/credentials/jwt/jwt_credentials.c',
'src/core/lib/security/credentials/jwt/jwt_verifier.c',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.c',
'src/core/lib/security/credentials/plugin/plugin_credentials.c',
'src/core/lib/security/credentials/ssl/ssl_credentials.c',
'src/core/lib/security/transport/client_auth_filter.c',
'src/core/lib/security/transport/handshake.c',
'src/core/lib/security/transport/secure_endpoint.c',
'src/core/lib/security/transport/security_connector.c',
'src/core/lib/security/transport/server_auth_filter.c',
'src/core/lib/security/transport/tsi_error.c',
'src/core/lib/security/util/b64.c',
'src/core/lib/security/util/json_util.c',
'src/core/lib/surface/init_secure.c',
'src/core/lib/tsi/fake_transport_security.c',
'src/core/lib/tsi/ssl_transport_security.c',
'src/core/lib/tsi/transport_security.c',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.c',
'src/core/ext/client_config/channel_connectivity.c',
'src/core/ext/client_config/client_channel.c',
'src/core/ext/client_config/client_channel_factory.c',
'src/core/ext/client_config/client_config.c',
'src/core/ext/client_config/client_config_plugin.c',
'src/core/ext/client_config/connector.c',
'src/core/ext/client_config/default_initial_connect_string.c',
'src/core/ext/client_config/initial_connect_string.c',
'src/core/ext/client_config/lb_policy.c',
'src/core/ext/client_config/lb_policy_factory.c',
'src/core/ext/client_config/lb_policy_registry.c',
'src/core/ext/client_config/parse_address.c',
'src/core/ext/client_config/resolver.c',
'src/core/ext/client_config/resolver_factory.c',
'src/core/ext/client_config/resolver_registry.c',
'src/core/ext/client_config/subchannel.c',
'src/core/ext/client_config/subchannel_call_holder.c',
'src/core/ext/client_config/subchannel_index.c',
'src/core/ext/client_config/uri_parser.c',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.c',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.c',
'src/core/ext/transport/chttp2/client/insecure/channel_create.c',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.c',
'src/core/ext/lb_policy/grpclb/grpclb.c',
'src/core/ext/lb_policy/grpclb/load_balancer_api.c',
'src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'third_party/nanopb/pb_common.c',
'third_party/nanopb/pb_decode.c',
'third_party/nanopb/pb_encode.c',
'src/core/ext/lb_policy/pick_first/pick_first.c',
'src/core/ext/lb_policy/round_robin/round_robin.c',
'src/core/ext/resolver/dns/native/dns_resolver.c',
'src/core/ext/resolver/sockaddr/sockaddr_resolver.c',
'src/core/ext/load_reporting/load_reporting.c',
'src/core/ext/load_reporting/load_reporting_filter.c',
'src/core/ext/census/base_resources.c',
'src/core/ext/census/context.c',
'src/core/ext/census/gen/census.pb.c',
'src/core/ext/census/grpc_context.c',
'src/core/ext/census/grpc_filter.c',
'src/core/ext/census/grpc_plugin.c',
'src/core/ext/census/initialize.c',
'src/core/ext/census/mlog.c',
'src/core/ext/census/operation.c',
'src/core/ext/census/placeholders.c',
'src/core/ext/census/resource.c',
'src/core/ext/census/tracing.c',
'src/core/plugin_registry/grpc_plugin_registry.c',
'src/boringssl/err_data.c',
'third_party/boringssl/crypto/aes/aes.c',
'third_party/boringssl/crypto/aes/mode_wrappers.c',
'third_party/boringssl/crypto/asn1/a_bitstr.c',
'third_party/boringssl/crypto/asn1/a_bool.c',
'third_party/boringssl/crypto/asn1/a_bytes.c',
'third_party/boringssl/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl/crypto/asn1/a_dup.c',
'third_party/boringssl/crypto/asn1/a_enum.c',
'third_party/boringssl/crypto/asn1/a_gentm.c',
'third_party/boringssl/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl/crypto/asn1/a_int.c',
'third_party/boringssl/crypto/asn1/a_mbstr.c',
'third_party/boringssl/crypto/asn1/a_object.c',
'third_party/boringssl/crypto/asn1/a_octet.c',
'third_party/boringssl/crypto/asn1/a_print.c',
'third_party/boringssl/crypto/asn1/a_strnid.c',
'third_party/boringssl/crypto/asn1/a_time.c',
'third_party/boringssl/crypto/asn1/a_type.c',
'third_party/boringssl/crypto/asn1/a_utctm.c',
'third_party/boringssl/crypto/asn1/a_utf8.c',
'third_party/boringssl/crypto/asn1/asn1_lib.c',
'third_party/boringssl/crypto/asn1/asn1_par.c',
'third_party/boringssl/crypto/asn1/asn_pack.c',
'third_party/boringssl/crypto/asn1/bio_asn1.c',
'third_party/boringssl/crypto/asn1/bio_ndef.c',
'third_party/boringssl/crypto/asn1/f_enum.c',
'third_party/boringssl/crypto/asn1/f_int.c',
'third_party/boringssl/crypto/asn1/f_string.c',
'third_party/boringssl/crypto/asn1/t_bitst.c',
'third_party/boringssl/crypto/asn1/t_pkey.c',
'third_party/boringssl/crypto/asn1/tasn_dec.c',
'third_party/boringssl/crypto/asn1/tasn_enc.c',
'third_party/boringssl/crypto/asn1/tasn_fre.c',
'third_party/boringssl/crypto/asn1/tasn_new.c',
'third_party/boringssl/crypto/asn1/tasn_prn.c',
'third_party/boringssl/crypto/asn1/tasn_typ.c',
'third_party/boringssl/crypto/asn1/tasn_utl.c',
'third_party/boringssl/crypto/asn1/x_bignum.c',
'third_party/boringssl/crypto/asn1/x_long.c',
'third_party/boringssl/crypto/base64/base64.c',
'third_party/boringssl/crypto/bio/bio.c',
'third_party/boringssl/crypto/bio/bio_mem.c',
'third_party/boringssl/crypto/bio/buffer.c',
'third_party/boringssl/crypto/bio/connect.c',
'third_party/boringssl/crypto/bio/fd.c',
'third_party/boringssl/crypto/bio/file.c',
'third_party/boringssl/crypto/bio/hexdump.c',
'third_party/boringssl/crypto/bio/pair.c',
'third_party/boringssl/crypto/bio/printf.c',
'third_party/boringssl/crypto/bio/socket.c',
'third_party/boringssl/crypto/bio/socket_helper.c',
'third_party/boringssl/crypto/bn/add.c',
'third_party/boringssl/crypto/bn/asm/x86_64-gcc.c',
'third_party/boringssl/crypto/bn/bn.c',
'third_party/boringssl/crypto/bn/bn_asn1.c',
'third_party/boringssl/crypto/bn/cmp.c',
'third_party/boringssl/crypto/bn/convert.c',
'third_party/boringssl/crypto/bn/ctx.c',
'third_party/boringssl/crypto/bn/div.c',
'third_party/boringssl/crypto/bn/exponentiation.c',
'third_party/boringssl/crypto/bn/gcd.c',
'third_party/boringssl/crypto/bn/generic.c',
'third_party/boringssl/crypto/bn/kronecker.c',
'third_party/boringssl/crypto/bn/montgomery.c',
'third_party/boringssl/crypto/bn/mul.c',
'third_party/boringssl/crypto/bn/prime.c',
'third_party/boringssl/crypto/bn/random.c',
'third_party/boringssl/crypto/bn/rsaz_exp.c',
'third_party/boringssl/crypto/bn/shift.c',
'third_party/boringssl/crypto/bn/sqrt.c',
'third_party/boringssl/crypto/buf/buf.c',
'third_party/boringssl/crypto/bytestring/asn1_compat.c',
'third_party/boringssl/crypto/bytestring/ber.c',
'third_party/boringssl/crypto/bytestring/cbb.c',
'third_party/boringssl/crypto/bytestring/cbs.c',
'third_party/boringssl/crypto/chacha/chacha_generic.c',
'third_party/boringssl/crypto/chacha/chacha_vec.c',
'third_party/boringssl/crypto/cipher/aead.c',
'third_party/boringssl/crypto/cipher/cipher.c',
'third_party/boringssl/crypto/cipher/derive_key.c',
'third_party/boringssl/crypto/cipher/e_aes.c',
'third_party/boringssl/crypto/cipher/e_chacha20poly1305.c',
'third_party/boringssl/crypto/cipher/e_des.c',
'third_party/boringssl/crypto/cipher/e_null.c',
'third_party/boringssl/crypto/cipher/e_rc2.c',
'third_party/boringssl/crypto/cipher/e_rc4.c',
'third_party/boringssl/crypto/cipher/e_ssl3.c',
'third_party/boringssl/crypto/cipher/e_tls.c',
'third_party/boringssl/crypto/cipher/tls_cbc.c',
'third_party/boringssl/crypto/cmac/cmac.c',
'third_party/boringssl/crypto/conf/conf.c',
'third_party/boringssl/crypto/cpu-arm.c',
'third_party/boringssl/crypto/cpu-intel.c',
'third_party/boringssl/crypto/crypto.c',
'third_party/boringssl/crypto/curve25519/curve25519.c',
'third_party/boringssl/crypto/curve25519/x25519-x86_64.c',
'third_party/boringssl/crypto/des/des.c',
'third_party/boringssl/crypto/dh/check.c',
'third_party/boringssl/crypto/dh/dh.c',
'third_party/boringssl/crypto/dh/dh_asn1.c',
'third_party/boringssl/crypto/dh/params.c',
'third_party/boringssl/crypto/digest/digest.c',
'third_party/boringssl/crypto/digest/digests.c',
'third_party/boringssl/crypto/directory_posix.c',
'third_party/boringssl/crypto/directory_win.c',
'third_party/boringssl/crypto/dsa/dsa.c',
'third_party/boringssl/crypto/dsa/dsa_asn1.c',
'third_party/boringssl/crypto/ec/ec.c',
'third_party/boringssl/crypto/ec/ec_asn1.c',
'third_party/boringssl/crypto/ec/ec_key.c',
'third_party/boringssl/crypto/ec/ec_montgomery.c',
'third_party/boringssl/crypto/ec/oct.c',
'third_party/boringssl/crypto/ec/p224-64.c',
'third_party/boringssl/crypto/ec/p256-64.c',
'third_party/boringssl/crypto/ec/p256-x86_64.c',
'third_party/boringssl/crypto/ec/simple.c',
'third_party/boringssl/crypto/ec/util-64.c',
'third_party/boringssl/crypto/ec/wnaf.c',
'third_party/boringssl/crypto/ecdh/ecdh.c',
'third_party/boringssl/crypto/ecdsa/ecdsa.c',
'third_party/boringssl/crypto/ecdsa/ecdsa_asn1.c',
'third_party/boringssl/crypto/engine/engine.c',
'third_party/boringssl/crypto/err/err.c',
'third_party/boringssl/crypto/err/err_data.c',
'third_party/boringssl/crypto/evp/algorithm.c',
'third_party/boringssl/crypto/evp/digestsign.c',
'third_party/boringssl/crypto/evp/evp.c',
'third_party/boringssl/crypto/evp/evp_asn1.c',
'third_party/boringssl/crypto/evp/evp_ctx.c',
'third_party/boringssl/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl/crypto/evp/p_ec.c',
'third_party/boringssl/crypto/evp/p_ec_asn1.c',
'third_party/boringssl/crypto/evp/p_rsa.c',
'third_party/boringssl/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl/crypto/evp/pbkdf.c',
'third_party/boringssl/crypto/evp/sign.c',
'third_party/boringssl/crypto/ex_data.c',
'third_party/boringssl/crypto/hkdf/hkdf.c',
'third_party/boringssl/crypto/hmac/hmac.c',
'third_party/boringssl/crypto/lhash/lhash.c',
'third_party/boringssl/crypto/md4/md4.c',
'third_party/boringssl/crypto/md5/md5.c',
'third_party/boringssl/crypto/mem.c',
'third_party/boringssl/crypto/modes/cbc.c',
'third_party/boringssl/crypto/modes/cfb.c',
'third_party/boringssl/crypto/modes/ctr.c',
'third_party/boringssl/crypto/modes/gcm.c',
'third_party/boringssl/crypto/modes/ofb.c',
'third_party/boringssl/crypto/obj/obj.c',
'third_party/boringssl/crypto/obj/obj_xref.c',
'third_party/boringssl/crypto/pem/pem_all.c',
'third_party/boringssl/crypto/pem/pem_info.c',
'third_party/boringssl/crypto/pem/pem_lib.c',
'third_party/boringssl/crypto/pem/pem_oth.c',
'third_party/boringssl/crypto/pem/pem_pk8.c',
'third_party/boringssl/crypto/pem/pem_pkey.c',
'third_party/boringssl/crypto/pem/pem_x509.c',
'third_party/boringssl/crypto/pem/pem_xaux.c',
'third_party/boringssl/crypto/pkcs8/p5_pbe.c',
'third_party/boringssl/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl/crypto/pkcs8/p8_pkey.c',
'third_party/boringssl/crypto/pkcs8/pkcs8.c',
'third_party/boringssl/crypto/poly1305/poly1305.c',
'third_party/boringssl/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl/crypto/rand/rand.c',
'third_party/boringssl/crypto/rand/urandom.c',
'third_party/boringssl/crypto/rand/windows.c',
'third_party/boringssl/crypto/rc4/rc4.c',
'third_party/boringssl/crypto/refcount_c11.c',
'third_party/boringssl/crypto/refcount_lock.c',
'third_party/boringssl/crypto/rsa/blinding.c',
'third_party/boringssl/crypto/rsa/padding.c',
'third_party/boringssl/crypto/rsa/rsa.c',
'third_party/boringssl/crypto/rsa/rsa_asn1.c',
'third_party/boringssl/crypto/rsa/rsa_impl.c',
'third_party/boringssl/crypto/sha/sha1.c',
'third_party/boringssl/crypto/sha/sha256.c',
'third_party/boringssl/crypto/sha/sha512.c',
'third_party/boringssl/crypto/stack/stack.c',
'third_party/boringssl/crypto/thread.c',
'third_party/boringssl/crypto/thread_none.c',
'third_party/boringssl/crypto/thread_pthread.c',
'third_party/boringssl/crypto/thread_win.c',
'third_party/boringssl/crypto/time_support.c',
'third_party/boringssl/crypto/x509/a_digest.c',
'third_party/boringssl/crypto/x509/a_sign.c',
'third_party/boringssl/crypto/x509/a_strex.c',
'third_party/boringssl/crypto/x509/a_verify.c',
'third_party/boringssl/crypto/x509/asn1_gen.c',
'third_party/boringssl/crypto/x509/by_dir.c',
'third_party/boringssl/crypto/x509/by_file.c',
'third_party/boringssl/crypto/x509/i2d_pr.c',
'third_party/boringssl/crypto/x509/pkcs7.c',
'third_party/boringssl/crypto/x509/t_crl.c',
'third_party/boringssl/crypto/x509/t_req.c',
'third_party/boringssl/crypto/x509/t_x509.c',
'third_party/boringssl/crypto/x509/t_x509a.c',
'third_party/boringssl/crypto/x509/x509.c',
'third_party/boringssl/crypto/x509/x509_att.c',
'third_party/boringssl/crypto/x509/x509_cmp.c',
'third_party/boringssl/crypto/x509/x509_d2.c',
'third_party/boringssl/crypto/x509/x509_def.c',
'third_party/boringssl/crypto/x509/x509_ext.c',
'third_party/boringssl/crypto/x509/x509_lu.c',
'third_party/boringssl/crypto/x509/x509_obj.c',
'third_party/boringssl/crypto/x509/x509_r2x.c',
'third_party/boringssl/crypto/x509/x509_req.c',
'third_party/boringssl/crypto/x509/x509_set.c',
'third_party/boringssl/crypto/x509/x509_trs.c',
'third_party/boringssl/crypto/x509/x509_txt.c',
'third_party/boringssl/crypto/x509/x509_v3.c',
'third_party/boringssl/crypto/x509/x509_vfy.c',
'third_party/boringssl/crypto/x509/x509_vpm.c',
'third_party/boringssl/crypto/x509/x509cset.c',
'third_party/boringssl/crypto/x509/x509name.c',
'third_party/boringssl/crypto/x509/x509rset.c',
'third_party/boringssl/crypto/x509/x509spki.c',
'third_party/boringssl/crypto/x509/x509type.c',
'third_party/boringssl/crypto/x509/x_algor.c',
'third_party/boringssl/crypto/x509/x_all.c',
'third_party/boringssl/crypto/x509/x_attrib.c',
'third_party/boringssl/crypto/x509/x_crl.c',
'third_party/boringssl/crypto/x509/x_exten.c',
'third_party/boringssl/crypto/x509/x_info.c',
'third_party/boringssl/crypto/x509/x_name.c',
'third_party/boringssl/crypto/x509/x_pkey.c',
'third_party/boringssl/crypto/x509/x_pubkey.c',
'third_party/boringssl/crypto/x509/x_req.c',
'third_party/boringssl/crypto/x509/x_sig.c',
'third_party/boringssl/crypto/x509/x_spki.c',
'third_party/boringssl/crypto/x509/x_val.c',
'third_party/boringssl/crypto/x509/x_x509.c',
'third_party/boringssl/crypto/x509/x_x509a.c',
'third_party/boringssl/crypto/x509v3/pcy_cache.c',
'third_party/boringssl/crypto/x509v3/pcy_data.c',
'third_party/boringssl/crypto/x509v3/pcy_lib.c',
'third_party/boringssl/crypto/x509v3/pcy_map.c',
'third_party/boringssl/crypto/x509v3/pcy_node.c',
'third_party/boringssl/crypto/x509v3/pcy_tree.c',
'third_party/boringssl/crypto/x509v3/v3_akey.c',
'third_party/boringssl/crypto/x509v3/v3_akeya.c',
'third_party/boringssl/crypto/x509v3/v3_alt.c',
'third_party/boringssl/crypto/x509v3/v3_bcons.c',
'third_party/boringssl/crypto/x509v3/v3_bitst.c',
'third_party/boringssl/crypto/x509v3/v3_conf.c',
'third_party/boringssl/crypto/x509v3/v3_cpols.c',
'third_party/boringssl/crypto/x509v3/v3_crld.c',
'third_party/boringssl/crypto/x509v3/v3_enum.c',
'third_party/boringssl/crypto/x509v3/v3_extku.c',
'third_party/boringssl/crypto/x509v3/v3_genn.c',
'third_party/boringssl/crypto/x509v3/v3_ia5.c',
'third_party/boringssl/crypto/x509v3/v3_info.c',
'third_party/boringssl/crypto/x509v3/v3_int.c',
'third_party/boringssl/crypto/x509v3/v3_lib.c',
'third_party/boringssl/crypto/x509v3/v3_ncons.c',
'third_party/boringssl/crypto/x509v3/v3_pci.c',
'third_party/boringssl/crypto/x509v3/v3_pcia.c',
'third_party/boringssl/crypto/x509v3/v3_pcons.c',
'third_party/boringssl/crypto/x509v3/v3_pku.c',
'third_party/boringssl/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl/crypto/x509v3/v3_prn.c',
'third_party/boringssl/crypto/x509v3/v3_purp.c',
'third_party/boringssl/crypto/x509v3/v3_skey.c',
'third_party/boringssl/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl/crypto/x509v3/v3_utl.c',
'third_party/boringssl/ssl/custom_extensions.c',
'third_party/boringssl/ssl/d1_both.c',
'third_party/boringssl/ssl/d1_clnt.c',
'third_party/boringssl/ssl/d1_lib.c',
'third_party/boringssl/ssl/d1_meth.c',
'third_party/boringssl/ssl/d1_pkt.c',
'third_party/boringssl/ssl/d1_srtp.c',
'third_party/boringssl/ssl/d1_srvr.c',
'third_party/boringssl/ssl/dtls_record.c',
'third_party/boringssl/ssl/pqueue/pqueue.c',
'third_party/boringssl/ssl/s3_both.c',
'third_party/boringssl/ssl/s3_clnt.c',
'third_party/boringssl/ssl/s3_enc.c',
'third_party/boringssl/ssl/s3_lib.c',
'third_party/boringssl/ssl/s3_meth.c',
'third_party/boringssl/ssl/s3_pkt.c',
'third_party/boringssl/ssl/s3_srvr.c',
'third_party/boringssl/ssl/ssl_aead_ctx.c',
'third_party/boringssl/ssl/ssl_asn1.c',
'third_party/boringssl/ssl/ssl_buffer.c',
'third_party/boringssl/ssl/ssl_cert.c',
'third_party/boringssl/ssl/ssl_cipher.c',
'third_party/boringssl/ssl/ssl_ecdh.c',
'third_party/boringssl/ssl/ssl_file.c',
'third_party/boringssl/ssl/ssl_lib.c',
'third_party/boringssl/ssl/ssl_rsa.c',
'third_party/boringssl/ssl/ssl_session.c',
'third_party/boringssl/ssl/ssl_stat.c',
'third_party/boringssl/ssl/t1_enc.c',
'third_party/boringssl/ssl/t1_lib.c',
'third_party/boringssl/ssl/tls_record.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
|
openhatch/oh-mainline | refs/heads/master | vendor/packages/Django/tests/modeltests/proxy_models/models.py | 51 | """
By specifying the 'proxy' Meta attribute, model subclasses can specify that
they will take data directly from the table of their base class table rather
than using a new table of their own. This allows them to act as simple proxies,
providing a modified interface to the data from the base class.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# A couple of managers for testing managing overriding in proxy model cases.
class PersonManager(models.Manager):
def get_query_set(self):
return super(PersonManager, self).get_query_set().exclude(name="fred")
class SubManager(models.Manager):
def get_query_set(self):
return super(SubManager, self).get_query_set().exclude(name="wilma")
@python_2_unicode_compatible
class Person(models.Model):
"""
A simple concrete base class.
"""
name = models.CharField(max_length=50)
objects = PersonManager()
def __str__(self):
return self.name
class Abstract(models.Model):
"""
A simple abstract base class, to be used for error checking.
"""
data = models.CharField(max_length=10)
class Meta:
abstract = True
class MyPerson(Person):
"""
A proxy subclass, this should not get a new table. Overrides the default
manager.
"""
class Meta:
proxy = True
ordering = ["name"]
permissions = (
("display_users", "May display users information"),
)
objects = SubManager()
other = PersonManager()
def has_special_name(self):
return self.name.lower() == "special"
class ManagerMixin(models.Model):
excluder = SubManager()
class Meta:
abstract = True
class OtherPerson(Person, ManagerMixin):
"""
A class with the default manager from Person, plus an secondary manager.
"""
class Meta:
proxy = True
ordering = ["name"]
class StatusPerson(MyPerson):
"""
A non-proxy subclass of a proxy, it should get a new table.
"""
status = models.CharField(max_length=80)
# We can even have proxies of proxies (and subclass of those).
class MyPersonProxy(MyPerson):
class Meta:
proxy = True
class LowerStatusPerson(MyPersonProxy):
status = models.CharField(max_length=80)
@python_2_unicode_compatible
class User(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class UserProxy(User):
class Meta:
proxy = True
class UserProxyProxy(UserProxy):
class Meta:
proxy = True
# We can still use `select_related()` to include related models in our querysets.
class Country(models.Model):
name = models.CharField(max_length=50)
@python_2_unicode_compatible
class State(models.Model):
name = models.CharField(max_length=50)
country = models.ForeignKey(Country)
def __str__(self):
return self.name
class StateProxy(State):
class Meta:
proxy = True
# Proxy models still works with filters (on related fields)
# and select_related, even when mixed with model inheritance
class BaseUser(models.Model):
name = models.CharField(max_length=255)
class TrackerUser(BaseUser):
status = models.CharField(max_length=50)
class ProxyTrackerUser(TrackerUser):
class Meta:
proxy = True
@python_2_unicode_compatible
class Issue(models.Model):
summary = models.CharField(max_length=255)
assignee = models.ForeignKey(TrackerUser)
def __str__(self):
return ':'.join((self.__class__.__name__,self.summary,))
class Bug(Issue):
version = models.CharField(max_length=50)
reporter = models.ForeignKey(BaseUser)
class ProxyBug(Bug):
"""
Proxy of an inherited class
"""
class Meta:
proxy = True
class ProxyProxyBug(ProxyBug):
"""
A proxy of proxy model with related field
"""
class Meta:
proxy = True
class Improvement(Issue):
"""
A model that has relation to a proxy model
or to a proxy of proxy model
"""
version = models.CharField(max_length=50)
reporter = models.ForeignKey(ProxyTrackerUser)
associated_bug = models.ForeignKey(ProxyProxyBug)
class ProxyImprovement(Improvement):
class Meta:
proxy = True
|
jayceyxc/hue | refs/heads/master | desktop/core/src/desktop/windmilltests.py | 38 | # Generated by the windmill services transformer
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from desktop.lib.windmill_util import logged_in_client, logout
import time
def test_login_and_logout():
client = logged_in_client()
# TODO: there's some race condition here :(
time.sleep(2.5)
logout(client)
|
cgstudiomap/cgstudiomap | refs/heads/develop | main/eggs/pbr-1.8.1-py2.7.egg/pbr/tests/util.py | 64 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2013 Association of Universities for Research in Astronomy
# (AURA)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of AURA and its representatives may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
import contextlib
import os
import shutil
import stat
try:
import ConfigParser as configparser
except ImportError:
import configparser
@contextlib.contextmanager
def open_config(filename):
cfg = configparser.SafeConfigParser()
cfg.read(filename)
yield cfg
with open(filename, 'w') as fp:
cfg.write(fp)
def rmtree(path):
"""shutil.rmtree() with error handler.
Handle 'access denied' from trying to delete read-only files.
"""
def onerror(func, path, exc_info):
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
return shutil.rmtree(path, onerror=onerror)
|
MeetMe/selenium | refs/heads/master | py/test/selenium/webdriver/chrome/chrome_launcher_tests.py | 52 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import logging
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
class ChromeLauncherTests (unittest.TestCase):
def testLaunchAndCloseBrowser(self):
self.webdriver = webdriver.Chrome()
self.webdriver.quit()
def test_we_can_launch_multiple_firefox_instances(self):
self.webdriver1 = webdriver.Chrome()
self.webdriver2 = webdriver.Chrome()
self.webdriver3 = webdriver.Chrome()
self.webdriver1.quit()
self.webdriver2.quit()
self.webdriver3.quit()
def test_launch_chrome_do_not_affect_default_capabilities(self):
expected = DesiredCapabilities.CHROME.copy()
self.webdriver1 = webdriver.Chrome()
actual = DesiredCapabilities.CHROME.copy()
self.webdriver1.quit()
assert actual == expected
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
unittest.main()
|
lombritz/odoo | refs/heads/8.0 | openerp/tools/pdf_utils.py | 456 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Copyright (c) 2003-2007 LOGILAB S.A. (Paris, FRANCE).
http://www.logilab.fr/ -- mailto:[email protected]
manipulate pdf and fdf files. pdftk recommended.
Notes regarding pdftk, pdf forms and fdf files (form definition file)
fields names can be extracted with:
pdftk orig.pdf generate_fdf output truc.fdf
to merge fdf and pdf:
pdftk orig.pdf fill_form test.fdf output result.pdf [flatten]
without flatten, one could further edit the resulting form.
with flatten, everything is turned into text.
"""
from __future__ import with_statement
import os
import tempfile
HEAD="""%FDF-1.2
%\xE2\xE3\xCF\xD3
1 0 obj
<<
/FDF
<<
/Fields [
"""
TAIL="""]
>>
>>
endobj
trailer
<<
/Root 1 0 R
>>
%%EOF
"""
def output_field(f):
return "\xfe\xff" + "".join( [ "\x00"+c for c in f ] )
def extract_keys(lines):
keys = []
for line in lines:
if line.startswith('/V'):
pass #print 'value',line
elif line.startswith('/T'):
key = line[7:-2]
key = ''.join(key.split('\x00'))
keys.append( key )
return keys
def write_field(out, key, value):
out.write("<<\n")
if value:
out.write("/V (%s)\n" %value)
else:
out.write("/V /\n")
out.write("/T (%s)\n" % output_field(key) )
out.write(">> \n")
def write_fields(out, fields):
out.write(HEAD)
for key in fields:
value = fields[key]
write_field(out, key, value)
# write_field(out, key+"a", value) # pour copie-carbone sur autres pages
out.write(TAIL)
def extract_keys_from_pdf(filename):
# what about using 'pdftk filename dump_data_fields' and parsing the output ?
tmp_file = tempfile.mkstemp(".fdf")[1]
try:
os.system('pdftk %s generate_fdf output \"%s\"' % (filename, tmp_file))
with open(tmp_file, "r") as ofile:
lines = ofile.readlines()
finally:
try:
os.remove(tmp_file)
except Exception:
pass # nothing to do
return extract_keys(lines)
def fill_pdf(infile, outfile, fields):
tmp_file = tempfile.mkstemp(".fdf")[1]
try:
with open(tmp_file, "w") as ofile:
write_fields(ofile, fields)
os.system('pdftk %s fill_form \"%s\" output %s flatten' % (infile, tmp_file, outfile))
finally:
try:
os.remove(tmp_file)
except Exception:
pass # nothing to do
def testfill_pdf(infile, outfile):
keys = extract_keys_from_pdf(infile)
fields = []
for key in keys:
fields.append( (key, key, '') )
fill_pdf(infile, outfile, fields)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
demonkit/it-com-location-map | refs/heads/master | config.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'data')
# process celery configuration
BROKER = "amqp://10.16.45.109:55672//"
BACKEND = "amqp://10.16.45.109:55672//"
# database config
SQLALCHEMY_DATABASE_URI = "sqlite://%s/%s" % (BASE_DIR, "it_company.db")
|
mrquim/repository.mrquim | refs/heads/master | repo/service.subtitles.unacs/resources/lib/common.py | 4 | # -*- coding: utf-8 -*-
import sys
import os
from bs4 import BeautifulSoup
import urllib, urllib2, os
import BaseHTTPServer
import gzip
from StringIO import StringIO
import re
import imp
from httplib import *
try:
import xbmc
import xbmcgui
from ga import ga
run_from_xbmc = True
except ImportError:
run_from_xbmc = False
pass
if run_from_xbmc == True:
import xbmcvfs
import xbmcaddon
import xbmcgui
import xbmcplugin
path =''
list_key = ['rating', 'fps', 'url', 'cds', 'info', 'id']
tv_show_list_re = [
r'^(?P<tvshow>[\S\s].*?)(?:s)(?P<season>\d{1,2})[_\.\s]?(?:e)(?P<episode>\d{1,2})(?P<title>[\S\s]*)$',
r'^(?P<tvshow>[\S\s].*?)(?P<season>\d{1,2})(?P<episode>\d{2})(?P<title>[\S\s]*)$',
r'^(?P<tvshow>[\S\s].*?)(?P<season>\d{1,2})(?:x)(?P<episode>\d{1,2})(?P<title>[\S\s]*)$',
r'^(?P<season>\d{1,2})(?:x)(?P<episode>\d{1,2})\s(?P<tvshow>[\S\s].*?)$',
]
movie_name_re = [
r'(\(?(?:19[789]\d|20[01]\d)\)?)',
r'(\[\/?B\])',
r'(\[\/?COLOR.*?\])',
r'\s(X{0,3})(IX|IV|V?I{0,3}):', # Roman numeral followed by a colon
r'(\:)',
r'(part[\s\S]\d+)'
]
search_re = [
(r'(\.)', ' '),
(r'(\s+)', ' '),
]
def log_my(*msg):
if run_from_xbmc == True:
xbmc.log((u"*** %s" % (msg,)).encode('utf-8'),level=xbmc.LOGNOTICE)
#xbmc.log((u"*** %s" % (msg,)).encode('utf-8'),level=xbmc.LOGERROR)
else:
for m in msg:
print m,
print
def get_search_string (item):
search_string = item['title']
if item['mansearch']:
search_string = item['mansearchstr']
return search_string
for name_clean in movie_name_re:
search_string = re.sub(name_clean, '', search_string)
if not item['tvshow']:
for tv_match in tv_show_list_re:
m = re.match(tv_match, search_string, re.IGNORECASE)
if m:
item['tvshow'] = m.group('tvshow')
item['season'] = m.group('season')
item['episode']= m.group('episode')
try: item['title'] = m.group('title')
except: pass
break
if item['tvshow']:
if item['season'] and item['episode']:
search_string = re.sub(r'\s+(.\d{1,2}.*?\d{2}[\s\S]*)$', '', item['tvshow'])
if int(item['season']) == 0:
# search for special episodes by episode title
search_string += ' ' + item['title']
else:
search_string += ' %#02dx%#02d' % (int(item['season']), int(item['episode']))
else:
search_string = item['tvshow']
for f, r in search_re:
search_string = re.sub(f, r, search_string)
return search_string
def update(name, act_ev, dat, crash=None):
payload = {}
payload['ec'] = name
payload['ea'] = act_ev
payload['ev'] = '1'
payload['dl'] = urllib.quote_plus(dat.encode('utf-8'))
if run_from_xbmc == True:
payload['an'] = xbmcaddon.Addon().getAddonInfo('name')
payload['av'] = xbmcaddon.Addon().getAddonInfo('version')
ga().update(payload, crash)
else:
print payload
def get_info(it):
str = 'Fps:{0} Cd:{1} - {2}'.format(it['fps'], it['cds'], it['info'])
return re.sub(" ", " ", str)
def savetofile(d, name):
if run_from_xbmc == False:
n = os.path.join(path, name)
f = open(n, 'wb')
f.write(d)
f.close
def dump_src(s, name):
if run_from_xbmc == False:
f = open(name,'wb')
f.write(s.prettify().encode('utf-8', 'replace'))
f.close()
|
Titulacion-Sistemas/PythonTitulacion-EV | refs/heads/master | Lib/site-packages/django/template/defaulttags.py | 104 | """Default tags used by the template system, available to all templates."""
from __future__ import unicode_literals
import os
import sys
import re
from datetime import datetime
from itertools import groupby, cycle as itertools_cycle
import warnings
from django.conf import settings
from django.template.base import (Node, NodeList, Template, Context, Library,
TemplateSyntaxError, VariableDoesNotExist, InvalidTemplateLibrary,
BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END,
SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END,
VARIABLE_ATTRIBUTE_SEPARATOR, get_library, token_kwargs, kwarg_re,
render_value_in_context)
from django.template.smartif import IfParser, Literal
from django.template.defaultfilters import date
from django.utils.encoding import smart_text
from django.utils.safestring import mark_safe
from django.utils.html import format_html
from django.utils import six
from django.utils import timezone
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html("<input type='hidden' name='csrfmiddlewaretoken' value='{0}' />", csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
from django.conf import settings
if settings.DEBUG:
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False, escape=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
self.escape = escape # only while the "future" version exists
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
if not self.escape:
value = mark_safe(value)
return render_value_in_context(value, context)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [pformat(val) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
context.update({'var': output})
filtered = self.filter_expr.resolve(context)
context.pop()
return filtered
class FirstOfNode(Node):
def __init__(self, variables, escape=False):
self.vars = variables
self.escape = escape # only while the "future" version exists
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
if not self.escape:
value = mark_safe(value)
return render_value_in_context(value, context)
return ''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
context.push()
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
context.pop()
return self.nodelist_empty.render(context)
nodelist = NodeList()
if self.is_reversed:
values = reversed(values)
unpack = len(self.loopvars) > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i+1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
# In TEMPLATE_DEBUG mode provide source of the node which
# actually raised the exception
if settings.TEMPLATE_DEBUG:
for node in self.nodelist_loop:
try:
nodelist.append(node.render(context))
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
else:
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
context.pop()
return nodelist.render(context)
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
if self not in state_frame:
state_frame[self] = None
nodelist_true_output = None
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != state_frame[self]:
state_frame[self] = compare_to
return nodelist_true_output or self.nodelist_true.render(context) # render true block if not already rendered
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list == None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath):
filepath = os.path.abspath(filepath)
for root in settings.ALLOWED_INCLUDE_ROOTS:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed):
self.filepath = filepath
self.parsed = parsed
def render(self, context):
filepath = self.filepath.resolve(context)
if not include_is_allowed(filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
with open(filepath, 'r') as fp:
output = fp.read()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath)
return t.render(context)
except TemplateSyntaxError as e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string):
self.format_string = format_string
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
return date(datetime.now(tz=tzinfo), self.format_string)
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = dict([(smart_text(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items()])
view_name = self.view_name.resolve(context)
if not view_name:
raise NoReverseMatch("'url' requires a non-empty first argument. "
"The syntax changed in Django 1.5, see the docs.")
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app)
except NoReverseMatch:
exc_info = sys.exc_info()
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=context.current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
six.reraise(*exc_info)
else:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
except ZeroDivisionError:
return '0'
except (ValueError, TypeError):
return ''
return str(int(round(ratio)))
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = dict([(key, val.resolve(context)) for key, val in
six.iteritems(self.extra_context)])
context.update(values)
output = self.nodelist.render(context)
context.pop()
return output
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token, escape=False):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each sucessive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
if not escape:
warnings.warn(
"'The `cycle` template tag is changing to escape its arguments; "
"the non-autoescaping version is deprecated. Load it "
"from the `future` tag library to start using the new behavior.",
PendingDeprecationWarning, stacklevel=2)
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if not name in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent, escape=escape)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values, escape=escape)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token, escape=False):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% elif var2 %}
{{ var2|safe }}
{% elif var3 %}
{{ var3|safe }}
{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to escape the output, use a filter tag::
{% filter force_escape %}
{% firstof var1 var2 var3 "fallback value" %}
{% endfilter %}
"""
if not escape:
warnings.warn(
"'The `firstof` template tag is changing to escape its arguments; "
"the non-autoescaping version is deprecated. Load it "
"from the `future` tag library to start using the new behavior.",
PendingDeprecationWarning, stacklevel=2)
bits = token.split_contents()[1:]
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
return FirstOfNode([parser.compile_filter(bit) for bit in bits], escape=escape)
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if althete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index+1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both atheletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi "/home/html/ljworld.com/includes/right_generic.html" parsed %}
"""
bits = token.split_contents()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
filepath = parser.compile_filter(bits[1])
return SsiNode(filepath, parsed)
@register.tag
def load(parser, token):
"""
Loads a custom template tag set.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
try:
taglib = bits[-1]
lib = get_library(taglib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
else:
temp_lib = Library()
for name in bits[1:-2]:
if name in lib.tags:
temp_lib.tags[name] = lib.tags[name]
# a name could be a tag *and* a filter, so check for both
if name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
elif name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
else:
raise TemplateSyntaxError("'%s' is not a valid tag or filter in tag library '%s'" %
(name, taglib))
parser.add_library(temp_lib)
else:
for taglib in bits[1:]:
# add the library to the parser
try:
lib = get_library(taglib)
parser.add_library(lib)
except InvalidTemplateLibrary as e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
return LoadNode()
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "path.to.some_view" arg1 arg2 %}
or
{% url "path.to.some_view" name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute Python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project.
Other arguments are space-separated values that will be filled in place of
positional and keyword arguments in the URL. Don't mix positional and
keyword arguments.
All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "app_name.client" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument can also be a named URL instead of the Python path to
the view callable. For example if the URLconf entry looks like this::
url('^client/(\d+)/$', name='client-detail-view')
then in the template you can use::
{% url "client-detail-view" client.id %}
There is even another possible value type for the first argument. It can be
the name of a template variable that will be evaluated to obtain the view
name or the URL name, e.g.::
{% with view_path="app_name.client" %}
{% url view_path client.id %}
{% endwith %}
or,
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
try:
viewname = parser.compile_filter(bits[1])
except TemplateSyntaxError as exc:
exc.args = (exc.args[0] + ". "
"The syntax of 'url' changed in Django 1.5, see the docs."),
raise
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stops the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src='bar.gif' height='10' width='{% widthratio this_value max_value max_width %}' />
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
"""
bits = token.split_contents()
if len(bits) != 4:
raise TemplateSyntaxError("widthratio takes three arguments")
tag, this_value_expr, max_value_expr, max_width = bits
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width))
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
zhangpf/vbox | refs/heads/master | src/VBox/ValidationKit/testmanager/webui/wuiadminbuildsource.py | 4 | # -*- coding: utf-8 -*-
# $Id$
"""
Test Manager WUI - Build Sources.
"""
__copyright__ = \
"""
Copyright (C) 2012-2014 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision$"
# Validation Kit imports.
from common import utils, webutils;
from testmanager.webui.wuicontentbase import WuiFormContentBase, WuiListContentBase, WuiTmLink, WuiRawHtml;
from testmanager.core import coreconsts;
from testmanager.core.db import isDbTimestampInfinity;
from testmanager.core.buildsource import BuildSourceData;
class WuiAdminBuildSrc(WuiFormContentBase):
"""
WUI Build Sources HTML content generator.
"""
def __init__(self, oData, sMode, oDisp):
assert isinstance(oData, BuildSourceData);
if sMode == WuiFormContentBase.ksMode_Add:
sTitle = 'New Build Source';
elif sMode == WuiFormContentBase.ksMode_Edit:
sTitle = 'Edit Build Source - %s (#%s)' % (oData.sName, oData.idBuildSrc,);
else:
assert sMode == WuiFormContentBase.ksMode_Show;
sTitle = 'Build Source - %s (#%s)' % (oData.sName, oData.idBuildSrc,);
WuiFormContentBase.__init__(self, oData, sMode, 'BuildSrc', oDisp, sTitle);
def _populateForm(self, oForm, oData):
oForm.addIntRO (BuildSourceData.ksParam_idBuildSrc, oData.idBuildSrc, 'Build Source item ID')
oForm.addTimestampRO(BuildSourceData.ksParam_tsEffective, oData.tsEffective, 'Last changed')
oForm.addTimestampRO(BuildSourceData.ksParam_tsExpire, oData.tsExpire, 'Expires (excl)')
oForm.addIntRO (BuildSourceData.ksParam_uidAuthor, oData.uidAuthor, 'Changed by UID')
oForm.addText (BuildSourceData.ksParam_sName, oData.sName, 'Name')
oForm.addText (BuildSourceData.ksParam_sDescription, oData.sDescription, 'Description')
oForm.addText (BuildSourceData.ksParam_sProduct, oData.sProduct, 'Product')
oForm.addText (BuildSourceData.ksParam_sBranch, oData.sBranch, 'Branch')
asTypes = self.getListOfItems(coreconsts.g_kasBuildTypesAll, oData.asTypes);
oForm.addListOfTypes(BuildSourceData.ksParam_asTypes, asTypes, 'Build types')
asOsArches = self.getListOfItems(coreconsts.g_kasOsDotCpusAll, oData.asOsArches);
oForm.addListOfOsArches(BuildSourceData.ksParam_asOsArches, asOsArches, 'Target architectures')
oForm.addInt (BuildSourceData.ksParam_iFirstRevision, oData.iFirstRevision, 'Starting from revision')
oForm.addInt (BuildSourceData.ksParam_iLastRevision, oData.iLastRevision, 'Ending by revision')
oForm.addLong (BuildSourceData.ksParam_cSecMaxAge,
utils.formatIntervalSeconds2(oData.cSecMaxAge) if oData.cSecMaxAge not in [-1, '', None] else '',
'Max age in seconds');
oForm.addSubmit();
return True;
class WuiAdminBuildSrcList(WuiListContentBase):
"""
WUI Build Source content generator.
"""
def __init__(self, aoEntries, iPage, cItemsPerPage, tsEffective, fnDPrint, oDisp):
WuiListContentBase.__init__(self, aoEntries, iPage, cItemsPerPage, tsEffective,
sTitle = 'Registered Build Sources', sId = 'build sources',
fnDPrint = fnDPrint, oDisp = oDisp);
self._asColumnHeaders = ['ID', 'Name', 'Description', 'Product',
'Branch', 'Build Types', 'OS/ARCH', 'First Revision', 'Last Revision', 'Max Age',
'Actions' ];
self._asColumnAttribs = ['align="center"', 'align="center"', 'align="center"', 'align="center"', 'align="center"',
'align="left"', 'align="left"', 'align="center"', 'align="center"', 'align="center"',
'align="center"' ];
def _getSubList(self, aList):
"""
Convert pythonic list into HTML list
"""
if aList not in (None, []):
sHtml = ' <ul class="tmshowall">\n'
for sTmp in aList:
sHtml += ' <li class="tmshowall">%s</a></li>\n' % (webutils.escapeElem(sTmp),);
sHtml += ' </ul>\n';
else:
sHtml = '<ul class="tmshowall"><li class="tmshowall">Any</li></ul>\n';
return WuiRawHtml(sHtml);
def _formatListEntry(self, iEntry):
"""
Format *show all* table entry
"""
from testmanager.webui.wuiadmin import WuiAdmin
oEntry = self._aoEntries[iEntry]
aoActions = [
WuiTmLink('Details', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionBuildSrcDetails,
BuildSourceData.ksParam_idBuildSrc: oEntry.idBuildSrc,
WuiAdmin.ksParamEffectiveDate: self._tsEffectiveDate, }),
WuiTmLink('Clone', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionBuildSrcClone,
BuildSourceData.ksParam_idBuildSrc: oEntry.idBuildSrc,
WuiAdmin.ksParamEffectiveDate: self._tsEffectiveDate, }),
];
if isDbTimestampInfinity(oEntry.tsExpire):
aoActions += [
WuiTmLink('Modify', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionBuildSrcEdit,
BuildSourceData.ksParam_idBuildSrc: oEntry.idBuildSrc } ),
WuiTmLink('Remove', WuiAdmin.ksScriptName,
{ WuiAdmin.ksParamAction: WuiAdmin.ksActionBuildSrcDoRemove,
BuildSourceData.ksParam_idBuildSrc: oEntry.idBuildSrc },
sConfirm = 'Are you sure you want to remove build source #%d?' % (oEntry.idBuildSrc,) )
];
return [ oEntry.idBuildSrc,
oEntry.sName,
oEntry.sDescription,
oEntry.sProduct,
oEntry.sBranch,
self._getSubList(oEntry.asTypes),
self._getSubList(oEntry.asOsArches),
oEntry.iFirstRevision,
oEntry.iLastRevision,
utils.formatIntervalSeconds2(oEntry.cSecMaxAge) if oEntry.cSecMaxAge is not None else None,
aoActions,
]
|
neoareslinux/neutron | refs/heads/master | neutron/plugins/ml2/drivers/ibm/mechanism_sdnve.py | 42 | # Copyright 2015 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_ibm.sdnve.ml2 import sdnve_driver
from oslo_log import log as logging
from neutron.common import constants as n_const
from neutron.extensions import portbindings
from neutron.plugins.ml2 import driver_api as api
LOG = logging.getLogger(__name__)
class SdnveMechanismDriver(api.MechanismDriver):
"""Ml2 Mechanism driver for IBM SDNVE Controller"""
def initialize(self):
self.vif_type = portbindings.VIF_TYPE_BRIDGE
self.vif_details = {portbindings.CAP_PORT_FILTER: False}
self.restrict_update_subnet = ['enable_dhcp',
'gateway_ip',
'allocation-pool']
self.restrict_update_network = ['router:external']
self.sdnve_drv = sdnve_driver.SdnveDriver()
# NETWORK
def create_network_precommit(self, context):
self.sdnve_drv._pre_create_network(context)
def create_network_postcommit(self, context):
self.sdnve_drv._create_network(context)
def update_network_precommit(self, context):
self.sdnve_drv._pre_update_network(context)
def update_network_postcommit(self, context):
self.sdnve_drv._update_network(context)
def delete_network_postcommit(self, context):
self.sdnve_drv._delete_network(context)
# SUBNET
def create_subnet_precommit(self, context):
self.sdnve_drv._pre_create_subnet(context)
def create_subnet_postcommit(self, context):
self.sdnve_drv._create_subnet(context)
def update_subnet_postcommit(self, context):
self.sdnve_drv._update_subnet(context)
def update_subnet_precommit(self, context):
self.sdnve_drv._pre_update_subnet(context)
def delete_subnet_postcommit(self, context):
self.sdnve_drv._delete_subnet(context)
# PORT
def create_port_postcommit(self, context):
self.sdnve_drv._create_port(context)
def create_port_precommit(self, context):
self.sdnve_drv._pre_create_port(context)
def delete_port_precommit(self, context):
self.sdnve_drv._pre_delete_port(context)
def update_port_postcommit(self, context):
self.sdnve_drv._update_port(context)
def delete_port_postcommit(self, context):
self.sdnve_drv._delete_port(context)
def bind_port(self, context):
LOG.debug("Attempting to bind port %(port)s on "
"network %(network)s",
{'port': context.current['id'],
'network': context.network.current['id']})
for segment in context.network.network_segments:
if self.sdnve_drv._check_segment(segment):
context.set_binding(segment[api.ID],
self.vif_type,
self.vif_details,
status=n_const.PORT_STATUS_ACTIVE)
LOG.debug("Bound using segment: %s", segment)
return
else:
LOG.debug("Refusing to bind port for segment ID %(id)s, "
"segment %(seg)s, phys net %(physnet)s, and "
"network type %(nettype)s",
{'id': segment[api.ID],
'seg': segment[api.SEGMENTATION_ID],
'physnet': segment[api.PHYSICAL_NETWORK],
'nettype': segment[api.NETWORK_TYPE]})
|
barbarubra/Don-t-know-What-i-m-doing. | refs/heads/master | python/src/Lib/plat-irix6/cdplayer.py | 33 | # This file implements a class which forms an interface to the .cdplayerrc
# file that is maintained by SGI's cdplayer program.
#
# Usage is as follows:
#
# import readcd
# r = readcd.Readcd()
# c = Cdplayer(r.gettrackinfo())
#
# Now you can use c.artist, c.title and c.track[trackno] (where trackno
# starts at 1). When the CD is not recognized, all values will be the empty
# string.
# It is also possible to set the above mentioned variables to new values.
# You can then use c.write() to write out the changed values to the
# .cdplayerrc file.
from warnings import warnpy3k
warnpy3k("the cdplayer module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
cdplayerrc = '.cdplayerrc'
class Cdplayer:
def __init__(self, tracklist):
import string
self.artist = ''
self.title = ''
if type(tracklist) == type(''):
t = []
for i in range(2, len(tracklist), 4):
t.append((None, \
(int(tracklist[i:i+2]), \
int(tracklist[i+2:i+4]))))
tracklist = t
self.track = [None] + [''] * len(tracklist)
self.id = 'd' + string.zfill(len(tracklist), 2)
for track in tracklist:
start, length = track
self.id = self.id + string.zfill(length[0], 2) + \
string.zfill(length[1], 2)
try:
import posix
f = open(posix.environ['HOME'] + '/' + cdplayerrc, 'r')
except IOError:
return
import re
reg = re.compile(r'^([^:]*):\t(.*)')
s = self.id + '.'
l = len(s)
while 1:
line = f.readline()
if line == '':
break
if line[:l] == s:
line = line[l:]
match = reg.match(line)
if not match:
print 'syntax error in ~/' + cdplayerrc
continue
name, value = match.group(1, 2)
if name == 'title':
self.title = value
elif name == 'artist':
self.artist = value
elif name[:5] == 'track':
trackno = int(name[6:])
self.track[trackno] = value
f.close()
def write(self):
import posix
filename = posix.environ['HOME'] + '/' + cdplayerrc
try:
old = open(filename, 'r')
except IOError:
old = open('/dev/null', 'r')
new = open(filename + '.new', 'w')
s = self.id + '.'
l = len(s)
while 1:
line = old.readline()
if line == '':
break
if line[:l] != s:
new.write(line)
new.write(self.id + '.title:\t' + self.title + '\n')
new.write(self.id + '.artist:\t' + self.artist + '\n')
for i in range(1, len(self.track)):
new.write('%s.track.%r:\t%s\n' % (i, track))
old.close()
new.close()
posix.rename(filename + '.new', filename)
|
dyoung418/tensorflow | refs/heads/master | tensorflow/contrib/eager/python/examples/resnet50/resnet50.py | 15 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ResNet50 model definition compatible with TensorFlow's eager execution.
Reference [Deep Residual Learning for Image
Recognition](https://arxiv.org/abs/1512.03385)
Adapted from tf.keras.applications.ResNet50. A notable difference is that the
model here outputs logits while the Keras model outputs probability.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import tensorflow as tf
import tensorflow.contrib.eager as tfe
class _IdentityBlock(tfe.Network):
"""_IdentityBlock is the block that has no conv layer at shortcut.
Args:
kernel_size: the kernel size of middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
data_format: data_format for the input ('channels_first' or
'channels_last').
"""
def __init__(self, kernel_size, filters, stage, block, data_format):
super(_IdentityBlock, self).__init__(name='')
filters1, filters2, filters3 = filters
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
bn_axis = 1 if data_format == 'channels_first' else 3
self.conv2a = self.track_layer(
tf.layers.Conv2D(
filters1, (1, 1),
name=conv_name_base + '2a',
data_format=data_format))
self.bn2a = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2a'))
self.conv2b = self.track_layer(
tf.layers.Conv2D(
filters2,
kernel_size,
padding='same',
data_format=data_format,
name=conv_name_base + '2b'))
self.bn2b = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2b'))
self.conv2c = self.track_layer(
tf.layers.Conv2D(
filters3, (1, 1),
name=conv_name_base + '2c',
data_format=data_format))
self.bn2c = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2c'))
def call(self, input_tensor, training=False):
x = self.conv2a(input_tensor)
x = self.bn2a(x, training=training)
x = tf.nn.relu(x)
x = self.conv2b(x)
x = self.bn2b(x, training=training)
x = tf.nn.relu(x)
x = self.conv2c(x)
x = self.bn2c(x, training=training)
x += input_tensor
return tf.nn.relu(x)
class _ConvBlock(tfe.Network):
"""_ConvBlock is the block that has a conv layer at shortcut.
Args:
kernel_size: the kernel size of middle conv layer at main path
filters: list of integers, the filterss of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
data_format: data_format for the input ('channels_first' or
'channels_last').
strides: strides for the convolution. Note that from stage 3, the first
conv layer at main path is with strides=(2,2), and the shortcut should
have strides=(2,2) as well.
"""
def __init__(self,
kernel_size,
filters,
stage,
block,
data_format,
strides=(2, 2)):
super(_ConvBlock, self).__init__(name='')
filters1, filters2, filters3 = filters
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
bn_axis = 1 if data_format == 'channels_first' else 3
self.conv2a = self.track_layer(
tf.layers.Conv2D(
filters1, (1, 1),
strides=strides,
name=conv_name_base + '2a',
data_format=data_format))
self.bn2a = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2a'))
self.conv2b = self.track_layer(
tf.layers.Conv2D(
filters2,
kernel_size,
padding='same',
name=conv_name_base + '2b',
data_format=data_format))
self.bn2b = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2b'))
self.conv2c = self.track_layer(
tf.layers.Conv2D(
filters3, (1, 1),
name=conv_name_base + '2c',
data_format=data_format))
self.bn2c = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2c'))
self.conv_shortcut = self.track_layer(
tf.layers.Conv2D(
filters3, (1, 1),
strides=strides,
name=conv_name_base + '1',
data_format=data_format))
self.bn_shortcut = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '1'))
def call(self, input_tensor, training=False):
x = self.conv2a(input_tensor)
x = self.bn2a(x, training=training)
x = tf.nn.relu(x)
x = self.conv2b(x)
x = self.bn2b(x, training=training)
x = tf.nn.relu(x)
x = self.conv2c(x)
x = self.bn2c(x, training=training)
shortcut = self.conv_shortcut(input_tensor)
shortcut = self.bn_shortcut(shortcut, training=training)
x += shortcut
return tf.nn.relu(x)
class ResNet50(tfe.Network):
"""Instantiates the ResNet50 architecture.
Args:
data_format: format for the image. Either 'channels_first' or
'channels_last'. 'channels_first' is typically faster on GPUs while
'channels_last' is typically faster on CPUs. See
https://www.tensorflow.org/performance/performance_guide#data_formats
name: Prefix applied to names of variables created in the model.
trainable: Is the model trainable? If true, performs backward
and optimization after call() method.
include_top: whether to include the fully-connected layer at the top of the
network.
pooling: Optional pooling mode for feature extraction when `include_top`
is `False`.
- `None` means that the output of the model will be the 4D tensor
output of the last convolutional layer.
- `avg` means that global average pooling will be applied to the output of
the last convolutional layer, and thus the output of the model will be
a 2D tensor.
- `max` means that global max pooling will be applied.
classes: optional number of classes to classify images into, only to be
specified if `include_top` is True.
Raises:
ValueError: in case of invalid argument for data_format.
"""
def __init__(self,
data_format,
name=None,
trainable=True,
include_top=True,
pooling=None,
classes=1000):
super(ResNet50, self).__init__(name='')
valid_channel_values = ('channels_first', 'channels_last')
if data_format not in valid_channel_values:
raise ValueError('Unknown data_format: %s. Valid values: %s' %
(data_format, valid_channel_values))
self.include_top = include_top
def conv_block(filters, stage, block, strides=(2, 2)):
l = _ConvBlock(
3,
filters,
stage=stage,
block=block,
data_format=data_format,
strides=strides)
return self.track_layer(l)
def id_block(filters, stage, block):
l = _IdentityBlock(
3, filters, stage=stage, block=block, data_format=data_format)
return self.track_layer(l)
self.conv1 = self.track_layer(
tf.layers.Conv2D(
64, (7, 7),
strides=(2, 2),
data_format=data_format,
padding='same',
name='conv1'))
bn_axis = 1 if data_format == 'channels_first' else 3
self.bn_conv1 = self.track_layer(
tf.layers.BatchNormalization(axis=bn_axis, name='bn_conv1'))
self.max_pool = self.track_layer(
tf.layers.MaxPooling2D((3, 3), strides=(2, 2), data_format=data_format))
self.l2a = conv_block([64, 64, 256], stage=2, block='a', strides=(1, 1))
self.l2b = id_block([64, 64, 256], stage=2, block='b')
self.l2c = id_block([64, 64, 256], stage=2, block='c')
self.l3a = conv_block([128, 128, 512], stage=3, block='a')
self.l3b = id_block([128, 128, 512], stage=3, block='b')
self.l3c = id_block([128, 128, 512], stage=3, block='c')
self.l3d = id_block([128, 128, 512], stage=3, block='d')
self.l4a = conv_block([256, 256, 1024], stage=4, block='a')
self.l4b = id_block([256, 256, 1024], stage=4, block='b')
self.l4c = id_block([256, 256, 1024], stage=4, block='c')
self.l4d = id_block([256, 256, 1024], stage=4, block='d')
self.l4e = id_block([256, 256, 1024], stage=4, block='e')
self.l4f = id_block([256, 256, 1024], stage=4, block='f')
self.l5a = conv_block([512, 512, 2048], stage=5, block='a')
self.l5b = id_block([512, 512, 2048], stage=5, block='b')
self.l5c = id_block([512, 512, 2048], stage=5, block='c')
self.avg_pool = self.track_layer(
tf.layers.AveragePooling2D(
(7, 7), strides=(7, 7), data_format=data_format))
if self.include_top:
self.fc1000 = self.track_layer(
tf.layers.Dense(classes, name='fc1000'))
else:
reduction_indices = [1, 2] if data_format == 'channels_last' else [2, 3]
reduction_indices = tf.constant(reduction_indices)
if pooling == 'avg':
self.global_pooling = functools.partial(
tf.reduce_mean,
reduction_indices=reduction_indices,
keep_dims=False)
elif pooling == 'max':
self.global_pooling = functools.partial(
tf.reduce_max, reduction_indices=reduction_indices, keep_dims=False)
else:
self.global_pooling = None
def call(self, input_tensor, training=False):
x = self.conv1(input_tensor)
x = self.bn_conv1(x, training=training)
x = tf.nn.relu(x)
x = self.max_pool(x)
x = self.l2a(x, training=training)
x = self.l2b(x, training=training)
x = self.l2c(x, training=training)
x = self.l3a(x, training=training)
x = self.l3b(x, training=training)
x = self.l3c(x, training=training)
x = self.l3d(x, training=training)
x = self.l4a(x, training=training)
x = self.l4b(x, training=training)
x = self.l4c(x, training=training)
x = self.l4d(x, training=training)
x = self.l4e(x, training=training)
x = self.l4f(x, training=training)
x = self.l5a(x, training=training)
x = self.l5b(x, training=training)
x = self.l5c(x, training=training)
x = self.avg_pool(x)
if self.include_top:
return self.fc1000(tf.layers.flatten(x))
elif self.global_pooling:
return self.global_pooling(x)
else:
return x
|
AnishShah/tensorflow | refs/heads/master | tensorflow/contrib/estimator/python/estimator/multi_head.py | 19 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Abstractions for the head(s) of a model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.estimator import model_fn
from tensorflow.python.estimator.canned import head as head_lib
from tensorflow.python.estimator.canned import metric_keys
from tensorflow.python.estimator.export import export_output as export_output_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics as metrics_lib
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.summary import summary
from tensorflow.python.training import training_util
_DEFAULT_SERVING_KEY = signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
def multi_head(heads, head_weights=None):
"""Creates a `_Head` for multi-objective learning.
This class merges the output of multiple `_Head` objects.
Specifically:
* For training, sums losses of each head, calls `train_op_fn` with this
final loss.
* For eval, merges metrics by adding `head.name` suffix to the keys in eval
metrics, such as `precision/head1`, `precision/head2`.
* For prediction, merges predictions and updates keys in prediction dict to a
2-tuple, `(head.name, prediction_key)`. Merges `export_outputs` such that
by default the first head is served.
Usage:
```python
# In `input_fn` specify labels as a dict keyed by head name:
def input_fn():
features = ...
labels1 = ...
labels2 = ...
return features, {'head1': labels1, 'head2': labels2}
# In `model_fn`, specify logits as a dict keyed by head name:
def model_fn(features, labels, mode):
# Create simple heads and specify head name.
head1 = multi_class_head(n_classes=3, name='head1')
head2 = binary_classification_head(name='head2')
# Create multi-head from two simple heads.
head = multi_head([head1, head2])
# Create logits for each head, and combine them into a dict.
logits1, logits2 = logit_fn()
logits = {'head1': logits1, 'head2': logits2}
# Return the merged EstimatorSpec
return head.create_estimator_spec(..., logits=logits, ...)
# Create an estimator with this model_fn.
estimator = tf.estimator.Estimator(model_fn=model_fn)
estimator.train(input_fn=input_fn, steps=100)
```
Also supports `logits` as a `Tensor` of shape
`[D0, D1, ... DN, logits_dimension]`. It will split the `Tensor` along the
last dimension and distribute it appropriately among the heads. E.g.:
```python
def model_fn(features, labels, mode):
# Create simple heads and specify head name.
head1 = multi_class_head(n_classes=3, name='head1')
head2 = binary_classification_head(name='head2')
# Create multi-head from two simple heads.
head = multi_head([head1, head2])
# Create logits for the multihead.
logits = logit_fn(logits_dimension=head.logits_dimension)
# Return the merged EstimatorSpec
return head.create_estimator_spec(..., logits=logits, ...)
```
Args:
heads: List or tuple of `_Head` instances. All heads must have `name`
specified. The first head in the list is the default used at serving time.
head_weights: Optional list of weights, same length as `heads`. Used when
merging losses to calculate the weighted sum of losses from each head. If
`None`, all losses are weighted equally.
Returns:
A instance of `_Head` that merges multiple heads.
Raises:
ValueError: If `heads` is empty.
ValueError: If any of the `heads` does not have `name` specified.
ValueError: If `heads` and `head_weights` have different size.
"""
if head_weights:
if len(head_weights) != len(heads):
raise ValueError(
'heads and head_weights must have the same size. '
'Given len(heads): {}. Given len(head_weights): {}.'.format(
len(heads), len(head_weights)))
if not heads:
raise ValueError('Must specify heads. Given: {}'.format(heads))
for head in heads:
if not head.name:
raise ValueError(
'All given heads must have name specified. '
'Given: {}'.format(head))
return _MultiHead(
heads=tuple(heads),
head_weights=tuple(head_weights) if head_weights else tuple())
def _no_op_train_fn(loss):
del loss
return control_flow_ops.no_op()
def _merge_losses(losses, head_weights=None):
"""Merges the given losses into one tensor."""
losses = tuple(losses)
with ops.name_scope(
'merge_losses', values=losses + (head_weights or tuple())):
if head_weights:
weighted_losses = []
for loss, weight in zip(losses, head_weights):
weighted_losses.append(math_ops.multiply(loss, weight))
else:
weighted_losses = losses
return math_ops.add_n(weighted_losses)
def _default_export_output(export_outputs, head_name):
"""Extracts the default export output from the given export_outputs dict."""
if len(export_outputs) == 1:
return next(six.itervalues(export_outputs))
for k, v in six.iteritems(export_outputs):
if k == _DEFAULT_SERVING_KEY:
return v
raise ValueError(
'{} did not specify default export_outputs. '
'Given: {} '
'Suggested fix: Use one of the heads in tf.contrib.estimator, or include '
'key {} in export_outputs.'.format(
head_name, export_outputs, _DEFAULT_SERVING_KEY))
class _MultiHead(head_lib._Head): # pylint:disable=protected-access
"""`_Head` for multi objective learning."""
def __init__(self, heads, head_weights):
self._logits_dimension = 0
for head in heads:
self._logits_dimension += head.logits_dimension
self._heads = heads
self._head_weights = head_weights
@property
def name(self):
return '_'.join([h.name for h in self._heads])
@property
def logits_dimension(self):
return self._logits_dimension
def create_loss(self, features, mode, logits, labels):
"""See `Head`."""
if isinstance(logits, dict):
logits_dict = logits
else:
logits_dict = self._split_logits(logits)
training_losses = []
labels_by_head = {}
unreduced_losses_by_head = {}
example_weights_by_head = {}
for i, head in enumerate(self._heads):
(training_loss, unreduced_loss,
weights, processed_labels) = head.create_loss(
features, mode, logits_dict[head.name], labels[head.name])
training_losses.append(training_loss)
labels_by_head[head.name] = processed_labels
if self._head_weights:
head_weight = self._head_weights[i]
unreduced_losses_by_head[head.name] = math_ops.multiply(
unreduced_loss, head_weight)
example_weights_by_head[head.name] = math_ops.multiply(
weights, head_weight)
else:
unreduced_losses_by_head[head.name] = unreduced_loss
example_weights_by_head[head.name] = weights
training_losses = tuple(training_losses)
with ops.name_scope(
'merge_losses',
values=training_losses + (self._head_weights or tuple())):
if self._head_weights:
head_weighted_training_losses = []
for training_loss, head_weight in zip(
training_losses, self._head_weights):
head_weighted_training_losses.append(
math_ops.multiply(training_loss, head_weight))
merged_training_loss = math_ops.add_n(head_weighted_training_losses)
else:
merged_training_loss = math_ops.add_n(training_losses)
return head_lib.LossSpec(
training_loss=merged_training_loss,
unreduced_loss=unreduced_losses_by_head,
weights=example_weights_by_head,
processed_labels=labels_by_head)
# TODO(b/65403806): Support regularization_losses arg.
def create_estimator_spec(
self, features, mode, logits, labels=None, optimizer=None,
train_op_fn=None):
"""See `_Head`."""
if isinstance(logits, dict):
logits_dict = logits
else:
logits_dict = self._split_logits(logits)
if labels and not isinstance(labels, dict):
raise ValueError('labels must be a dict. Given: {}'.format(labels))
all_estimator_spec = []
for head in self._heads:
head_name = head.name
all_estimator_spec.append(
head.create_estimator_spec(
features=features,
mode=mode,
logits=logits_dict[head_name],
labels=labels[head_name] if labels else None,
train_op_fn=_no_op_train_fn))
if mode == model_fn.ModeKeys.TRAIN:
spec = self._merge_train(
all_estimator_spec=all_estimator_spec,
optimizer=optimizer,
train_op_fn=train_op_fn)
with ops.name_scope(''):
summary.scalar(metric_keys.MetricKeys.LOSS, spec.loss)
return spec
if mode == model_fn.ModeKeys.PREDICT:
return self._merge_predict(all_estimator_spec)
if mode == model_fn.ModeKeys.EVAL:
return self._merge_eval(all_estimator_spec)
raise ValueError('mode={} unrecognized'.format(mode))
def _split_logits(self, logits):
"""Splits logits along the last dimension and returns a dict."""
logits_dict = {}
with ops.name_scope(None, 'split_logits', values=[logits]):
logits = ops.convert_to_tensor(logits)
batch_shape = array_ops.shape(logits)[:-1]
zeros_like_batch_shape = array_ops.zeros_like(batch_shape)
minus_ones_like_batch_shape = -1 * array_ops.ones_like(batch_shape)
begin_idx = 0
for head in self._heads:
begin_tensor = array_ops.concat(
[zeros_like_batch_shape, [begin_idx]], axis=0)
size_tensor = array_ops.concat(
[minus_ones_like_batch_shape, [head.logits_dimension]], axis=0)
logits_dict[head.name] = array_ops.slice(
logits, begin=begin_tensor, size=size_tensor)
begin_idx += head.logits_dimension
return logits_dict
def _merge_train(self, all_estimator_spec, optimizer, train_op_fn):
"""Merges list of `EstimatorSpec` for training.
Args:
all_estimator_spec: list of `EstimatorSpec` for the individual heads.
optimizer: `Optimizer` instance to create train op. See
`create_estimator_spec` documentation for more details.
train_op_fn: Function to create train op. Used if `optimizer` is `None`.
Returns:
`EstimatorSpec` that merges all heads for TRAIN.
Raises:
ValueError: If both `train_op_fn` and `optimizer` are `None` in TRAIN
mode.
"""
losses = []
metrics = {}
for spec in all_estimator_spec:
losses.append(spec.loss)
# Metric keys already contain head.name.
metrics.update(spec.eval_metric_ops or {})
loss = _merge_losses(losses, self._head_weights)
if optimizer is not None:
if train_op_fn is not None:
raise ValueError('train_op_fn and optimizer cannot both be set.')
train_op = optimizer.minimize(
loss, global_step=training_util.get_global_step())
elif train_op_fn is not None:
train_op = train_op_fn(loss)
else:
raise ValueError('train_op_fn and optimizer cannot both be None.')
return model_fn.EstimatorSpec(
mode=model_fn.ModeKeys.TRAIN,
loss=loss,
train_op=train_op,
eval_metric_ops=metrics)
def _merge_predict(self, all_estimator_spec):
"""Merges list of `EstimatorSpec` for prediction.
Args:
all_estimator_spec: list of `EstimatorSpec` for the individual heads.
Returns:
`EstimatorSpec` that merges all heads for PREDICT.
"""
predictions = {}
export_outputs = {
_DEFAULT_SERVING_KEY: _default_export_output(
all_estimator_spec[0].export_outputs,
self._heads[0].name),
}
merged_predict_outputs = {}
for head, spec in zip(self._heads, all_estimator_spec):
head_name = head.name
for k, v in six.iteritems(spec.export_outputs):
if k == _DEFAULT_SERVING_KEY:
key = head_name
else:
key = '%s/%s' % (head_name, k)
export_outputs[key] = v
if (k == head_lib._PREDICT_SERVING_KEY and # pylint:disable=protected-access
isinstance(v, export_output_lib.PredictOutput)):
for kp, vp in six.iteritems(v.outputs):
key = '%s/%s' % (head_name, kp)
merged_predict_outputs[key] = vp
for k, v in six.iteritems(spec.predictions):
predictions[(head_name, k)] = v
export_outputs[head_lib._PREDICT_SERVING_KEY] = ( # pylint:disable=protected-access
export_output_lib.PredictOutput(merged_predict_outputs))
return model_fn.EstimatorSpec(
mode=model_fn.ModeKeys.PREDICT,
predictions=predictions,
export_outputs=export_outputs)
def _merge_eval(self, all_estimator_spec):
"""Merges list of `EstimatorSpec` for eval.
Args:
all_estimator_spec: list of `EstimatorSpec` for the individual heads.
Returns:
`EstimatorSpec` that merges all heads for EVAL.
"""
predictions = {}
metrics = {}
losses = []
with ops.name_scope('merge_eval'):
for head, spec in zip(self._heads, all_estimator_spec):
losses.append(spec.loss)
head_name = head.name
# Loss metric is not added by default.
loss_name = head_lib._summary_key( # pylint:disable=protected-access
head_name, metric_keys.MetricKeys.LOSS)
metrics[loss_name] = metrics_lib.mean(spec.loss, name=loss_name)
# Metric keys already contain head.name.
metrics.update(spec.eval_metric_ops or {})
for k, v in six.iteritems(spec.predictions):
predictions[(head_name, k)] = v
loss = _merge_losses(losses, self._head_weights)
return model_fn.EstimatorSpec(
mode=model_fn.ModeKeys.EVAL,
predictions=predictions,
loss=loss,
eval_metric_ops=metrics)
|
hmpf/nav | refs/heads/master | python/nav/web/syslogger/forms.py | 2 | """
forms and functions used for syslogger in NAV
"""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Layout, Fieldset, Row, Column, Field
from nav.models.logger import Priority, LoggerCategory, Origin, LogMessageType
DATEFORMAT = ("%Y-%m-%d %H:%M",)
def _choice_values(model, field_name):
"""
Generates a choice_values list to be used with ChoiceField etc.
:param model: django model
:param field_name: field to aggregate on
:return: values_list based on model and field_name
"""
choice_list = model.objects.values_list(field_name).distinct()
choices = [(choice[0], choice[0]) for choice in choice_list]
choices.sort()
choices.insert(0, ('', u'(All)'))
return choices
class LoggerGroupSearchForm(forms.Form):
"""LoggerSearchForm"""
facility = forms.ChoiceField(required=False)
priority = forms.ChoiceField(required=False)
mnemonic = forms.ChoiceField(required=False)
origin = forms.ChoiceField(required=False)
category = forms.ModelChoiceField(queryset=LoggerCategory.objects.all(),
required=False, empty_label=u'(All)')
timestamp_from = forms.DateTimeField(input_formats=DATEFORMAT)
timestamp_to = forms.DateTimeField(input_formats=DATEFORMAT)
show_log = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(LoggerGroupSearchForm, self).__init__(*args, **kwargs)
self.fields['facility'].choices = _choice_values(LogMessageType,
'facility')
self.fields['priority'].choices = _choice_values(Priority, 'keyword')
self.fields['mnemonic'].choices = _choice_values(LogMessageType,
'mnemonic')
self.fields['origin'].choices = _choice_values(Origin, 'name')
self.fields['timestamp_from'].widget.format = DATEFORMAT[0]
self.fields['timestamp_to'].widget.format = DATEFORMAT[0]
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.layout = Layout(
Row(
Column(
Fieldset(
'Filter <a href="http://www.cisco.com/en/US/docs/ios/system/messages/guide/sm_cnovr.html"><i class="fa fa-info-circle"></i></a>',
Row(
Column(Field('facility',
css_class='select2 medium-12'),
css_class='medium-12'),
Column(Field('priority', css_class='select2'),
css_class='medium-12'),
Column(Field('mnemonic', css_class='select2'),
css_class='medium-12'),
Column(Field('origin', css_class='select2'),
css_class='medium-12'),
Column(Field('category', css_class='select2'),
css_class='medium-12'),
Column('timestamp_from', css_class='medium-12'),
Column('timestamp_to', css_class='medium-12'),
Column('show_log', css_class='medium-12'),
),
),
css_class='medium-12'
),
),
)
|
eduNEXT/edx-platform | refs/heads/master | common/djangoapps/student/management/commands/manage_user.py | 4 | """
Management command `manage_user` is used to idempotently create or remove
Django users, set/unset permission bits, and associate groups by name.
"""
from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import is_password_usable, identify_hasher
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.utils.translation import gettext as _
from openedx.core.djangoapps.user_authn.utils import generate_password
from common.djangoapps.student.models import UserProfile
def is_valid_django_hash(encoded):
"""
Starting with django 2.1, the function is_password_usable no longer checks whether encode
is a valid password created by a django hasher (hasher in PASSWORD_HASHERS setting)
Adding this function to create constant behavior as we upgrade django versions
"""
try:
identify_hasher(encoded)
except ValueError:
return False
return True
class Command(BaseCommand): # lint-amnesty, pylint: disable=missing-class-docstring
help = 'Creates the specified user, if it does not exist, and sets its groups.'
def add_arguments(self, parser):
parser.add_argument('username')
parser.add_argument('email')
parser.add_argument('--remove', dest='is_remove', action='store_true')
parser.add_argument('--superuser', dest='is_superuser', action='store_true')
parser.add_argument('--staff', dest='is_staff', action='store_true')
parser.add_argument('--unusable-password', dest='unusable_password', action='store_true')
parser.add_argument('--initial-password-hash', dest='initial_password_hash')
parser.add_argument('-g', '--groups', nargs='*', default=[])
def _maybe_update(self, user, attribute, new_value):
"""
DRY helper. If the specified attribute of the user differs from the
specified value, it will be updated.
"""
old_value = getattr(user, attribute)
if new_value != old_value:
self.stderr.write(
_('Setting {attribute} for user "{username}" to "{new_value}"').format(
attribute=attribute, username=user.username, new_value=new_value
)
)
setattr(user, attribute, new_value)
def _check_email_match(self, user, email):
"""
DRY helper.
Requiring the user to specify both username and email will help catch
certain issues, for example if the expected username has already been
taken by someone else.
"""
if user.email.lower() != email.lower():
# The passed email address doesn't match this username's email address.
# Assume a problem and fail.
raise CommandError(
_(
'Skipping user "{}" because the specified and existing email '
'addresses do not match.'
).format(user.username)
)
def _handle_remove(self, username, email): # lint-amnesty, pylint: disable=missing-function-docstring
try:
user = get_user_model().objects.get(username=username)
except get_user_model().DoesNotExist:
self.stderr.write(_('Did not find a user with username "{}" - skipping.').format(username))
return
self._check_email_match(user, email)
self.stderr.write(_('Removing user: "{}"').format(user))
user.delete()
@transaction.atomic
def handle(self, username, email, is_remove, is_staff, is_superuser, groups, # lint-amnesty, pylint: disable=arguments-differ
unusable_password, initial_password_hash, *args, **options):
if is_remove:
return self._handle_remove(username, email)
old_groups, new_groups = set(), set()
user, created = get_user_model().objects.get_or_create(
username=username,
defaults={'email': email}
)
if created:
if initial_password_hash:
if not (is_password_usable(initial_password_hash) and is_valid_django_hash(initial_password_hash)):
raise CommandError(f'The password hash provided for user {username} is invalid.')
user.password = initial_password_hash
else:
# Set the password to a random, unknown, but usable password
# allowing self-service password resetting. Cases where unusable
# passwords are required, should be explicit, and will be handled below.
user.set_password(generate_password(length=25))
self.stderr.write(_('Created new user: "{}"').format(user))
else:
# NOTE, we will not update the email address of an existing user.
self.stderr.write(_('Found existing user: "{}"').format(user))
self._check_email_match(user, email)
old_groups = set(user.groups.all())
self._maybe_update(user, 'is_staff', is_staff)
self._maybe_update(user, 'is_superuser', is_superuser)
# Set unusable password if specified
if unusable_password and user.has_usable_password():
self.stderr.write(_('Setting unusable password for user "{}"').format(user))
user.set_unusable_password()
# Ensure the user has a profile
try:
__ = user.profile
except UserProfile.DoesNotExist:
UserProfile.objects.create(user=user)
self.stderr.write(_('Created new profile for user: "{}"').format(user))
# resolve the specified groups
for group_name in groups or set():
try:
group = Group.objects.get(name=group_name)
new_groups.add(group)
except Group.DoesNotExist:
# warn, but move on.
self.stderr.write(_('Could not find a group named "{}" - skipping.').format(group_name))
add_groups = new_groups - old_groups
remove_groups = old_groups - new_groups
self.stderr.write(
_(
'Adding user "{username}" to groups {group_names}'
).format(
username=user.username,
group_names=[g.name for g in add_groups]
)
)
self.stderr.write(
_(
'Removing user "{username}" from groups {group_names}'
).format(
username=user.username,
group_names=[g.name for g in remove_groups]
)
)
user.groups.set(new_groups)
user.save()
|
openstack/cinder | refs/heads/master | cinder/volume/drivers/open_e/jovian_common/rest_proxy.py | 2 | # Copyright (c) 2020 Open-E, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network connection handling class for JovianDSS driver."""
import json
from oslo_log import log as logging
from oslo_utils import netutils as o_netutils
import requests
import urllib3
from cinder import exception
from cinder.i18n import _
from cinder.utils import retry
from cinder.volume.drivers.open_e.jovian_common import exception as jexc
LOG = logging.getLogger(__name__)
class JovianRESTProxy(object):
"""Jovian REST API proxy."""
def __init__(self, config):
""":param config: list of config values."""
self.proto = 'http'
if config.get('driver_use_ssl', True):
self.proto = 'https'
self.hosts = config.get('san_hosts', [])
self.port = str(config.get('san_api_port', 82))
for host in self.hosts:
if o_netutils.is_valid_ip(host) is False:
err_msg = ('Invalid value of jovian_host property: '
'%(addr)s, IP address expected.' %
{'addr': host})
LOG.debug(err_msg)
raise exception.InvalidConfigurationValue(err_msg)
self.active_host = 0
self.delay = config.get('jovian_recovery_delay', 40)
self.pool = config.get('jovian_pool', 'Pool-0')
self.user = config.get('san_login', 'admin')
self.password = config.get('san_password', 'admin')
self.verify = config.get('driver_ssl_cert_verify', True)
self.cert = config.get('driver_ssl_cert_path')
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
self.session = self._get_session()
def _get_session(self):
"""Create and init new session object"""
session = requests.Session()
session.auth = (self.user, self.password)
session.headers.update({'Connection': 'keep-alive',
'Content-Type': 'application/json',
'Authorization': 'Basic'})
session.hooks['response'] = [JovianRESTProxy._handle_500]
session.verify = self.verify
if self.verify and self.cert:
session.verify = self.cert
return session
def _get_base_url(self):
"""Get url prefix with active host"""
url = ('%(proto)s://%(host)s:%(port)s/api/v3' % {
'proto': self.proto,
'host': self.hosts[self.active_host],
'port': self.port})
return url
def _next_host(self):
"""Set next host as active"""
self.active_host = (self.active_host + 1) % len(self.hosts)
def request(self, request_method, req, json_data=None):
"""Send request to the specific url.
:param request_method: GET, POST, DELETE
:param url: where to send
:param json_data: data
"""
out = None
for i in range(len(self.hosts)):
try:
addr = "{base}{req}".format(base=self._get_base_url(),
req=req)
LOG.debug("Sending %(t)s to %(addr)s",
{'t': request_method, 'addr': addr})
r = None
if json_data:
r = requests.Request(request_method,
addr,
data=json.dumps(json_data))
else:
r = requests.Request(request_method, addr)
pr = self.session.prepare_request(r)
out = self._send(pr)
except requests.exceptions.ConnectionError:
self._next_host()
continue
break
LOG.debug("Geting %(data)s from %(t)s to %(addr)s",
{'data': out, 't': request_method, 'addr': addr})
return out
def pool_request(self, request_method, req, json_data=None):
"""Send request to the specific url.
:param request_method: GET, POST, DELETE
:param url: where to send
:param json_data: data
"""
req = "/pools/{pool}{req}".format(pool=self.pool, req=req)
addr = "{base}{req}".format(base=self._get_base_url(), req=req)
LOG.debug("Sending pool request %(t)s to %(addr)s",
{'t': request_method, 'addr': addr})
return self.request(request_method, req, json_data=json_data)
@retry((requests.exceptions.ConnectionError,
jexc.JDSSOSException),
interval=2,
backoff_rate=2,
retries=7)
def _send(self, pr):
"""Send prepared request
:param pr: prepared request
"""
ret = dict()
response_obj = self.session.send(pr)
ret['code'] = response_obj.status_code
try:
data = json.loads(response_obj.text)
ret["error"] = data.get("error")
ret["data"] = data.get("data")
except json.JSONDecodeError:
pass
return ret
@staticmethod
def _handle_500(resp, *args, **kwargs):
"""Handle OS error on a storage side"""
error = None
if resp.status_code == 500:
try:
data = json.loads(resp.text)
error = data.get("error")
except json.JSONDecodeError:
return
else:
return
if error:
if "class" in error:
if error["class"] == "opene.tools.scstadmin.ScstAdminError":
LOG.debug("ScstAdminError %(code)d %(msg)s",
{'code': error["errno"],
'msg': error["message"]})
raise jexc.JDSSOSException(_(error["message"]))
if error["class"] == "exceptions.OSError":
LOG.debug("OSError %(code)d %(msg)s",
{'code': error["errno"],
'msg': error["message"]})
raise jexc.JDSSOSException(_(error["message"]))
def get_active_host(self):
"""Return address of currently used host."""
return self.hosts[self.active_host]
|
victorbriz/rethinkdb | refs/heads/next | external/v8_3.30.33.16/build/gyp/test/mac/gyptest-bundle-resources.py | 193 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies things related to bundle resources.
"""
import TestGyp
import os
import stat
import sys
def check_attribs(path, expected_exec_bit):
out_path = test.built_file_path(
os.path.join('resource.app/Contents/Resources', path), chdir=CHDIR)
in_stat = os.stat(os.path.join(CHDIR, path))
out_stat = os.stat(out_path)
if in_stat.st_mtime == out_stat.st_mtime:
test.fail_test()
if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
test.fail_test()
if sys.platform == 'darwin':
# set |match| to ignore build stderr output.
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
CHDIR = 'bundle-resources'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', test.ALL, chdir=CHDIR)
test.built_file_must_match('resource.app/Contents/Resources/secret.txt',
'abc\n', chdir=CHDIR)
test.built_file_must_match('source_rule.app/Contents/Resources/secret.txt',
'ABC\n', chdir=CHDIR)
test.built_file_must_match(
'resource.app/Contents/Resources/executable-file.sh',
'#!/bin/bash\n'
'\n'
'echo echo echo echo cho ho o o\n', chdir=CHDIR)
check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
check_attribs('secret.txt', expected_exec_bit=0)
# TODO(thakis): This currently fails with make.
if test.format != 'make':
test.built_file_must_match(
'resource_rule.app/Contents/Resources/secret.txt', 'ABC\n', chdir=CHDIR)
test.pass_test()
|
alon/servo | refs/heads/master | tests/wpt/web-platform-tests/fetch/api/resources/echo-content.py | 80 | def main(request, response):
headers = [("X-Request-Method", request.method),
("X-Request-Content-Length", request.headers.get("Content-Length", "NO")),
("X-Request-Content-Type", request.headers.get("Content-Type", "NO"))]
content = request.body
return headers, content
|
kennjason/ursula | refs/heads/master | library/swift_ring.py | 16 | #!/usr/bin/python
#coding: utf-8 -*-
#
# (c) 2014, Craig Tracey <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
author: Craig Tracey
'''
import os
from subprocess import check_call, CalledProcessError
def _run_ring_command(module, command, builder_file, force, *args):
cmd = ['swift-ring-builder', builder_file, command] + list(args)
try:
rc = subprocess.check_call(cmd)
except Exception as e:
module.fail_json(msg="Error running swift-ring-builder command %s'" %
(e.message, " ".join(cmd)))
return True
def swift_ring_create(module, builder_file, part_power, replicas,
min_part_hours, force=False):
return _run_ring_command(module, 'create', builder_file, force,
part_power, replicas, min_part_hours)
def swift_ring_add(module, builder_file, zone, ip, port, device_name, meta,
weight, force=False):
device_str = "z%(zone)s-%(ip)s:%(port)s/%(device_name)s_%(meta)s" % \
locals()
return _run_ring_command(module, 'add', builder_file, force,
device_str, weight)
def swift_ring_rebalance(module, builder_file, ring_type, force=False):
if not force and os.path.exists("/etc/swift/%s.ring.gz" % ring_type):
return False
return _run_ring_command(module, 'rebalance', builder_file, force)
def main():
module = AnsibleModule(
argument_spec=dict(
action=dict(required=True,
choices=['create', 'add', 'rebalance']),
ring_type=dict(required=True,
choices=['account', 'container', 'object']),
builder_file=dict(required=True),
part_power=dict(required=False),
replicas=dict(required=False),
min_part_hours=dict(required=False),
zone=dict(required=False),
ip=dict(required=False),
port=dict(required=False),
device_name=dict(required=False),
meta=dict(required=False),
weight=dict(required=False),
force=dict(required=False, default=False)
)
)
changed = False
params = module.params
if params['action'] == 'create':
changed = swift_ring_create(module,
params['builder_file'],
params['part_power'],
params['replicas'],
params['min_part_hours'],
params['force'])
elif params['action'] == 'add':
changed = swift_ring_add(module,
params['builder_file'],
params['zone'],
params['ip'],
params['port'],
params['device_name'],
params['meta'],
params['weight'],
params['force'])
elif params['action'] == 'rebalance':
changed = swift_ring_rebalance(module,
params['builder_file'],
params['ring_type'],
params['force'])
module.exit_json(changed=changed)
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
main()
|
gboudreau/CouchPotato | refs/heads/master | app/lib/provider/yarr/search.py | 6 | from app.config.cplog import CPLog
from app.lib.provider.yarr.sources.newznab import newznab
from app.lib.provider.yarr.sources.nzbmatrix import nzbMatrix
from app.lib.provider.yarr.sources.newzbin import newzbin
from app.lib.provider.yarr.sources.nzbs import nzbs
from app.lib.provider.yarr.sources.tpb import tpb
from app.lib.provider.yarr.sources.x264 import x264
from app.lib.provider.yarr.sources.nzbsrus import nzbsRus
from app.lib.qualities import Qualities
from urllib2 import URLError
import time
import urllib2
log = CPLog(__name__)
class Searcher():
sources = []
def __init__(self, config, debug):
self.config = config
self.debug = debug
for yarr in [newzbin, nzbMatrix, nzbs, newznab, tpb, x264, nzbsRus]:
m = yarr(config)
self.sources.append(m)
def find(self, movie, queue):
''' Find movie by name '''
log.debug('Searching for movie: %s' % movie.name)
qualities = Qualities()
for source in self.sources:
results = []
# find by main name
type = queue.qualityType
results.extend(source.find(movie, type, type))
# Search for alternative naming
for alt in qualities.getAlternatives(type):
results.extend(source.find(movie, alt, type))
#search for highest score
highest = None
highestScore = -1000
if results:
for result in results:
if result.score > highestScore:
if not result.checkNZB or self.validNZB(result.url):
highest = result
highestScore = result.score
if highest:
return highest
return None
def validNZB(self, url):
if self.debug:
return True
try:
time.sleep(10)
log.info('Checking if %s is valid.' % url)
data = urllib2.urlopen(url, timeout = 10).info()
for check in ['nzb', 'download', 'torrent']:
if check in data.get('Content-Type'):
return True
return False
except (IOError, URLError):
return False
def findById(self, id):
''' Find movie by TheMovieDB ID '''
for source in self.sources:
result = source.findById(id)
if result:
return result
return []
def findByImdbId(self, id):
''' Find movie by IMDB ID '''
for source in self.sources:
result = source.findByImdbId(id)
if result:
return result
return []
|
fitermay/intellij-community | refs/heads/master | python/testData/dotNet/expected.skeleton.Deep.py | 79 | # encoding: utf-8
# module SingleNameSpace.Some.Deep calls itself Deep
# from SingleNameSpace, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
# by generatorXXX
# no doc
# no imports
# no functions
# classes
class WeHaveClass(object):
""" WeHaveClass() """
MyClass = None
|
40223246/2015cd_midterm | refs/heads/master | static/Brython3.1.1-20150328-091302/Lib/site.py | 805 | import sys
|
madisonhicks/blob | refs/heads/master | blob_server/database.py | 1 | from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.orm import scoped_session, sessionmaker
from blob_server.config import (
SQL_USER,
SQL_DB_NAME,
SQL_DB_HOST,
SQL_DB_PORT,
SQL_PASSWORD
)
def db_uri():
"""
creates url using defined environment variables
:return: database password
"""
return "postgresql://{}:{}@{}:{}/{}".format(
SQL_USER, SQL_PASSWORD, SQL_DB_HOST, SQL_DB_PORT, SQL_DB_NAME
)
def get_engine():
"""
creates the engine
:return: engine
"""
engine = create_engine(db_uri())
return engine
def get_session_registry(engine: Engine):
"""
:param engine: engine created by get engine
:return: Session class
"""
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
return Session |
KenV99/service.kodi.callbacks | refs/heads/master | resources/lib/watchdog/utils/unicode_paths.py | 17 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Will Bond <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
from watchdog.utils import platform
try:
# Python 2
str_cls = unicode
bytes_cls = str
except NameError:
# Python 3
str_cls = str
bytes_cls = bytes
# This is used by Linux when the locale seems to be improperly set. UTF-8 tends
# to be the encoding used by all distros, so this is a good fallback.
fs_fallback_encoding = 'utf-8'
fs_encoding = sys.getfilesystemencoding() or fs_fallback_encoding
def encode(path):
if isinstance(path, str_cls):
try:
path = path.encode(fs_encoding, 'strict')
except UnicodeEncodeError:
if not platform.is_linux():
raise
path = path.encode(fs_fallback_encoding, 'strict')
return path
def decode(path):
if isinstance(path, bytes_cls):
try:
path = path.decode(fs_encoding, 'strict')
except UnicodeDecodeError:
if not platform.is_linux():
raise
path = path.decode(fs_fallback_encoding, 'strict')
return path
|
abaldwin88/roamer | refs/heads/master | roamer/edit_directory.py | 1 | """
Represents a directory after the user has submitted edits
"""
from collections import Counter
from roamer.entry import Entry
class EditDirectory(object):
def __init__(self, path, content):
self.path = path
self.entries = {}
self.process_lines(content)
self.handle_duplicate_names()
def process_lines(self, content):
for line in content.splitlines():
name, digest = process_line(line)
if name is None:
continue
entry = Entry(name, self.path, digest)
if digest in self.entries:
self.entries[digest].append(entry)
else:
self.entries[digest] = [entry]
def handle_duplicate_names(self):
for entries in self.entries.values():
entry_names = [entry.name for entry in entries]
entry_counts = Counter(entry_names)
for entry_name, count in entry_counts.items():
for i in range(count - 1):
duplicate_entry = next(entry for entry in entries if entry.name == entry_name)
duplicate_entry.append_to_name('_copy_%s' % str(i + 1))
def find(self, digest):
return self.entries.get(digest)
def process_line(line):
columns = line.split('|')
name = columns[0]
if name.isspace() or name == '':
name = None
elif name[-1] == ' ':
name = name[:-1]
if name and name[0] == '"':
# Hashtags are commented lines
name = None
if len(columns) == 1:
digest = None
else:
digest = columns[1].replace(' ', '')
if digest == '':
digest = None
return name, digest
|
daviskirk/climatecontrol | refs/heads/master | climatecontrol/cli_utils.py | 1 | """CLI utils for easy command line extras."""
import click
from climatecontrol import core
def click_settings_file_option(
settings_obj: core.Climate, click_obj=click, option_name="settings", **kw
):
"""Build a `click` option decorator.
Args:
settings_obj: settings object to load configuration into.
click_obj: if a command
Example:
Given a command line script `cli.py`:
.. code-block:: python
import click
from climatecontrol import core, cli_utils
settings_map = settings_parser.Climate(env_prefix='TEST_STUFF')
@click.command()
@cli_utils.click_settings_file_option(settings_map)
def tmp_cli():
pass
And running the script:
.. code-block:: bash
python cli.py --settings 'my_settings_file.yaml'
will load settings from `my_settings_file.yaml` into the `settings_map`
object which can then be used in the script.
"""
def validate(ctx, param, value):
if value:
settings_obj.settings_files = value
settings_obj.update()
option_kwargs = dict(
help="Settings file path for loading settings from file.",
callback=validate,
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
expose_value=False,
is_eager=True,
multiple=True,
)
option_kwargs.update(kw)
option = click_obj.option(
"--{}".format(option_name), "-{}".format(option_name[0]), **option_kwargs
)
return option
|
unioslo/cerebrum | refs/heads/master | Cerebrum/group/__init__.py | 2 | # -*- coding: utf-8 -*-
# Copyright 2019 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
|
indashnet/InDashNet.Open.UN2000 | refs/heads/master | android/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/scm/scm_mock.py | 1 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.common.checkout.scm import CommitMessage
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.executive_mock import MockExecutive
class MockSCM(object):
executable_name = "MockSCM"
def __init__(self, filesystem=None, executive=None):
self.checkout_root = "/mock-checkout"
self.added_paths = set()
self._filesystem = filesystem or MockFileSystem()
self._executive = executive or MockExecutive()
def add(self, destination_path, return_exit_code=False):
self.add_list([destination_path], return_exit_code)
def add_list(self, destination_paths, return_exit_code=False):
self.added_paths.update(set(destination_paths))
if return_exit_code:
return 0
def has_working_directory_changes(self):
return False
def discard_working_directory_changes(self):
pass
def ensure_cleanly_tracking_remote_master(self):
pass
def current_branch(self):
return "mock-branch-name"
def checkout_branch(self, name):
pass
def create_clean_branch(self, name):
pass
def delete_branch(self, name):
pass
def supports_local_commits(self):
return True
def has_local_commits(self):
return False
def discard_local_commits(self):
pass
def discard_local_changes(self):
pass
def exists(self, path):
# TestRealMain.test_real_main (and several other rebaseline tests) are sensitive to this return value.
# We should make those tests more robust, but for now we just return True always (since no test needs otherwise).
return True
def absolute_path(self, *comps):
return self._filesystem.join(self.checkout_root, *comps)
def changed_files(self, git_commit=None):
return ["MockFile1"]
def changed_files_for_revision(self, revision):
return ["MockFile1"]
def head_svn_revision(self):
return '1234'
def svn_revision(self, path):
return '5678'
def timestamp_of_revision(self, path, revision):
return '2013-02-01 08:48:05 +0000'
def create_patch(self, git_commit, changed_files=None):
return "Patch1"
def commit_ids_from_commitish_arguments(self, args):
return ["Commitish1", "Commitish2"]
def committer_email_for_revision(self, revision):
return "[email protected]"
def commit_locally_with_message(self, message, commit_all_working_directory_changes=True):
pass
def commit_with_message(self, message, username=None, password=None, git_commit=None, force_squash=False, changed_files=None):
pass
def merge_base(self, git_commit):
return None
def commit_message_for_local_commit(self, commit_id):
if commit_id == "Commitish1":
return CommitMessage("CommitMessage1\n" \
"https://bugs.example.org/show_bug.cgi?id=50000\n")
if commit_id == "Commitish2":
return CommitMessage("CommitMessage2\n" \
"https://bugs.example.org/show_bug.cgi?id=50001\n")
raise Exception("Bogus commit_id in commit_message_for_local_commit.")
def diff_for_file(self, path, log=None):
return path + '-diff'
def diff_for_revision(self, revision):
return "DiffForRevision%s\nhttp://bugs.webkit.org/show_bug.cgi?id=12345" % revision
def show_head(self, path):
return path
def svn_revision_from_commit_text(self, commit_text):
return "49824"
def svn_revision_from_git_commit(self, git_commit):
if git_commit == '6469e754a1':
return 1234
if git_commit == '624c3081c0':
return 5678
if git_commit == '624caaaaaa':
return 10000
return None
def delete(self, path):
return self.delete_list([path])
def delete_list(self, paths):
if not self._filesystem:
return
for path in paths:
if self._filesystem.exists(path):
self._filesystem.remove(path)
def move(self, origin, destination):
if self._filesystem:
self._filesystem.move(self.absolute_path(origin), self.absolute_path(destination))
|
alrusdi/lettuce | refs/heads/master | tests/integration/lib/Django-1.2.5/tests/regressiontests/templates/views.py | 59 | # Fake views for testing url reverse lookup
def index(request):
pass
def client(request, id):
pass
def client_action(request, id, action):
pass
def client2(request, tag):
pass
|
tkingless/webtesting | refs/heads/master | venvs/dev/lib/python2.7/sre.py | 4 | /usr/lib/python2.7/sre.py |
bbengfort/inigo | refs/heads/master | tests/fs_tests.py | 1 | # tests.fs_tests
# Testing for the fs package in inigo
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Sun Jun 14 20:43:36 2015 -0400
#
# Copyright (C) 2015 Windsor View Corporation
# For license information, see LICENSE.txt
#
# ID: fs_tests.py [] [email protected] $
"""
Testing for the fs package in inigo.
"""
##########################################################################
## Imports
##########################################################################
import tempfile
import unittest
##########################################################################
## Test Cases
##########################################################################
|
JuliaSprenger/python-neo | refs/heads/master | neo/test/coretest/test_epoch.py | 2 | """
Tests of the neo.core.epoch.Epoch class
"""
import unittest
import warnings
from copy import deepcopy
import numpy as np
import quantities as pq
import pickle
import os
from numpy.testing import assert_array_equal
from neo.core.dataobject import ArrayDict
try:
from IPython.lib.pretty import pretty
except ImportError as err:
HAVE_IPYTHON = False
else:
HAVE_IPYTHON = True
from neo.core.epoch import Epoch
from neo.core import Segment
from neo.test.tools import (assert_neo_object_is_compliant, assert_arrays_equal,
assert_arrays_almost_equal, assert_same_sub_schema,
assert_same_attributes, assert_same_annotations,
assert_same_array_annotations)
from neo.test.generate_datasets import (get_fake_value, get_fake_values, fake_neo,
TEST_ANNOTATIONS)
class Test__generate_datasets(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.annotations = {
str(x): TEST_ANNOTATIONS[x] for x in range(len(TEST_ANNOTATIONS))}
def test__get_fake_values(self):
self.annotations['seed'] = 0
times = get_fake_value('times', pq.Quantity, seed=0, dim=1)
durations = get_fake_value('durations', pq.Quantity, seed=1, dim=1)
labels = get_fake_value('labels', np.ndarray, seed=2, dim=1, dtype='U')
name = get_fake_value('name', str, seed=3, obj=Epoch)
description = get_fake_value('description', str, seed=4, obj='Epoch')
file_origin = get_fake_value('file_origin', str)
arr_ann = get_fake_value('array_annotations', dict, seed=6, obj=Epoch, n=5)
attrs1 = {'name': name, 'description': description, 'file_origin': file_origin}
attrs2 = attrs1.copy()
attrs2.update(self.annotations)
attrs2['array_annotations'] = arr_ann
res11 = get_fake_values(Epoch, annotate=False, seed=0)
res12 = get_fake_values('Epoch', annotate=False, seed=0)
res21 = get_fake_values(Epoch, annotate=True, seed=0)
res22 = get_fake_values('Epoch', annotate=True, seed=0)
assert_arrays_equal(res11.pop('times'), times)
assert_arrays_equal(res12.pop('times'), times)
assert_arrays_equal(res21.pop('times'), times)
assert_arrays_equal(res22.pop('times'), times)
assert_arrays_equal(res11.pop('durations'), durations)
assert_arrays_equal(res12.pop('durations'), durations)
assert_arrays_equal(res21.pop('durations'), durations)
assert_arrays_equal(res22.pop('durations'), durations)
assert_arrays_equal(res11.pop('labels'), labels)
assert_arrays_equal(res12.pop('labels'), labels)
assert_arrays_equal(res21.pop('labels'), labels)
assert_arrays_equal(res22.pop('labels'), labels)
self.assertEqual(res11, attrs1)
self.assertEqual(res12, attrs1)
# Array annotations need to be compared separately
# because numpy arrays define equality differently
arr_ann_res21 = res21.pop('array_annotations')
arr_ann_attrs2 = attrs2.pop('array_annotations')
self.assertEqual(res21, attrs2)
assert_arrays_equal(arr_ann_res21['valid'], arr_ann_attrs2['valid'])
assert_arrays_equal(arr_ann_res21['number'], arr_ann_attrs2['number'])
arr_ann_res22 = res22.pop('array_annotations')
self.assertEqual(res22, attrs2)
assert_arrays_equal(arr_ann_res22['valid'], arr_ann_attrs2['valid'])
assert_arrays_equal(arr_ann_res22['number'], arr_ann_attrs2['number'])
def test__fake_neo__cascade(self):
self.annotations['seed'] = None
obj_type = Epoch
cascade = True
res = fake_neo(obj_type=obj_type, cascade=cascade)
self.assertTrue(isinstance(res, Epoch))
assert_neo_object_is_compliant(res)
self.assertEqual(res.annotations, self.annotations)
def test__fake_neo__nocascade(self):
self.annotations['seed'] = None
obj_type = 'Epoch'
cascade = False
res = fake_neo(obj_type=obj_type, cascade=cascade)
self.assertTrue(isinstance(res, Epoch))
assert_neo_object_is_compliant(res)
self.assertEqual(res.annotations, self.annotations)
class TestEpoch(unittest.TestCase):
def setUp(self):
self.params = {'test0': 'y1', 'test1': ['deeptest'], 'test2': True}
self.seg = Segment()
self.epc = Epoch(times=[10, 20, 30, 40, 50] * pq.s, durations=[10, 5, 7, 14, 9] * pq.ms,
labels=np.array(['btn0', 'btn1', 'btn2', 'btn0', 'btn3'], dtype='S'),
**self.params)
self.epc.segment = self.seg
def test_Epoch_creation(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'names': ['a', 'b', 'c'], 'index': np.arange(10, 13)}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
assert_arrays_equal(epc.times, [1.1, 1.5, 1.7] * pq.ms)
assert_arrays_equal(epc.durations, [20, 40, 60] * pq.ns)
assert_arrays_equal(epc.labels,
np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'))
self.assertEqual(epc.name, 'test')
self.assertEqual(epc.description, 'tester')
self.assertEqual(epc.file_origin, 'test.file')
self.assertEqual(epc.annotations['test0'], [1, 2])
self.assertEqual(epc.annotations['test1'], 1.1)
self.assertEqual(epc.annotations['test2'], 'y1')
self.assertTrue(epc.annotations['test3'])
assert_arrays_equal(epc.array_annotations['names'], np.array(['a', 'b', 'c']))
assert_arrays_equal(epc.array_annotations['index'], np.arange(10, 13))
self.assertIsInstance(epc.array_annotations, ArrayDict)
def test_Epoch_invalid_times_dimension(self):
data2d = np.array([1, 2, 3, 4]).reshape((4, -1))
durations = np.array([1, 1, 1, 1])
self.assertRaises(ValueError, Epoch, times=data2d * pq.s, durations=durations)
def test_Epoch_creation_invalid_durations_labels(self):
self.assertRaises(ValueError, Epoch, [1.1, 1.5, 1.7] * pq.ms,
durations=[20, 40, 60, 80] * pq.ns)
self.assertRaises(ValueError, Epoch, [1.1, 1.5, 1.7] * pq.ms,
durations=[20, 40, 60] * pq.ns,
labels=["A", "B"])
def test_Epoch_creation_scalar_duration(self):
# test with scalar for durations
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=20 * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'))
assert_neo_object_is_compliant(epc)
assert_arrays_equal(epc.times, [1.1, 1.5, 1.7] * pq.ms)
assert_arrays_equal(epc.durations, [20, 20, 20] * pq.ns)
self.assertEqual(epc.durations.size, 3)
assert_arrays_equal(epc.labels,
np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'))
def test_Epoch_creation_from_lists(self):
epc = Epoch([1.1, 1.5, 1.7],
[20.0, 20.0, 20.0],
['test event 1', 'test event 2', 'test event 3'],
units=pq.ms)
assert_arrays_equal(epc.times, [1.1, 1.5, 1.7] * pq.ms)
assert_arrays_equal(epc.durations, [20.0, 20.0, 20.0] * pq.ms)
assert_arrays_equal(epc.labels,
np.array(['test event 1', 'test event 2', 'test event 3']))
def test_Epoch_repr(self):
params = {'test2': 'y1', 'test3': True}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = ('<Epoch: test epoch [email protected] ms for 20.0 ns, '
+ 'test epoch [email protected] ms for 40.0 ns, '
+ 'test epoch [email protected] ms for 60.0 ns>')
res = repr(epc)
self.assertEqual(targ, res)
def test_Epoch_merge(self):
params1 = {'test2': 'y1', 'test3': True}
params2 = {'test2': 'no', 'test4': False}
paramstarg = {'test2': 'yes;no', 'test3': True, 'test4': False}
arr_ann1 = {'index': np.arange(10, 13)}
arr_ann2 = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc1 = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.us,
labels=np.array(['test epoch 1 1', 'test epoch 1 2', 'test epoch 1 3'],
dtype='U'), name='test', description='tester 1',
file_origin='test.file', test1=1, array_annotations=arr_ann1, **params1)
epc2 = Epoch([2.1, 2.5, 2.7] * pq.us, durations=[3, 5, 7] * pq.ms,
labels=np.array(['test epoch 2 1', 'test epoch 2 2', 'test epoch 2 3'],
dtype='U'), name='test', description='tester 2',
file_origin='test.file', test1=1, array_annotations=arr_ann2, **params2)
epctarg = Epoch([1.1, 1.5, 1.7, .0021, .0025, .0027] * pq.ms,
durations=[20, 40, 60, 3000, 5000, 7000] * pq.us,
labels=np.array(['test epoch 1 1', 'test epoch 1 2', 'test epoch 1 3',
'test epoch 2 1', 'test epoch 2 2', 'test epoch 2 3'],
dtype='U'),
name='test',
description='merge(tester 1, tester 2)', file_origin='test.file',
array_annotations={'index': [10, 11, 12, 0, 1, 2]}, test1=1, **paramstarg)
assert_neo_object_is_compliant(epc1)
assert_neo_object_is_compliant(epc2)
assert_neo_object_is_compliant(epctarg)
with warnings.catch_warnings(record=True) as w:
epcres = epc1.merge(epc2)
self.assertTrue(len(w), 1)
self.assertEqual(w[0].category, UserWarning)
self.assertSequenceEqual(str(w[0].message), "The following array annotations were "
"omitted, because they were only present"
" in one of the merged objects: "
"[] from the one that was merged "
"into and ['test'] from the one that "
"was merged into the other")
assert_neo_object_is_compliant(epcres)
assert_same_sub_schema(epctarg, epcres)
# Remove this, when array_annotations are added to assert_same_sub_schema
assert_arrays_equal(epcres.array_annotations['index'], np.array([10, 11, 12, 0, 1, 2]))
self.assertTrue('test' not in epcres.array_annotations)
self.assertIsInstance(epcres.array_annotations, ArrayDict)
def test_set_labels_duration(self):
epc = Epoch([1.1, 1.5, 1.7] * pq.ms,
durations=20 * pq.ns,
labels=['A', 'B', 'C'])
assert_array_equal(epc.durations.magnitude, np.array([20, 20, 20]))
epc.durations = [20.0, 21.0, 22.0] * pq.ns
assert_array_equal(epc.durations.magnitude, np.array([20, 21, 22]))
self.assertRaises(ValueError, setattr, epc, "durations", [25.0, 26.0] * pq.ns)
assert_array_equal(epc.labels, np.array(['A', 'B', 'C']))
epc.labels = ['D', 'E', 'F']
assert_array_equal(epc.labels, np.array(['D', 'E', 'F']))
self.assertRaises(ValueError, setattr, epc, "labels", ['X', 'Y'])
def test__children(self):
params = {'test2': 'y1', 'test3': True}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
segment = Segment(name='seg1')
segment.epochs = [epc]
segment.create_many_to_one_relationship()
self.assertEqual(epc._single_parent_objects, ('Segment',))
self.assertEqual(epc._multi_parent_objects, ())
self.assertEqual(epc._single_parent_containers, ('segment',))
self.assertEqual(epc._multi_parent_containers, ())
self.assertEqual(epc._parent_objects, ('Segment',))
self.assertEqual(epc._parent_containers, ('segment',))
self.assertEqual(len(epc.parents), 1)
self.assertEqual(epc.parents[0].name, 'seg1')
assert_neo_object_is_compliant(epc)
@unittest.skipUnless(HAVE_IPYTHON, "requires IPython")
def test__pretty(self):
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file')
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
prepr = pretty(epc)
targ = ("Epoch\nname: '%s'\ndescription: '%s'\nannotations: %s"
"" % (epc.name, epc.description, pretty(epc.annotations)))
self.assertEqual(prepr, targ)
def test__time_slice(self):
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch(times=[10, 20, 30] * pq.s, durations=[10, 5, 7] * pq.ms,
labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U'), foo='bar',
array_annotations=arr_ann)
epc2 = epc.time_slice(10 * pq.s, 20 * pq.s)
assert_arrays_equal(epc2.times, [10, 20] * pq.s)
assert_arrays_equal(epc2.durations, [10, 5] * pq.ms)
assert_arrays_equal(epc2.labels, np.array(['btn0', 'btn1'], dtype='U'))
self.assertEqual(epc.annotations, epc2.annotations)
assert_arrays_equal(epc2.array_annotations['index'], np.arange(2))
assert_arrays_equal(epc2.array_annotations['test'], np.array(['a', 'b']))
self.assertIsInstance(epc2.array_annotations, ArrayDict)
def test_time_slice2(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.5] * pq.ms, durations=[40] * pq.ns,
labels=np.array(['test epoch 2'], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1,
array_annotations={'index': [1], 'test': ['b']}, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = 1.2
t_stop = 1.6
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array([1]))
assert_arrays_equal(result.array_annotations['test'], np.array(['b']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test__time_slice_deepcopy_annotations(self):
params1 = {'test0': 'y1', 'test1': ['deeptest'], 'test2': True}
self.epc.annotate(**params1)
# time_slice spike train, keep sliced spike times
t_start = 15 * pq.s
t_stop = 45 * pq.s
result = self.epc.time_slice(t_start, t_stop)
# Change annotations of original
params2 = {'test0': 'y2', 'test2': False}
self.epc.annotate(**params2)
self.epc.annotations['test1'][0] = 'shallowtest'
self.assertNotEqual(self.epc.annotations['test0'], result.annotations['test0'])
self.assertNotEqual(self.epc.annotations['test1'], result.annotations['test1'])
self.assertNotEqual(self.epc.annotations['test2'], result.annotations['test2'])
# Change annotations of result
params3 = {'test0': 'y3'}
result.annotate(**params3)
result.annotations['test1'][0] = 'shallowtest2'
self.assertNotEqual(self.epc.annotations['test0'], result.annotations['test0'])
self.assertNotEqual(self.epc.annotations['test1'], result.annotations['test1'])
self.assertNotEqual(self.epc.annotations['test2'], result.annotations['test2'])
def test__time_slice_deepcopy_array_annotations(self):
length = self.epc.shape[-1]
params1 = {'test0': ['y{}'.format(i) for i in range(length)],
'test1': ['deeptest' for i in range(length)],
'test2': [(-1)**i > 0 for i in range(length)]}
self.epc.array_annotate(**params1)
# time_slice spike train, keep sliced spike times
t_start = 15 * pq.s
t_stop = 45 * pq.s
result = self.epc.time_slice(t_start, t_stop)
# Change annotations of original
params2 = {'test0': ['x{}'.format(i) for i in range(length)],
'test2': [(-1) ** (i + 1) > 0 for i in range(length)]}
self.epc.array_annotate(**params2)
self.epc.array_annotations['test1'][2] = 'shallowtest'
self.assertFalse(all(self.epc.array_annotations['test0'][1:4]
== result.array_annotations['test0']))
self.assertFalse(all(self.epc.array_annotations['test1'][1:4]
== result.array_annotations['test1']))
self.assertFalse(all(self.epc.array_annotations['test2'][1:4]
== result.array_annotations['test2']))
# Change annotations of result
params3 = {'test0': ['z{}'.format(i) for i in range(1, 4)]}
result.array_annotate(**params3)
result.array_annotations['test1'][1] = 'shallow2'
self.assertFalse(all(self.epc.array_annotations['test0'][1:4]
== result.array_annotations['test0']))
self.assertFalse(all(self.epc.array_annotations['test1'][1:4]
== result.array_annotations['test1']))
self.assertFalse(all(self.epc.array_annotations['test2'][1:4]
== result.array_annotations['test2']))
def test__time_slice_deepcopy_data(self):
result = self.epc.time_slice(None, None)
# Change values of original array
self.epc[2] = 7.3*self.epc.units
self.assertFalse(all(self.epc == result))
# Change values of sliced array
result[3] = 9.5*result.units
self.assertFalse(all(self.epc == result))
def test_time_slice_out_of_boundries(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = 0.0001
t_stop = 30
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array(arr_ann['index']))
assert_arrays_equal(result.array_annotations['test'], np.array(arr_ann['test']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test_time_slice_empty(self):
params = {'test2': 'y1', 'test3': True}
epc = Epoch([] * pq.ms, durations=[] * pq.ns, labels=np.array([], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([] * pq.ms, durations=[] * pq.ns, labels=np.array([], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = 1.2
t_stop = 1.6
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
self.assertIsInstance(result.array_annotations, ArrayDict)
def test_time_slice_none_stop(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.5, 1.7] * pq.ms, durations=[40, 60] * pq.ns,
labels=np.array(['test epoch 2', 'test epoch 3'], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1,
array_annotations={'index': [1, 2], 'test': ['b', 'c']}, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = 1.2
t_stop = None
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array([1, 2]))
assert_arrays_equal(result.array_annotations['test'], np.array(['b', 'c']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test_time_slice_none_start(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.1, 1.5] * pq.ms, durations=[20, 40] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2'], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1,
array_annotations={'index': [0, 1], 'test': ['a', 'b']}, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = None
t_stop = 1.6
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array([0, 1]))
assert_arrays_equal(result.array_annotations['test'], np.array(['a', 'b']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test_time_slice_none_both(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = None
t_stop = None
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array([0, 1, 2]))
assert_arrays_equal(result.array_annotations['test'], np.array(['a', 'b', 'c']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test_time_slice_differnt_units(self):
params = {'test2': 'y1', 'test3': True}
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epc = Epoch([1.1, 1.5, 1.7] * pq.ms, durations=[20, 40, 60] * pq.ns,
labels=np.array(['test epoch 1', 'test epoch 2', 'test epoch 3'], dtype='U'),
name='test', description='tester', file_origin='test.file', test1=1,
array_annotations=arr_ann, **params)
epc.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(epc)
targ = Epoch([1.5] * pq.ms, durations=[40] * pq.ns,
labels=np.array(['test epoch 2'], dtype='U'), name='test',
description='tester', file_origin='test.file', test1=1,
array_annotations={'index': [1], 'test': ['b']}, **params)
targ.annotate(test1=1.1, test0=[1, 2])
assert_neo_object_is_compliant(targ)
t_start = 0.0012 * pq.s
t_stop = 0.0016 * pq.s
result = epc.time_slice(t_start, t_stop)
assert_arrays_equal(result.times, targ.times)
assert_arrays_equal(result.durations, targ.durations)
assert_arrays_equal(result.labels, targ.labels)
self.assertEqual(result.name, targ.name)
self.assertEqual(result.description, targ.description)
self.assertEqual(result.file_origin, targ.file_origin)
self.assertEqual(result.annotations['test0'], targ.annotations['test0'])
self.assertEqual(result.annotations['test1'], targ.annotations['test1'])
self.assertEqual(result.annotations['test2'], targ.annotations['test2'])
assert_arrays_equal(result.array_annotations['index'], np.array([1]))
assert_arrays_equal(result.array_annotations['test'], np.array(['b']))
self.assertIsInstance(result.array_annotations, ArrayDict)
def test__time_slice_should_set_parents_to_None(self):
# When timeslicing, a deep copy is made,
# thus the reference to parent objects should be destroyed
result = self.epc.time_slice(1 * pq.ms, 3 * pq.ms)
self.assertEqual(result.segment, None)
def test__deepcopy_should_set_parents_objects_to_None(self):
# Deepcopy should destroy references to parents
result = deepcopy(self.epc)
self.assertEqual(result.segment, None)
def test__time_shift_same_attributes(self):
result = self.epc.time_shift(1 * pq.ms)
assert_same_attributes(result, self.epc, exclude=['times'])
def test__time_shift_same_annotations(self):
result = self.epc.time_shift(1 * pq.ms)
assert_same_annotations(result, self.epc)
def test__time_shift_same_array_annotations(self):
result = self.epc.time_shift(1 * pq.ms)
assert_same_array_annotations(result, self.epc)
def test__time_shift_should_set_parents_to_None(self):
# When time-shifting, a deep copy is made,
# thus the reference to parent objects should be destroyed
result = self.epc.time_shift(1 * pq.ms)
self.assertEqual(result.segment, None)
def test__time_shift_by_zero(self):
shifted = self.epc.time_shift(0 * pq.ms)
assert_arrays_equal(shifted.times, self.epc.times)
def test__time_shift_same_units(self):
shifted = self.epc.time_shift(10 * pq.ms)
assert_arrays_equal(shifted.times, self.epc.times + 10 * pq.ms)
def test__time_shift_different_units(self):
shifted = self.epc.time_shift(1 * pq.s)
assert_arrays_equal(shifted.times, self.epc.times + 1000 * pq.ms)
def test_as_array(self):
times = [2, 3, 4, 5]
durations = [0.1, 0.2, 0.3, 0.4]
epc = Epoch(times * pq.ms, durations=durations * pq.ms)
epc_as_arr = epc.as_array(units='ms')
self.assertIsInstance(epc_as_arr, np.ndarray)
assert_array_equal(times, epc_as_arr)
def test_as_quantity(self):
times = [2, 3, 4, 5]
durations = [0.1, 0.2, 0.3, 0.4]
epc = Epoch(times * pq.ms, durations=durations * pq.ms)
epc_as_q = epc.as_quantity()
self.assertIsInstance(epc_as_q, pq.Quantity)
assert_array_equal(times * pq.ms, epc_as_q)
def test_getitem_scalar(self):
times = [2, 3, 4, 5]
durations = [0.1, 0.2, 0.3, 0.4]
labels = ["A", "B", "C", "D"]
epc = Epoch(times * pq.ms, durations=durations * pq.ms, labels=labels)
single_epoch = epc[2]
self.assertIsInstance(single_epoch, pq.Quantity)
assert_array_equal(single_epoch.times, np.array([4.0]))
assert_array_equal(single_epoch.durations, np.array([0.3]))
assert_array_equal(single_epoch.labels, np.array(["C"]))
def test_slice(self):
times = [2, 3, 4, 5]
durations = [0.1, 0.2, 0.3, 0.4]
labels = ["A", "B", "C", "D"]
arr_ann = {'index': np.arange(4), 'test': ['a', 'b', 'c', 'd']}
epc = Epoch(times * pq.ms, durations=durations * pq.ms, labels=labels,
array_annotations=arr_ann)
single_epoch = epc[1:3]
self.assertIsInstance(single_epoch, Epoch)
assert_array_equal(single_epoch.times, np.array([3.0, 4.0]))
assert_array_equal(single_epoch.durations, np.array([0.2, 0.3]))
assert_array_equal(single_epoch.labels, np.array(["B", "C"]))
assert_arrays_equal(single_epoch.array_annotations['index'], np.arange(1, 3))
assert_arrays_equal(single_epoch.array_annotations['test'], np.array(['b', 'c']))
self.assertIsInstance(single_epoch.array_annotations, ArrayDict)
def test_rescale(self):
times = [2, 3, 4, 5]
durations = [0.1, 0.2, 0.3, 0.4]
labels = ["A", "B", "C", "D"]
arr_ann = {'index': np.arange(4), 'test': ['a', 'b', 'c', 'd']}
epc = Epoch(times * pq.ms, durations=durations * pq.ms, labels=labels,
array_annotations=arr_ann)
result = epc.rescale(pq.us)
self.assertIsInstance(result, Epoch)
assert_neo_object_is_compliant(result)
assert_arrays_equal(result.array_annotations['index'], np.arange(4))
assert_arrays_equal(result.array_annotations['test'],
np.array(['a', 'b', 'c', 'd']))
self.assertIsInstance(result.array_annotations, ArrayDict)
self.assertEqual(result.units, 1 * pq.us)
assert_array_equal(epc.labels, result.labels)
assert_arrays_almost_equal(result.times, [2000, 3000, 4000, 5000] * pq.us, 1e-9)
assert_arrays_almost_equal(result.times.magnitude,
np.array([2000, 3000, 4000, 5000]),
1e-9)
assert_arrays_almost_equal(result.durations.magnitude,
np.array([100, 200, 300, 400]),
1e-9)
class TestDuplicateWithNewData(unittest.TestCase):
def setUp(self):
self.data = np.array([0.1, 0.5, 1.2, 3.3, 6.4, 7])
self.durations = np.array([0.2, 0.4, 1.1, 2.4, 0.2, 2.0])
self.quant = pq.ms
self.arr_ann = {'index': np.arange(6), 'test': ['a', 'b', 'c', 'd', 'e', 'f']}
self.epoch = Epoch(self.data * self.quant, durations=self.durations * self.quant,
array_annotations=self.arr_ann)
def test_duplicate_with_new_data(self):
signal1 = self.epoch
new_times = np.sort(np.random.uniform(0, 100, self.epoch.size)) * pq.ms
new_durations = np.ones_like(new_times)
new_labels = np.array(list("zyxwvutsrqponmlkjihgfedcba"[:self.epoch.size]))
signal1b = signal1.duplicate_with_new_data(new_times, new_durations, new_labels)
# After duplicating, array annotations should always be empty,
# because different length of data would cause inconsistencies
assert_arrays_equal(signal1b.labels, new_labels)
assert_arrays_equal(signal1b.durations, new_durations)
self.assertTrue('index' not in signal1b.array_annotations)
self.assertTrue('test' not in signal1b.array_annotations)
self.assertIsInstance(signal1b.array_annotations, ArrayDict)
class TestEpochFunctions(unittest.TestCase):
def test__pickle(self):
arr_ann = {'index': np.arange(3), 'test': ['a', 'b', 'c']}
epoch1 = Epoch(np.arange(0, 30, 10) * pq.s, durations=[1, 2, 3] * pq.s,
labels=np.array(['t0', 't1', 't2'], dtype='U'), units='s',
annotation1="foo", annotation2="bar", array_annotations=arr_ann)
fobj = open('./pickle', 'wb')
pickle.dump(epoch1, fobj)
fobj.close()
fobj = open('./pickle', 'rb')
try:
epoch2 = pickle.load(fobj)
except ValueError:
epoch2 = None
fobj.close()
assert_array_equal(epoch1.times, epoch2.times)
self.assertEqual(epoch2.annotations, epoch1.annotations)
assert_arrays_equal(epoch2.array_annotations['index'], np.array(arr_ann['index']))
assert_arrays_equal(epoch2.array_annotations['test'], np.array(arr_ann['test']))
self.assertIsInstance(epoch2.array_annotations, ArrayDict)
# Make sure the dict can perform correct checks after unpickling
epoch2.array_annotations['anno3'] = list(range(3, 6))
with self.assertRaises(ValueError):
epoch2.array_annotations['anno4'] = [2, 1]
os.remove('./pickle')
if __name__ == "__main__":
unittest.main()
|
rsutormin/narrative | refs/heads/master | src/biokbase/narrative/mongonbmanager.py | 4 | """A notebook manager that uses mongodb for storage.
Based on Travis Harrison's shocknbmanager and the azurenbmanager
Authors:
* Steve Chan <[email protected]>
Copyright (C) 2013 The Regents of the University of California
Department of Energy contract-operators of the Lawrence Berkeley National Laboratory
1 Cyclotron Road, Berkeley, CA 94720
Copyright (C) 2013 The KBase Project
Distributed unspecified open source license as of 8/14/2013
"""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import datetime
import dateutil.parser
from bson.json_util import dumps
from unicodedata import normalize
from tornado import web
from pymongo import MongoClient
from pymongo.read_preferences import ReadPreference
from IPython.html.services.notebooks.nbmanager import NotebookManager
#from IPython.config.configurable import LoggingConfigurable
from IPython.nbformat import current
from IPython.utils.traitlets import Unicode, Dict, Bool, List, TraitError
#from IPython.utils import tz
# To log narrative itself
from biokbase.narrative.common import kblogging
g_log = kblogging.get_logger("narrative.base")
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class MongoNotebookManager(NotebookManager):
# The MongoDB backend simply wraps the JSON notebook in a enclosing dict
# and pushes it into MongoDB. The dict has the following fields
# {
# '_id' : {mongodb UUID - we set it manually using notebook_id},
# 'owner' : {username of the owner of this notebook},
# 'doc_type' : (ipynb),
# 'ipynb' : { actual ipython notebook dict },
# 'ipynb_chkpt' : { dict for checkpointed notebook },
# 'created' : { creation/update timestamp }
# 'chkpt_created' : { timestamp for ipynb_chkpt }
# }
mongodb_uri = Unicode('mongodb://localhost/', config=True, help='MongoDB connection URI')
mongodb_database = Unicode('narrative', config=True, help='MongoDB database')
mongodb_collection = Unicode('notebooks', config=True, help='MongoDB collection')
ipynb_type = Unicode(u'ipynb')
allowed_formats = List([u'json'])
node_format = ipynb_type
def __init__(self, **kwargs):
"""Verify that we can connect to the MongoDB instance"""
super(MongoNotebookManager, self).__init__(**kwargs)
if not self.mongodb_uri:
raise web.HTTPError(412, u"Missing MongoDB connection URI.")
if not self.mongodb_database:
raise web.HTTPError(412, u"Missing MongoDB database.")
if not self.mongodb_collection:
raise web.HTTPError(412, u"Missing MongoDB collection.")
try:
self.mclient = MongoClient(self.mongodb_uri,
read_preference=ReadPreference.PRIMARY_PREFERRED)
self.db = self.mclient[self.mongodb_database]
self.collection = self.db[self.mongodb_collection]
except Exception as e:
raise web.HTTPError(500, u"Unable to connect to MongoDB service at %s: %s " % (self.mongodb_uri, e))
# setup a mapping dict for MongoDB/notebook_id <-> Notebook name
mapping = Dict()
# Map notebook names to notebook_ids
rev_mapping = Dict()
def list_notebooks(self):
"""List all notebooks in MongoDB.
The _id field used by MongoDB is a UUID like the notebook_id, so
we directly use the notebook_id for the MongoDB _id field
The name field is coming out of document.ipynb.metadata.name
"""
all_ipynb = self.collection.find( {'doc_type' : self.ipynb_type})
all2 = list( all_ipynb)
self.mapping = { doc['_id'] : doc['ipynb']['metadata']['name'] for doc in all2 }
self.rev_mapping = { doc['ipynb']['metadata']['name'] : doc['_id'] for doc in all2 }
data = [ dict(notebook_id = it[0], name = it[1]) for it in self.mapping.items()]
data = sorted(data, key=lambda item: item['name'])
return data
def new_notebook_id(self, name):
"""Generate a new notebook_id for a name and store its mappings."""
notebook_id = super(MongoNotebookManager, self).new_notebook_id(name)
self.rev_mapping[name] = notebook_id
return notebook_id
def delete_notebook_id(self, notebook_id):
"""Delete a notebook's id in the mapping."""
name = self.mapping[notebook_id]
super(MongoNotebookManager, self).delete_notebook_id(notebook_id)
del self.rev_mapping[name]
def notebook_exists(self, notebook_id):
"""Does a notebook exist?"""
exists = super(MongoNotebookManager, self).notebook_exists(notebook_id)
if not exists:
return False
return exists
def get_name(self, notebook_id):
"""get a notebook name, raising 404 if not found"""
try:
name = self.mapping[notebook_id]
except KeyError:
raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id)
return name
def read_notebook_object(self, notebook_id):
"""Get the Notebook representation of a notebook by notebook_id."""
doc = self.collection.find_one({'_id': notebook_id})
if doc is None:
raise web.HTTPError(500, u'Notebook % not found' % notebook_id)
# Convert from MongoDB doc to plain JSON and then conver to notebook format
jsonnb = dumps(doc['ipynb'] )
nb = current.reads( jsonnb, u'json')
last_modified = dateutil.parser.parse(doc['created'])
return last_modified, nb
def write_notebook_object(self, nb, notebook_id=None):
"""Save an existing notebook object by notebook_id."""
try:
new_name = normalize('NFC', nb.metadata.name)
except AttributeError:
raise web.HTTPError(400, u'Missing notebook name')
# Carry over some of the metadata stuff from ShockNBManager
try:
if notebook_id is None:
notebook_id = self.new_notebook_id(new_name)
if not hasattr(nb.metadata, 'owner'):
nb.metadata.owner = 'public'
if not hasattr(nb.metadata, 'type'):
nb.metadata.type = 'generic'
if not hasattr(nb.metadata, 'description'):
nb.metadata.description = ''
nb.metadata.created = datetime.datetime.utcnow().isoformat()
nb.metadata.format = self.node_format
except Exception as e:
raise web.HTTPError(400, u'Unexpected error setting notebook attributes: %s' %e)
if notebook_id not in self.mapping:
raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id)
try:
doc = { '_id' : notebook_id,
'owner' : nb.metadata.owner,
'doc_type' : self.ipynb_type,
'created' : nb.metadata.created,
'ipynb' : nb
}
# Preserve the old checkpoint if it is there
old = self.collection.find_one( { '_id' : notebook_id })
if old and 'ipynb_chkpt' in old:
doc['ipynb_chkpt'] = old['ipynb_chkpt']
doc['chkpt_created'] = old['chkpt_created']
id = self.collection.save( doc, manipulate = True, safe=True)
except Exception as e:
raise web.HTTPError(500, u'%s saving notebook: %s' % (type(e),e))
self.mapping[id] = new_name
return id
def delete_notebook(self, notebook_id):
"""Delete notebook by notebook_id."""
if notebook_id is None:
raise web.HTTPError(400, u'Missing notebookd_id')
doc = self.collection.find_one( { '_id' : notebook_id });
if doc is None:
raise web.HTTPError(404, u'Notebook not found')
self.log.debug("unlinking notebook %s", notebook_id)
self.collection.remove( { '_id' : notebook_id })
self.delete_notebook_id(notebook_id)
# public checkpoint API
# Checkpoints in the MongoDB manager are just another field in the
# overall MongoDB document. We copy the ipynb field into the ipynb_chkpt
# field (and vice versa for revert)
def create_checkpoint(self, notebook_id):
"""Create a checkpoint from the current state of a notebook"""
# only the one checkpoint ID:
checkpoint_id = u"checkpoint"
doc = self.collection.find_one( { '_id' : notebook_id })
if doc is None:
raise web.HTTPError(500, u'Notebook % not found' % notebook_id)
chkpt_created = datetime.datetime.utcnow()
self.collection.update( { '_id' : notebook_id } ,
{ '$set' : { 'ipynb_chkpt' : doc['ipynb'],
'chkpt_created' : chkpt_created.isoformat() } } );
# return the checkpoint info
return { 'checkpoint_id' : checkpoint_id , 'last_modified' : chkpt_created}
def list_checkpoints(self, notebook_id):
"""list the checkpoints for a given notebook
This notebook manager currently only supports one checkpoint per notebook.
"""
checkpoint_id = u"checkpoint"
doc = self.collection.find_one( { '_id' : notebook_id })
if 'ipynb_chkpt' in doc:
return [{'checkpoint_id' : checkpoint_id, 'last_modified' : dateutil.parser.parse(doc['chkpt_created']) } ]
else:
return []
def restore_checkpoint(self, notebook_id, checkpoint_id):
"""restore a notebook to a checkpointed state"""
doc = self.collection.find_one( { '_id' : notebook_id })
if doc:
if 'ipynb_chkpt' in doc:
doc['ipynb'] = doc['ipynb_chkpt']
doc['created'] = doc['chkpt_created']
id = self.collection.save( doc, manipulate = True, safe=True)
self.log.debug("copying ipynb_chkpt to ipynb for %s", notebook_id)
else:
self.log.debug("checkpoint for %s does not exist" % notebook_id)
raise web.HTTPError(404,
u'Notebook checkpoint does not exist: %s' % notebook_id)
else:
self.log( "notebook %s does not exist" % notebook_id)
raise web.HTTPError(404,
u'Notebook %s does not exist' % notebook_id)
def delete_checkpoint(self, notebook_id, checkpoint_id):
"""delete a notebook's checkpoint"""
doc = self.collection.find_one( { '_id' : notebook_id })
if doc:
if 'ipynb_chkpt' in doc:
self.collection.update( { '_id' : notebook_id },
{ '$unset' : { 'ipynb_chkpt' : 1,
'chkpt_created' : 1}})
else:
raise web.HTTPError(404,
u'Notebook checkpoint does not exist: %s' % notebook_id)
else:
raise web.HTTPError(404,
u'Notebook %s does not exist' % notebook_id)
def log_info(self):
self.log.info("Serving notebooks from MongoDB URI %s" %self.mongodb_uri)
self.log.info("Serving notebooks from MongoDB db %s" %self.mongodb_database)
self.log.info("Serving notebooks from MongoDB collection %s" %self.mongodb_collection)
def info_string(self):
return "Serving notebooks from mongodb database %s and collection %s" % (self.mongodb_database,
self.mongodb_collection)
|
arnavd96/Cinemiezer | refs/heads/master | myvenv/lib/python3.4/site-packages/music21/scale/scala/__init__.py | 1 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# Name: scala/__init__.py
# Purpose: Interface and representation of Scala scale files
#
# Authors: Christopher Ariza
#
# Copyright: Copyright © 2010 Michael Scott Cuthbert and the music21 Project
# License: LGPL or BSD, see license.txt
#-------------------------------------------------------------------------------
'''
This module defines classes for representing Scala scale data,
including Scala pitch representations, storage, and files.
The Scala format is defined at the following URL:
http://www.huygens-fokker.org/scala/scl_format.html
We thank Manuel Op de Coul for allowing us to include
the repository (as of May 11, 2011) with music21
Utility functions are also provided to search and find
scales in the Scala scale archive. File names can be found
with the :func:`~music21.scala.search` function.
To create a :class:`~music21.scale.ScalaScale` instance, simply
provide a root pitch and the name of the scale. Scale names are given as a the scala .scl file name.
>>> mbiraScales = scale.scala.search('mbira')
>>> mbiraScales
['mbira_banda.scl', 'mbira_banda2.scl', 'mbira_gondo.scl', 'mbira_kunaka.scl', 'mbira_kunaka2.scl', 'mbira_mude.scl', 'mbira_mujuru.scl', 'mbira_zimb.scl']
For most people you'll want to do something like this:
>>> sc = scale.ScalaScale('a4', 'mbira_banda.scl')
>>> [str(p) for p in sc.pitches]
['A4', 'B4(-15c)', 'C#5(-11c)', 'D#5(-7c)', 'E~5(+6c)', 'F#5(+14c)', 'G~5(+1c)', 'B-5(+2c)']
'''
import os
import unittest
import math
import io
from music21 import common
from music21 import interval
# scl is the library of scala files
from music21.scale.scala import scl
from music21 import environment
_MOD = "pitch.py"
environLocal = environment.Environment(_MOD)
#-------------------------------------------------------------------------------
# global variable to cache the paths returned from getPaths()
SCALA_PATHS = None
def getPaths():
'''Get all scala scale paths. This is called once or the module and cached as SCALA_PATHS, which should be used instead of calls to this function.
>>> a = scale.scala.getPaths()
>>> len(a) >= 3800
True
'''
# declare that the copy of SCALA_PATHS here is the same
# as the outer scope. See
# http://stackoverflow.com/questions/423379/using-global-variables-in-a-function-other-than-the-one-that-created-them
global SCALA_PATHS # pylint: disable=global-statement
if SCALA_PATHS is not None:
return SCALA_PATHS
moduleName = scl
if not hasattr(moduleName, '__path__'):
# when importing a package name (a directory) the moduleName
# may be a list of all paths contained within the package
# this seems to be dependent on the context of the call:
# from the command line is different than from the interpreter
dirListing = moduleName
else:
# returns a list with one or more paths
# the first is the path to the directory that contains xml files
directory = moduleName.__path__[0]
dirListing = [os.path.join(directory, x) for x in os.listdir(directory)]
paths = {} # return a dictionary with keys and list of alternate names
for fp in dirListing:
if fp.endswith('.scl'):
paths[fp] = []
# store alternative name representations
# store version with no extension
directory, fn = os.path.split(fp)
fn = fn.replace('.scl', '')
paths[fp].append(fn)
# store version with removed underscores
directory, fn = os.path.split(fp)
fn = fn.lower()
fn = fn.replace('.scl', '')
fn = fn.replace('_', '')
fn = fn.replace('-', '')
paths[fp].append(fn)
SCALA_PATHS = paths
return paths
#-------------------------------------------------------------------------------
class ScalaPitch(object):
'''Representation of a scala pitch notation
>>> sp = scale.scala.ScalaPitch(' 1066.667 cents')
>>> print(sp.parse())
1066.667
>>> sp = scale.scala.ScalaPitch(' 2/1')
>>> sp.parse()
1200.0
>>> sp.parse('100.0 C#')
100.0
>>> [sp.parse(x) for x in ['89/84', '55/49', '44/37', '63/50', '4/3', '99/70', '442/295', '27/17', '37/22', '98/55', '15/8', '2/1']]
[100.09920982..., 199.9798432913..., 299.973903610..., 400.108480470..., 498.044999134..., 600.08832376157..., 699.9976981706..., 800.90959309..., 900.02609638..., 1000.020156708..., 1088.268714730..., 1200.0]
'''
# pitch values; if has a period, is cents, otherwise a ratio
# above the implied base ratio
# integer values w/ no period or slash: 2 is 2/1
def __init__(self, sourceString=None):
self.src = None
if sourceString is not None:
self._setSrc(sourceString)
# resole all values into cents shifts
self.cents = None
def _setSrc(self, raw):
raw = raw.strip()
# get decimals and fractions
raw, junk = common.getNumFromStr(raw, numbers='0123456789./')
self.src = raw.strip()
def parse(self, sourceString=None):
'''Parse the source string and set self.cents.
'''
if sourceString is not None:
self._setSrc(sourceString)
if '.' in self.src: # cents
self.cents = float(self.src)
else: # its a ratio
if '/' in self.src:
n, d = self.src.split('/')
n, d = float(n), float(d)
else:
n = float(self.src)
d = 1.0
# http://www.sengpielaudio.com/calculator-centsratio.htm
self.cents = 1200.0 * math.log((n / d), 2)
return self.cents
class ScalaData(object):
'''Object representation of data stored in a Scale scale file. This object is used to
access Scala information stored in a file. To create a music21 scale with a Scala file,
use :class:`~music21.scale.ScalaScale`.
This is not called ScalaScale, as this name clashes with the
:class:`~music21.scale.ScalaScale` that uses this object.
'''
def __init__(self, sourceString=None, fileName=None):
self.src = sourceString
self.fileName = fileName # store source file anme
# added in parsing:
self.description = None
# lower limit is 0, as degree 0, or the 1/1 ratio, is implied
# assumes octave equivalence?
self.pitchCount = None # number of lines w/ pitch values will follow
self.pitchValues = []
def parse(self):
'''Parse a scala file delivered as a long string with line breaks
'''
lines = self.src.split('\n')
count = 0 # count non-comment lines
for i, l in enumerate(lines):
l = l.strip()
#environLocal.printDebug(['l', l, self.fileName, i])
if l.startswith('!'):
if i == 0 and self.fileName is None: # try to get from first l
if '.scl' in l: # its got the file name
self.fileName = l[1:].strip() # remove leading !
continue # comment
else:
count += 1
if count == 1: #
if l != '': # may be empty
self.description = l
elif count == 2:
if l != '':
self.pitchCount = int(l)
else: # remaining counts are pitches
if l != '':
sp = ScalaPitch(l)
sp.parse()
self.pitchValues.append(sp)
def getCentsAboveTonic(self):
'''Return a list of cent values above the implied tonic.
'''
return [sp.cents for sp in self.pitchValues]
def getAdjacentCents(self):
'''Get cents values between adjacent intervals.
'''
post = []
location = 0
for c in self.getCentsAboveTonic():
dif = c - location
#environLocal.printDebug(['getAdjacentCents', 'c', c, 'location', location, 'dif', dif])
post.append(dif)
location = c # set new location
return post
def setAdjacentCents(self, centList):
'''Given a list of adjacent cent values, create the necessary ScalaPitch objects and update the
'''
self.pitchValues = []
location = 0
for c in centList:
sp = ScalaPitch()
sp.cents = location + c
location = sp.cents
self.pitchValues.append(sp)
self.pitchCount = len(self.pitchValues)
def getIntervalSequence(self):
'''Get the scale as a list of Interval objects.
'''
post = []
for c in self.getAdjacentCents():
# convert cent values to semitone values to create intervals
post.append(interval.Interval(c*.01))
return post
def setIntervalSequence(self, iList):
'''Set the scale from a list of Interval objects.
'''
self.pitchValues = []
location = 0
for i in iList:
# convert cent values to semitone values to create intervals
sp = ScalaPitch()
sp.cents = location + i.cents
location = sp.cents
self.pitchValues.append(sp)
self.pitchCount = len(self.pitchValues)
def getFileString(self):
'''Return a string suitable for writing a Scale file
'''
msg = []
if self.fileName is not None:
msg.append('! %s' % self.fileName)
# conventional to add a comment space
msg.append('!')
if self.description is not None:
msg.append(self.description)
else: # must supply empty line
msg.append('')
if self.pitchCount is not None:
msg.append(str(self.pitchCount))
else: # must supply empty line
msg.append('')
# conventional to add a comment space
msg.append('!')
for sp in self.pitchValues:
msg.append(str(sp.cents))
# add space
msg.append('')
return '\n'.join(msg)
#-------------------------------------------------------------------------------
class ScalaFile(object):
'''
Interface for reading and writing scala files.
On reading, returns a :class:`~music21.scala.ScalaData` object.
>>> sf = scale.scala.ScalaFile()
'''
def __init__(self, data=None):
self.fileName = None
self.file = None
# store data source if provided
self.data = data
def open(self, fp, mode='r'):
'''Open a file for reading
'''
self.file = io.open(fp, mode, encoding='utf-8')
self.fileName = os.path.basename(fp)
def openFileLike(self, fileLike):
'''Assign a file-like object, such as those provided by StringIO, as an open file object.
'''
self.file = fileLike # already 'open'
def __repr__(self):
r = "<ScalaFile>"
return r
def close(self):
self.file.close()
def read(self):
'''Read a file. Note that this calls readstring, which processes all tokens.
If `number` is given, a work number will be extracted if possible.
'''
return self.readstr(self.file.read())
def readstr(self, strSrc):
'''Read a string and process all Tokens. Returns a ABCHandler instance.
'''
ss = ScalaData(strSrc, self.fileName)
ss.parse()
self.data = ss
return ss
def write(self):
ws = self.writestr()
self.file.write(ws)
def writestr(self):
if isinstance(self.data, ScalaData):
return self.data.getFileString()
# handle Scale or other objects
#-------------------------------------------------------------------------------
def parse(target):
'''
Get a :class:`~music21.scala.ScalaData` object from
the bundled SCL archive or a file path.
>>> ss = scale.scala.parse('balafon6')
>>> ss.description
'Observed balafon tuning from Burma, Helmholtz/Ellis p. 518, nr.84'
>>> [str(i) for i in ss.getIntervalSequence()]
['<music21.interval.Interval m2 (+14c)>', '<music21.interval.Interval M2 (+36c)>', '<music21.interval.Interval M2>', '<music21.interval.Interval m2 (+37c)>', '<music21.interval.Interval M2 (-49c)>', '<music21.interval.Interval M2 (-6c)>', '<music21.interval.Interval M2 (-36c)>']
>>> scale.scala.parse('incorrectFileName.scl') == None
True
>>> ss = scale.scala.parse('barbourChrom1')
>>> print(ss.description)
Barbour's #1 Chromatic
>>> ss.fileName
'barbour_chrom1.scl'
>>> ss = scale.scala.parse('blackj_gws.scl')
>>> ss.description
'Detempered Blackjack in 1/4 kleismic marvel tuning'
'''
match = None
# this may be a file path to a scala file
if os.path.exists(target) and target.endswith('.scl'):
match = target
# try from stored collections
# remove any spaces
target = target.replace(' ', '')
if match is None:
for fp in getPaths():
unused_directory, fn = os.path.split(fp)
# try exact match
if target.lower() == fn.lower():
match = fp
break
# try again, from cached reduced expressions
if match is None:
for fp in getPaths():
# look at alternative names
for alt in getPaths()[fp]:
if target.lower() == alt:
match = fp
break
if match is None:
# accept partial matches
for fp in getPaths():
# look at alternative names
for alt in getPaths()[fp]:
if target.lower() in alt:
match = fp
break
# might put this in a try block
if match is not None:
sf = ScalaFile()
sf.open(match)
ss = sf.read()
sf.close()
return ss
def search(target):
'''Search the scala archive for matches based on a string
>>> mbiraScales = scale.scala.search('mbira')
>>> mbiraScales
['mbira_banda.scl', 'mbira_banda2.scl', 'mbira_gondo.scl', 'mbira_kunaka.scl', 'mbira_kunaka2.scl', 'mbira_mude.scl', 'mbira_mujuru.scl', 'mbira_zimb.scl']
'''
match = []
# try from stored collections
# remove any spaces
target = target.replace(' ', '')
for fp in getPaths():
unused_directory, fn = os.path.split(fp)
# try exact match
if target.lower() == fn.lower():
if fp not in match:
match.append(fp)
# accept partial matches
for fp in getPaths():
# look at alternative names
for alt in getPaths()[fp]:
if target.lower() in alt:
if fp not in match:
match.append(fp)
names = []
for fp in match:
names.append(os.path.basename(fp))
names.sort()
return names
#-------------------------------------------------------------------------------
class TestExternal(unittest.TestCase):
def runTest(self):
pass
class Test(unittest.TestCase):
def runTest(self):
pass
def testScalaScaleA(self):
msg = '''! slendro5_2.scl
!
A slendro type pentatonic which is based on intervals of 7, no. 2
5
!
7/6
4/3
3/2
7/4
2/1
'''
ss = ScalaData(msg)
ss.parse()
self.assertEqual(ss.pitchCount, 5)
self.assertEqual(ss.fileName, 'slendro5_2.scl')
self.assertEqual(len(ss.pitchValues), 5)
self.assertEqual(["%.9f" % x.cents for x in ss.pitchValues], ['266.870905604', '498.044999135', '701.955000865', '968.825906469', '1200.000000000'])
self.assertEqual(["%.9f" % x for x in ss.getCentsAboveTonic()], ['266.870905604', '498.044999135', '701.955000865', '968.825906469', '1200.000000000'])
# sent values between scale degrees
self.assertEqual(["%.9f" % x for x in ss.getAdjacentCents()], ['266.870905604', '231.174093531', '203.910001731', '266.870905604', '231.174093531'] )
self.assertEqual([str(x) for x in ss.getIntervalSequence()], ['<music21.interval.Interval m3 (-33c)>', '<music21.interval.Interval M2 (+31c)>', '<music21.interval.Interval M2 (+4c)>', '<music21.interval.Interval m3 (-33c)>', '<music21.interval.Interval M2 (+31c)>'])
def testScalaScaleB(self):
msg = '''! fj-12tet.scl
!
Franck Jedrzejewski continued fractions approx. of 12-tet
12
!
89/84
55/49
44/37
63/50
4/3
99/70
442/295
27/17
37/22
98/55
15/8
2/1
'''
ss = ScalaData(msg)
ss.parse()
self.assertEqual(ss.pitchCount, 12)
self.assertEqual(ss.fileName, 'fj-12tet.scl')
self.assertEqual(ss.description, 'Franck Jedrzejewski continued fractions approx. of 12-tet')
self.assertEqual(["%.9f" % x for x in ss.getCentsAboveTonic()], ['100.099209825',
'199.979843291',
'299.973903610',
'400.108480470',
'498.044999135',
'600.088323762',
'699.997698171',
'800.909593096',
'900.026096390',
'1000.020156709',
'1088.268714730',
'1200.000000000'])
self.assertEqual(["%.9f" % x for x in ss.getAdjacentCents()], ['100.099209825',
'99.880633466',
'99.994060319',
'100.134576860',
'97.936518664',
'102.043324627',
'99.909374409',
'100.911894925',
'99.116503294',
'99.994060319',
'88.248558022',
'111.731285270'])
self.assertEqual([str(x) for x in ss.getIntervalSequence()], ['<music21.interval.Interval m2 (+0c)>', '<music21.interval.Interval m2 (0c)>', '<music21.interval.Interval m2 (0c)>', '<music21.interval.Interval m2 (+0c)>', '<music21.interval.Interval m2 (-2c)>', '<music21.interval.Interval m2 (+2c)>', '<music21.interval.Interval m2 (0c)>', '<music21.interval.Interval m2 (+1c)>', '<music21.interval.Interval m2 (-1c)>', '<music21.interval.Interval m2 (0c)>', '<music21.interval.Interval m2 (-12c)>', '<music21.interval.Interval m2 (+12c)>'])
# test loading a new scala object from adjacent sets
ss2 = ScalaData()
ss2.setAdjacentCents(ss.getAdjacentCents())
self.assertEqual(["%.9f" % x for x in ss2.getCentsAboveTonic()], ['100.099209825',
'199.979843291',
'299.973903610',
'400.108480470',
'498.044999135',
'600.088323762',
'699.997698171',
'800.909593096',
'900.026096390',
'1000.020156709',
'1088.268714730',
'1200.000000000'])
def testScalaFileA(self):
msg = '''! arist_chromenh.scl
!
Aristoxenos' Chromatic/Enharmonic, 3 + 9 + 18 parts
7
!
50.00000
200.00000
500.00000
700.00000
750.00000
900.00000
2/1
'''
sf = ScalaFile()
ss = sf.readstr(msg)
self.assertEqual(ss.pitchCount, 7)
# all but last will be the same
#print ss.getFileString()
self.assertEqual(ss.getFileString()[:1], msg[:1])
self.assertEqual([str(x) for x in ss.getIntervalSequence()], ['<music21.interval.Interval P1 (+50c)>', '<music21.interval.Interval m2 (+50c)>', '<music21.interval.Interval m3>', '<music21.interval.Interval M2>', '<music21.interval.Interval P1 (+50c)>', '<music21.interval.Interval m2 (+50c)>', '<music21.interval.Interval m3>'])
#-------------------------------------------------------------------------------
# define presented order in documentation
_DOC_ORDER = []
if __name__ == "__main__":
# sys.arg test options will be used in mainTest()
import music21
music21.mainTest(Test)
#------------------------------------------------------------------------------
# eof
#------------------------------------------------------------------------------
# eof
|
probstj/pyMPB | refs/heads/master | pympb/geometry.py | 1 | #Copyright 2009-2016 Seyed Hessam Moosavi Mehr, Juergen Probst
#This program is free software; you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation; either version 3 of the License, or
#(at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import objects
import log
from objects import *
class Geometry(object):
def __init__(
self, width, height, objects, depth=None, triangular=False):
self.width = width
self.height = height
self.objects = objects
if depth is None:
self.depth = 'no-size'
else:
self.depth = depth
self.triangular = triangular
self.substrate_index = 1
def add_substrate(self, material, start_at):
if self.substrate_index != 1:
log.error('Geometry: a substrate was added before, will ignore '
'this call')
return
if self.depth == 'no-size':
log.error('Geometry: can only add a substrate if depth is '
'given (third dimension). Substrate not added.')
return
self.substrate_index = material.index
self.objects.append(
Block(
x=0, y=0, z=start_at / 2.0 - self.depth / 4.0,
material=material,
size=(
#make it bigger than computational cell, just in case:
2 * self.width, 2 * self.height,
self.depth / 2.0 + start_at)
))
def get_is3D(self):
return 'no-size' not in [self.width, self.height, self.depth]
is3D = property(get_is3D)
def get_area(self):
return self.width*self.height
cell_area = property(get_area)
def get_lattice(self):
if self.triangular:
return ('(make lattice (size %s %s %s)'
'\n (basis1 (/ (sqrt 3) 2) 0.5)'
'\n (basis2 (/ (sqrt 3) 2) -0.5))') % \
(self.width, self.height, self.depth)
else:
return '(make lattice (size %s %s %s))' % \
(self.width, self.height, self.depth)
lattice = property(get_lattice)
## def get_max_epsilon(self):
## return max(a.material.epsilon for a in self.objects)
## max_epsilon = property(get_max_epsilon)
## def __repr__(self):
## object_template = {
## objects.Rod : ("EdgeForm[Directive[Dashed,Darker["
## "Green,0.6]],Disk[{%(x)s,%(y)s},%(radius)s")}
#maxeps = self.max_epsilon
#hue_function = lambda epsilon:(maxeps-epsilon)/epsilon
## return('Graphics[{'+','.join(object_template[a.__class__]\
## %a.__dict__ for a in self.objects)+'}]')
def __str__(self):
return '(list' + ''.join(str(a) for a in self.objects) + ')'
def __repr__(self):
s = '; '.join(
[
'lattice = {0!r}'.format(self.lattice),
'geometry = {0!r}'.format(self.__str__())])
return '<geometry.Geometry object: {0}>'.format(s)
def __iter__(self):
return self
#~ def geom_objects(self):
#~ for obj in self.objects:
#~ yield
|
ryancoleman/autodock-vina | refs/heads/master | boost_1_54_0/tools/build/v2/test/core_option_l.py | 51 | #!/usr/bin/python
# Copyright 2007 Rene Rivera.
# Copyright 2011 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0)
t.write("sleep.bat", """\
::@timeout /T %1 /NOBREAK >nul
@ping 127.0.0.1 -n 2 -w 1000 >nul
@ping 127.0.0.1 -n %1 -w 1000 >nul
@exit /B 0
""")
t.write("file.jam", """\
if $(NT)
{
SLEEP = @call sleep.bat ;
}
else
{
SLEEP = sleep ;
}
actions .a. {
echo 001
$(SLEEP) 4
echo 002
}
.a. sleeper ;
DEPENDS all : sleeper ;
""")
t.run_build_system(["-ffile.jam", "-d1", "-l2"], status=1)
t.expect_output_lines("2 second time limit exceeded")
t.cleanup()
|
JBonsink/GSOC-2013 | refs/heads/master | tools/ns-allinone-3.14.1/ns-3.14.1/doc/tutorial-pt-br/pickle-to-xml.py | 392 | #!/usr/bin/python
# output xml format:
# <pages>
# <page url="xx"><prev url="yyy">zzz</prev><next url="hhh">lll</next><fragment>file.frag</fragment></page>
# ...
# </pages>
import pickle
import os
import codecs
def dump_pickles(out, dirname, filename, path):
f = open(os.path.join(dirname, filename), 'r')
data = pickle.load(f)
fragment_file = codecs.open(data['current_page_name'] + '.frag', mode='w', encoding='utf-8')
fragment_file.write(data['body'])
fragment_file.close()
out.write(' <page url="%s">\n' % path)
out.write(' <fragment>%s.frag</fragment>\n' % data['current_page_name'])
if data['prev'] is not None:
out.write(' <prev url="%s">%s</prev>\n' %
(os.path.normpath(os.path.join(path, data['prev']['link'])),
data['prev']['title']))
if data['next'] is not None:
out.write(' <next url="%s">%s</next>\n' %
(os.path.normpath(os.path.join(path, data['next']['link'])),
data['next']['title']))
out.write(' </page>\n')
f.close()
if data['next'] is not None:
next_path = os.path.normpath(os.path.join(path, data['next']['link']))
next_filename = os.path.basename(next_path) + '.fpickle'
dump_pickles(out, dirname, next_filename, next_path)
return
import sys
sys.stdout.write('<pages>\n')
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), '/')
sys.stdout.write('</pages>')
|
cristianquaglio/odoo | refs/heads/master | addons/document/__openerp__.py | 260 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Document Management System',
'version': '2.1',
'category': 'Knowledge Management',
'description': """
This is a complete document management system.
==============================================
* User Authentication
* Document Indexation:- .pptx and .docx files are not supported in Windows platform.
* Dashboard for Document that includes:
* New Files (list)
* Files by Resource Type (graph)
* Files by Partner (graph)
* Files Size by Month (graph)
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com',
'depends': ['knowledge', 'mail'],
'data': [
'security/document_security.xml',
'document_view.xml',
'document_data.xml',
'wizard/document_configuration_view.xml',
'security/ir.model.access.csv',
'report/document_report_view.xml',
'views/document.xml',
],
'demo': [ 'document_demo.xml' ],
'test': ['test/document_test2.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mariansoban/ardupilot | refs/heads/Copter-4.0.x-sobi-phl-8m | libraries/AP_InertialSensor/examples/coning.py | 6 | #!/usr/bin/python
from math import *
from pymavlink.rotmat import Vector3, Matrix3
from numpy import linspace
from visual import *
class Quat:
def __init__(self,w=1.0,x=0.0,y=0.0,z=0.0):
self.w = w
self.x = x
self.y = y
self.z = z
def to_euler(self):
roll = (atan2(2.0*(self.w*self.x + self.y*self.z), 1 - 2.0*(self.x*self.x + self.y*self.y)))
pitch = asin(2.0*(self.w*self.y - self.z*self.x))
yaw = atan2(2.0*(self.w*self.z + self.x*self.y), 1 - 2.0*(self.y*self.y + self.z*self.z))
return Vector3(roll,pitch,yaw)
def from_euler(self,euler):
#(roll,pitch,yaw)
cr2 = cos(euler[0]*0.5)
cp2 = cos(euler[1]*0.5)
cy2 = cos(euler[2]*0.5)
sr2 = sin(euler[0]*0.5)
sp2 = sin(euler[1]*0.5)
sy2 = sin(euler[2]*0.5)
self.w = cr2*cp2*cy2 + sr2*sp2*sy2
self.x = sr2*cp2*cy2 - cr2*sp2*sy2
self.y = cr2*sp2*cy2 + sr2*cp2*sy2
self.z = cr2*cp2*sy2 - sr2*sp2*cy2
return self
def from_axis_angle(self, vec):
theta = vec.length()
if theta == 0:
self.w = 1.0
self.x = 0.0
self.y = 0.0
self.z = 0.0
return
vec_normalized = vec.normalized()
st2 = sin(theta/2.0)
self.w = cos(theta/2.0)
self.x = vec_normalized.x * st2
self.y = vec_normalized.y * st2
self.z = vec_normalized.z * st2
def rotate(self, vec):
r = Quat()
r.from_axis_angle(vec)
q = self * r
self.w = q.w
self.x = q.x
self.y = q.y
self.z = q.z
def to_axis_angle(self):
l = sqrt(self.x**2+self.y**2+self.z**2)
(x,y,z) = (self.x,self.y,self.z)
if l != 0:
temp = 2.0*atan2(l,self.w)
if temp > pi:
temp -= 2*pi
elif temp < -pi:
temp += 2*pi
(x,y,z) = (temp*x/l,temp*y/l,temp*z/l)
return Vector3(x,y,z)
def to_rotation_matrix(self):
m = Matrix3()
yy = self.y**2
yz = self.y * self.z
xx = self.x**2
xy = self.x * self.y
xz = self.x * self.z
wx = self.w * self.x
wy = self.w * self.y
wz = self.w * self.z
zz = self.z**2
m.a.x = 1.0-2.0*(yy + zz)
m.a.y = 2.0*(xy - wz)
m.a.z = 2.0*(xz + wy)
m.b.x = 2.0*(xy + wz)
m.b.y = 1.0-2.0*(xx + zz)
m.b.z = 2.0*(yz - wx)
m.c.x = 2.0*(xz - wy)
m.c.y = 2.0*(yz + wx)
m.c.z = 1.0-2.0*(xx + yy)
return m
def inverse(self):
return Quat(self.w,-self.x,-self.y,-self.z)
def __mul__(self,operand):
ret = Quat()
w1=self.w
x1=self.x
y1=self.y
z1=self.z
w2=operand.w
x2=operand.x
y2=operand.y
z2=operand.z
ret.w = w1*w2 - x1*x2 - y1*y2 - z1*z2
ret.x = w1*x2 + x1*w2 + y1*z2 - z1*y2
ret.y = w1*y2 - x1*z2 + y1*w2 + z1*x2
ret.z = w1*z2 + x1*y2 - y1*x2 + z1*w2
return ret
def __str__(self):
return "Quat(%f, %f, %f, %f)" % (self.w,self.x,self.y,self.z)
def vpy_vec(vec):
return vector(vec.y, -vec.z, -vec.x)
def update_arrows(q,x,y,z):
m = q.to_rotation_matrix().transposed()
x.axis = vpy_vec(m*Vector3(1,0,0))
x.up = vpy_vec(m*Vector3(0,1,0))
y.axis = vpy_vec(m*Vector3(0,1,0))
y.up = vpy_vec(m*Vector3(1,0,0))
z.axis = vpy_vec(m*Vector3(0,0,1))
z.up = vpy_vec(m*Vector3(1,0,0))
class Attitude:
def __init__(self,reference=False):
self.labels = []
self.xarrows = []
self.yarrows = []
self.zarrows = []
self.q = Quat()
self.reference = reference
self.update_arrows()
def add_arrows(self, arrowpos = Vector3(0,0,0), labeltext=None):
if labeltext is not None:
self.labels.append(label(pos = vpy_vec(arrowpos), text=labeltext))
sw = .005 if self.reference else .05
self.xarrows.append(arrow(pos=vpy_vec(arrowpos),color=color.red,opacity=1,shaftwidth=sw))
self.yarrows.append(arrow(pos=vpy_vec(arrowpos),color=color.green,opacity=1,shaftwidth=sw))
self.zarrows.append(arrow(pos=vpy_vec(arrowpos),color=color.blue,opacity=1,shaftwidth=sw))
self.update_arrows()
def rotate(self, vec):
self.q.rotate(vec)
def update_arrows(self):
m = self.q.to_rotation_matrix().transposed()
sl = 1.1 if self.reference else 1.0
for i in self.xarrows:
i.axis = vpy_vec(m*Vector3(sl,0,0))
i.up = vpy_vec(m*Vector3(0,1,0))
for i in self.yarrows:
i.axis = vpy_vec(m*Vector3(0,sl,0))
i.up = vpy_vec(m*Vector3(1,0,0))
for i in self.zarrows:
i.axis = vpy_vec(m*Vector3(0,0,sl))
i.up = vpy_vec(m*Vector3(1,0,0))
for i in self.labels:
i.xoffset = scene.width*0.07
i.yoffset = scene.width*0.1
class Tian_integrator:
def __init__(self, integrate_separately=True):
self.alpha = Vector3(0,0,0)
self.beta = Vector3(0,0,0)
self.last_alpha = Vector3(0,0,0)
self.last_delta_alpha = Vector3(0,0,0)
self.last_sample = Vector3(0,0,0)
self.integrate_separately = integrate_separately
def add_sample(self, sample, dt):
delta_alpha = (self.last_sample+sample)*0.5*dt
self.alpha += delta_alpha
delta_beta = 0.5 * (self.last_alpha + (1.0/6.0)*self.last_delta_alpha)%delta_alpha
if self.integrate_separately:
self.beta += delta_beta
else:
self.alpha += delta_beta
self.last_alpha = self.alpha
self.last_delta_alpha = delta_alpha
self.last_sample = sample
def pop_delta_angles(self):
ret = self.alpha + self.beta
self.alpha.zero()
self.beta.zero()
return ret
filter2p_1khz_30hz_data = {}
def filter2p_1khz_30hz(sample, key):
global filter2p_1khz_30hz_data
if not key in filter2p_1khz_30hz_data:
filter2p_1khz_30hz_data[key] = (0.0,0.0)
(delay_element_1, delay_element_2) = filter2p_1khz_30hz_data[key]
sample_freq = 1000
cutoff_freq = 30
fr = sample_freq // cutoff_freq
ohm = tan(pi/fr)
c = 1.0+2.0*cos(pi/4.0)*ohm + ohm**2
b0 = ohm**2/c
b1 = 2.0*b0
b2 = b0
a1 = 2.0*(ohm**2-1.0)/c
a2 = (1.0-2.0*cos(pi/4.0)*ohm+ohm**2)/c
delay_element_0 = sample - delay_element_1 * a1 - delay_element_2 * a2
output = delay_element_0 * b0 + delay_element_1 * b1 + delay_element_2 * b2
filter2p_1khz_30hz_data[key] = (delay_element_0, delay_element_1)
return output
def filter2p_1khz_30hz_vector3(sample, key):
ret = Vector3()
ret.x = filter2p_1khz_30hz(sample.x, "vec3f"+key+"x")
ret.y = filter2p_1khz_30hz(sample.y, "vec3f"+key+"y")
ret.z = filter2p_1khz_30hz(sample.z, "vec3f"+key+"z")
return ret
reference_attitude = Attitude(True)
uncorrected_attitude_low = Attitude()
uncorrected_attitude_high = Attitude()
corrected_attitude = Attitude()
corrected_attitude_combined = Attitude()
corrected_attitude_integrator = Tian_integrator()
corrected_attitude_integrator_combined = Tian_integrator(integrate_separately = False)
reference_attitude.add_arrows(Vector3(0,-3,0))
uncorrected_attitude_low.add_arrows(Vector3(0,-3,0), "no correction\nlow rate integration\n30hz software LPF @ 1khz\n(ardupilot 2015-02-18)")
reference_attitude.add_arrows(Vector3(0,-1,0))
uncorrected_attitude_high.add_arrows(Vector3(0,-1,0), "no correction\nhigh rate integration")
reference_attitude.add_arrows(Vector3(0,1,0))
corrected_attitude.add_arrows(Vector3(0,1,0), "Tian et al\nseparate integration")
reference_attitude.add_arrows(Vector3(0,3,0))
corrected_attitude_combined.add_arrows(Vector3(0,3,0), "Tian et al\ncombined_integration\n(proposed patch)")
#scene.scale = (0.3,0.3,0.3)
scene.fov = 0.001
scene.forward = (-0.5, -0.5, -1)
coning_frequency_hz = 50
coning_magnitude_rad_s = 2
label_text = (
"coning motion frequency %f hz\n"
"coning motion peak amplitude %f deg/s\n"
"thin arrows are reference attitude"
) % (coning_frequency_hz, degrees(coning_magnitude_rad_s))
label(pos = vpy_vec(Vector3(0,0,2)), text=label_text)
t = 0.0
dt_10000 = 0.0001
dt_1000 = 0.001
dt_333 = 0.003
accumulated_delta_angle = Vector3(0,0,0)
last_gyro_10000 = Vector3(0,0,0)
last_gyro_1000 = Vector3(0,0,0)
last_filtered_gyro_333 = Vector3(0,0,0)
filtered_gyro = Vector3(0,0,0)
while True:
rate(66)
for i in range(5):
for j in range(3):
for k in range(10):
#vvvvvvvvvv 10 kHz vvvvvvvvvv#
#compute angular rate at current time
gyro = Vector3(sin(t*coning_frequency_hz*2*pi), cos(t*coning_frequency_hz*2*pi),0)*coning_magnitude_rad_s
#integrate reference attitude
reference_attitude.rotate((gyro+last_gyro_10000) * dt_10000 * 0.5)
#increment time
t += dt_10000
last_gyro_10000 = gyro
#vvvvvvvvvv 1 kHz vvvvvvvvvv#
#update filter for sim 1
filtered_gyro = filter2p_1khz_30hz_vector3(gyro, "1")
#update integrator for sim 2
accumulated_delta_angle += (gyro+last_gyro_1000) * dt_1000 * 0.5
#update integrator for sim 3
corrected_attitude_integrator.add_sample(gyro, dt_1000)
#update integrator for sim 4
corrected_attitude_integrator_combined.add_sample(gyro, dt_1000)
last_gyro_1000 = gyro
#vvvvvvvvvv 333 Hz vvvvvvvvvv#
#update sim 1 (leftmost)
uncorrected_attitude_low.rotate((filtered_gyro+last_filtered_gyro_333) * dt_333 * 0.5)
#update sim 2
uncorrected_attitude_high.rotate(accumulated_delta_angle)
accumulated_delta_angle.zero()
#update sim 3
corrected_attitude.rotate(corrected_attitude_integrator.pop_delta_angles())
#update sim 4 (rightmost)
corrected_attitude_combined.rotate(corrected_attitude_integrator_combined.pop_delta_angles())
last_filtered_gyro_333 = filtered_gyro
#vvvvvvvvvv 66 Hz vvvvvvvvvv#
reference_attitude.update_arrows()
corrected_attitude.update_arrows()
corrected_attitude_combined.update_arrows()
uncorrected_attitude_low.update_arrows()
uncorrected_attitude_high.update_arrows()
|
rwillmer/django | refs/heads/master | tests/model_meta/test_legacy.py | 199 | import warnings
from django import test
from django.contrib.contenttypes.fields import GenericRelation
from django.core.exceptions import FieldDoesNotExist
from django.db.models.fields import CharField, related
from django.utils.deprecation import RemovedInDjango110Warning
from .models import BasePerson, Person
from .results import TEST_RESULTS
class OptionsBaseTests(test.SimpleTestCase):
def _map_related_query_names(self, res):
return tuple((o.field.related_query_name(), m) for o, m in res)
def _map_names(self, res):
return tuple((f.name, m) for f, m in res)
class M2MTests(OptionsBaseTests):
def test_many_to_many_with_model(self):
for model, expected_result in TEST_RESULTS['many_to_many_with_model'].items():
with warnings.catch_warnings(record=True) as warning:
warnings.simplefilter("always")
models = [model for field, model in model._meta.get_m2m_with_model()]
self.assertEqual([RemovedInDjango110Warning], [w.message.__class__ for w in warning])
self.assertEqual(models, expected_result)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class RelatedObjectsTests(OptionsBaseTests):
key_name = lambda self, r: r[0]
def test_related_objects(self):
result_key = 'get_all_related_objects_with_model_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model()
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_local(self):
result_key = 'get_all_related_objects_with_model_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(local_only=True)
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_include_hidden(self):
result_key = 'get_all_related_objects_with_model_hidden_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(include_hidden=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
def test_related_objects_include_hidden_local_only(self):
result_key = 'get_all_related_objects_with_model_hidden_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_hidden=True, local_only=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
def test_related_objects_proxy(self):
result_key = 'get_all_related_objects_with_model_proxy_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_proxy_eq=True)
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_objects_proxy_hidden(self):
result_key = 'get_all_related_objects_with_model_proxy_hidden_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_objects_with_model(
include_proxy_eq=True, include_hidden=True)
self.assertEqual(
sorted(self._map_names(objects), key=self.key_name),
sorted(expected, key=self.key_name)
)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class RelatedM2MTests(OptionsBaseTests):
def test_related_m2m_with_model(self):
result_key = 'get_all_related_many_to_many_with_model_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_m2m_objects_with_model()
self.assertEqual(self._map_related_query_names(objects), expected)
def test_related_m2m_local_only(self):
result_key = 'get_all_related_many_to_many_local_legacy'
for model, expected in TEST_RESULTS[result_key].items():
objects = model._meta.get_all_related_many_to_many_objects(local_only=True)
self.assertEqual([o.field.related_query_name() for o in objects], expected)
def test_related_m2m_asymmetrical(self):
m2m = Person._meta.many_to_many
self.assertTrue('following_base' in [f.attname for f in m2m])
related_m2m = Person._meta.get_all_related_many_to_many_objects()
self.assertTrue('followers_base' in [o.field.related_query_name() for o in related_m2m])
def test_related_m2m_symmetrical(self):
m2m = Person._meta.many_to_many
self.assertTrue('friends_base' in [f.attname for f in m2m])
related_m2m = Person._meta.get_all_related_many_to_many_objects()
self.assertIn('friends_inherited_rel_+', [o.field.related_query_name() for o in related_m2m])
@test.ignore_warnings(category=RemovedInDjango110Warning)
class GetFieldByNameTests(OptionsBaseTests):
def test_get_data_field(self):
field_info = Person._meta.get_field_by_name('data_abstract')
self.assertEqual(field_info[1:], (BasePerson, True, False))
self.assertIsInstance(field_info[0], CharField)
def test_get_m2m_field(self):
field_info = Person._meta.get_field_by_name('m2m_base')
self.assertEqual(field_info[1:], (BasePerson, True, True))
self.assertIsInstance(field_info[0], related.ManyToManyField)
def test_get_related_object(self):
field_info = Person._meta.get_field_by_name('relating_baseperson')
self.assertEqual(field_info[1:], (BasePerson, False, False))
self.assertTrue(field_info[0].auto_created)
def test_get_related_m2m(self):
field_info = Person._meta.get_field_by_name('relating_people')
self.assertEqual(field_info[1:], (None, False, True))
self.assertTrue(field_info[0].auto_created)
def test_get_generic_relation(self):
field_info = Person._meta.get_field_by_name('generic_relation_base')
self.assertEqual(field_info[1:], (None, True, False))
self.assertIsInstance(field_info[0], GenericRelation)
def test_get_m2m_field_invalid(self):
with warnings.catch_warnings(record=True) as warning:
warnings.simplefilter("always")
self.assertRaises(
FieldDoesNotExist,
Person._meta.get_field,
**{'field_name': 'm2m_base', 'many_to_many': False}
)
self.assertEqual(Person._meta.get_field('m2m_base', many_to_many=True).name, 'm2m_base')
# 2 RemovedInDjango110Warning messages should be raised, one for each call of get_field()
# with the 'many_to_many' argument.
self.assertEqual(
[RemovedInDjango110Warning, RemovedInDjango110Warning],
[w.message.__class__ for w in warning]
)
@test.ignore_warnings(category=RemovedInDjango110Warning)
class GetAllFieldNamesTestCase(OptionsBaseTests):
def test_get_all_field_names(self):
for model, expected_names in TEST_RESULTS['get_all_field_names'].items():
objects = model._meta.get_all_field_names()
self.assertEqual(sorted(map(str, objects)), sorted(expected_names))
|
deepinsight/Deformable-ConvNets | refs/heads/master | carvn/fcn-xs/image_segmentaion.py | 15 | # pylint: skip-file
import numpy as np
import mxnet as mx
from PIL import Image
def getpallete(num_cls):
# this function is to get the colormap for visualizing the segmentation mask
n = num_cls
pallete = [0]*(n*3)
for j in xrange(0,n):
lab = j
pallete[j*3+0] = 0
pallete[j*3+1] = 0
pallete[j*3+2] = 0
i = 0
while (lab > 0):
pallete[j*3+0] |= (((lab >> 0) & 1) << (7-i))
pallete[j*3+1] |= (((lab >> 1) & 1) << (7-i))
pallete[j*3+2] |= (((lab >> 2) & 1) << (7-i))
i = i + 1
lab >>= 3
return pallete
pallete = getpallete(256)
img = "./person_bicycle.jpg"
seg = img.replace("jpg", "png")
model_previx = "FCN8s_VGG16"
epoch = 19
ctx = mx.gpu(0)
def get_data(img_path):
"""get the (1, 3, h, w) np.array data for the img_path"""
mean = np.array([123.68, 116.779, 103.939]) # (R,G,B)
img = Image.open(img_path)
img = np.array(img, dtype=np.float32)
reshaped_mean = mean.reshape(1, 1, 3)
img = img - reshaped_mean
img = np.swapaxes(img, 0, 2)
img = np.swapaxes(img, 1, 2)
img = np.expand_dims(img, axis=0)
return img
def main():
fcnxs, fcnxs_args, fcnxs_auxs = mx.model.load_checkpoint(model_previx, epoch)
fcnxs_args["data"] = mx.nd.array(get_data(img), ctx)
data_shape = fcnxs_args["data"].shape
label_shape = (1, data_shape[2]*data_shape[3])
fcnxs_args["softmax_label"] = mx.nd.empty(label_shape, ctx)
exector = fcnxs.bind(ctx, fcnxs_args ,args_grad=None, grad_req="null", aux_states=fcnxs_args)
exector.forward(is_train=False)
output = exector.outputs[0]
out_img = np.uint8(np.squeeze(output.asnumpy().argmax(axis=1)))
out_img = Image.fromarray(out_img)
out_img.putpalette(pallete)
out_img.save(seg)
if __name__ == "__main__":
main()
|
eepalms/gem5-newcache | refs/heads/master | ext/ply/test/lex_optimize3.py | 164 | # -----------------------------------------------------------------------------
# lex_optimize3.py
#
# Writes table in a subdirectory structure.
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = (
'NAME','NUMBER',
'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
'LPAREN','RPAREN',
)
# Tokens
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_EQUALS = r'='
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
def t_NUMBER(t):
r'\d+'
try:
t.value = int(t.value)
except ValueError:
print("Integer value too large %s" % t.value)
t.value = 0
return t
t_ignore = " \t"
def t_newline(t):
r'\n+'
t.lineno += t.value.count("\n")
def t_error(t):
print("Illegal character '%s'" % t.value[0])
t.lexer.skip(1)
# Build the lexer
lex.lex(optimize=1,lextab="lexdir.sub.calctab",outputdir="lexdir/sub")
lex.runmain(data="3+4")
|
Jonnymcc/ansible | refs/heads/devel | test/units/mock/generator.py | 97 | # Copyright 2016 Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import Mapping
def make_method(func, args, kwargs):
def test_method(self):
func(self, *args, **kwargs)
# Format the argument string
arg_string = ', '.join(repr(a) for a in args)
kwarg_string = ', '.join('{0}={1}'.format(item[0], repr(item[1])) for item in kwargs.items())
arg_list = []
if arg_string:
arg_list.append(arg_string)
if kwarg_string:
arg_list.append(kwarg_string)
test_method.__name__ = 'test_{0}({1})'.format(func.__name__, ', '.join(arg_list))
return test_method
def add_method(func, *combined_args):
"""
Add a test case via a class decorator.
nose uses generators for this but doesn't work with unittest.TestCase
subclasses. So we have to write our own.
The first argument to this decorator is a test function. All subsequent
arguments are the arguments to create each generated test function with in
the following format:
Each set of arguments is a two-tuple. The first element is an iterable of
positional arguments. the second is a dict representing the kwargs.
"""
def wrapper(cls):
for combined_arg in combined_args:
if len(combined_arg) == 2:
args = combined_arg[0]
kwargs = combined_arg[1]
elif isinstance(combined_arg[0], Mapping):
args = []
kwargs = combined_arg[0]
else:
args = combined_arg[0]
kwargs = {}
test_method = make_method(func, args, kwargs)
setattr(cls, test_method.__name__, test_method)
return cls
return wrapper
|
home-assistant/home-assistant | refs/heads/dev | tests/components/airly/test_system_health.py | 6 | """Test Airly system health."""
import asyncio
from unittest.mock import Mock
from aiohttp import ClientError
from homeassistant.components.airly.const import DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import get_system_health_info
async def test_airly_system_health(hass, aioclient_mock):
"""Test Airly system health."""
aioclient_mock.get("https://airapi.airly.eu/v2/", text="")
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = Mock(
airly=Mock(
AIRLY_API_URL="https://airapi.airly.eu/v2/",
requests_remaining=42,
requests_per_day=100,
)
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info["can_reach_server"] == "ok"
assert info["requests_remaining"] == 42
assert info["requests_per_day"] == 100
async def test_airly_system_health_fail(hass, aioclient_mock):
"""Test Airly system health."""
aioclient_mock.get("https://airapi.airly.eu/v2/", exc=ClientError)
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = Mock(
airly=Mock(
AIRLY_API_URL="https://airapi.airly.eu/v2/",
requests_remaining=0,
requests_per_day=1000,
)
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info["can_reach_server"] == {"type": "failed", "error": "unreachable"}
assert info["requests_remaining"] == 0
assert info["requests_per_day"] == 1000
|
alexryndin/ambari | refs/heads/branch-adh-1.5 | ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py | 1 | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import base64
import getpass
import os
import string
import subprocess
import sys
import tempfile
from tempfile import gettempdir
from resource_management import *
from utils import get_property_value
from ambari_commons.os_utils import remove_file
from ambari_agent import Constants
class KerberosScript(Script):
KRB5_REALM_PROPERTIES = [
'kdc',
'admin_server',
'default_domain',
'master_kdc'
]
KRB5_SECTION_NAMES = [
'libdefaults',
'logging',
'realms',
'domain_realm',
'capaths',
'ca_paths',
'appdefaults',
'plugins'
]
@staticmethod
def create_random_password():
import random
chars = string.digits + string.ascii_letters
return ''.join(random.choice(chars) for x in range(13))
@staticmethod
def write_conf_section(output_file, section_name, section_data):
if section_name is not None:
output_file.write('[%s]\n' % section_name)
if section_data is not None:
for key, value in section_data.iteritems():
output_file.write(" %s = %s\n" % (key, value))
@staticmethod
def _write_conf_realm(output_file, realm_name, realm_data):
""" Writes out realm details
Example:
EXAMPLE.COM = {
kdc = kerberos.example.com
admin_server = kerberos.example.com
}
"""
if realm_name is not None:
output_file.write(" %s = {\n" % realm_name)
if realm_data is not None:
for key, value in realm_data.iteritems():
if key in KerberosScript.KRB5_REALM_PROPERTIES:
output_file.write(" %s = %s\n" % (key, value))
output_file.write(" }\n")
@staticmethod
def write_conf_realms_section(output_file, section_name, realms_data):
if section_name is not None:
output_file.write('[%s]\n' % section_name)
if realms_data is not None:
for realm, realm_data in realms_data.iteritems():
KerberosScript._write_conf_realm(output_file, realm, realm_data)
output_file.write('\n')
@staticmethod
def write_krb5_conf():
import params
Directory(params.krb5_conf_dir,
owner='root',
create_parents = True,
group='root',
mode=0755
)
content = InlineTemplate(params.krb5_conf_template)
File(params.krb5_conf_path,
content=content,
owner='root',
group='root',
mode=0644
)
@staticmethod
def invoke_kadmin(query, admin_identity=None, default_realm=None):
"""
Executes the kadmin or kadmin.local command (depending on whether auth_identity is set or not
and returns command result code and standard out data.
:param query: the kadmin query to execute
:param admin_identity: the identity for the administrative user (optional)
:param default_realm: the default realm to assume
:return: return_code, out
"""
if (query is not None) and (len(query) > 0):
auth_principal = None
auth_keytab_file = None
if admin_identity is not None:
auth_principal = get_property_value(admin_identity, 'principal')
if auth_principal is None:
kadmin = 'kadmin.local'
credential = ''
else:
kadmin = 'kadmin -p "%s"' % auth_principal
auth_password = get_property_value(admin_identity, 'password')
if auth_password is None:
auth_keytab = get_property_value(admin_identity, 'keytab')
if auth_keytab is not None:
(fd, auth_keytab_file) = tempfile.mkstemp()
os.write(fd, base64.b64decode(auth_keytab))
os.close(fd)
credential = '-k -t %s' % auth_keytab_file
else:
credential = '-w "%s"' % auth_password
if (default_realm is not None) and (len(default_realm) > 0):
realm = '-r %s' % default_realm
else:
realm = ''
try:
command = '%s %s %s -q "%s"' % (kadmin, credential, realm, query.replace('"', '\\"'))
return shell.checked_call(command)
except:
raise
finally:
if auth_keytab_file is not None:
os.remove(auth_keytab_file)
@staticmethod
def create_keytab_file(principal, path, auth_identity=None):
success = False
if (principal is not None) and (len(principal) > 0):
if (auth_identity is None) or (len(auth_identity) == 0):
norandkey = '-norandkey'
else:
norandkey = ''
if (path is not None) and (len(path) > 0):
keytab_file = '-k %s' % path
else:
keytab_file = ''
try:
result_code, output = KerberosScript.invoke_kadmin(
'ktadd %s %s %s' % (keytab_file, norandkey, principal),
auth_identity)
success = (result_code == 0)
except:
raise Fail("Failed to create keytab for principal: %s (in %s)" % (principal, path))
return success
@staticmethod
def create_keytab(principal, auth_identity=None):
keytab = None
(fd, temp_path) = tempfile.mkstemp()
os.remove(temp_path)
try:
if KerberosScript.create_keytab_file(principal, temp_path, auth_identity):
with open(temp_path, 'r') as f:
keytab = base64.b64encode(f.read())
finally:
if os.path.isfile(temp_path):
os.remove(temp_path)
return keytab
@staticmethod
def principal_exists(identity, auth_identity=None):
exists = False
if identity is not None:
principal = get_property_value(identity, 'principal')
if (principal is not None) and (len(principal) > 0):
try:
result_code, output = KerberosScript.invoke_kadmin('getprinc %s' % principal,
auth_identity)
exists = (output is not None) and (("Principal: %s" % principal) in output)
except:
raise Fail("Failed to determine if principal exists: %s" % principal)
return exists
@staticmethod
def change_principal_password(identity, auth_identity=None):
success = False
if identity is not None:
principal = get_property_value(identity, 'principal')
if (principal is not None) and (len(principal) > 0):
password = get_property_value(identity, 'password')
if password is None:
credentials = '-randkey'
else:
credentials = '-pw "%s"' % password
try:
result_code, output = KerberosScript.invoke_kadmin(
'change_password %s %s' % (credentials, principal),
auth_identity)
success = (result_code == 0)
except:
raise Fail("Failed to create principal: %s" % principal)
return success
@staticmethod
def create_principal(identity, auth_identity=None):
success = False
if identity is not None:
principal = get_property_value(identity, 'principal')
if (principal is not None) and (len(principal) > 0):
password = get_property_value(identity, 'password')
if password is None:
credentials = '-randkey'
else:
credentials = '-pw "%s"' % password
try:
result_code, out = KerberosScript.invoke_kadmin(
'addprinc %s %s' % (credentials, principal),
auth_identity)
success = (result_code == 0)
except:
raise Fail("Failed to create principal: %s" % principal)
return success
@staticmethod
def clear_tmp_cache():
tmp_dir = Constants.AGENT_TMP_DIR
if tmp_dir is None:
tmp_dir = gettempdir()
curl_krb_cache_path = os.path.join(tmp_dir, "curl_krb_cache")
Directory(curl_krb_cache_path, action="delete")
@staticmethod
def create_principals(identities, auth_identity=None):
if identities is not None:
for identity in identities:
KerberosScript.create_principal(identity, auth_identity)
@staticmethod
def create_or_update_administrator_identity():
import params
if params.realm is not None:
admin_identity = params.get_property_value(params.realm, 'admin_identity')
if KerberosScript.principal_exists(admin_identity):
KerberosScript.change_principal_password(admin_identity)
else:
KerberosScript.create_principal(admin_identity)
@staticmethod
def test_kinit(identity, user=None):
principal = get_property_value(identity, 'principal')
kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
kdestroy_path_local = functions.get_kdestroy_path(default('/configurations/kerberos-env/executable_search_paths', None))
if principal is not None:
keytab_file = get_property_value(identity, 'keytab_file')
keytab = get_property_value(identity, 'keytab')
password = get_property_value(identity, 'password')
# If a test keytab file is available, simply use it
if (keytab_file is not None) and (os.path.isfile(keytab_file)):
command = '%s -k -t %s %s' % (kinit_path_local, keytab_file, principal)
Execute(command,
user = user,
)
return shell.checked_call(kdestroy_path_local)
# If base64-encoded test keytab data is available; then decode it, write it to a temporary file
# use it, and then remove the temporary file
elif keytab is not None:
(fd, test_keytab_file) = tempfile.mkstemp()
os.write(fd, base64.b64decode(keytab))
os.close(fd)
try:
command = '%s -k -t %s %s' % (kinit_path_local, test_keytab_file, principal)
Execute(command,
user = user,
)
return shell.checked_call(kdestroy_path_local)
except:
raise
finally:
if test_keytab_file is not None:
os.remove(test_keytab_file)
# If no keytab data is available and a password was supplied, simply use it.
elif password is not None:
process = subprocess.Popen([kinit_path_local, principal], stdin=subprocess.PIPE)
stdout, stderr = process.communicate(password)
if process.returncode:
err_msg = Logger.filter_text("Execution of kinit returned %d. %s" % (process.returncode, stderr))
raise Fail(err_msg)
else:
return shell.checked_call(kdestroy_path_local)
else:
return 0, ''
else:
return 0, ''
def write_keytab_file(self):
import params
import stat
if params.kerberos_command_params is not None:
for item in params.kerberos_command_params:
keytab_content_base64 = get_property_value(item, 'keytab_content_base64')
if (keytab_content_base64 is not None) and (len(keytab_content_base64) > 0):
keytab_file_path = get_property_value(item, 'keytab_file_path')
if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
head, tail = os.path.split(keytab_file_path)
if head:
Directory(head, create_parents = True, mode=0755, owner="root", group="root")
owner = get_property_value(item, 'keytab_file_owner_name')
if not owner:
owner = getpass.getuser()
owner_access = get_property_value(item, 'keytab_file_owner_access')
group = get_property_value(item, 'keytab_file_group_name')
group_access = get_property_value(item, 'keytab_file_group_access')
mode = 0
if owner_access == 'rw':
mode |= stat.S_IREAD | stat.S_IWRITE
else:
mode |= stat.S_IREAD
if group_access == 'rw':
mode |= stat.S_IRGRP | stat.S_IWGRP
elif group_access == 'r':
mode |= stat.S_IRGRP
keytab_content = base64.b64decode(keytab_content_base64)
# to hide content in command output
def make_lambda(data):
return lambda: data
File(keytab_file_path,
content=make_lambda(keytab_content),
mode=mode,
owner=owner,
group=group)
principal = get_property_value(item, 'principal')
if principal is not None:
curr_content = Script.structuredOut
if "keytabs" not in curr_content:
curr_content['keytabs'] = {}
curr_content['keytabs'][principal.replace("_HOST", params.hostname)] = keytab_file_path
self.put_structured_out(curr_content)
def delete_keytab_file(self):
import params
if params.kerberos_command_params is not None:
for item in params.kerberos_command_params:
keytab_file_path = get_property_value(item, 'keytab_file_path')
if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
# Delete the keytab file
File(keytab_file_path, action="delete")
principal = get_property_value(item, 'principal')
if principal is not None:
curr_content = Script.structuredOut
if "keytabs" not in curr_content:
curr_content['keytabs'] = {}
curr_content['keytabs'][principal.replace("_HOST", params.hostname)] = '_REMOVED_'
self.put_structured_out(curr_content) |
ostree/thefuck | refs/heads/master | tests/rules/test_python_command.py | 16 | from thefuck.rules.python_command import match, get_new_command
from tests.utils import Command
def test_match():
assert match(Command('temp.py', stderr='Permission denied'))
assert not match(Command())
def test_get_new_command():
assert get_new_command(Command('./test_sudo.py'))\
== 'python ./test_sudo.py'
|
RyanDJLee/pyta | refs/heads/master | nodes/Dict.py | 1 | """
Dict astroid node
This node represents the Python dictionary objects.
Attributes:
- items (List[Tuple[Node, Node]])
- Contains the tuple of key and value pair, where key and value are
nodes.
Example:
- items -> [('b', 1)]
Type-checking:
Type is Dict[K, V], where K is the most specific class that every key
of the dictionary is an instance of, and V is the most specific class that
every value of the dictionary is an instance of.
"""
a = {'b': 1}
|
tafaRU/account-financial-tools | refs/heads/8.0 | __unported__/account_invoice_currency/__openerp__.py | 8 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP - Account invoice currency
# Copyright (C) 2004-2011 Zikzakmedia S.L. (http://zikzakmedia.com)
# Jordi Esteve <[email protected]>
# Copyright (c) 2013 Joaquin Gutierrez (http://www.gutierrezweb.es)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Company currency in invoices",
'version': "1.0",
'author': "Zikzakmedia SL",
'website': "http://www.zikzakmedia.com",
'category': "Localisation / Accounting",
'contributors': ['Joaquín Gutierrez'],
"description": """
This Module adds functional fields to show invoice in the company currency
==========================================================================
Amount Untaxed, Amount Tax and Amount Total invoice
fields in the company currency.
These fields are shown in "Other information" tab in invoice form.
""",
'license': "AGPL-3",
'depends': ["account"],
'data': [
"account_invoice_view.xml"
],
'installable': False,
'active': False,
}
|
75651/kbengine_cloud | refs/heads/master | kbe/src/lib/python/Lib/test/test_poplib.py | 72 | """Test script for poplib module."""
# Modified by Giampaolo Rodola' to give poplib.POP3 and poplib.POP3_SSL
# a real test suite
import poplib
import asyncore
import asynchat
import socket
import os
import time
import errno
from unittest import TestCase, skipUnless
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0
SUPPORTS_SSL = False
if hasattr(poplib, 'POP3_SSL'):
import ssl
from ssl import HAS_SNI
SUPPORTS_SSL = True
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "keycert3.pem")
CAFILE = os.path.join(os.path.dirname(__file__) or os.curdir, "pycacert.pem")
else:
HAS_SNI = False
requires_ssl = skipUnless(SUPPORTS_SSL, 'SSL not supported')
# the dummy data returned by server when LIST and RETR commands are issued
LIST_RESP = b'1 1\r\n2 2\r\n3 3\r\n4 4\r\n5 5\r\n.\r\n'
RETR_RESP = b"""From: [email protected]\
\r\nContent-Type: text/plain\r\n\
MIME-Version: 1.0\r\n\
Subject: Dummy\r\n\
\r\n\
line1\r\n\
line2\r\n\
line3\r\n\
.\r\n"""
class DummyPOP3Handler(asynchat.async_chat):
CAPAS = {'UIDL': [], 'IMPLEMENTATION': ['python-testlib-pop-server']}
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
self.tls_active = False
self.tls_starting = False
def collect_incoming_data(self, data):
self.in_buffer.append(data)
def found_terminator(self):
line = b''.join(self.in_buffer)
line = str(line, 'ISO-8859-1')
self.in_buffer = []
cmd = line.split(' ')[0].lower()
space = line.find(' ')
if space != -1:
arg = line[space + 1:]
else:
arg = ""
if hasattr(self, 'cmd_' + cmd):
method = getattr(self, 'cmd_' + cmd)
method(arg)
else:
self.push('-ERR unrecognized POP3 command "%s".' %cmd)
def handle_error(self):
raise
def push(self, data):
asynchat.async_chat.push(self, data.encode("ISO-8859-1") + b'\r\n')
def cmd_echo(self, arg):
# sends back the received string (used by the test suite)
self.push(arg)
def cmd_user(self, arg):
if arg != "guido":
self.push("-ERR no such user")
self.push('+OK password required')
def cmd_pass(self, arg):
if arg != "python":
self.push("-ERR wrong password")
self.push('+OK 10 messages')
def cmd_stat(self, arg):
self.push('+OK 10 100')
def cmd_list(self, arg):
if arg:
self.push('+OK %s %s' % (arg, arg))
else:
self.push('+OK')
asynchat.async_chat.push(self, LIST_RESP)
cmd_uidl = cmd_list
def cmd_retr(self, arg):
self.push('+OK %s bytes' %len(RETR_RESP))
asynchat.async_chat.push(self, RETR_RESP)
cmd_top = cmd_retr
def cmd_dele(self, arg):
self.push('+OK message marked for deletion.')
def cmd_noop(self, arg):
self.push('+OK done nothing.')
def cmd_rpop(self, arg):
self.push('+OK done nothing.')
def cmd_apop(self, arg):
self.push('+OK done nothing.')
def cmd_quit(self, arg):
self.push('+OK closing.')
self.close_when_done()
def _get_capas(self):
_capas = dict(self.CAPAS)
if not self.tls_active and SUPPORTS_SSL:
_capas['STLS'] = []
return _capas
def cmd_capa(self, arg):
self.push('+OK Capability list follows')
if self._get_capas():
for cap, params in self._get_capas().items():
_ln = [cap]
if params:
_ln.extend(params)
self.push(' '.join(_ln))
self.push('.')
if SUPPORTS_SSL:
def cmd_stls(self, arg):
if self.tls_active is False:
self.push('+OK Begin TLS negotiation')
tls_sock = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False,
suppress_ragged_eofs=False)
self.del_channel()
self.set_socket(tls_sock)
self.tls_active = True
self.tls_starting = True
self.in_buffer = []
self._do_tls_handshake()
else:
self.push('-ERR Command not permitted when TLS active')
def _do_tls_handshake(self):
try:
self.socket.do_handshake()
except ssl.SSLError as err:
if err.args[0] in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE):
return
elif err.args[0] == ssl.SSL_ERROR_EOF:
return self.handle_close()
raise
except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self.tls_active = True
self.tls_starting = False
def handle_read(self):
if self.tls_starting:
self._do_tls_handshake()
else:
try:
asynchat.async_chat.handle_read(self)
except ssl.SSLEOFError:
self.handle_close()
class DummyPOP3Server(asyncore.dispatcher, threading.Thread):
handler = DummyPOP3Handler
def __init__(self, address, af=socket.AF_INET):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(af, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.active = False
self.active_lock = threading.Lock()
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
self.active_lock.acquire()
asyncore.loop(timeout=0.1, count=1)
self.active_lock.release()
asyncore.close_all(ignore_all=True)
def stop(self):
assert self.active
self.active = False
self.join()
def handle_accepted(self, conn, addr):
self.handler_instance = self.handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
class TestPOP3Class(TestCase):
def assertOK(self, resp):
self.assertTrue(resp.startswith(b"+OK"))
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
def tearDown(self):
self.client.close()
self.server.stop()
def test_getwelcome(self):
self.assertEqual(self.client.getwelcome(),
b'+OK dummy pop3 server ready. <timestamp>')
def test_exceptions(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd, 'echo -err')
def test_user(self):
self.assertOK(self.client.user('guido'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_pass_(self):
self.assertOK(self.client.pass_('python'))
self.assertRaises(poplib.error_proto, self.client.user, 'invalid')
def test_stat(self):
self.assertEqual(self.client.stat(), (10, 100))
def test_list(self):
self.assertEqual(self.client.list()[1:],
([b'1 1', b'2 2', b'3 3', b'4 4', b'5 5'],
25))
self.assertTrue(self.client.list('1').endswith(b"OK 1 1"))
def test_retr(self):
expected = (b'+OK 116 bytes',
[b'From: [email protected]', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy',
b'', b'line1', b'line2', b'line3'],
113)
foo = self.client.retr('foo')
self.assertEqual(foo, expected)
def test_too_long_lines(self):
self.assertRaises(poplib.error_proto, self.client._shortcmd,
'echo +%s' % ((poplib._MAXLINE + 10) * 'a'))
def test_dele(self):
self.assertOK(self.client.dele('foo'))
def test_noop(self):
self.assertOK(self.client.noop())
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
def test_apop(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
def test_top(self):
expected = (b'+OK 116 bytes',
[b'From: [email protected]', b'Content-Type: text/plain',
b'MIME-Version: 1.0', b'Subject: Dummy', b'',
b'line1', b'line2', b'line3'],
113)
self.assertEqual(self.client.top(1, 1), expected)
def test_uidl(self):
self.client.uidl()
self.client.uidl('foo')
def test_capa(self):
capa = self.client.capa()
self.assertTrue('IMPLEMENTATION' in capa.keys())
def test_quit(self):
resp = self.client.quit()
self.assertTrue(resp)
self.assertIsNone(self.client.sock)
self.assertIsNone(self.client.file)
@requires_ssl
def test_stls_capa(self):
capa = self.client.capa()
self.assertTrue('STLS' in capa.keys())
@requires_ssl
def test_stls(self):
expected = b'+OK Begin TLS negotiation'
resp = self.client.stls()
self.assertEqual(resp, expected)
@requires_ssl
@skipUnless(HAS_SNI, 'No SNI support in ssl module')
def test_stls_context(self):
expected = b'+OK Begin TLS negotiation'
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(CAFILE)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.check_hostname = True
with self.assertRaises(ssl.CertificateError):
resp = self.client.stls(context=ctx)
self.client = poplib.POP3("localhost", self.server.port, timeout=3)
resp = self.client.stls(context=ctx)
self.assertEqual(resp, expected)
if SUPPORTS_SSL:
class DummyPOP3_SSLHandler(DummyPOP3Handler):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
ssl_socket = ssl.wrap_socket(self.socket, certfile=CERTFILE,
server_side=True,
do_handshake_on_connect=False)
self.del_channel()
self.set_socket(ssl_socket)
# Must try handshake before calling push()
self.tls_active = True
self.tls_starting = True
self._do_tls_handshake()
self.set_terminator(b"\r\n")
self.in_buffer = []
self.push('+OK dummy pop3 server ready. <timestamp>')
@requires_ssl
class TestPOP3_SSLClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3_SSL
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.handler = DummyPOP3_SSLHandler
self.server.start()
self.client = poplib.POP3_SSL(self.server.host, self.server.port)
def test__all__(self):
self.assertIn('POP3_SSL', poplib.__all__)
def test_context(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, certfile=CERTFILE, context=ctx)
self.assertRaises(ValueError, poplib.POP3_SSL, self.server.host,
self.server.port, keyfile=CERTFILE,
certfile=CERTFILE, context=ctx)
self.client.quit()
self.client = poplib.POP3_SSL(self.server.host, self.server.port,
context=ctx)
self.assertIsInstance(self.client.sock, ssl.SSLSocket)
self.assertIs(self.client.sock.context, ctx)
self.assertTrue(self.client.noop().startswith(b'+OK'))
def test_stls(self):
self.assertRaises(poplib.error_proto, self.client.stls)
test_stls_context = test_stls
def test_stls_capa(self):
capa = self.client.capa()
self.assertFalse('STLS' in capa.keys())
@requires_ssl
class TestPOP3_TLSClass(TestPOP3Class):
# repeat previous tests by using poplib.POP3.stls()
def setUp(self):
self.server = DummyPOP3Server((HOST, PORT))
self.server.start()
self.client = poplib.POP3(self.server.host, self.server.port, timeout=3)
self.client.stls()
def tearDown(self):
if self.client.file is not None and self.client.sock is not None:
try:
self.client.quit()
except poplib.error_proto:
# happens in the test_too_long_lines case; the overlong
# response will be treated as response to QUIT and raise
# this exception
self.client.close()
self.server.stop()
def test_stls(self):
self.assertRaises(poplib.error_proto, self.client.stls)
test_stls_context = test_stls
def test_stls_capa(self):
capa = self.client.capa()
self.assertFalse(b'STLS' in capa.keys())
class TestTimeouts(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=self.server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
del self.thread # Clear out any dangling Thread objects.
def server(self, evt, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
conn.send(b"+ Hola mundo\n")
conn.close()
except socket.timeout:
pass
finally:
serv.close()
def testTimeoutDefault(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def testTimeoutNone(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
pop = poplib.POP3(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(pop.sock.gettimeout())
pop.sock.close()
def testTimeoutValue(self):
pop = poplib.POP3(HOST, self.port, timeout=30)
self.assertEqual(pop.sock.gettimeout(), 30)
pop.sock.close()
def test_main():
tests = [TestPOP3Class, TestTimeouts,
TestPOP3_SSLClass, TestPOP3_TLSClass]
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
test_support.threading_cleanup(*thread_info)
if __name__ == '__main__':
test_main()
|
imply/chuu | refs/heads/master | tools/metrics/histograms/validate_format.py | 33 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Verifies that the histograms XML file is well-formatted."""
import extract_histograms
def main():
# This will raise an exception if the file is not well-formatted.
histograms = extract_histograms.ExtractHistograms('histograms.xml')
if __name__ == '__main__':
main()
|
Subsets and Splits