repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ml-lab/neon | neon/diagnostics/visualize_rnn.py | 4 | 6174 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Visualization for recurrent neural networks
"""
import numpy as np
from neon.util.compat import range
class VisualizeRNN(object):
"""
Visualzing weight matrices during training
"""
def __init__(self):
import matplotlib.pyplot
self.plt = matplotlib.pyplot
self.plt.interactive(1)
def plot_weights(self, weights_in, weights_rec, weights_out):
"""
Visizualize the three weight matrices after every epoch. Serves to
check that weights are structured, not exploding, and get upated
"""
self.plt.figure(2)
self.plt.clf()
self.plt.subplot(1, 3, 1)
self.plt.imshow(weights_in.T, vmin=-1, vmax=1, interpolation='nearest')
self.plt.title('input.T')
self.plt.subplot(1, 3, 2)
self.plt.imshow(weights_rec, vmin=-1, vmax=1, interpolation='nearest')
self.plt.title('recurrent')
self.plt.subplot(1, 3, 3)
self.plt.imshow(weights_out, vmin=-1, vmax=1, interpolation='nearest')
self.plt.title('output')
self.plt.colorbar()
self.plt.draw()
self.plt.show()
def plot_lstm_wts(self, lstm_layer, scale=1, fig=4):
"""
Visizualize the three weight matrices after every epoch. Serves to
check that weights are structured, not exploding, and get upated
"""
self.plt.figure(fig)
self.plt.clf()
pltidx = 1
for lbl, wts in zip(lstm_layer.param_names, lstm_layer.params[:4]):
self.plt.subplot(2, 4, pltidx)
self.plt.imshow(wts.asnumpyarray().T, vmin=-scale, vmax=scale,
interpolation='nearest')
self.plt.title(lbl + ' Wx.T')
pltidx += 1
for lbl, wts, bs in zip(lstm_layer.param_names,
lstm_layer.params[4:8],
lstm_layer.params[8:12]):
self.plt.subplot(2, 4, pltidx)
self.plt.imshow(np.hstack((wts.asnumpyarray(),
bs.asnumpyarray(),
bs.asnumpyarray())).T,
vmin=-scale, vmax=scale, interpolation='nearest')
self.plt.title(lbl + ' Wh.T')
pltidx += 1
self.plt.draw()
self.plt.show()
def plot_lstm_acts(self, lstm_layer, scale=1, fig=4):
acts_lbl = ['i_t', 'f_t', 'o_t', 'g_t', 'net_i', 'c_t', 'c_t', 'c_phi']
acts_stp = [0, 0, 0, 1, 0, 0, 1, 1]
self.plt.figure(fig)
self.plt.clf()
for idx, lbl in enumerate(acts_lbl):
act_tsr = getattr(lstm_layer, lbl)[acts_stp[idx]]
self.plt.subplot(2, 4, idx+1)
self.plt.imshow(act_tsr.asnumpyarray().T,
vmin=-scale, vmax=scale, interpolation='nearest')
self.plt.title(lbl + '[' + str(acts_stp[idx]) + '].T')
self.plt.draw()
self.plt.show()
def plot_error(self, suberror_list, error_list):
self.plt.figure(1)
self.plt.clf()
self.plt.plot(np.arange(len(suberror_list)) /
np.float(len(suberror_list)) *
len(error_list), suberror_list)
self.plt.plot(error_list, linewidth=2)
self.plt.ylim((min(suberror_list), max(error_list)))
self.plt.draw()
self.plt.show()
def plot_activations(self, pre1, out1, pre2, out2, targets):
"""
Loop over tau unrolling steps, at each time step show the pre-acts
and outputs of the recurrent layer and output layer. Note that the
pre-acts are actually the g', so if the activation is linear it will
be one.
"""
self.plt.figure(3)
self.plt.clf()
for i in range(len(pre1)): # loop over unrolling
self.plt.subplot(len(pre1), 5, 5 * i + 1)
self.plt.imshow(pre1[i].asnumpyarray(), vmin=-1, vmax=1,
interpolation='nearest')
if i == 0:
self.plt.title('pre1 or g\'1')
self.plt.subplot(len(pre1), 5, 5 * i + 2)
self.plt.imshow(out1[i].asnumpyarray(), vmin=-1, vmax=1,
interpolation='nearest')
if i == 0:
self.plt.title('out1')
self.plt.subplot(len(pre1), 5, 5 * i + 3)
self.plt.imshow(pre2[i].asnumpyarray(), vmin=-1, vmax=1,
interpolation='nearest')
if i == 0:
self.plt.title('pre2 or g\'2')
self.plt.subplot(len(pre1), 5, 5 * i + 4)
self.plt.imshow(out2[i].asnumpyarray(), vmin=-1, vmax=1,
interpolation='nearest')
if i == 0:
self.plt.title('out2')
self.plt.subplot(len(pre1), 5, 5 * i + 5)
self.plt.imshow(targets[i].asnumpyarray(),
vmin=-1, vmax=1, interpolation='nearest')
if i == 0:
self.plt.title('target')
self.plt.draw()
self.plt.show()
def print_text(self, inputs, outputs):
"""
Moved this here so it's legal to use numpy.
"""
print("Prediction inputs")
print(np.argmax(inputs, 0).asnumpyarray().astype(np.int8).view('c'))
print("Prediction outputs")
print(np.argmax(outputs, 0).asnumpyarray().astype(np.int8).view('c'))
| apache-2.0 | 4,156,699,950,637,713,000 | 39.090909 | 79 | 0.535471 | false |
takis/django | tests/resolve_url/tests.py | 199 | 3167 | from __future__ import unicode_literals
from django.contrib.auth.views import logout
from django.core.urlresolvers import NoReverseMatch, reverse_lazy
from django.shortcuts import resolve_url
from django.test import SimpleTestCase, ignore_warnings, override_settings
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from .models import UnimportantThing
@override_settings(ROOT_URLCONF='resolve_url.urls')
class ResolveUrlTests(SimpleTestCase):
"""
Tests for the ``resolve_url`` function.
"""
def test_url_path(self):
"""
Tests that passing a URL path to ``resolve_url`` will result in the
same url.
"""
self.assertEqual('/something/', resolve_url('/something/'))
def test_relative_path(self):
"""
Tests that passing a relative URL path to ``resolve_url`` will result
in the same url.
"""
self.assertEqual('../', resolve_url('../'))
self.assertEqual('../relative/', resolve_url('../relative/'))
self.assertEqual('./', resolve_url('./'))
self.assertEqual('./relative/', resolve_url('./relative/'))
def test_full_url(self):
"""
Tests that passing a full URL to ``resolve_url`` will result in the
same url.
"""
url = 'http://example.com/'
self.assertEqual(url, resolve_url(url))
def test_model(self):
"""
Tests that passing a model to ``resolve_url`` will result in
``get_absolute_url`` being called on that model instance.
"""
m = UnimportantThing(importance=1)
self.assertEqual(m.get_absolute_url(), resolve_url(m))
def test_view_function(self):
"""
Tests that passing a view name to ``resolve_url`` will result in the
URL path mapping to that view name.
"""
resolved_url = resolve_url(logout)
self.assertEqual('/accounts/logout/', resolved_url)
def test_lazy_reverse(self):
"""
Tests that passing the result of reverse_lazy is resolved to a real URL
string.
"""
resolved_url = resolve_url(reverse_lazy('logout'))
self.assertIsInstance(resolved_url, six.text_type)
self.assertEqual('/accounts/logout/', resolved_url)
@ignore_warnings(category=RemovedInDjango110Warning)
def test_valid_view_name(self):
"""
Tests that passing a view function to ``resolve_url`` will result in
the URL path mapping to that view.
"""
resolved_url = resolve_url('django.contrib.auth.views.logout')
self.assertEqual('/accounts/logout/', resolved_url)
def test_domain(self):
"""
Tests that passing a domain to ``resolve_url`` returns the same domain.
"""
self.assertEqual(resolve_url('example.com'), 'example.com')
def test_non_view_callable_raises_no_reverse_match(self):
"""
Tests that passing a non-view callable into ``resolve_url`` raises a
``NoReverseMatch`` exception.
"""
with self.assertRaises(NoReverseMatch):
resolve_url(lambda: 'asdf')
| bsd-3-clause | -2,160,599,311,070,732,500 | 34.188889 | 79 | 0.628355 | false |
stimpsonsg/moose | gui/gui/CommentEditor.py | 8 | 1534 | #!/usr/bin/python
try:
from PyQt4 import QtCore, QtGui
QtCore.Signal = QtCore.pyqtSignal
QtCore.Slot = QtCore.pyqtSlot
except ImportError:
try:
from PySide import QtCore, QtGui
QtCore.QString = str
except ImportError:
raise ImportError("Cannot load either PyQt or PySide")
from GenSyntax import *
from ParamTable import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class CommentEditor(QtGui.QDialog):
def __init__(self, item, win_parent=None):
QtGui.QDialog.__init__(self, win_parent)
self.item = item
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.edit_box = QtGui.QTextEdit()
try:
self.edit_box.insertPlainText(item.comment)
except:
pass
self.layout.addWidget(self.edit_box)
self.button_layout = QHBoxLayout()
self.apply_button = QPushButton('Apply')
self.cancel_button = QPushButton('Cancel')
self.button_layout.addWidget(self.apply_button)
self.button_layout.addWidget(self.cancel_button)
QtCore.QObject.connect(self.apply_button, QtCore.SIGNAL("clicked()"), self.accept_text)
QtCore.QObject.connect(self.cancel_button, QtCore.SIGNAL("clicked()"), self.reject)
self.layout.addLayout(self.button_layout)
self.resize(700,500)
def accept_text(self):
self.item.comment = str(self.edit_box.toPlainText())
self.accept()
| lgpl-2.1 | 2,651,596,455,769,878,000 | 26.392857 | 95 | 0.65189 | false |
ratschlab/RGAN | eICU_tstr_evaluation.py | 1 | 8268 | import data_utils
import pandas as pd
import numpy as np
import tensorflow as tf
import math, random, itertools
import pickle
import time
import json
import os
import math
import data_utils
import pickle
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score, precision_score, recall_score, roc_curve, auc, precision_recall_curve
import copy
from scipy.stats import sem
print ("Starting TSTR experiment.")
print ("loading data...")
samples, labels = data_utils.eICU_task()
train_seqs = samples['train'].reshape(-1,16,4)
vali_seqs = samples['vali'].reshape(-1,16,4)
test_seqs = samples['test'].reshape(-1,16,4)
train_targets = labels['train']
vali_targets = labels['vali']
test_targets = labels['test']
train_seqs, vali_seqs, test_seqs = data_utils.scale_data(train_seqs, vali_seqs, test_seqs)
print ("data loaded.")
# iterate over all dataset versions generated after running the GAN for 5 times
aurocs_all_runs = []
auprcs_all_runs = []
for oo in range(5):
print (oo)
# find the best "dataset epoch", meaning the GAN epoch that generated the dataset
# validation is only done in some of the tasks, and the others are considered unknown
# (use validation set to pick best GAN epoch, then get result on test set)
vali_seqs_r = vali_seqs.reshape((vali_seqs.shape[0], -1))
test_seqs_r = test_seqs.reshape((test_seqs.shape[0], -1))
all_aurocs_exp = []
all_auprcs_exp = []
for nn in np.arange(50,1050,50):
with open('./synthetic_eICU_datasets/samples_eICU_cdgan_synthetic_dataset_r' + str(oo) + '_' + str(nn) + '.pk', 'rb') as f:
synth_data = pickle.load(file=f)
with open('./synthetic_eICU_datasets/labels_eICU_cdgan_synthetic_dataset_r' + str(oo) + '_' + str(nn) + '.pk', 'rb') as f:
synth_labels = pickle.load(file=f)
train_seqs = synth_data
train_targets = synth_labels
train_seqs_r = train_seqs.reshape((train_seqs.shape[0], -1))
all_aurocs = []
all_auprcs = []
# in case we want to train each random forest multiple times with each dataset
for exp_num in range(1):
accuracies = []
precisions = []
recalls = []
aurocs = []
auprcs = []
for col_num in range(train_targets.shape[1]):
estimator = RandomForestClassifier(n_estimators=100)
estimator.fit(train_seqs_r, train_targets[:,col_num])
accuracies.append(estimator.score(vali_seqs_r, vali_targets[:,col_num]))
preds = estimator.predict(vali_seqs_r)
precisions.append(precision_score(y_pred=preds, y_true=vali_targets[:,col_num]))
recalls.append(recall_score(y_pred=preds, y_true=vali_targets[:,col_num]))
preds = estimator.predict_proba(vali_seqs_r)
fpr, tpr, thresholds = roc_curve(vali_targets[:,col_num], preds[:,1])
aurocs.append(auc(fpr, tpr))
precision, recall, thresholds = precision_recall_curve(vali_targets[:,col_num], preds[:,1])
auprcs.append(auc(recall, precision))
all_aurocs.append(aurocs)
all_auprcs.append(auprcs)
all_aurocs_exp.append(all_aurocs)
all_auprcs_exp.append(all_auprcs)
#with open('all_aurocs_exp_r' + str(oo) + '.pk', 'wb') as f:
# pickle.dump(file=f, obj=all_aurocs_exp)
#with open('all_auprcs_exp_r' + str(oo) + '.pk', 'wb') as f:
# pickle.dump(file=f, obj=all_auprcs_exp)
best_idx = np.argmax(np.array(all_aurocs_exp).sum(axis=1)[:,[0,2,4]].sum(axis=1) + np.array(all_auprcs_exp).sum(axis=1)[:,[0,2,4]].sum(axis=1))
best = np.arange(50,1050,50)[best_idx]
with open('./synthetic_eICU_datasets/samples_eICU_cdgan_synthetic_dataset_r' + str(oo) + '_' + str(best) + '.pk', 'rb') as f:
synth_data = pickle.load(file=f)
with open('./synthetic_eICU_datasets/labels_eICU_cdgan_synthetic_dataset_r' + str(oo) + '_' + str(best) + '.pk', 'rb') as f:
synth_labels = pickle.load(file=f)
train_seqs = synth_data
train_targets = synth_labels
train_seqs_r = train_seqs.reshape((train_seqs.shape[0], -1))
accuracies = []
precisions = []
recalls = []
aurocs = []
auprcs = []
for col_num in range(train_targets.shape[1]):
estimator = RandomForestClassifier(n_estimators=100)
estimator.fit(train_seqs_r, train_targets[:,col_num])
accuracies.append(estimator.score(test_seqs_r, test_targets[:,col_num]))
preds = estimator.predict(test_seqs_r)
precisions.append(precision_score(y_pred=preds, y_true=test_targets[:,col_num]))
recalls.append(recall_score(y_pred=preds, y_true=test_targets[:,col_num]))
preds = estimator.predict_proba(test_seqs_r)
fpr, tpr, thresholds = roc_curve(test_targets[:,col_num], preds[:,1])
aurocs.append(auc(fpr, tpr))
precision, recall, thresholds = precision_recall_curve(test_targets[:,col_num], preds[:,1])
auprcs.append(auc(recall, precision))
print(accuracies)
print(precisions)
print(recalls)
print(aurocs)
print(auprcs)
print ("----------------------------")
aurocs_all_runs.append(aurocs)
auprcs_all_runs.append(auprcs)
allr = np.vstack(aurocs_all_runs)
allp = np.vstack(auprcs_all_runs)
tstr_aurocs_mean = allr.mean(axis=0)
tstr_aurocs_sem = sem(allr, axis=0)
tstr_auprcs_mean = allp.mean(axis=0)
tstr_auprcs_sem = sem(allp, axis=0)
# get AUROC/AUPRC for real, random data
print ("Experiment with real data.")
print ("loading data...")
samples, labels = data_utils.eICU_task()
train_seqs = samples['train'].reshape(-1,16,4)
vali_seqs = samples['vali'].reshape(-1,16,4)
test_seqs = samples['test'].reshape(-1,16,4)
train_targets = labels['train']
vali_targets = labels['vali']
test_targets = labels['test']
train_seqs, vali_seqs, test_seqs = data_utils.scale_data(train_seqs, vali_seqs, test_seqs)
print ("data loaded.")
train_seqs_r = train_seqs.reshape((train_seqs.shape[0], -1))
vali_seqs_r = vali_seqs.reshape((vali_seqs.shape[0], -1))
test_seqs_r = test_seqs.reshape((test_seqs.shape[0], -1))
aurocs_all = []
auprcs_all = []
for i in range(5):
accuracies = []
precisions = []
recalls = []
aurocs = []
auprcs = []
for col_num in range(train_targets.shape[1]):
estimator = RandomForestClassifier(n_estimators=100)
estimator.fit(train_seqs_r, train_targets[:,col_num])
accuracies.append(estimator.score(test_seqs_r, test_targets[:,col_num]))
preds = estimator.predict(test_seqs_r)
precisions.append(precision_score(y_pred=preds, y_true=test_targets[:,col_num]))
recalls.append(recall_score(y_pred=preds, y_true=test_targets[:,col_num]))
preds = estimator.predict_proba(test_seqs_r)
fpr, tpr, thresholds = roc_curve(test_targets[:,col_num], preds[:,1])
aurocs.append(auc(fpr, tpr))
precision, recall, thresholds = precision_recall_curve(test_targets[:,col_num], preds[:,1])
auprcs.append(auc(recall, precision))
print(accuracies)
print(precisions)
print(recalls)
print(aurocs)
print(auprcs)
aurocs_all.append(aurocs)
auprcs_all.append(auprcs)
real_aurocs_mean = np.array(aurocs_all).mean(axis=0)
real_aurocs_sem = sem(aurocs_all, axis=0)
real_auprcs_mean = np.array(auprcs_all).mean(axis=0)
real_auprcs_sem = sem(auprcs_all, axis=0)
print ("Experiment with random predictions.")
#random score
test_targets_random = copy.deepcopy(test_targets)
random.shuffle(test_targets_random)
accuracies = []
precisions = []
recalls = []
aurocs = []
auprcs = []
for col_num in range(train_targets.shape[1]):
accuracies.append(accuracy_score(y_pred=test_targets_random[:,col_num], y_true=test_targets[:,col_num]))
precisions.append(precision_score(y_pred=test_targets_random[:,col_num], y_true=test_targets[:,col_num]))
recalls.append(recall_score(y_pred=test_targets_random[:,col_num], y_true=test_targets[:,col_num]))
preds = np.random.rand(len(test_targets[:,col_num]))
fpr, tpr, thresholds = roc_curve(test_targets[:,col_num], preds)
aurocs.append(auc(fpr, tpr))
precision, recall, thresholds = precision_recall_curve(test_targets[:,col_num], preds)
auprcs.append(auc(recall, precision))
print(accuracies)
print(precisions)
print(recalls)
print(aurocs)
print(auprcs)
random_aurocs = aurocs
random_auprcs = auprcs
print("Results")
print("------------")
print("------------")
print("TSTR")
print(tstr_aurocs_mean)
print(tstr_aurocs_sem)
print(tstr_auprcs_mean)
print(tstr_auprcs_sem)
print("------------")
print("Real")
print(real_aurocs_mean)
print(real_aurocs_sem)
print(real_auprcs_mean)
print(real_auprcs_sem)
print("------------")
print("Random")
print(random_aurocs)
print(random_auprcs) | mit | 184,687,565,268,853,280 | 32.75102 | 144 | 0.697388 | false |
cyisfor/Python-Markdown | markdown/extensions/extra.py | 122 | 5547 | """
Python-Markdown Extra Extension
===============================
A compilation of various Python-Markdown extensions that imitates
[PHP Markdown Extra](http://michelf.com/projects/php-markdown/extra/).
Note that each of the individual extensions still need to be available
on your PYTHONPATH. This extension simply wraps them all up as a
convenience so that only one extension needs to be listed when
initiating Markdown. See the documentation for each individual
extension for specifics about that extension.
There may be additional extensions that are distributed with
Python-Markdown that are not included here in Extra. Those extensions
are not part of PHP Markdown Extra, and therefore, not part of
Python-Markdown Extra. If you really would like Extra to include
additional extensions, we suggest creating your own clone of Extra
under a differant name. You could also edit the `extensions` global
variable defined below, but be aware that such changes may be lost
when you upgrade to any future version of Python-Markdown.
See <https://pythonhosted.org/Markdown/extensions/extra.html>
for documentation.
Copyright The Python Markdown Project
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..blockprocessors import BlockProcessor
from .. import util
import re
extensions = [
'markdown.extensions.smart_strong',
'markdown.extensions.fenced_code',
'markdown.extensions.footnotes',
'markdown.extensions.attr_list',
'markdown.extensions.def_list',
'markdown.extensions.tables',
'markdown.extensions.abbr'
]
class ExtraExtension(Extension):
""" Add various extensions to Markdown class."""
def __init__(self, *args, **kwargs):
""" config is a dumb holder which gets passed to actual ext later. """
self.config = kwargs.pop('configs', {})
self.config.update(kwargs)
def extendMarkdown(self, md, md_globals):
""" Register extension instances. """
md.registerExtensions(extensions, self.config)
if not md.safeMode:
# Turn on processing of markdown text within raw html
md.preprocessors['html_block'].markdown_in_raw = True
md.parser.blockprocessors.add('markdown_block',
MarkdownInHtmlProcessor(md.parser),
'_begin')
md.parser.blockprocessors.tag_counter = -1
md.parser.blockprocessors.contain_span_tags = re.compile(
r'^(p|h[1-6]|li|dd|dt|td|th|legend|address)$', re.IGNORECASE)
def makeExtension(*args, **kwargs):
return ExtraExtension(*args, **kwargs)
class MarkdownInHtmlProcessor(BlockProcessor):
"""Process Markdown Inside HTML Blocks."""
def test(self, parent, block):
return block == util.TAG_PLACEHOLDER % \
str(self.parser.blockprocessors.tag_counter + 1)
def _process_nests(self, element, block):
"""Process the element's child elements in self.run."""
# Build list of indexes of each nest within the parent element.
nest_index = [] # a list of tuples: (left index, right index)
i = self.parser.blockprocessors.tag_counter + 1
while len(self._tag_data) > i and self._tag_data[i]['left_index']:
left_child_index = self._tag_data[i]['left_index']
right_child_index = self._tag_data[i]['right_index']
nest_index.append((left_child_index - 1, right_child_index))
i += 1
# Create each nest subelement.
for i, (left_index, right_index) in enumerate(nest_index[:-1]):
self.run(element, block[left_index:right_index],
block[right_index:nest_index[i + 1][0]], True)
self.run(element, block[nest_index[-1][0]:nest_index[-1][1]], # last
block[nest_index[-1][1]:], True) # nest
def run(self, parent, blocks, tail=None, nest=False):
self._tag_data = self.parser.markdown.htmlStash.tag_data
self.parser.blockprocessors.tag_counter += 1
tag = self._tag_data[self.parser.blockprocessors.tag_counter]
# Create Element
markdown_value = tag['attrs'].pop('markdown')
element = util.etree.SubElement(parent, tag['tag'], tag['attrs'])
# Slice Off Block
if nest:
self.parser.parseBlocks(parent, tail) # Process Tail
block = blocks[1:]
else: # includes nests since a third level of nesting isn't supported
block = blocks[tag['left_index'] + 1: tag['right_index']]
del blocks[:tag['right_index']]
# Process Text
if (self.parser.blockprocessors.contain_span_tags.match( # Span Mode
tag['tag']) and markdown_value != 'block') or \
markdown_value == 'span':
element.text = '\n'.join(block)
else: # Block Mode
i = self.parser.blockprocessors.tag_counter + 1
if len(self._tag_data) > i and self._tag_data[i]['left_index']:
first_subelement_index = self._tag_data[i]['left_index'] - 1
self.parser.parseBlocks(
element, block[:first_subelement_index])
if not nest:
block = self._process_nests(element, block)
else:
self.parser.parseBlocks(element, block)
| bsd-3-clause | -4,391,257,087,335,375,400 | 41.022727 | 78 | 0.632414 | false |
360youlun/cmsplugin-bootstrap-carousel | cmsplugin_bootstrap_carousel/migrations/0002_auto__add_field_carouselitem_button_title__add_field_carouselitem_butt.py | 1 | 4403 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CarouselItem.button_title'
db.add_column(u'cmsplugin_bootstrap_carousel_carouselitem', 'button_title',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True),
keep_default=False)
# Adding field 'CarouselItem.button_url'
db.add_column(u'cmsplugin_bootstrap_carousel_carouselitem', 'button_url',
self.gf('django.db.models.fields.URLField')(default='', max_length=200, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CarouselItem.button_title'
db.delete_column(u'cmsplugin_bootstrap_carousel_carouselitem', 'button_title')
# Deleting field 'CarouselItem.button_url'
db.delete_column(u'cmsplugin_bootstrap_carousel_carouselitem', 'button_url')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'cmsplugin_bootstrap_carousel.carousel': {
'Meta': {'object_name': 'Carousel', 'db_table': "u'cmsplugin_carousel'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'domid': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'interval': ('django.db.models.fields.IntegerField', [], {'default': '5000'})
},
u'cmsplugin_bootstrap_carousel.carouselitem': {
'Meta': {'object_name': 'CarouselItem'},
'button_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'button_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'caption_content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'caption_title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'carousel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cmsplugin_bootstrap_carousel.Carousel']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['cmsplugin_bootstrap_carousel'] | bsd-3-clause | 4,345,188,735,798,274,000 | 61.914286 | 157 | 0.583239 | false |
aperigault/ansible | lib/ansible/module_utils/network/f5/urls.py | 60 | 4623 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
try:
from library.module_utils.network.f5.common import F5ModuleError
except ImportError:
from ansible.module_utils.network.f5.common import F5ModuleError
_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$')
_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$')
def check_header_validity(header):
"""Verifies that header value is a string which doesn't contain
leading whitespace or return characters.
NOTE: This is a slightly modified version of the original function
taken from the requests library:
http://docs.python-requests.org/en/master/_modules/requests/utils/
:param header: string containing ':'.
"""
try:
name, value = header.split(':')
except ValueError:
raise F5ModuleError('Invalid header format: {0}'.format(header))
if name == '':
raise F5ModuleError('Invalid header format: {0}'.format(header))
if isinstance(value, bytes):
pat = _CLEAN_HEADER_REGEX_BYTE
else:
pat = _CLEAN_HEADER_REGEX_STR
try:
if not pat.match(value):
raise F5ModuleError("Invalid return character or leading space in header: %s" % name)
except TypeError:
raise F5ModuleError("Value for header {%s: %s} must be of type str or "
"bytes, not %s" % (name, value, type(value)))
def build_service_uri(base_uri, partition, name):
"""Build the proper uri for a service resource.
This follows the scheme:
<base_uri>/~<partition>~<<name>.app>~<name>
:param base_uri: str -- base uri of the REST endpoint
:param partition: str -- partition for the service
:param name: str -- name of the service
:returns: str -- uri to access the service
"""
name = name.replace('/', '~')
return '%s~%s~%s.app~%s' % (base_uri, partition, name, name)
def parseStats(entry):
if 'description' in entry:
return entry['description']
elif 'value' in entry:
return entry['value']
elif 'entries' in entry or 'nestedStats' in entry and 'entries' in entry['nestedStats']:
if 'entries' in entry:
entries = entry['entries']
else:
entries = entry['nestedStats']['entries']
result = None
for name in entries:
entry = entries[name]
if 'https://localhost' in name:
name = name.split('/')
name = name[-1]
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
else:
if '.' in name:
names = name.split('.')
key = names[0]
value = names[1]
if result is None:
# result can be None if this branch is reached first
#
# For example, the mgmt/tm/net/trunk/NAME/stats API
# returns counters.bitsIn before anything else.
result = dict()
result[key] = dict()
elif key not in result:
result[key] = dict()
elif result[key] is None:
result[key] = dict()
result[key][value] = parseStats(entry)
else:
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
return result
| gpl-3.0 | 2,141,585,248,126,525,400 | 36.893443 | 97 | 0.522388 | false |
geminateCoder/Character-Archive-Website | Lib/site-packages/pip/_vendor/requests/packages/chardet/cp949prober.py | 2801 | 1782 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import CP949SMModel
class CP949Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(CP949SMModel)
# NOTE: CP949 is a superset of EUC-KR, so the distribution should be
# not different.
self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "CP949"
| cc0-1.0 | 7,990,987,862,373,303,000 | 39.5 | 76 | 0.709877 | false |
markflorisson/blaze-core | blaze/compute/air/frontend/ckernel_impls.py | 6 | 1562 | # -*- coding: utf-8 -*-
"""
Lift ckernels to their appropriate rank so they always consume the full array
arguments.
"""
from __future__ import absolute_import, division, print_function
from pykit.ir import transform, Op
#------------------------------------------------------------------------
# Run
#------------------------------------------------------------------------
def run(func, env):
transform(CKernelImplementations(), func)
#------------------------------------------------------------------------
# Extract CKernel Implementations
#------------------------------------------------------------------------
class CKernelImplementations(object):
"""
For kernels that are implemented via ckernels, this
grabs the ckernel_deferred and turns it into a ckernel
op.
"""
def op_kernel(self, op):
function = op.metadata['kernel']
overload = op.metadata['overload']
func = overload.func
polysig = overload.sig
monosig = overload.resolved_sig
argtypes = monosig.argtypes
if function.matches('ckernel', argtypes):
overload = function.best_match('ckernel', argtypes)
impl = overload.func
assert monosig == overload.resolved_sig, (monosig,
overload.resolved_sig)
new_op = Op('ckernel', op.type, [impl, op.args[1:]], op.result)
new_op.add_metadata({'rank': 0,
'parallel': True})
return new_op
return op
| bsd-3-clause | -5,970,998,405,474,794,000 | 31.541667 | 77 | 0.482074 | false |
zzicewind/nova | nova/tests/unit/pci/test_utils.py | 44 | 7119 | # Copyright (c) 2013 Intel, Inc.
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import os
import mock
from nova import exception
from nova.pci import utils
from nova import test
class PciDeviceMatchTestCase(test.NoDBTestCase):
def setUp(self):
super(PciDeviceMatchTestCase, self).setUp()
self.fake_pci_1 = {'vendor_id': 'v1',
'device_id': 'd1'}
def test_single_spec_match(self):
self.assertTrue(utils.pci_device_prop_match(
self.fake_pci_1, [{'vendor_id': 'v1', 'device_id': 'd1'}]))
def test_multiple_spec_match(self):
self.assertTrue(utils.pci_device_prop_match(
self.fake_pci_1,
[{'vendor_id': 'v1', 'device_id': 'd1'},
{'vendor_id': 'v3', 'device_id': 'd3'}]))
def test_spec_dismatch(self):
self.assertFalse(utils.pci_device_prop_match(
self.fake_pci_1,
[{'vendor_id': 'v4', 'device_id': 'd4'},
{'vendor_id': 'v3', 'device_id': 'd3'}]))
def test_spec_extra_key(self):
self.assertFalse(utils.pci_device_prop_match(
self.fake_pci_1,
[{'vendor_id': 'v1', 'device_id': 'd1', 'wrong_key': 'k1'}]))
class PciDeviceAddressParserTestCase(test.NoDBTestCase):
def test_parse_address(self):
self.parse_result = utils.parse_address("0000:04:12.6")
self.assertEqual(self.parse_result, ('0000', '04', '12', '6'))
def test_parse_address_wrong(self):
self.assertRaises(exception.PciDeviceWrongAddressFormat,
utils.parse_address, "0000:04.12:6")
def test_parse_address_invalid_character(self):
self.assertRaises(exception.PciDeviceWrongAddressFormat,
utils.parse_address, "0000:h4.12:6")
class GetFunctionByIfnameTestCase(test.NoDBTestCase):
@mock.patch.object(os, 'readlink')
@mock.patch.object(os, 'listdir')
def test_virtual_function(self, mock_listdir, mock_readlink):
mock_listdir.return_value = ['foo', 'bar']
mock_readlink.return_value = '../../../0000.00.00.1'
address, physical_function = utils.get_function_by_ifname('eth0')
self.assertEqual(address, '0000.00.00.1')
self.assertFalse(physical_function)
@mock.patch.object(os, 'readlink')
@mock.patch.object(os, 'listdir')
def test_physical_function(self, mock_listdir, mock_readlink):
mock_listdir.return_value = ['foo', 'virtfn1', 'bar']
mock_readlink.return_value = '../../../0000:00:00.1'
address, physical_function = utils.get_function_by_ifname('eth0')
self.assertEqual(address, '0000:00:00.1')
self.assertTrue(physical_function)
@mock.patch.object(os, 'listdir')
def test_exception(self, mock_listdir):
mock_listdir.side_effect = OSError('No such file or directory')
address, physical_function = utils.get_function_by_ifname('lo')
self.assertIsNone(address)
self.assertFalse(physical_function)
class IsPhysicalFunctionTestCase(test.NoDBTestCase):
class FakePciAddress(object):
def __init__(self):
self.domain = 0
self.bus = 0
self.slot = 0
self.func = 0
def setUp(self):
super(IsPhysicalFunctionTestCase, self).setUp()
self.pci_address = self.FakePciAddress()
@mock.patch.object(os, 'listdir')
def test_virtual_function(self, mock_listdir):
mock_listdir.return_value = ['foo', 'bar']
self.assertFalse(utils.is_physical_function(self.pci_address))
@mock.patch.object(os, 'listdir')
def test_physical_function(self, mock_listdir):
mock_listdir.return_value = ['foo', 'virtfn1', 'bar']
self.assertTrue(utils.is_physical_function(self.pci_address))
@mock.patch.object(os, 'listdir')
def test_exception(self, mock_listdir):
mock_listdir.side_effect = OSError('No such file or directory')
self.assertFalse(utils.is_physical_function(self.pci_address))
class GetIfnameByPciAddressTestCase(test.NoDBTestCase):
def setUp(self):
super(GetIfnameByPciAddressTestCase, self).setUp()
self.pci_address = '0000:00:00.1'
@mock.patch.object(os, 'listdir')
def test_physical_function_inferface_name(self, mock_listdir):
mock_listdir.return_value = ['foo', 'bar']
ifname = utils.get_ifname_by_pci_address(
self.pci_address, pf_interface=True)
self.assertEqual(ifname, 'bar')
@mock.patch.object(os, 'listdir')
def test_virtual_function_inferface_name(self, mock_listdir):
mock_listdir.return_value = ['foo', 'bar']
ifname = utils.get_ifname_by_pci_address(
self.pci_address, pf_interface=False)
self.assertEqual(ifname, 'bar')
@mock.patch.object(os, 'listdir')
def test_exception(self, mock_listdir):
mock_listdir.side_effect = OSError('No such file or directory')
self.assertRaises(
exception.PciDeviceNotFoundById,
utils.get_ifname_by_pci_address,
self.pci_address
)
class GetVfNumByPciAddressTestCase(test.NoDBTestCase):
def setUp(self):
super(GetVfNumByPciAddressTestCase, self).setUp()
self.pci_address = '0000:00:00.1'
self.paths = [
'/sys/bus/pci/devices/0000:00:00.1/physfn/virtfn3',
]
@mock.patch.object(os, 'readlink')
@mock.patch.object(glob, 'iglob')
def test_vf_number_found(self, mock_iglob, mock_readlink):
mock_iglob.return_value = self.paths
mock_readlink.return_value = '../../0000:00:00.1'
vf_num = utils.get_vf_num_by_pci_address(self.pci_address)
self.assertEqual(vf_num, '3')
@mock.patch.object(os, 'readlink')
@mock.patch.object(glob, 'iglob')
def test_vf_number_not_found(self, mock_iglob, mock_readlink):
mock_iglob.return_value = self.paths
mock_readlink.return_value = '../../0000:00:00.2'
self.assertRaises(
exception.PciDeviceNotFoundById,
utils.get_vf_num_by_pci_address,
self.pci_address
)
@mock.patch.object(os, 'readlink')
@mock.patch.object(glob, 'iglob')
def test_exception(self, mock_iglob, mock_readlink):
mock_iglob.return_value = self.paths
mock_readlink.side_effect = OSError('No such file or directory')
self.assertRaises(
exception.PciDeviceNotFoundById,
utils.get_vf_num_by_pci_address,
self.pci_address
)
| apache-2.0 | -1,701,311,397,965,207,300 | 35.88601 | 78 | 0.638292 | false |
komsas/OpenUpgrade | addons/stock_landed_costs/product.py | 364 | 1611 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
SPLIT_METHOD = [
('equal', 'Equal'),
('by_quantity', 'By Quantity'),
('by_current_cost_price', 'By Current Cost Price'),
('by_weight', 'By Weight'),
('by_volume', 'By Volume'),
]
class product_template(osv.osv):
_inherit = "product.template"
_columns = {
'landed_cost_ok': fields.boolean('Can constitute a landed cost'),
'split_method': fields.selection(SPLIT_METHOD, 'Split Method'),
}
_defaults = {
'landed_cost_ok': False,
'split_method': 'equal',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,926,539,192,814,421,000 | 34.8 | 78 | 0.60211 | false |
jhcepas/ete | ete3/tools/ete_build_lib/task/trimal.py | 4 | 4158 | # -*- coding: utf-8 -*-
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: [email protected]
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
import os
import sys
import logging
from six.moves import map
log = logging.getLogger("main")
from ..master_task import AlgCleanerTask
from ..master_job import Job
from ..utils import SeqGroup, GLOBALS, hascontent, DATATYPES, pjoin
from .. import db
__all__ = ["Trimal"]
class Trimal(AlgCleanerTask):
def __init__(self, nodeid, seqtype, alg_fasta_file, alg_phylip_file,
conf, confname):
GLOBALS["citator"].add('trimal')
self.confname = confname
self.conf = conf
self.seqtype = seqtype
self.alg_fasta_file = alg_fasta_file
self.alg_phylip_file = alg_phylip_file
base_args = {
'-in': None,
'-out': None,
'-fasta': "",
'-colnumbering': "",
}
# Initialize task
AlgCleanerTask.__init__(self, nodeid, "acleaner", "Trimal",
base_args,
self.conf[confname])
self.init()
def load_jobs(self):
appname = self.conf[self.confname]["_app"]
args = self.args.copy()
args["-in"] = pjoin(GLOBALS["input_dir"], self.alg_fasta_file)
args["-out"] = "clean.alg.fasta"
job = Job(self.conf["app"][appname], args, parent_ids=[self.nodeid])
job.add_input_file(self.alg_fasta_file)
self.jobs.append(job)
def finish(self):
# Once executed, alignment is converted into relaxed
# interleaved phylip format. Both files, fasta and phylip,
# remain accessible.
# Set Task specific attributes
main_job = self.jobs[0]
fasta_path = pjoin(main_job.jobdir, "clean.alg.fasta")
alg = SeqGroup(fasta_path)
if len(alg) != self.size:
log.warning("Trimming was to aggressive and it tried"
" to remove one or more sequences."
" Alignment trimming will be disabled for this dataset."
)
self.clean_alg_fasta_file = db.register_task_data(self.taskid, DATATYPES.clean_alg_fasta, self.alg_fasta_file)
self.clean_alg_phylip_file = db.register_task_data(self.taskid, DATATYPES.clean_alg_phylip, self.alg_phylip_file)
else:
for line in open(self.jobs[0].stdout_file):
line = line.strip()
if line.startswith("#ColumnsMap"):
kept_columns = list(map(int, line.split("\t")[1].split(",")))
fasta = alg.write(format="fasta")
phylip = alg.write(format="iphylip_relaxed")
AlgCleanerTask.store_data(self, fasta, phylip, kept_columns)
| gpl-3.0 | 2,918,170,234,741,428,700 | 36.459459 | 125 | 0.600289 | false |
kangkot/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32/Demos/security/set_file_owner.py | 34 | 2045 | fname=r'h:\tmp.txt'
import win32security,win32file,win32api,ntsecuritycon,win32con
new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
)
ph = win32api.GetCurrentProcess()
th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES)
win32security.AdjustTokenPrivileges(th,0,new_privs)
all_security_info = \
win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
sd=win32security.GetFileSecurity(fname,all_security_info)
old_dacl=sd.GetSecurityDescriptorDacl()
old_sacl=sd.GetSecurityDescriptorSacl()
old_group=sd.GetSecurityDescriptorGroup()
new_sd=win32security.SECURITY_DESCRIPTOR()
print "relative, valid, size: ",new_sd.IsSelfRelative(), new_sd.IsValid(), new_sd.GetLength()
my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0]
tmp_sid = win32security.LookupAccountName('','tmp')[0]
new_sd.SetSecurityDescriptorSacl(1,old_sacl,1)
new_sd.SetSecurityDescriptorDacl(1,old_dacl,1)
new_sd.SetSecurityDescriptorOwner(tmp_sid,0)
new_sd.SetSecurityDescriptorGroup(old_group,0)
win32security.SetFileSecurity(fname,all_security_info,new_sd)
| apache-2.0 | 8,461,893,031,772,759,000 | 52.815789 | 156 | 0.785819 | false |
ltilve/chromium | media/PRESUBMIT.py | 21 | 6977 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Chromium media component.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def _FilterFile(affected_file):
"""Return true if the file could contain code requiring a presubmit check."""
return affected_file.LocalPath().endswith(
('.h', '.cc', '.cpp', '.cxx', '.mm'))
def _CheckForUseOfWrongClock(input_api, output_api):
"""Make sure new lines of media code don't use a clock susceptible to skew."""
# Regular expression that should detect any explicit references to the
# base::Time type (or base::Clock/DefaultClock), whether in using decls,
# typedefs, or to call static methods.
base_time_type_pattern = r'(^|\W)base::(Time|Clock|DefaultClock)(\W|$)'
# Regular expression that should detect references to the base::Time class
# members, such as a call to base::Time::Now.
base_time_member_pattern = r'(^|\W)(Time|Clock|DefaultClock)::'
# Regular expression to detect "using base::Time" declarations. We want to
# prevent these from triggerring a warning. For example, it's perfectly
# reasonable for code to be written like this:
#
# using base::Time;
# ...
# int64 foo_us = foo_s * Time::kMicrosecondsPerSecond;
using_base_time_decl_pattern = r'^\s*using\s+(::)?base::Time\s*;'
# Regular expression to detect references to the kXXX constants in the
# base::Time class. We want to prevent these from triggerring a warning.
base_time_konstant_pattern = r'(^|\W)Time::k\w+'
problem_re = input_api.re.compile(
r'(' + base_time_type_pattern + r')|(' + base_time_member_pattern + r')')
exception_re = input_api.re.compile(
r'(' + using_base_time_decl_pattern + r')|(' +
base_time_konstant_pattern + r')')
problems = []
for f in input_api.AffectedSourceFiles(_FilterFile):
for line_number, line in f.ChangedContents():
if problem_re.search(line):
if not exception_re.search(line):
problems.append(
' %s:%d\n %s' % (f.LocalPath(), line_number, line.strip()))
if problems:
return [output_api.PresubmitPromptOrNotify(
'You added one or more references to the base::Time class and/or one\n'
'of its member functions (or base::Clock/DefaultClock). In media\n'
'code, it is rarely correct to use a clock susceptible to time skew!\n'
'Instead, could you use base::TimeTicks to track the passage of\n'
'real-world time?\n\n' +
'\n'.join(problems))]
else:
return []
def _CheckForMessageLoopProxy(input_api, output_api):
"""Make sure media code only uses MessageLoopProxy for accessing the current
loop."""
message_loop_proxy_re = input_api.re.compile(
r'\bMessageLoopProxy(?!::current\(\))')
problems = []
for f in input_api.AffectedSourceFiles(_FilterFile):
for line_number, line in f.ChangedContents():
if message_loop_proxy_re.search(line):
problems.append('%s:%d' % (f.LocalPath(), line_number))
if problems:
return [output_api.PresubmitError(
'MessageLoopProxy should only be used for accessing the current loop.\n'
'Use the TaskRunner interfaces instead as they are more explicit about\n'
'the run-time characteristics. In most cases, SingleThreadTaskRunner\n'
'is a drop-in replacement for MessageLoopProxy.', problems)]
return []
def _CheckForHistogramOffByOne(input_api, output_api):
"""Make sure histogram enum maxes are used properly"""
# A general-purpose chunk of regex to match whitespace and/or comments
# that may be interspersed with the code we're interested in:
comment = r'/\*.*?\*/|//[^\n]*'
whitespace = r'(?:[\n\t ]|(?:' + comment + r'))*'
# The name is assumed to be a literal string.
histogram_name = r'"[^"]*"'
# This can be an arbitrary expression, so just ensure it isn't a ; to prevent
# matching past the end of this statement.
histogram_value = r'[^;]*'
# In parens so we can retrieve it for further checks.
histogram_max = r'([^;,]*)'
# This should match a uma histogram enumeration macro expression.
uma_macro_re = input_api.re.compile(
r'\bUMA_HISTOGRAM_ENUMERATION\(' + whitespace + histogram_name + r',' +
whitespace + histogram_value + r',' + whitespace + histogram_max +
whitespace + r'\)' + whitespace + r';(?:' + whitespace +
r'\/\/ (PRESUBMIT_IGNORE_UMA_MAX))?')
uma_max_re = input_api.re.compile(r'.*(?:Max|MAX).* \+ 1')
problems = []
for f in input_api.AffectedSourceFiles(_FilterFile):
contents = input_api.ReadFile(f)
# We want to match across lines, but still report a line number, so we keep
# track of the line we're on as we search through the file.
line_number = 1
# We search the entire file, then check if any violations are in the changed
# areas, this is inefficient, but simple. A UMA_HISTOGRAM_ENUMERATION call
# will often span multiple lines, so finding a match looking just at the
# deltas line-by-line won't catch problems.
match = uma_macro_re.search(contents)
while match:
line_number += contents.count('\n', 0, match.start())
max_arg = match.group(1) # The third argument.
if (not uma_max_re.match(max_arg) and match.group(2) !=
'PRESUBMIT_IGNORE_UMA_MAX'):
uma_range = range(match.start(), match.end() + 1)
# Check if any part of the match is in the changed lines:
for num, line in f.ChangedContents():
if line_number <= num <= line_number + match.group().count('\n'):
problems.append('%s:%d' % (f, line_number))
break
# Strip off the file contents up to the end of the match and update the
# line number.
contents = contents[match.end():]
line_number += match.group().count('\n')
match = uma_macro_re.search(contents)
if problems:
return [output_api.PresubmitError(
'UMA_HISTOGRAM_ENUMERATION reports in src/media/ are expected to adhere\n'
'to the following guidelines:\n'
' - The max value (3rd argument) should be an enum value equal to the\n'
' last valid value, e.g. FOO_MAX = LAST_VALID_FOO.\n'
' - 1 must be added to that max value.\n'
'Contact [email protected] if you have questions.' , problems)]
return []
def _CheckChange(input_api, output_api):
results = []
results.extend(_CheckForUseOfWrongClock(input_api, output_api))
results.extend(_CheckForMessageLoopProxy(input_api, output_api))
results.extend(_CheckForHistogramOffByOne(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
return _CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CheckChange(input_api, output_api)
| bsd-3-clause | -5,019,829,396,670,538,000 | 38.868571 | 80 | 0.670632 | false |
jim-thisplace/exercises-in-programming-style | 29-dataspaces/tf-29.py | 17 | 1558 | #!/usr/bin/env python
import re, sys, operator, Queue, threading
# Two data spaces
word_space = Queue.Queue()
freq_space = Queue.Queue()
stopwords = set(open('../stop_words.txt').read().split(','))
# Worker function that consumes words from the word space
# and sends partial results to the frequency space
def process_words():
word_freqs = {}
while True:
try:
word = word_space.get(timeout=1)
except Queue.Empty:
break
if not word in stopwords:
if word in word_freqs:
word_freqs[word] += 1
else:
word_freqs[word] = 1
freq_space.put(word_freqs)
# Let's have this thread populate the word space
for word in re.findall('[a-z]{2,}', open(sys.argv[1]).read().lower()):
word_space.put(word)
# Let's create the workers and launch them at their jobs
workers = []
for i in range(5):
workers.append(threading.Thread(target = process_words))
[t.start() for t in workers]
# Let's wait for the workers to finish
[t.join() for t in workers]
# Let's merge the partial frequency results by consuming
# frequency data from the frequency space
word_freqs = {}
while not freq_space.empty():
freqs = freq_space.get()
for (k, v) in freqs.iteritems():
if k in word_freqs:
count = sum(item[k] for item in [freqs, word_freqs])
else:
count = freqs[k]
word_freqs[k] = count
for (w, c) in sorted(word_freqs.iteritems(), key=operator.itemgetter(1), reverse=True)[:25]:
print w, '-', c
| mit | 8,532,222,993,943,636,000 | 28.961538 | 92 | 0.621309 | false |
miyosuda/intro-to-dl-android | ImageClassification/jni-build/jni/include/tensorflow/python/kernel_tests/fifo_queue_test.py | 3 | 39552 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.data_flow_ops.FIFOQueue."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import re
import time
import tensorflow.python.platform
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
class FIFOQueueTest(tf.test.TestCase):
def testConstructor(self):
with tf.Graph().as_default():
q = tf.FIFOQueue(10, tf.float32, name="Q")
self.assertTrue(isinstance(q.queue_ref, tf.Tensor))
self.assertEquals(tf.string_ref, q.queue_ref.dtype)
self.assertProtoEquals("""
name:'Q' op:'FIFOQueue'
attr { key: 'component_types' value { list { type: DT_FLOAT } } }
attr { key: 'shapes' value { list {} } }
attr { key: 'capacity' value { i: 10 } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.queue_ref.op.node_def)
def testMultiQueueConstructor(self):
with tf.Graph().as_default():
q = tf.FIFOQueue(5, (tf.int32, tf.float32),
shared_name="foo", name="Q")
self.assertTrue(isinstance(q.queue_ref, tf.Tensor))
self.assertEquals(tf.string_ref, q.queue_ref.dtype)
self.assertProtoEquals("""
name:'Q' op:'FIFOQueue'
attr { key: 'component_types' value { list {
type: DT_INT32 type : DT_FLOAT
} } }
attr { key: 'shapes' value { list {} } }
attr { key: 'capacity' value { i: 5 } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: 'foo' } }
""", q.queue_ref.op.node_def)
def testConstructorWithShapes(self):
with tf.Graph().as_default():
q = tf.FIFOQueue(5, (tf.int32, tf.float32),
shapes=(tf.TensorShape([1, 1, 2, 3]),
tf.TensorShape([5, 8])), name="Q")
self.assertTrue(isinstance(q.queue_ref, tf.Tensor))
self.assertEquals(tf.string_ref, q.queue_ref.dtype)
self.assertProtoEquals("""
name:'Q' op:'FIFOQueue'
attr { key: 'component_types' value { list {
type: DT_INT32 type : DT_FLOAT
} } }
attr { key: 'shapes' value { list {
shape { dim { size: 1 }
dim { size: 1 }
dim { size: 2 }
dim { size: 3 } }
shape { dim { size: 5 }
dim { size: 8 } }
} } }
attr { key: 'capacity' value { i: 5 } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.queue_ref.op.node_def)
def testEnqueue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
enqueue_op = q.enqueue((10.0,))
enqueue_op.run()
def testEnqueueWithShape(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32, shapes=(3, 2))
enqueue_correct_op = q.enqueue(([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]],))
enqueue_correct_op.run()
with self.assertRaises(ValueError):
q.enqueue(([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],))
self.assertEqual(1, q.size().eval())
def testEnqueueManyWithShape(self):
with self.test_session():
q = tf.FIFOQueue(10, [tf.int32, tf.int32],
shapes=[(), (2,)])
q.enqueue_many([[1, 2, 3, 4], [[1, 1], [2, 2], [3, 3], [4, 4]]]).run()
self.assertEqual(4, q.size().eval())
def testParallelEnqueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
# Run one producer thread for each element in elems.
def enqueue(enqueue_op):
sess.run(enqueue_op)
threads = [self.checkedThread(target=enqueue, args=(e,))
for e in enqueue_ops]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# Dequeue every element using a single thread.
results = []
for _ in xrange(len(elems)):
results.append(dequeued_t.eval())
self.assertItemsEqual(elems, results)
def testParallelDequeue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
# Enqueue every element using a single thread.
for enqueue_op in enqueue_ops:
enqueue_op.run()
# Run one consumer thread for each element in elems.
results = []
def dequeue():
results.append(sess.run(dequeued_t))
threads = [self.checkedThread(target=dequeue) for _ in enqueue_ops]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, results)
def testDequeue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
for enqueue_op in enqueue_ops:
enqueue_op.run()
for i in xrange(len(elems)):
vals = dequeued_t.eval()
self.assertEqual([elems[i]], vals)
def testEnqueueAndBlockingDequeue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(3, tf.float32)
elems = [10.0, 20.0, 30.0]
enqueue_ops = [q.enqueue((x,)) for x in elems]
dequeued_t = q.dequeue()
def enqueue():
# The enqueue_ops should run after the dequeue op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
for enqueue_op in enqueue_ops:
sess.run(enqueue_op)
results = []
def dequeue():
for _ in xrange(len(elems)):
results.append(sess.run(dequeued_t))
enqueue_thread = self.checkedThread(target=enqueue)
dequeue_thread = self.checkedThread(target=dequeue)
enqueue_thread.start()
dequeue_thread.start()
enqueue_thread.join()
dequeue_thread.join()
for elem, result in zip(elems, results):
self.assertEqual([elem], result)
def testMultiEnqueueAndDequeue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, (tf.int32, tf.float32))
elems = [(5, 10.0), (10, 20.0), (15, 30.0)]
enqueue_ops = [q.enqueue((x, y)) for x, y in elems]
dequeued_t = q.dequeue()
for enqueue_op in enqueue_ops:
enqueue_op.run()
for i in xrange(len(elems)):
x_val, y_val = sess.run(dequeued_t)
x, y = elems[i]
self.assertEqual([x], x_val)
self.assertEqual([y], y_val)
def testQueueSizeEmpty(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
self.assertEqual([0], q.size().eval())
def testQueueSizeAfterEnqueueAndDequeue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
enqueue_op = q.enqueue((10.0,))
dequeued_t = q.dequeue()
size = q.size()
self.assertEqual([], size.get_shape())
enqueue_op.run()
self.assertEqual(1, size.eval())
dequeued_t.op.run()
self.assertEqual(0, size.eval())
def testEnqueueMany(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue()
enqueue_op.run()
enqueue_op.run()
for i in range(8):
vals = dequeued_t.eval()
self.assertEqual([elems[i % 4]], vals)
def testEmptyEnqueueMany(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
empty_t = tf.constant([], dtype=tf.float32,
shape=[0, 2, 3])
enqueue_op = q.enqueue_many((empty_t,))
size_t = q.size()
self.assertEqual([0], size_t.eval())
enqueue_op.run()
self.assertEqual([0], size_t.eval())
def testEmptyDequeueMany(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32, shapes=())
enqueue_op = q.enqueue((10.0,))
dequeued_t = q.dequeue_many(0)
self.assertEqual([], dequeued_t.eval().tolist())
enqueue_op.run()
self.assertEqual([], dequeued_t.eval().tolist())
def testEmptyDequeueManyWithNoShape(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
# Expect the operation to fail due to the shape not being constrained.
with self.assertRaisesOpError("specified shapes"):
q.dequeue_many(0).eval()
def testMultiEnqueueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, (tf.float32, tf.int32))
float_elems = [10.0, 20.0, 30.0, 40.0]
int_elems = [[1, 2], [3, 4], [5, 6], [7, 8]]
enqueue_op = q.enqueue_many((float_elems, int_elems))
dequeued_t = q.dequeue()
enqueue_op.run()
enqueue_op.run()
for i in range(8):
float_val, int_val = sess.run(dequeued_t)
self.assertEqual(float_elems[i % 4], float_val)
self.assertAllEqual(int_elems[i % 4], int_val)
def testDequeueMany(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32, ())
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(4)
enqueue_op.run()
self.assertAllEqual(elems[0:4], dequeued_t.eval())
self.assertAllEqual(elems[4:8], dequeued_t.eval())
def testMultiDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, (tf.float32, tf.int32),
shapes=((), (2,)))
float_elems = [
10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
int_elems = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10],
[11, 12], [13, 14], [15, 16], [17, 18], [19, 20]]
enqueue_op = q.enqueue_many((float_elems, int_elems))
dequeued_t = q.dequeue_many(4)
dequeued_single_t = q.dequeue()
enqueue_op.run()
float_val, int_val = sess.run(dequeued_t)
self.assertAllEqual(float_elems[0:4], float_val)
self.assertAllEqual(int_elems[0:4], int_val)
self.assertEqual(float_val.shape, dequeued_t[0].get_shape())
self.assertEqual(int_val.shape, dequeued_t[1].get_shape())
float_val, int_val = sess.run(dequeued_t)
self.assertAllEqual(float_elems[4:8], float_val)
self.assertAllEqual(int_elems[4:8], int_val)
float_val, int_val = sess.run(dequeued_single_t)
self.assertAllEqual(float_elems[8], float_val)
self.assertAllEqual(int_elems[8], int_val)
self.assertEqual(float_val.shape, dequeued_single_t[0].get_shape())
self.assertEqual(int_val.shape, dequeued_single_t[1].get_shape())
def testHighDimension(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.int32, (4, 4, 4, 4))
elems = np.array([[[[[x] * 4] * 4] * 4] * 4 for x in range(10)], np.int32)
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(10)
enqueue_op.run()
self.assertAllEqual(dequeued_t.eval(), elems)
def testEnqueueWrongShape(self):
q = tf.FIFOQueue(10, (tf.int32, tf.int32), ((), (2)))
with self.assertRaises(ValueError):
q.enqueue(([1, 2], [2, 2]))
with self.assertRaises(ValueError):
q.enqueue_many((7, [[1, 2], [3, 4], [5, 6]]))
def testBatchSizeMismatch(self):
q = tf.FIFOQueue(10, (tf.int32, tf.int32, tf.int32), ((), (), ()))
with self.assertRaises(ValueError):
q.enqueue_many(([1, 2, 3], [1, 2], [1, 2, 3]))
with self.assertRaises(ValueError):
q.enqueue_many(([1, 2, 3], [1, 2], tf.placeholder(tf.int32)))
with self.assertRaises(ValueError):
q.enqueue_many((tf.placeholder(tf.int32), [1, 2], [1, 2, 3]))
def testEnqueueManyEmptyTypeConversion(self):
q = tf.FIFOQueue(10, (tf.int32, tf.float32), ((), ()))
enq = q.enqueue_many(([], []))
self.assertEqual(tf.int32, enq.inputs[1].dtype)
self.assertEqual(tf.float32, enq.inputs[2].dtype)
def testEnqueueWrongType(self):
q = tf.FIFOQueue(10, (tf.int32, tf.float32), ((), ()))
with self.assertRaises(ValueError):
q.enqueue((tf.placeholder(tf.int32), tf.placeholder(tf.int32)))
with self.assertRaises(ValueError):
q.enqueue_many((tf.placeholder(tf.int32), tf.placeholder(tf.int32)))
def testEnqueueWrongShapeAtRuntime(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, (tf.int32, tf.int32), ((2, 2), (3, 3)))
elems_ok = np.array([1] * 4).reshape((2, 2)).astype(np.int32)
elems_bad = tf.placeholder(tf.int32)
enqueue_op = q.enqueue((elems_ok, elems_bad))
with self.assertRaisesRegexp(
tf.errors.InvalidArgumentError, r"Expected \[3,3\], got \[3,4\]"):
sess.run([enqueue_op],
feed_dict={elems_bad: np.array([1] * 12).reshape((3, 4))})
def testEnqueueDequeueManyWrongShape(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, (tf.int32, tf.int32), ((2, 2), (3, 3)))
elems_ok = np.array([1] * 8).reshape((2, 2, 2)).astype(np.int32)
elems_bad = tf.placeholder(tf.int32)
enqueue_op = q.enqueue_many((elems_ok, elems_bad))
dequeued_t = q.dequeue_many(2)
with self.assertRaisesRegexp(
tf.errors.InvalidArgumentError,
"Shape mismatch in tuple component 1. "
r"Expected \[2,3,3\], got \[2,3,4\]"):
sess.run([enqueue_op],
feed_dict={elems_bad: np.array([1] * 24).reshape((2, 3, 4))})
dequeued_t.eval()
def testParallelEnqueueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(1000, tf.float32, shapes=())
elems = [10.0 * x for x in range(100)]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(1000)
# Enqueue 100 items in parallel on 10 threads.
def enqueue():
sess.run(enqueue_op)
threads = [self.checkedThread(target=enqueue) for _ in range(10)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(dequeued_t.eval(), elems * 10)
def testParallelDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(1000, tf.float32, shapes=())
elems = [10.0 * x for x in range(1000)]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(100)
enqueue_op.run()
# Dequeue 100 items in parallel on 10 threads.
dequeued_elems = []
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t))
threads = [self.checkedThread(target=dequeue) for _ in range(10)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertItemsEqual(elems, dequeued_elems)
def testParallelEnqueueAndDequeue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(50, tf.float32, shapes=())
initial_elements = [10.0] * 49
q.enqueue_many((initial_elements,)).run()
enqueue_op = q.enqueue((20.0,))
dequeued_t = q.dequeue()
def enqueue():
for _ in xrange(100):
sess.run(enqueue_op)
def dequeue():
for _ in xrange(100):
self.assertTrue(sess.run(dequeued_t) in (10.0, 20.0))
enqueue_threads = [self.checkedThread(target=enqueue) for _ in range(10)]
dequeue_threads = [self.checkedThread(target=dequeue) for _ in range(10)]
for enqueue_thread in enqueue_threads:
enqueue_thread.start()
for dequeue_thread in dequeue_threads:
dequeue_thread.start()
for enqueue_thread in enqueue_threads:
enqueue_thread.join()
for dequeue_thread in dequeue_threads:
dequeue_thread.join()
# Dequeue the initial count of elements to clean up.
cleanup_elems = q.dequeue_many(49).eval()
for elem in cleanup_elems:
self.assertTrue(elem in (10.0, 20.0))
def testMixtureOfEnqueueAndEnqueueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.int32, shapes=())
enqueue_placeholder = tf.placeholder(tf.int32, shape=())
enqueue_op = q.enqueue((enqueue_placeholder,))
enqueuemany_placeholder = tf.placeholder(
tf.int32, shape=(None,))
enqueuemany_op = q.enqueue_many((enqueuemany_placeholder,))
dequeued_t = q.dequeue()
close_op = q.close()
def dequeue():
for i in xrange(250):
self.assertEqual(i, sess.run(dequeued_t))
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
elements_enqueued = 0
while elements_enqueued < 250:
# With equal probability, run Enqueue or enqueue_many.
if random.random() > 0.5:
enqueue_op.run({enqueue_placeholder: elements_enqueued})
elements_enqueued += 1
else:
count = random.randint(0, min(20, 250 - elements_enqueued))
range_to_enqueue = np.arange(elements_enqueued,
elements_enqueued + count,
dtype=np.int32)
enqueuemany_op.run({enqueuemany_placeholder: range_to_enqueue})
elements_enqueued += count
close_op.run()
dequeue_thread.join()
self.assertEqual(0, q.size().eval())
def testMixtureOfDequeueAndDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.int32, shapes=())
enqueue_op = q.enqueue_many((np.arange(250, dtype=np.int32),))
dequeued_t = q.dequeue()
count_placeholder = tf.placeholder(tf.int32, shape=())
dequeuemany_t = q.dequeue_many(count_placeholder)
def enqueue():
sess.run(enqueue_op)
enqueue_thread = self.checkedThread(target=enqueue)
enqueue_thread.start()
elements_dequeued = 0
while elements_dequeued < 250:
# With equal probability, run Dequeue or dequeue_many.
if random.random() > 0.5:
self.assertEqual(elements_dequeued, dequeued_t.eval())
elements_dequeued += 1
else:
count = random.randint(0, min(20, 250 - elements_dequeued))
expected_range = np.arange(elements_dequeued,
elements_dequeued + count,
dtype=np.int32)
self.assertAllEqual(
expected_range, dequeuemany_t.eval({count_placeholder: count}))
elements_dequeued += count
q.close().run()
enqueue_thread.join()
self.assertEqual(0, q.size().eval())
def testBlockingDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32, ())
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
dequeued_t = q.dequeue_many(4)
dequeued_elems = []
def enqueue():
# The enqueue_op should run after the dequeue op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
sess.run(enqueue_op)
def dequeue():
dequeued_elems.extend(sess.run(dequeued_t).tolist())
enqueue_thread = self.checkedThread(target=enqueue)
dequeue_thread = self.checkedThread(target=dequeue)
enqueue_thread.start()
dequeue_thread.start()
enqueue_thread.join()
dequeue_thread.join()
self.assertAllEqual(elems, dequeued_elems)
def testDequeueManyWithTensorParameter(self):
with self.test_session():
# Define a first queue that contains integer counts.
dequeue_counts = [random.randint(1, 10) for _ in range(100)]
count_q = tf.FIFOQueue(100, tf.int32, ())
enqueue_counts_op = count_q.enqueue_many((dequeue_counts,))
total_count = sum(dequeue_counts)
# Define a second queue that contains total_count elements.
elems = [random.randint(0, 100) for _ in range(total_count)]
q = tf.FIFOQueue(total_count, tf.int32, ())
enqueue_elems_op = q.enqueue_many((elems,))
# Define a subgraph that first dequeues a count, then DequeuesMany
# that number of elements.
dequeued_t = q.dequeue_many(count_q.dequeue())
enqueue_counts_op.run()
enqueue_elems_op.run()
dequeued_elems = []
for _ in dequeue_counts:
dequeued_elems.extend(dequeued_t.eval())
self.assertEqual(elems, dequeued_elems)
def testDequeueFromClosedQueue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
close_op.run()
for elem in elems:
self.assertEqual([elem], dequeued_t.eval())
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.OutOfRangeError,
"is closed and has insufficient"):
dequeued_t.eval()
def testBlockingDequeueFromClosedQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
def dequeue():
for elem in elems:
self.assertEqual([elem], sess.run(dequeued_t))
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testBlockingDequeueFromClosedEmptyQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32)
close_op = q.close()
dequeued_t = q.dequeue()
def dequeue():
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testBlockingDequeueManyFromClosedQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32, ())
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_many(4)
enqueue_op.run()
def dequeue():
self.assertAllEqual(elems, sess.run(dequeued_t))
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testEnqueueManyLargerThanCapacityWithConcurrentDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, tf.float32, ())
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
dequeued_t = q.dequeue_many(3)
cleanup_dequeue_t = q.dequeue()
def enqueue():
sess.run(enqueue_op)
def dequeue():
self.assertAllEqual(elems[0:3], sess.run(dequeued_t))
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run(dequeued_t)
self.assertEqual(elems[3], sess.run(cleanup_dequeue_t))
def close():
sess.run(close_op)
enqueue_thread = self.checkedThread(target=enqueue)
enqueue_thread.start()
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_thread = self.checkedThread(target=close)
close_thread.start()
enqueue_thread.join()
dequeue_thread.join()
close_thread.join()
def testClosedBlockingDequeueManyRestoresPartialBatch(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, (tf.float32, tf.float32), ((), ()))
elems_a = [1.0, 2.0, 3.0]
elems_b = [10.0, 20.0, 30.0]
enqueue_op = q.enqueue_many((elems_a, elems_b))
dequeued_a_t, dequeued_b_t = q.dequeue_many(4)
cleanup_dequeue_a_t, cleanup_dequeue_b_t = q.dequeue()
close_op = q.close()
enqueue_op.run()
def dequeue():
with self.assertRaises(tf.errors.OutOfRangeError):
sess.run([dequeued_a_t, dequeued_b_t])
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
# Test that the elements in the partially-dequeued batch are
# restored in the correct order.
for elem_a, elem_b in zip(elems_a, elems_b):
val_a, val_b = sess.run([cleanup_dequeue_a_t, cleanup_dequeue_b_t])
self.assertEqual(elem_a, val_a)
self.assertEqual(elem_b, val_b)
self.assertEqual(0, q.size().eval())
def testBlockingDequeueManyFromClosedEmptyQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(10, tf.float32, ())
close_op = q.close()
dequeued_t = q.dequeue_many(4)
def dequeue():
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.OutOfRangeError,
"is closed and has insufficient"):
sess.run(dequeued_t)
dequeue_thread = self.checkedThread(target=dequeue)
dequeue_thread.start()
# The close_op should run after the dequeue_thread has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
close_op.run()
dequeue_thread.join()
def testEnqueueToClosedQueue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
enqueue_op = q.enqueue((10.0,))
close_op = q.close()
enqueue_op.run()
close_op.run()
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.AbortedError, "is closed"):
enqueue_op.run()
def testEnqueueManyToClosedQueue(self):
with self.test_session():
q = tf.FIFOQueue(10, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
close_op = q.close()
enqueue_op.run()
close_op.run()
# Expect the operation to fail due to the queue being closed.
with self.assertRaisesRegexp(tf.errors.AbortedError, "is closed"):
enqueue_op.run()
def testBlockingEnqueueToFullQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue((50.0,))
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
sess.run(blocking_enqueue_op)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The dequeue ops should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
for elem in elems:
self.assertEqual([elem], dequeued_t.eval())
self.assertEqual([50.0], dequeued_t.eval())
thread.join()
def testBlockingEnqueueManyToFullQueue(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue_many(([50.0, 60.0],))
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
sess.run(blocking_enqueue_op)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The dequeue ops should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
for elem in elems:
self.assertEqual([elem], dequeued_t.eval())
time.sleep(0.01)
self.assertEqual([50.0], dequeued_t.eval())
self.assertEqual([60.0], dequeued_t.eval())
def testBlockingEnqueueBeforeClose(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, tf.float32)
elems = [10.0, 20.0, 30.0, 40.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue((50.0,))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
# Expect the operation to succeed once the dequeue op runs.
sess.run(blocking_enqueue_op)
enqueue_thread = self.checkedThread(target=blocking_enqueue)
enqueue_thread.start()
# The close_op should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
def close():
sess.run(close_op)
close_thread = self.checkedThread(target=close)
close_thread.start()
# The dequeue will unblock both threads.
self.assertEqual(10.0, dequeued_t.eval())
enqueue_thread.join()
close_thread.join()
for elem in [20.0, 30.0, 40.0, 50.0]:
self.assertEqual(elem, dequeued_t.eval())
self.assertEqual(0, q.size().eval())
def testBlockingEnqueueManyBeforeClose(self):
with self.test_session() as sess:
q = tf.FIFOQueue(4, tf.float32)
elems = [10.0, 20.0, 30.0]
enqueue_op = q.enqueue_many((elems,))
blocking_enqueue_op = q.enqueue_many(([50.0, 60.0],))
close_op = q.close()
dequeued_t = q.dequeue()
enqueue_op.run()
def blocking_enqueue():
sess.run(blocking_enqueue_op)
enqueue_thread = self.checkedThread(target=blocking_enqueue)
enqueue_thread.start()
# The close_op should run after the blocking_enqueue_op has blocked.
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
def close():
sess.run(close_op)
close_thread = self.checkedThread(target=close)
close_thread.start()
# The dequeue will unblock both threads.
self.assertEqual(10.0, dequeued_t.eval())
enqueue_thread.join()
close_thread.join()
for elem in [20.0, 30.0, 50.0, 60.0]:
self.assertEqual(elem, dequeued_t.eval())
def testDoesNotLoseValue(self):
with self.test_session():
q = tf.FIFOQueue(1, tf.float32)
enqueue_op = q.enqueue((10.0,))
size_t = q.size()
enqueue_op.run()
for _ in range(500):
self.assertEqual(size_t.eval(), [1])
def testSharedQueueSameSession(self):
with self.test_session():
q1 = tf.FIFOQueue(
1, tf.float32, shared_name="shared_queue")
q1.enqueue((10.0,)).run()
q2 = tf.FIFOQueue(
1, tf.float32, shared_name="shared_queue")
q1_size_t = q1.size()
q2_size_t = q2.size()
self.assertEqual(q1_size_t.eval(), [1])
self.assertEqual(q2_size_t.eval(), [1])
self.assertEqual(q2.dequeue().eval(), [10.0])
self.assertEqual(q1_size_t.eval(), [0])
self.assertEqual(q2_size_t.eval(), [0])
q2.enqueue((20.0,)).run()
self.assertEqual(q1_size_t.eval(), [1])
self.assertEqual(q2_size_t.eval(), [1])
self.assertEqual(q1.dequeue().eval(), [20.0])
self.assertEqual(q1_size_t.eval(), [0])
self.assertEqual(q2_size_t.eval(), [0])
def testIncompatibleSharedQueueErrors(self):
with self.test_session():
q_a_1 = tf.FIFOQueue(10, tf.float32, shared_name="q_a")
q_a_2 = tf.FIFOQueue(15, tf.float32, shared_name="q_a")
q_a_1.queue_ref.eval()
with self.assertRaisesOpError("capacity"):
q_a_2.queue_ref.eval()
q_b_1 = tf.FIFOQueue(10, tf.float32, shared_name="q_b")
q_b_2 = tf.FIFOQueue(10, tf.int32, shared_name="q_b")
q_b_1.queue_ref.eval()
with self.assertRaisesOpError("component types"):
q_b_2.queue_ref.eval()
q_c_1 = tf.FIFOQueue(10, tf.float32, shared_name="q_c")
q_c_2 = tf.FIFOQueue(
10, tf.float32, shapes=[(1, 1, 2, 3)], shared_name="q_c")
q_c_1.queue_ref.eval()
with self.assertRaisesOpError("component shapes"):
q_c_2.queue_ref.eval()
q_d_1 = tf.FIFOQueue(
10, tf.float32, shapes=[(1, 1, 2, 3)], shared_name="q_d")
q_d_2 = tf.FIFOQueue(10, tf.float32, shared_name="q_d")
q_d_1.queue_ref.eval()
with self.assertRaisesOpError("component shapes"):
q_d_2.queue_ref.eval()
q_e_1 = tf.FIFOQueue(
10, tf.float32, shapes=[(1, 1, 2, 3)], shared_name="q_e")
q_e_2 = tf.FIFOQueue(
10, tf.float32, shapes=[(1, 1, 2, 4)], shared_name="q_e")
q_e_1.queue_ref.eval()
with self.assertRaisesOpError("component shapes"):
q_e_2.queue_ref.eval()
q_f_1 = tf.FIFOQueue(10, tf.float32, shared_name="q_f")
q_f_2 = tf.FIFOQueue(
10, (tf.float32, tf.int32), shared_name="q_f")
q_f_1.queue_ref.eval()
with self.assertRaisesOpError("component types"):
q_f_2.queue_ref.eval()
def testSelectQueue(self):
with self.test_session():
num_queues = 10
qlist = list()
for _ in xrange(num_queues):
qlist.append(tf.FIFOQueue(10, tf.float32))
# Enqueue/Dequeue into a dynamically selected queue
for _ in xrange(20):
index = np.random.randint(num_queues)
q = tf.FIFOQueue.from_list(index, qlist)
q.enqueue((10.,)).run()
self.assertEqual(q.dequeue().eval(), 10.0)
def testSelectQueueOutOfRange(self):
with self.test_session():
q1 = tf.FIFOQueue(10, tf.float32)
q2 = tf.FIFOQueue(15, tf.float32)
enq_q = tf.FIFOQueue.from_list(3, [q1, q2])
with self.assertRaisesOpError("Index must be in the range"):
enq_q.dequeue().eval()
def _blockingDequeue(self, sess, dequeue_op):
with self.assertRaisesOpError("Dequeue operation was cancelled"):
sess.run(dequeue_op)
def _blockingDequeueMany(self, sess, dequeue_many_op):
with self.assertRaisesOpError("Dequeue operation was cancelled"):
sess.run(dequeue_many_op)
def _blockingEnqueue(self, sess, enqueue_op):
with self.assertRaisesOpError("Enqueue operation was cancelled"):
sess.run(enqueue_op)
def _blockingEnqueueMany(self, sess, enqueue_many_op):
with self.assertRaisesOpError("Enqueue operation was cancelled"):
sess.run(enqueue_many_op)
def testResetOfBlockingOperation(self):
with self.test_session() as sess:
q_empty = tf.FIFOQueue(5, tf.float32, ())
dequeue_op = q_empty.dequeue()
dequeue_many_op = q_empty.dequeue_many(1)
q_full = tf.FIFOQueue(5, tf.float32)
sess.run(q_full.enqueue_many(([1.0, 2.0, 3.0, 4.0, 5.0],)))
enqueue_op = q_full.enqueue((6.0,))
enqueue_many_op = q_full.enqueue_many(([6.0],))
threads = [
self.checkedThread(self._blockingDequeue, args=(sess, dequeue_op)),
self.checkedThread(self._blockingDequeueMany, args=(sess,
dequeue_many_op)),
self.checkedThread(self._blockingEnqueue, args=(sess, enqueue_op)),
self.checkedThread(self._blockingEnqueueMany, args=(sess,
enqueue_many_op))]
for t in threads:
t.start()
time.sleep(0.1)
sess.close() # Will cancel the blocked operations.
for t in threads:
t.join()
def testBigEnqueueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(5, tf.int32, ((),))
elem = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
enq = q.enqueue_many((elem,))
deq = q.dequeue()
size_op = q.size()
enq_done = []
def blocking_enqueue():
enq_done.append(False)
# This will fill the queue and then block until enough dequeues happen.
sess.run(enq)
enq_done.append(True)
thread = self.checkedThread(target=blocking_enqueue)
thread.start()
# The enqueue should start and then block.
results = []
results.append(deq.eval()) # Will only complete after the enqueue starts.
self.assertEqual(len(enq_done), 1)
self.assertEqual(sess.run(size_op), 5)
for _ in range(3):
results.append(deq.eval())
time.sleep(0.1)
self.assertEqual(len(enq_done), 1)
self.assertEqual(sess.run(size_op), 5)
# This dequeue will unblock the thread.
results.append(deq.eval())
time.sleep(0.1)
self.assertEqual(len(enq_done), 2)
thread.join()
for i in range(5):
self.assertEqual(size_op.eval(), 5 - i)
results.append(deq.eval())
self.assertEqual(size_op.eval(), 5 - i - 1)
self.assertAllEqual(elem, results)
def testBigDequeueMany(self):
with self.test_session() as sess:
q = tf.FIFOQueue(2, tf.int32, ((),))
elem = np.arange(4, dtype=np.int32)
enq_list = [q.enqueue((e,)) for e in elem]
deq = q.dequeue_many(4)
results = []
def blocking_dequeue():
# Will only complete after 4 enqueues complete.
results.extend(sess.run(deq))
thread = self.checkedThread(target=blocking_dequeue)
thread.start()
# The dequeue should start and then block.
for enq in enq_list:
# TODO(mrry): Figure out how to do this without sleeping.
time.sleep(0.1)
self.assertEqual(len(results), 0)
sess.run(enq)
# Enough enqueued to unblock the dequeue
thread.join()
self.assertAllEqual(elem, results)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 8,018,380,447,867,938,000 | 34.032772 | 80 | 0.603181 | false |
kemaswill/keras | keras/regularizers.py | 5 | 4694 | from __future__ import absolute_import
from . import backend as K
class Regularizer(object):
def set_param(self, p):
self.p = p
def set_layer(self, layer):
self.layer = layer
def __call__(self, loss):
return loss
def get_config(self):
return {'name': self.__class__.__name__}
class EigenvalueRegularizer(Regularizer):
'''This takes a constant that controls
the regularization by Eigenvalue Decay on the
current layer and outputs the regularized
loss (evaluated on the training data) and
the original loss (evaluated on the
validation data).
'''
def __init__(self, k):
self.k = k
self.uses_learning_phase = True
def set_param(self, p):
self.p = p
def __call__(self, loss):
power = 9 # number of iterations of the power method
W = self.p
if K.ndim(W) > 2:
raise Exception('Eigenvalue Decay regularizer '
'is only available for dense '
'and embedding layers.')
WW = K.dot(K.transpose(W), W)
dim1, dim2 = K.eval(K.shape(WW)) # number of neurons in the layer
# power method for approximating the dominant eigenvector:
o = K.ones([dim1, 1]) # initial values for the dominant eigenvector
main_eigenvect = K.dot(WW, o)
for n in range(power - 1):
main_eigenvect = K.dot(WW, main_eigenvect)
WWd = K.dot(WW, main_eigenvect)
# the corresponding dominant eigenvalue:
main_eigenval = K.dot(K.transpose(WWd), main_eigenvect) / K.dot(K.transpose(main_eigenvect), main_eigenvect)
regularized_loss = loss + (main_eigenval ** 0.5) * self.k # multiplied by the given regularization gain
return K.in_train_phase(regularized_loss[0, 0], loss)
class WeightRegularizer(Regularizer):
def __init__(self, l1=0., l2=0.):
self.l1 = K.cast_to_floatx(l1)
self.l2 = K.cast_to_floatx(l2)
self.uses_learning_phase = True
def set_param(self, p):
self.p = p
def __call__(self, loss):
if not hasattr(self, 'p'):
raise Exception('Need to call `set_param` on '
'WeightRegularizer instance '
'before calling the instance. '
'Check that you are not passing '
'a WeightRegularizer instead of an '
'ActivityRegularizer '
'(i.e. activity_regularizer="l2" instead '
'of activity_regularizer="activity_l2".')
regularized_loss = loss
if self.l1:
regularized_loss += K.sum(self.l1 * K.abs(self.p))
if self.l2:
regularized_loss += K.sum(self.l2 * K.square(self.p))
return K.in_train_phase(regularized_loss, loss)
def get_config(self):
return {'name': self.__class__.__name__,
'l1': float(self.l1),
'l2': float(self.l2)}
class ActivityRegularizer(Regularizer):
def __init__(self, l1=0., l2=0.):
self.l1 = K.cast_to_floatx(l1)
self.l2 = K.cast_to_floatx(l2)
self.uses_learning_phase = True
def set_layer(self, layer):
self.layer = layer
def __call__(self, loss):
if not hasattr(self, 'layer'):
raise Exception('Need to call `set_layer` on '
'ActivityRegularizer instance '
'before calling the instance.')
regularized_loss = loss
for i in range(len(self.layer.inbound_nodes)):
output = self.layer.get_output_at(i)
if self.l1:
regularized_loss += K.sum(self.l1 * K.abs(output))
if self.l2:
regularized_loss += K.sum(self.l2 * K.square(output))
return K.in_train_phase(regularized_loss, loss)
def get_config(self):
return {'name': self.__class__.__name__,
'l1': float(self.l1),
'l2': float(self.l2)}
def l1(l=0.01):
return WeightRegularizer(l1=l)
def l2(l=0.01):
return WeightRegularizer(l2=l)
def l1l2(l1=0.01, l2=0.01):
return WeightRegularizer(l1=l1, l2=l2)
def activity_l1(l=0.01):
return ActivityRegularizer(l1=l)
def activity_l2(l=0.01):
return ActivityRegularizer(l2=l)
def activity_l1l2(l1=0.01, l2=0.01):
return ActivityRegularizer(l1=l1, l2=l2)
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'regularizer',
instantiate=True, kwargs=kwargs)
| mit | -5,540,997,221,737,474,000 | 30.931973 | 116 | 0.566894 | false |
Batterfii/django | tests/auth_tests/test_templates.py | 328 | 2785 | from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.auth.views import (
password_change, password_change_done, password_reset,
password_reset_complete, password_reset_confirm, password_reset_done,
)
from django.test import RequestFactory, TestCase, override_settings
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF='auth_tests.urls',
)
class AuthTemplateTests(TestCase):
def test_titles(self):
rf = RequestFactory()
user = User.objects.create_user('jsmith', '[email protected]', 'pass')
user = authenticate(username=user.username, password='pass')
request = rf.get('/somepath/')
request.user = user
response = password_reset(request, post_reset_redirect='dummy/')
self.assertContains(response, '<title>Password reset</title>')
self.assertContains(response, '<h1>Password reset</h1>')
response = password_reset_done(request)
self.assertContains(response, '<title>Password reset sent</title>')
self.assertContains(response, '<h1>Password reset sent</h1>')
# password_reset_confirm invalid token
response = password_reset_confirm(request, uidb64='Bad', token='Bad', post_reset_redirect='dummy/')
self.assertContains(response, '<title>Password reset unsuccessful</title>')
self.assertContains(response, '<h1>Password reset unsuccessful</h1>')
# password_reset_confirm valid token
default_token_generator = PasswordResetTokenGenerator()
token = default_token_generator.make_token(user)
uidb64 = force_text(urlsafe_base64_encode(force_bytes(user.pk)))
response = password_reset_confirm(request, uidb64, token, post_reset_redirect='dummy/')
self.assertContains(response, '<title>Enter new password</title>')
self.assertContains(response, '<h1>Enter new password</h1>')
response = password_reset_complete(request)
self.assertContains(response, '<title>Password reset complete</title>')
self.assertContains(response, '<h1>Password reset complete</h1>')
response = password_change(request, post_change_redirect='dummy/')
self.assertContains(response, '<title>Password change</title>')
self.assertContains(response, '<h1>Password change</h1>')
response = password_change_done(request)
self.assertContains(response, '<title>Password change successful</title>')
self.assertContains(response, '<h1>Password change successful</h1>')
| bsd-3-clause | 6,447,871,748,114,101,000 | 47.859649 | 107 | 0.711311 | false |
jondo/shogun | examples/undocumented/python_modular/graphical/converter_jade_bss.py | 26 | 1089 | """
Blind Source Separation using the Jade Algorithm with Shogun
Based on the example from scikit-learn
http://scikit-learn.org/
Kevin Hughes 2013
"""
import numpy as np
import pylab as pl
from modshogun import RealFeatures
from modshogun import Jade
# Generate sample data
np.random.seed(0)
n_samples = 2000
time = np.linspace(0, 10, n_samples)
# Source Signals
s1 = np.sin(2 * time) # sin wave
s2 = np.sign(np.sin(3 * time)) # square wave
S = np.c_[s1, s2]
S += 0.2 * np.random.normal(size=S.shape) # add noise
# Standardize data
S /= S.std(axis=0)
S = S.T
# Mixing Matrix
A = np.array([[1, 0.5], [0.5, 1]])
# Mix Signals
X = np.dot(A,S)
mixed_signals = RealFeatures(X)
# Separating
jade = Jade()
signals = jade.apply(mixed_signals)
S_ = signals.get_feature_matrix()
A_ = jade.get_mixing_matrix();
# Plot results
pl.figure()
pl.subplot(3, 1, 1)
pl.plot(S.T)
pl.title('True Sources')
pl.subplot(3, 1, 2)
pl.plot(X.T)
pl.title('Mixed Sources')
pl.subplot(3, 1, 3)
pl.plot(S_.T)
pl.title('Estimated Sources')
pl.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.36)
pl.show()
| gpl-3.0 | -7,365,081,487,775,130,000 | 18.105263 | 60 | 0.677686 | false |
LarsFronius/ansible | lib/ansible/plugins/callback/foreman.py | 20 | 7129 | # -*- coding: utf-8 -*-
# (C) 2015, 2016 Daniel Lobato <[email protected]>
# 2016 Guido Günther <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from datetime import datetime
from collections import defaultdict
import json
import time
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
This callback will report facts and reports to Foreman https://theforeman.org/
It makes use of the following environment variables:
FOREMAN_URL: URL to the Foreman server
FOREMAN_SSL_CERT: X509 certificate to authenticate to Foreman if
https is used
FOREMAN_SSL_KEY: the corresponding private key
FOREMAN_SSL_VERIFY: whether to verify the Foreman certificate
It can be set to '1' to verify SSL certificates using the
installed CAs or to a path pointing to a CA bundle. Set to '0'
to disable certificate checking.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'foreman'
CALLBACK_NEEDS_WHITELIST = True
FOREMAN_URL = os.getenv('FOREMAN_URL', "http://localhost:3000")
FOREMAN_SSL_CERT = (os.getenv('FOREMAN_SSL_CERT',
"/etc/foreman/client_cert.pem"),
os.getenv('FOREMAN_SSL_KEY',
"/etc/foreman/client_key.pem"))
FOREMAN_SSL_VERIFY = os.getenv('FOREMAN_SSL_VERIFY', "1")
FOREMAN_HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json"
}
TIME_FORMAT = "%Y-%m-%d %H:%M:%S %f"
def __init__(self):
super(CallbackModule, self).__init__()
self.items = defaultdict(list)
self.start_time = int(time.time())
if HAS_REQUESTS:
requests_major = int(requests.__version__.split('.')[0])
if requests_major >= 2:
self.ssl_verify = self._ssl_verify()
else:
self._disable_plugin('The `requests` python module is too old.')
else:
self._disable_plugin('The `requests` python module is not installed.')
def _disable_plugin(self, msg):
self.disabled = True
self._display.warning(msg + ' Disabling the Foreman callback plugin.')
def _ssl_verify(self):
if self.FOREMAN_SSL_VERIFY.lower() in ["1", "true", "on"]:
verify = True
elif self.FOREMAN_SSL_VERIFY.lower() in ["0", "false", "off"]:
requests.packages.urllib3.disable_warnings()
self._display.warning("SSL verification of %s disabled" %
self.FOREMAN_URL)
verify = False
else: # Set ta a CA bundle:
verify = self.FOREMAN_SSL_VERIFY
return verify
def send_facts(self, host, data):
"""
Sends facts to Foreman, to be parsed by foreman_ansible fact
parser. The default fact importer should import these facts
properly.
"""
data["_type"] = "ansible"
data["_timestamp"] = datetime.now().strftime(self.TIME_FORMAT)
facts = {"name": host,
"facts": data,
}
requests.post(url=self.FOREMAN_URL + '/api/v2/hosts/facts',
data=json.dumps(facts),
headers=self.FOREMAN_HEADERS,
cert=self.FOREMAN_SSL_CERT,
verify=self.ssl_verify)
def _build_log(self, data):
logs = []
for entry in data:
source, msg = entry
if 'failed' in msg:
level = 'err'
else:
level = 'notice' if 'changed' in msg and msg['changed'] else 'info'
logs.append({"log": {
'sources': {'source': source},
'messages': {'message': json.dumps(msg)},
'level': level
}})
return logs
def send_reports(self, stats):
"""
Send reports to Foreman to be parsed by its config report
importer. THe data is in a format that Foreman can handle
without writing another report importer.
"""
status = defaultdict(lambda: 0)
metrics = {}
for host in stats.processed.keys():
sum = stats.summarize(host)
status["applied"] = sum['changed']
status["failed"] = sum['failures'] + sum['unreachable']
status["skipped"] = sum['skipped']
log = self._build_log(self.items[host])
metrics["time"] = {"total": int(time.time()) - self.start_time}
now = datetime.now().strftime(self.TIME_FORMAT)
report = {
"report": {
"host": host,
"reported_at": now,
"metrics": metrics,
"status": status,
"logs": log,
}
}
# To be changed to /api/v2/config_reports in 1.11. Maybe we
# could make a GET request to get the Foreman version & do
# this automatically.
requests.post(url=self.FOREMAN_URL + '/api/v2/reports',
data=json.dumps(report),
headers=self.FOREMAN_HEADERS,
cert=self.FOREMAN_SSL_CERT,
verify=self.ssl_verify)
self.items[host] = []
def append_result(self, result):
name = result._task.get_name()
host = result._host.get_name()
self.items[host].append((name, result._result))
# Ansible callback API
def v2_runner_on_failed(self, result, ignore_errors=False):
self.append_result(result)
def v2_runner_on_unreachable(self, result):
self.append_result(result)
def v2_runner_on_async_ok(self, result, jid):
self.append_result(result)
def v2_runner_on_async_failed(self, result, jid):
self.append_result(result)
def v2_playbook_on_stats(self, stats):
self.send_reports(stats)
def v2_runner_on_ok(self, result):
res = result._result
module = result._task.action
if module == 'setup':
host = result._host.get_name()
self.send_facts(host, res)
else:
self.append_result(result)
| gpl-3.0 | 2,600,363,873,120,758,000 | 35 | 83 | 0.574214 | false |
fanghuaqi/mbed | tools/flash_algo/extract.py | 31 | 3546 | #!/usr/bin/env python
"""
mbed
Copyright (c) 2017-2017 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import sys
import os
import argparse
from os.path import join, abspath, dirname
from flash_algo import PackFlashAlgo
# Be sure that the tools directory is in the search path
ROOT = abspath(join(dirname(__file__), "..", ".."))
sys.path.insert(0, ROOT)
from tools.targets import TARGETS
from tools.arm_pack_manager import Cache
TEMPLATE_PATH = "c_blob_mbed.tmpl"
def main():
"""Generate flash algorithms"""
parser = argparse.ArgumentParser(description='Flash generator')
parser.add_argument("--rebuild_all", action="store_true",
help="Rebuild entire cache")
parser.add_argument("--rebuild_descriptors", action="store_true",
help="Rebuild descriptors")
parser.add_argument("--target", default=None,
help="Name of target to generate algo for")
parser.add_argument("--all", action="store_true",
help="Build all flash algos for devcies")
args = parser.parse_args()
cache = Cache(True, True)
if args.rebuild_all:
cache.cache_everything()
print("Cache rebuilt")
return
if args.rebuild_descriptors:
cache.cache_descriptors()
print("Descriptors rebuilt")
return
if args.target is None:
device_and_filenames = [(target.device_name, target.name) for target
in TARGETS if hasattr(target, "device_name")]
else:
device_and_filenames = [(args.target, args.target.replace("/", "-"))]
try:
os.mkdir("output")
except OSError:
# Directory already exists
pass
for device, filename in device_and_filenames:
dev = cache.index[device]
binaries = cache.get_flash_algorthim_binary(device, all=True)
algos = [PackFlashAlgo(binary.read()) for binary in binaries]
filtered_algos = algos if args.all else filter_algos(dev, algos)
for idx, algo in enumerate(filtered_algos):
file_name = ("%s_%i.c" % (filename, idx)
if args.all or len(filtered_algos) != 1
else "%s.c" % filename)
output_path = join("output", file_name)
algo.process_template(TEMPLATE_PATH, output_path)
print("%s: %s \r" % (device, filename))
def filter_algos(dev, algos):
if "memory" not in dev:
return algos
if "IROM1" not in dev["memory"]:
return algos
if "IROM2" in dev["memory"]:
return algos
rom_rgn = dev["memory"]["IROM1"]
try:
start = int(rom_rgn["start"], 0)
size = int(rom_rgn["size"], 0)
except ValueError:
return algos
matching_algos = [algo for algo in algos if
algo.flash_start == start and algo.flash_size == size]
return matching_algos if len(matching_algos) == 1 else algos
if __name__ == '__main__':
main()
| apache-2.0 | 2,842,470,524,662,760,000 | 32.140187 | 77 | 0.625776 | false |
ralphtheninja/compose | tests/integration/cli_test.py | 9 | 20973 | from __future__ import absolute_import
from operator import attrgetter
import sys
import os
import shlex
from six import StringIO
from mock import patch
from .testcases import DockerClientTestCase
from compose.cli.main import TopLevelCommand
from compose.project import NoSuchService
class CLITestCase(DockerClientTestCase):
def setUp(self):
super(CLITestCase, self).setUp()
self.old_sys_exit = sys.exit
sys.exit = lambda code=0: None
self.command = TopLevelCommand()
self.command.base_dir = 'tests/fixtures/simple-composefile'
def tearDown(self):
sys.exit = self.old_sys_exit
self.project.kill()
self.project.remove_stopped()
for container in self.project.containers(stopped=True, one_off=True):
container.remove(force=True)
super(CLITestCase, self).tearDown()
@property
def project(self):
# Hack: allow project to be overridden. This needs refactoring so that
# the project object is built exactly once, by the command object, and
# accessed by the test case object.
if hasattr(self, '_project'):
return self._project
return self.command.get_project()
def test_help(self):
old_base_dir = self.command.base_dir
self.command.base_dir = 'tests/fixtures/no-composefile'
with self.assertRaises(SystemExit) as exc_context:
self.command.dispatch(['help', 'up'], None)
self.assertIn('Usage: up [options] [SERVICE...]', str(exc_context.exception))
# self.project.kill() fails during teardown
# unless there is a composefile.
self.command.base_dir = old_base_dir
# TODO: address the "Inappropriate ioctl for device" warnings in test output
@patch('sys.stdout', new_callable=StringIO)
def test_ps(self, mock_stdout):
self.project.get_service('simple').create_container()
self.command.dispatch(['ps'], None)
self.assertIn('simplecomposefile_simple_1', mock_stdout.getvalue())
@patch('sys.stdout', new_callable=StringIO)
def test_ps_default_composefile(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/multiple-composefiles'
self.command.dispatch(['up', '-d'], None)
self.command.dispatch(['ps'], None)
output = mock_stdout.getvalue()
self.assertIn('multiplecomposefiles_simple_1', output)
self.assertIn('multiplecomposefiles_another_1', output)
self.assertNotIn('multiplecomposefiles_yetanother_1', output)
@patch('sys.stdout', new_callable=StringIO)
def test_ps_alternate_composefile(self, mock_stdout):
config_path = os.path.abspath(
'tests/fixtures/multiple-composefiles/compose2.yml')
self._project = self.command.get_project(config_path)
self.command.base_dir = 'tests/fixtures/multiple-composefiles'
self.command.dispatch(['-f', 'compose2.yml', 'up', '-d'], None)
self.command.dispatch(['-f', 'compose2.yml', 'ps'], None)
output = mock_stdout.getvalue()
self.assertNotIn('multiplecomposefiles_simple_1', output)
self.assertNotIn('multiplecomposefiles_another_1', output)
self.assertIn('multiplecomposefiles_yetanother_1', output)
@patch('compose.service.log')
def test_pull(self, mock_logging):
self.command.dispatch(['pull'], None)
mock_logging.info.assert_any_call('Pulling simple (busybox:latest)...')
mock_logging.info.assert_any_call('Pulling another (busybox:latest)...')
@patch('sys.stdout', new_callable=StringIO)
def test_build_no_cache(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/simple-dockerfile'
self.command.dispatch(['build', 'simple'], None)
mock_stdout.truncate(0)
cache_indicator = 'Using cache'
self.command.dispatch(['build', 'simple'], None)
output = mock_stdout.getvalue()
self.assertIn(cache_indicator, output)
mock_stdout.truncate(0)
self.command.dispatch(['build', '--no-cache', 'simple'], None)
output = mock_stdout.getvalue()
self.assertNotIn(cache_indicator, output)
def test_up(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
another = self.project.get_service('another')
self.assertEqual(len(service.containers()), 1)
self.assertEqual(len(another.containers()), 1)
# Ensure containers don't have stdin and stdout connected in -d mode
config = service.containers()[0].inspect()['Config']
self.assertFalse(config['AttachStderr'])
self.assertFalse(config['AttachStdout'])
self.assertFalse(config['AttachStdin'])
def test_up_with_links(self):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', 'web'], None)
web = self.project.get_service('web')
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(web.containers()), 1)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 0)
def test_up_with_no_deps(self):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', '--no-deps', 'web'], None)
web = self.project.get_service('web')
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(web.containers()), 1)
self.assertEqual(len(db.containers()), 0)
self.assertEqual(len(console.containers()), 0)
def test_up_with_recreate(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
old_ids = [c.id for c in service.containers()]
self.command.dispatch(['up', '-d'], None)
self.assertEqual(len(service.containers()), 1)
new_ids = [c.id for c in service.containers()]
self.assertNotEqual(old_ids, new_ids)
def test_up_with_keep_old(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
old_ids = [c.id for c in service.containers()]
self.command.dispatch(['up', '-d', '--no-recreate'], None)
self.assertEqual(len(service.containers()), 1)
new_ids = [c.id for c in service.containers()]
self.assertEqual(old_ids, new_ids)
def test_up_with_timeout(self):
self.command.dispatch(['up', '-d', '-t', '1'], None)
service = self.project.get_service('simple')
another = self.project.get_service('another')
self.assertEqual(len(service.containers()), 1)
self.assertEqual(len(another.containers()), 1)
# Ensure containers don't have stdin and stdout connected in -d mode
config = service.containers()[0].inspect()['Config']
self.assertFalse(config['AttachStderr'])
self.assertFalse(config['AttachStdout'])
self.assertFalse(config['AttachStdin'])
@patch('dockerpty.start')
def test_run_service_without_links(self, mock_stdout):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', 'console', '/bin/true'], None)
self.assertEqual(len(self.project.containers()), 0)
# Ensure stdin/out was open
container = self.project.containers(stopped=True, one_off=True)[0]
config = container.inspect()['Config']
self.assertTrue(config['AttachStderr'])
self.assertTrue(config['AttachStdout'])
self.assertTrue(config['AttachStdin'])
@patch('dockerpty.start')
def test_run_service_with_links(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', 'web', '/bin/true'], None)
db = self.project.get_service('db')
console = self.project.get_service('console')
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 0)
@patch('dockerpty.start')
def test_run_with_no_deps(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['run', '--no-deps', 'web', '/bin/true'], None)
db = self.project.get_service('db')
self.assertEqual(len(db.containers()), 0)
@patch('dockerpty.start')
def test_run_does_not_recreate_linked_containers(self, __):
self.command.base_dir = 'tests/fixtures/links-composefile'
self.command.dispatch(['up', '-d', 'db'], None)
db = self.project.get_service('db')
self.assertEqual(len(db.containers()), 1)
old_ids = [c.id for c in db.containers()]
self.command.dispatch(['run', 'web', '/bin/true'], None)
self.assertEqual(len(db.containers()), 1)
new_ids = [c.id for c in db.containers()]
self.assertEqual(old_ids, new_ids)
@patch('dockerpty.start')
def test_run_without_command(self, _):
self.command.base_dir = 'tests/fixtures/commands-composefile'
self.check_build('tests/fixtures/simple-dockerfile', tag='composetest_test')
self.command.dispatch(['run', 'implicit'], None)
service = self.project.get_service('implicit')
containers = service.containers(stopped=True, one_off=True)
self.assertEqual(
[c.human_readable_command for c in containers],
[u'/bin/sh -c echo "success"'],
)
self.command.dispatch(['run', 'explicit'], None)
service = self.project.get_service('explicit')
containers = service.containers(stopped=True, one_off=True)
self.assertEqual(
[c.human_readable_command for c in containers],
[u'/bin/true'],
)
@patch('dockerpty.start')
def test_run_service_with_entrypoint_overridden(self, _):
self.command.base_dir = 'tests/fixtures/dockerfile_with_entrypoint'
name = 'service'
self.command.dispatch(
['run', '--entrypoint', '/bin/echo', name, 'helloworld'],
None
)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(
shlex.split(container.human_readable_command),
[u'/bin/echo', u'helloworld'],
)
@patch('dockerpty.start')
def test_run_service_with_user_overridden(self, _):
self.command.base_dir = 'tests/fixtures/user-composefile'
name = 'service'
user = 'sshd'
args = ['run', '--user={}'.format(user), name]
self.command.dispatch(args, None)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(user, container.get('Config.User'))
@patch('dockerpty.start')
def test_run_service_with_user_overridden_short_form(self, _):
self.command.base_dir = 'tests/fixtures/user-composefile'
name = 'service'
user = 'sshd'
args = ['run', '-u', user, name]
self.command.dispatch(args, None)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
self.assertEqual(user, container.get('Config.User'))
@patch('dockerpty.start')
def test_run_service_with_environement_overridden(self, _):
name = 'service'
self.command.base_dir = 'tests/fixtures/environment-composefile'
self.command.dispatch(
['run', '-e', 'foo=notbar', '-e', 'allo=moto=bobo',
'-e', 'alpha=beta', name],
None
)
service = self.project.get_service(name)
container = service.containers(stopped=True, one_off=True)[0]
# env overriden
self.assertEqual('notbar', container.environment['foo'])
# keep environement from yaml
self.assertEqual('world', container.environment['hello'])
# added option from command line
self.assertEqual('beta', container.environment['alpha'])
# make sure a value with a = don't crash out
self.assertEqual('moto=bobo', container.environment['allo'])
@patch('dockerpty.start')
def test_run_service_without_map_ports(self, __):
# create one off container
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['run', '-d', 'simple'], None)
container = self.project.get_service('simple').containers(one_off=True)[0]
# get port information
port_random = container.get_local_port(3000)
port_assigned = container.get_local_port(3001)
# close all one off containers we just created
container.stop()
# check the ports
self.assertEqual(port_random, None)
self.assertEqual(port_assigned, None)
@patch('dockerpty.start')
def test_run_service_with_map_ports(self, __):
# create one off container
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['run', '-d', '--service-ports', 'simple'], None)
container = self.project.get_service('simple').containers(one_off=True)[0]
# get port information
port_random = container.get_local_port(3000)
port_assigned = container.get_local_port(3001)
# close all one off containers we just created
container.stop()
# check the ports
self.assertNotEqual(port_random, None)
self.assertIn("0.0.0.0", port_random)
self.assertEqual(port_assigned, "0.0.0.0:49152")
def test_rm(self):
service = self.project.get_service('simple')
service.create_container()
service.kill()
self.assertEqual(len(service.containers(stopped=True)), 1)
self.command.dispatch(['rm', '--force'], None)
self.assertEqual(len(service.containers(stopped=True)), 0)
service = self.project.get_service('simple')
service.create_container()
service.kill()
self.assertEqual(len(service.containers(stopped=True)), 1)
self.command.dispatch(['rm', '-f'], None)
self.assertEqual(len(service.containers(stopped=True)), 0)
def test_stop(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['stop', '-t', '1'], None)
self.assertEqual(len(service.containers(stopped=True)), 1)
self.assertFalse(service.containers(stopped=True)[0].is_running)
def test_logs_invalid_service_name(self):
with self.assertRaises(NoSuchService):
self.command.dispatch(['logs', 'madeupname'], None)
def test_kill(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill'], None)
self.assertEqual(len(service.containers(stopped=True)), 1)
self.assertFalse(service.containers(stopped=True)[0].is_running)
def test_kill_signal_sigstop(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.assertEqual(len(service.containers()), 1)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill', '-s', 'SIGSTOP'], None)
self.assertEqual(len(service.containers()), 1)
# The container is still running. It has only been paused
self.assertTrue(service.containers()[0].is_running)
def test_kill_stopped_service(self):
self.command.dispatch(['up', '-d'], None)
service = self.project.get_service('simple')
self.command.dispatch(['kill', '-s', 'SIGSTOP'], None)
self.assertTrue(service.containers()[0].is_running)
self.command.dispatch(['kill', '-s', 'SIGKILL'], None)
self.assertEqual(len(service.containers(stopped=True)), 1)
self.assertFalse(service.containers(stopped=True)[0].is_running)
def test_restart(self):
service = self.project.get_service('simple')
container = service.create_container()
service.start_container(container)
started_at = container.dictionary['State']['StartedAt']
self.command.dispatch(['restart', '-t', '1'], None)
container.inspect()
self.assertNotEqual(
container.dictionary['State']['FinishedAt'],
'0001-01-01T00:00:00Z',
)
self.assertNotEqual(
container.dictionary['State']['StartedAt'],
started_at,
)
def test_scale(self):
project = self.project
self.command.scale(project, {'SERVICE=NUM': ['simple=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=3', 'another=2']})
self.assertEqual(len(project.get_service('simple').containers()), 3)
self.assertEqual(len(project.get_service('another').containers()), 2)
self.command.scale(project, {'SERVICE=NUM': ['simple=1', 'another=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.assertEqual(len(project.get_service('another').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=1', 'another=1']})
self.assertEqual(len(project.get_service('simple').containers()), 1)
self.assertEqual(len(project.get_service('another').containers()), 1)
self.command.scale(project, {'SERVICE=NUM': ['simple=0', 'another=0']})
self.assertEqual(len(project.get_service('simple').containers()), 0)
self.assertEqual(len(project.get_service('another').containers()), 0)
def test_port(self):
self.command.base_dir = 'tests/fixtures/ports-composefile'
self.command.dispatch(['up', '-d'], None)
container = self.project.get_service('simple').get_container()
@patch('sys.stdout', new_callable=StringIO)
def get_port(number, mock_stdout):
self.command.dispatch(['port', 'simple', str(number)], None)
return mock_stdout.getvalue().rstrip()
self.assertEqual(get_port(3000), container.get_local_port(3000))
self.assertEqual(get_port(3001), "0.0.0.0:49152")
self.assertEqual(get_port(3002), "")
def test_port_with_scale(self):
self.command.base_dir = 'tests/fixtures/ports-composefile-scale'
self.command.dispatch(['scale', 'simple=2'], None)
containers = sorted(
self.project.containers(service_names=['simple']),
key=attrgetter('name'))
@patch('sys.stdout', new_callable=StringIO)
def get_port(number, mock_stdout, index=None):
if index is None:
self.command.dispatch(['port', 'simple', str(number)], None)
else:
self.command.dispatch(['port', '--index=' + str(index), 'simple', str(number)], None)
return mock_stdout.getvalue().rstrip()
self.assertEqual(get_port(3000), containers[0].get_local_port(3000))
self.assertEqual(get_port(3000, index=1), containers[0].get_local_port(3000))
self.assertEqual(get_port(3000, index=2), containers[1].get_local_port(3000))
self.assertEqual(get_port(3002), "")
def test_env_file_relative_to_compose_file(self):
config_path = os.path.abspath('tests/fixtures/env-file/docker-compose.yml')
self.command.dispatch(['-f', config_path, 'up', '-d'], None)
self._project = self.command.get_project(config_path)
containers = self.project.containers(stopped=True)
self.assertEqual(len(containers), 1)
self.assertIn("FOO=1", containers[0].get('Config.Env'))
def test_up_with_extends(self):
self.command.base_dir = 'tests/fixtures/extends'
self.command.dispatch(['up', '-d'], None)
self.assertEqual(
set([s.name for s in self.project.services]),
set(['mydb', 'myweb']),
)
# Sort by name so we get [db, web]
containers = sorted(
self.project.containers(stopped=True),
key=lambda c: c.name,
)
self.assertEqual(len(containers), 2)
web = containers[1]
self.assertEqual(set(web.links()), set(['db', 'mydb_1', 'extends_mydb_1']))
expected_env = set([
"FOO=1",
"BAR=2",
"BAZ=2",
])
self.assertTrue(expected_env <= set(web.get('Config.Env')))
| apache-2.0 | 1,343,260,207,027,971,800 | 40.043053 | 101 | 0.626377 | false |
HydrelioxGitHub/home-assistant | homeassistant/components/meteo_france/__init__.py | 3 | 4108 | """Support for Meteo-France weather data."""
import datetime
import logging
import voluptuous as vol
from homeassistant.const import CONF_MONITORED_CONDITIONS, TEMP_CELSIUS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.util import Throttle
REQUIREMENTS = ['meteofrance==0.3.4']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Météo-France"
CONF_CITY = 'city'
DATA_METEO_FRANCE = 'data_meteo_france'
DEFAULT_WEATHER_CARD = True
DOMAIN = 'meteo_france'
SCAN_INTERVAL = datetime.timedelta(minutes=5)
SENSOR_TYPES = {
'rain_chance': ['Rain chance', '%'],
'freeze_chance': ['Freeze chance', '%'],
'thunder_chance': ['Thunder chance', '%'],
'snow_chance': ['Snow chance', '%'],
'weather': ['Weather', None],
'wind_speed': ['Wind Speed', 'km/h'],
'next_rain': ['Next rain', 'min'],
'temperature': ['Temperature', TEMP_CELSIUS],
'uv': ['UV', None],
}
CONDITION_CLASSES = {
'clear-night': ['Nuit Claire'],
'cloudy': ['Très nuageux'],
'fog': ['Brume ou bancs de brouillard',
'Brouillard', 'Brouillard givrant'],
'hail': ['Risque de grêle'],
'lightning': ["Risque d'orages", 'Orages'],
'lightning-rainy': ['Pluie orageuses', 'Pluies orageuses',
'Averses orageuses'],
'partlycloudy': ['Ciel voilé', 'Ciel voilé nuit', 'Éclaircies'],
'pouring': ['Pluie forte'],
'rainy': ['Bruine / Pluie faible', 'Bruine', 'Pluie faible',
'Pluies éparses / Rares averses', 'Pluies éparses',
'Rares averses', 'Pluie / Averses', 'Averses', 'Pluie'],
'snowy': ['Neige / Averses de neige', 'Neige', 'Averses de neige',
'Neige forte', 'Quelques flocons'],
'snowy-rainy': ['Pluie et neige', 'Pluie verglaçante'],
'sunny': ['Ensoleillé'],
'windy': [],
'windy-variant': [],
'exceptional': [],
}
def has_all_unique_cities(value):
"""Validate that all cities are unique."""
cities = [location[CONF_CITY] for location in value]
vol.Schema(vol.Unique())(cities)
return value
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.All(cv.ensure_list, [vol.Schema({
vol.Required(CONF_CITY): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})], has_all_unique_cities)
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the Meteo-France component."""
hass.data[DATA_METEO_FRANCE] = {}
for location in config[DOMAIN]:
city = location[CONF_CITY]
from meteofrance.client import meteofranceClient, meteofranceError
try:
client = meteofranceClient(city)
except meteofranceError as exp:
_LOGGER.error(exp)
return
client.need_rain_forecast = bool(
CONF_MONITORED_CONDITIONS in location and 'next_rain' in
location[CONF_MONITORED_CONDITIONS])
hass.data[DATA_METEO_FRANCE][city] = MeteoFranceUpdater(client)
hass.data[DATA_METEO_FRANCE][city].update()
if CONF_MONITORED_CONDITIONS in location:
monitored_conditions = location[CONF_MONITORED_CONDITIONS]
load_platform(
hass, 'sensor', DOMAIN, {
CONF_CITY: city,
CONF_MONITORED_CONDITIONS: monitored_conditions}, config)
load_platform(hass, 'weather', DOMAIN, {CONF_CITY: city}, config)
return True
class MeteoFranceUpdater:
"""Update data from Meteo-France."""
def __init__(self, client):
"""Initialize the data object."""
self._client = client
def get_data(self):
"""Get the latest data from Meteo-France."""
return self._client.get_data()
@Throttle(SCAN_INTERVAL)
def update(self):
"""Get the latest data from Meteo-France."""
from meteofrance.client import meteofranceError
try:
self._client.update()
except meteofranceError as exp:
_LOGGER.error(exp)
| apache-2.0 | 505,188,131,546,163,800 | 30.274809 | 77 | 0.619234 | false |
lashwang/pyspider | tests/test_database.py | 5 | 23410 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-02-08 22:37:13
from __future__ import unicode_literals, division
import os
import six
import time
import unittest2 as unittest
from pyspider import database
from pyspider.database.base.taskdb import TaskDB
class TaskDBCase(object):
sample_task = {
'taskid': 'taskid',
'project': 'project',
'url': 'www.baidu.com/',
'status': TaskDB.FAILED,
'schedule': {
'priority': 1,
'retries': 3,
'exetime': 0,
'age': 3600,
'itag': 'itag',
'recrawl': 5,
},
'fetch': {
'method': 'GET',
'headers': {
'Cookie': 'a=b',
},
'data': 'a=b&c=d',
'timeout': 60,
},
'process': {
'callback': 'callback',
'save': [1, 2, 3],
},
'track': {
'fetch': {
'ok': True,
'time': 300,
'status_code': 200,
'headers': {
'Content-Type': 'plain/html',
},
'encoding': 'utf8',
# 'content': 'asdfasdfasdfasdf',
},
'process': {
'ok': False,
'time': 10,
'follows': 3,
'outputs': 5,
'exception': u"中文",
},
},
'lastcrawltime': time.time(),
'updatetime': time.time(),
}
@classmethod
def setUpClass(self):
raise NotImplementedError
# this test not works for mongodb
# def test_10_create_project(self):
# with self.assertRaises(AssertionError):
# self.taskdb._create_project('abc.abc')
# self.taskdb._create_project('abc')
# self.taskdb._list_project()
# self.assertEqual(len(self.taskdb.projects), 1)
# self.assertIn('abc', self.taskdb.projects)
def test_20_insert(self):
self.taskdb.insert('project', 'taskid', self.sample_task)
self.taskdb.insert('project', 'taskid2', self.sample_task)
def test_25_get_task(self):
task = self.taskdb.get_task('project', 'taskid2')
self.assertEqual(task['taskid'], 'taskid2')
self.assertEqual(task['project'], self.sample_task['project'])
self.assertEqual(task['url'], self.sample_task['url'])
self.assertEqual(task['status'], self.taskdb.FAILED)
self.assertEqual(task['schedule'], self.sample_task['schedule'])
self.assertEqual(task['fetch'], self.sample_task['fetch'])
self.assertEqual(task['process'], self.sample_task['process'])
self.assertEqual(task['track'], self.sample_task['track'])
task = self.taskdb.get_task('project', 'taskid1', fields=['status'])
self.assertIsNone(task)
task = self.taskdb.get_task('project', 'taskid', fields=['taskid', 'track', ])
self.assertIn('track', task)
self.assertNotIn('project', task)
def test_30_status_count(self):
status = self.taskdb.status_count('abc')
self.assertEqual(status, {})
status = self.taskdb.status_count('project')
self.assertEqual(status, {self.taskdb.FAILED: 2})
def test_40_update_and_status_count(self):
self.taskdb.update('project', 'taskid', status=self.taskdb.ACTIVE)
status = self.taskdb.status_count('project')
self.assertEqual(status, {self.taskdb.ACTIVE: 1, self.taskdb.FAILED: 1})
self.taskdb.update('project', 'taskid', track={})
task = self.taskdb.get_task('project', 'taskid', fields=['taskid', 'track', ])
self.assertIn('track', task)
self.assertEqual(task['track'], {})
def test_50_load_tasks(self):
tasks = list(self.taskdb.load_tasks(self.taskdb.ACTIVE))
self.assertEqual(len(tasks), 1)
task = tasks[0]
self.assertIn('taskid', task, task)
self.assertEqual(task['taskid'], 'taskid', task)
self.assertEqual(task['schedule'], self.sample_task['schedule'])
self.assertEqual(task['fetch'], self.sample_task['fetch'])
self.assertEqual(task['process'], self.sample_task['process'])
self.assertEqual(task['track'], {})
tasks = list(self.taskdb.load_tasks(self.taskdb.ACTIVE, project='project',
fields=['taskid']))
self.assertEqual(len(tasks), 1)
self.assertEqual(tasks[0]['taskid'], 'taskid')
self.assertNotIn('project', tasks[0])
def test_60_relist_projects(self):
if hasattr(self.taskdb, '_list_project'):
self.taskdb._list_project()
self.assertNotIn('system.indexes', self.taskdb.projects)
def test_z10_drop(self):
self.taskdb.insert('drop_project2', 'taskid', self.sample_task)
self.taskdb.insert('drop_project3', 'taskid', self.sample_task)
self.taskdb.drop('drop_project3')
self.assertIsNotNone(self.taskdb.get_task('drop_project2', 'taskid'), None)
self.assertIsNone(self.taskdb.get_task('drop_project3', 'taskid'), None)
def test_z20_update_projects(self):
saved = getattr(self.taskdb, 'UPDATE_PROJECTS_TIME', None)
self.taskdb.UPDATE_PROJECTS_TIME = 0.1
time.sleep(0.2)
self.assertIn('drop_project2', self.taskdb.projects)
self.assertNotIn('drop_project3', self.taskdb.projects)
self.taskdb.UPDATE_PROJECTS_TIME = saved
class ProjectDBCase(object):
sample_project = {
'name': 'name',
'script': 'import time\nprint(time.time(), "!@#$%^&*()\';:<>?/|")',
'status': 'TODO',
'rate': 1.0,
'burst': 10.0,
}
@classmethod
def setUpClass(self):
raise NotImplemented
def test_10_insert(self):
self.projectdb.insert('abc', self.sample_project)
self.projectdb.insert(u'name中文', self.sample_project)
project = self.projectdb.get('abc')
self.assertIsNotNone(project)
def test_20_get_all(self):
projects = list(self.projectdb.get_all())
self.assertEqual(len(projects), 2)
for project in projects:
if project['name'] == 'abc':
break
for key in ('name', 'group', 'status', 'script', 'comments', 'rate', 'burst', 'updatetime'):
self.assertIn(key, project)
self.assertEqual(project['name'], u'abc')
self.assertEqual(project['status'], self.sample_project['status'])
self.assertEqual(project['script'], self.sample_project['script'])
self.assertEqual(project['rate'], self.sample_project['rate'])
self.assertEqual(type(project['rate']), float)
self.assertEqual(project['burst'], self.sample_project['burst'])
self.assertEqual(type(project['burst']), float)
projects = list(self.projectdb.get_all(fields=['name', 'script']))
self.assertEqual(len(projects), 2)
project = projects[1]
self.assertIn('name', project)
self.assertNotIn('gourp', project)
def test_30_update(self):
self.projectdb.update('not_found', status='RUNNING')
project = self.projectdb.get('not_found')
self.assertIsNone(project)
def test_40_check_update(self):
time.sleep(0.1)
now = time.time()
time.sleep(0.1)
self.projectdb.update('abc', status='RUNNING')
projects = list(self.projectdb.check_update(
now,
fields=['name', 'status', 'group', 'updatetime', ]
))
self.assertEqual(len(projects), 1, repr(projects))
project = projects[0]
self.assertEqual(project['name'], 'abc')
self.assertEqual(project['status'], 'RUNNING')
def test_45_check_update_when_bootup(self):
projects = list(self.projectdb.check_update(0))
project = projects[0]
for key in ('name', 'group', 'status', 'script', 'comments', 'rate', 'burst', 'updatetime'):
self.assertIn(key, project)
def test_50_get(self):
project = self.projectdb.get('not_found')
self.assertIsNone(project)
project = self.projectdb.get('abc')
self.assertEqual(project['name'], 'abc')
self.assertEqual(project['status'], 'RUNNING')
project = self.projectdb.get(u'name中文', ['group', 'status', 'name'])
self.assertEqual(project['name'], u'name中文')
self.assertIn('status', project)
self.assertNotIn('gourp', project)
def test_z10_drop(self):
self.projectdb.insert(u'drop_project2', self.sample_project)
self.projectdb.insert(u'drop_project3', self.sample_project)
self.projectdb.drop('drop_project3')
self.assertIsNotNone(self.projectdb.get('drop_project2'))
self.assertIsNone(self.projectdb.get('drop_project3'))
class ResultDBCase(object):
@classmethod
def setUpClass(self):
raise NotImplemented
def test_10_save(self):
self.resultdb.save('test_project', 'test_taskid', 'test_url', 'result')
result = self.resultdb.get('test_project', 'test_taskid')
self.assertEqual(result['result'], 'result')
self.resultdb.save('test_project', 'test_taskid', 'test_url_updated', 'result_updated')
result = self.resultdb.get('test_project', 'test_taskid')
self.assertEqual(result['result'], 'result_updated')
self.assertEqual(result['url'], 'test_url_updated')
def test_20_get(self):
result = self.resultdb.get('test_project', 'not_exists')
self.assertIsNone(result)
result = self.resultdb.get('not_exists', 'test_taskid')
self.assertIsNone(result)
result = self.resultdb.get('test_project', 'test_taskid', fields=('url', ))
self.assertIn('url', result)
self.assertNotIn('result', result)
result = self.resultdb.get('test_project', 'test_taskid')
self.assertEqual(result['taskid'], 'test_taskid')
self.assertEqual(result['url'], 'test_url_updated')
self.assertEqual(result['result'], 'result_updated')
self.assertIn('updatetime', result)
def test_30_select(self):
for i in range(5):
self.resultdb.save('test_project', 'test_taskid-%d' % i,
'test_url', 'result-%d' % i)
ret = list(self.resultdb.select('test_project'))
self.assertEqual(len(ret), 6)
ret = list(self.resultdb.select('test_project', limit=4))
self.assertEqual(len(ret), 4)
for ret in self.resultdb.select('test_project', fields=('url', ), limit=1):
self.assertIn('url', ret)
self.assertNotIn('result', ret)
def test_35_select_limit(self):
ret = list(self.resultdb.select('test_project', limit=None, offset=None))
self.assertEqual(len(ret), 6)
ret = list(self.resultdb.select('test_project', limit=None, offset=2))
self.assertEqual(len(ret), 4, ret)
def test_40_count(self):
self.assertEqual(self.resultdb.count('test_project'), 6)
def test_50_select_not_finished(self):
for i in self.resultdb.select('test_project'):
break
self.assertEqual(self.resultdb.count('test_project'), 6)
def test_60_relist_projects(self):
if hasattr(self.resultdb, '_list_project'):
self.resultdb._list_project()
self.assertNotIn('system.indexes', self.resultdb.projects)
def test_z10_drop(self):
self.resultdb.save('drop_project2', 'test_taskid', 'test_url', 'result')
self.resultdb.save('drop_project3', 'test_taskid', 'test_url', 'result')
self.resultdb.drop('drop_project3')
self.assertIsNotNone(self.resultdb.get('drop_project2', 'test_taskid'))
self.assertIsNone(self.resultdb.get('drop_project3', 'test_taskid'))
def test_z20_update_projects(self):
saved = self.resultdb.UPDATE_PROJECTS_TIME
self.resultdb.UPDATE_PROJECTS_TIME = 0.1
time.sleep(0.2)
self.assertIn('drop_project2', self.resultdb.projects)
self.assertNotIn('drop_project3', self.resultdb.projects)
self.resultdb.UPDATE_PROJECTS_TIME = saved
class TestSqliteTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database('sqlite+taskdb://')
@classmethod
def tearDownClass(self):
del self.taskdb
class TestSqliteProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database('sqlite+projectdb://')
@classmethod
def tearDownClass(self):
del self.projectdb
class TestSqliteResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database('sqlite+resultdb://')
@classmethod
def tearDownClass(self):
del self.resultdb
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestMysqlTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database('mysql+taskdb://localhost/pyspider_test_taskdb')
@classmethod
def tearDownClass(self):
self.taskdb._execute('DROP DATABASE pyspider_test_taskdb')
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestMysqlProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'mysql+projectdb://localhost/pyspider_test_projectdb'
)
@classmethod
def tearDownClass(self):
self.projectdb._execute('DROP DATABASE pyspider_test_projectdb')
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestMysqlResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'mysql+resultdb://localhost/pyspider_test_resultdb'
)
@classmethod
def tearDownClass(self):
self.resultdb._execute('DROP DATABASE pyspider_test_resultdb')
@unittest.skipIf(os.environ.get('IGNORE_MONGODB') or os.environ.get('IGNORE_ALL'), 'no mongodb server for test.')
class TestMongoDBTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database(
'mongodb+taskdb://localhost:27017/pyspider_test_taskdb'
)
@classmethod
def tearDownClass(self):
self.taskdb.conn.drop_database(self.taskdb.database.name)
def test_create_project(self):
self.assertNotIn('test_create_project', self.taskdb.projects)
self.taskdb._create_project('test_create_project')
self.assertIn('test_create_project', self.taskdb.projects)
@unittest.skipIf(os.environ.get('IGNORE_MONGODB') or os.environ.get('IGNORE_ALL'), 'no mongodb server for test.')
class TestMongoDBProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'mongodb+projectdb://localhost/pyspider_test_projectdb'
)
@classmethod
def tearDownClass(self):
self.projectdb.conn.drop_database(self.projectdb.database.name)
@unittest.skipIf(os.environ.get('IGNORE_MONGODB') or os.environ.get('IGNORE_ALL'), 'no mongodb server for test.')
class TestMongoDBResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'mongodb+resultdb://localhost/pyspider_test_resultdb'
)
@classmethod
def tearDownClass(self):
self.resultdb.conn.drop_database(self.resultdb.database.name)
def test_create_project(self):
self.assertNotIn('test_create_project', self.resultdb.projects)
self.resultdb._create_project('test_create_project')
self.assertIn('test_create_project', self.resultdb.projects)
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestSQLAlchemyMySQLTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database(
'sqlalchemy+mysql+mysqlconnector+taskdb://root@localhost/pyspider_test_taskdb'
)
@classmethod
def tearDownClass(self):
self.taskdb.engine.execute('DROP DATABASE pyspider_test_taskdb')
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestSQLAlchemyMySQLProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'sqlalchemy+mysql+mysqlconnector+projectdb://root@localhost/pyspider_test_projectdb'
)
@classmethod
def tearDownClass(self):
self.projectdb.engine.execute('DROP DATABASE pyspider_test_projectdb')
@unittest.skipIf(os.environ.get('IGNORE_MYSQL') or os.environ.get('IGNORE_ALL'), 'no mysql server for test.')
class TestSQLAlchemyMySQLResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'sqlalchemy+mysql+mysqlconnector+resultdb://root@localhost/pyspider_test_resultdb'
)
@classmethod
def tearDownClass(self):
self.resultdb.engine.execute('DROP DATABASE pyspider_test_resultdb')
class TestSQLAlchemyTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database(
'sqlalchemy+sqlite+taskdb://'
)
@classmethod
def tearDownClass(self):
del self.taskdb
class TestSQLAlchemyProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'sqlalchemy+sqlite+projectdb://'
)
@classmethod
def tearDownClass(self):
del self.projectdb
class TestSQLAlchemyResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'sqlalchemy+sqlite+resultdb://'
)
@classmethod
def tearDownClass(self):
del self.resultdb
@unittest.skipIf(os.environ.get('IGNORE_POSTGRESQL') or os.environ.get('IGNORE_ALL'), 'no postgresql server for test.')
class TestPGTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database(
'sqlalchemy+postgresql+taskdb://[email protected]:5432/pyspider_test_taskdb'
)
self.tearDownClass()
@classmethod
def tearDownClass(self):
for project in self.taskdb.projects:
self.taskdb.drop(project)
@unittest.skipIf(os.environ.get('IGNORE_POSTGRESQL') or os.environ.get('IGNORE_ALL'), 'no postgresql server for test.')
class TestPGProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'sqlalchemy+postgresql+projectdb://[email protected]:5432/pyspider_test_projectdb'
)
self.tearDownClass()
@classmethod
def tearDownClass(self):
for project in self.projectdb.get_all(fields=['name']):
self.projectdb.drop(project['name'])
@unittest.skipIf(os.environ.get('IGNORE_POSTGRESQL') or os.environ.get('IGNORE_ALL'), 'no postgresql server for test.')
class TestPGResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'sqlalchemy+postgresql+resultdb://[email protected]/pyspider_test_resultdb'
)
self.tearDownClass()
@classmethod
def tearDownClass(self):
for project in self.resultdb.projects:
self.resultdb.drop(project)
@unittest.skipIf(os.environ.get('IGNORE_REDIS') or os.environ.get('IGNORE_ALL'), 'no redis server for test.')
class TestRedisTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database('redis+taskdb://localhost:6379/15')
self.taskdb.__prefix__ = 'testtaskdb_'
@classmethod
def tearDownClass(self):
for project in self.taskdb.projects:
self.taskdb.drop(project)
@unittest.skipIf(os.environ.get('IGNORE_ELASTICSEARCH') or os.environ.get('IGNORE_ALL'), 'no elasticsearch server for test.')
class TestESProjectDB(ProjectDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.projectdb = database.connect_database(
'elasticsearch+projectdb://127.0.0.1:9200/?index=test_pyspider_projectdb'
)
assert self.projectdb.index == 'test_pyspider_projectdb'
@classmethod
def tearDownClass(self):
self.projectdb.es.indices.delete(index='test_pyspider_projectdb', ignore=[400, 404])
@unittest.skipIf(os.environ.get('IGNORE_ELASTICSEARCH') or os.environ.get('IGNORE_ALL'), 'no elasticsearch server for test.')
class TestESResultDB(ResultDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.resultdb = database.connect_database(
'elasticsearch+resultdb://127.0.0.1:9200/?index=test_pyspider_resultdb'
)
assert self.resultdb.index == 'test_pyspider_resultdb'
@classmethod
def tearDownClass(self):
self.resultdb.es.indices.delete(index='test_pyspider_resultdb', ignore=[400, 404])
def test_15_save(self):
self.resultdb.refresh()
def test_30_select(self):
for i in range(5):
self.resultdb.save('test_project', 'test_taskid-%d' % i,
'test_url', 'result-%d' % i)
self.resultdb.refresh()
ret = list(self.resultdb.select('test_project'))
self.assertEqual(len(ret), 6)
ret = list(self.resultdb.select('test_project', limit=4))
self.assertEqual(len(ret), 4)
for ret in self.resultdb.select('test_project', fields=('url', ), limit=1):
self.assertIn('url', ret)
self.assertNotIn('result', ret)
def test_35_select_limit(self):
pass
def test_z20_update_projects(self):
self.resultdb.refresh()
self.assertIn('drop_project2', self.resultdb.projects)
self.assertNotIn('drop_project3', self.resultdb.projects)
@unittest.skipIf(os.environ.get('IGNORE_ELASTICSEARCH') or os.environ.get('IGNORE_ALL'), 'no elasticsearch server for test.')
class TestESTaskDB(TaskDBCase, unittest.TestCase):
@classmethod
def setUpClass(self):
self.taskdb = database.connect_database(
'elasticsearch+taskdb://127.0.0.1:9200/?index=test_pyspider_taskdb'
)
assert self.taskdb.index == 'test_pyspider_taskdb'
@classmethod
def tearDownClass(self):
self.taskdb.es.indices.delete(index='test_pyspider_taskdb', ignore=[400, 404])
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 517,360,827,569,604,400 | 33.96861 | 125 | 0.634564 | false |
directorlive/oppia | core/controllers/recent_commits.py | 33 | 2031 | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for queries relating to recent commits."""
__author__ = '[email protected] (Sean Lip)'
from core.controllers import base
from core.domain import exp_services
class RecentCommitsHandler(base.BaseHandler):
"""Returns a list of recent commits."""
# TODO(sll): Accept additional URL parameters that filter by user_id and
# exploration_id. For the former, do a check to ensure that the user is
# allowed to see this data (as it may include private explorations).
def get(self):
"""Handles GET requests."""
urlsafe_start_cursor = self.request.get('cursor')
query_type = self.request.get('query_type')
if query_type == 'all_non_private_commits':
all_commits, new_urlsafe_start_cursor, more = (
exp_services.get_next_page_of_all_non_private_commits(
urlsafe_start_cursor=urlsafe_start_cursor))
else:
raise self.PageNotFoundException
exp_ids = set([commit.exploration_id for commit in all_commits])
exp_ids_to_exp_data = (
exp_services.get_exploration_titles_and_categories(exp_ids))
all_commit_dicts = [commit.to_dict() for commit in all_commits]
self.render_json({
'results': all_commit_dicts,
'cursor': new_urlsafe_start_cursor,
'more': more,
'exp_ids_to_exp_data': exp_ids_to_exp_data,
})
| apache-2.0 | -5,916,595,727,925,072,000 | 37.320755 | 76 | 0.669621 | false |
sandipbgt/nodeshot | setup.py | 8 | 1542 | #!/usr/bin/env python
from setuptools import setup, find_packages
from nodeshot import get_version
def get_install_requires():
"""
parse requirements.txt, ignore links, exclude comments
"""
requirements = []
for line in open('requirements.txt').readlines():
# skip to next iteration if comment or empty line
if line.startswith('#') or line == '' or line.startswith('http') or line.startswith('git'):
continue
# add line to requirements
requirements.append(line)
return requirements
setup(
name='nodeshot',
version=get_version(),
description="Extensible Django web application for management of community-led georeferenced data.",
long_description=open('README.rst').read(),
author='Federico Capoano',
author_email='[email protected]',
license='GPL3',
url='https://github.com/ninuxorg/nodeshot',
download_url='https://github.com/ninuxorg/nodeshot/releases',
packages=find_packages(exclude=['docs', 'docs.*']),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Framework :: Django',
],
install_requires=get_install_requires(),
scripts=['nodeshot/bin/nodeshot'],
)
| gpl-3.0 | -6,299,887,426,704,017,000 | 33.266667 | 104 | 0.648508 | false |
aznashwan/heat2arm | heat2arm/translators/instances/nova_server.py | 1 | 2931 | # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Defines the translator and utilities for mapping
a Nova server to an Azure VM.
"""
import logging
from heat2arm.translators.instances.base_instance import (
BaseInstanceARMTranslator
)
from heat2arm.translators.instances import nova_utils as utils
LOG = logging.getLogger(__name__)
class NovaServerARMTranslator(BaseInstanceARMTranslator):
""" NovaServerARMTranslator is the translator associated to a Nova server.
It processes the fields and parameters of a Nova server defined in Heat.
"""
heat_resource_type = "OS::Nova::Server"
def get_variables(self):
""" get_variables returns the dict of ARM template variables
associated with the Heat template's resource translation.
"""
base_vars = self._get_base_variables()
(publisher, offer, sku) = utils.get_azure_image_info(
self._heat_resource.properties['image'])
base_vars.update({
"vmSize_%s" % self._name: utils.get_azure_flavor(
self._heat_resource.properties['flavor']),
"imgPublisher_%s" % self._name: publisher,
"imgOffer_%s" % self._name: offer,
"imgSku_%s" % self._name: sku,
})
return base_vars
# NOTE:the following methods are inherited from BaseInstanceARMTranslator:
# - get_parameters.
# - get_dependencies.
# - get_resource_data.
# - update_context.
def _get_ref_port_resource_names(self):
""" _get_ref_port_resource_names is a helper method which returns a
list containing the names of all port resources which are referenced
by this Nova server.
"""
port_resource_names = []
if 'networks' in self._heat_resource.properties.data:
for port_data in self._heat_resource.properties.data["networks"]:
if 'port' in port_data:
port_resource_names.append(port_data['port'].args)
return port_resource_names
def _get_userdata(self):
""" _get_userdata is a helper method which returns the userdata
from the instance's definition, if any.
"""
if 'user_data' in self._heat_resource.properties:
return self._heat_resource.properties['user_data']
return ""
| apache-2.0 | 1,305,231,403,946,624,800 | 33.892857 | 78 | 0.655067 | false |
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/express_route_circuit_stats.py | 8 | 1682 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteCircuitStats(Model):
"""Contains stats associated with the peering.
:param primarybytes_in: Gets BytesIn of the peering.
:type primarybytes_in: long
:param primarybytes_out: Gets BytesOut of the peering.
:type primarybytes_out: long
:param secondarybytes_in: Gets BytesIn of the peering.
:type secondarybytes_in: long
:param secondarybytes_out: Gets BytesOut of the peering.
:type secondarybytes_out: long
"""
_attribute_map = {
'primarybytes_in': {'key': 'primarybytesIn', 'type': 'long'},
'primarybytes_out': {'key': 'primarybytesOut', 'type': 'long'},
'secondarybytes_in': {'key': 'secondarybytesIn', 'type': 'long'},
'secondarybytes_out': {'key': 'secondarybytesOut', 'type': 'long'},
}
def __init__(self, primarybytes_in=None, primarybytes_out=None, secondarybytes_in=None, secondarybytes_out=None):
super(ExpressRouteCircuitStats, self).__init__()
self.primarybytes_in = primarybytes_in
self.primarybytes_out = primarybytes_out
self.secondarybytes_in = secondarybytes_in
self.secondarybytes_out = secondarybytes_out
| mit | -6,749,520,567,871,143,000 | 41.05 | 117 | 0.634364 | false |
dulems/hue | desktop/core/ext-py/tablib-develop/tablib/packages/odf3/number.py | 56 | 3137 | # -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from .namespaces import NUMBERNS
from .element import Element
from .style import StyleElement
# Autogenerated
def AmPm(**args):
return Element(qname = (NUMBERNS,'am-pm'), **args)
def Boolean(**args):
return Element(qname = (NUMBERNS,'boolean'), **args)
def BooleanStyle(**args):
return StyleElement(qname = (NUMBERNS,'boolean-style'), **args)
def CurrencyStyle(**args):
return StyleElement(qname = (NUMBERNS,'currency-style'), **args)
def CurrencySymbol(**args):
return Element(qname = (NUMBERNS,'currency-symbol'), **args)
def DateStyle(**args):
return StyleElement(qname = (NUMBERNS,'date-style'), **args)
def Day(**args):
return Element(qname = (NUMBERNS,'day'), **args)
def DayOfWeek(**args):
return Element(qname = (NUMBERNS,'day-of-week'), **args)
def EmbeddedText(**args):
return Element(qname = (NUMBERNS,'embedded-text'), **args)
def Era(**args):
return Element(qname = (NUMBERNS,'era'), **args)
def Fraction(**args):
return Element(qname = (NUMBERNS,'fraction'), **args)
def Hours(**args):
return Element(qname = (NUMBERNS,'hours'), **args)
def Minutes(**args):
return Element(qname = (NUMBERNS,'minutes'), **args)
def Month(**args):
return Element(qname = (NUMBERNS,'month'), **args)
def Number(**args):
return Element(qname = (NUMBERNS,'number'), **args)
def NumberStyle(**args):
return StyleElement(qname = (NUMBERNS,'number-style'), **args)
def PercentageStyle(**args):
return StyleElement(qname = (NUMBERNS,'percentage-style'), **args)
def Quarter(**args):
return Element(qname = (NUMBERNS,'quarter'), **args)
def ScientificNumber(**args):
return Element(qname = (NUMBERNS,'scientific-number'), **args)
def Seconds(**args):
return Element(qname = (NUMBERNS,'seconds'), **args)
def Text(**args):
return Element(qname = (NUMBERNS,'text'), **args)
def TextContent(**args):
return Element(qname = (NUMBERNS,'text-content'), **args)
def TextStyle(**args):
return StyleElement(qname = (NUMBERNS,'text-style'), **args)
def TimeStyle(**args):
return StyleElement(qname = (NUMBERNS,'time-style'), **args)
def WeekOfYear(**args):
return Element(qname = (NUMBERNS,'week-of-year'), **args)
def Year(**args):
return Element(qname = (NUMBERNS,'year'), **args)
| apache-2.0 | 5,794,709,919,981,279,000 | 29.153846 | 80 | 0.69037 | false |
JFriel/honours_project | networkx/networkx/algorithms/approximation/vertex_cover.py | 63 | 2088 | # -*- coding: utf-8 -*-
"""
************
Vertex Cover
************
Given an undirected graph `G = (V, E)` and a function w assigning nonnegative
weights to its vertices, find a minimum weight subset of V such that each edge
in E is incident to at least one vertex in the subset.
http://en.wikipedia.org/wiki/Vertex_cover
"""
# Copyright (C) 2011-2012 by
# Nicholas Mancuso <[email protected]>
# All rights reserved.
# BSD license.
from networkx.utils import *
__all__ = ["min_weighted_vertex_cover"]
__author__ = """Nicholas Mancuso ([email protected])"""
@not_implemented_for('directed')
def min_weighted_vertex_cover(G, weight=None):
r"""2-OPT Local Ratio for Minimum Weighted Vertex Cover
Find an approximate minimum weighted vertex cover of a graph.
Parameters
----------
G : NetworkX graph
Undirected graph
weight : None or string, optional (default = None)
If None, every edge has weight/distance/cost 1. If a string, use this
edge attribute as the edge weight. Any edge attribute not present
defaults to 1.
Returns
-------
min_weighted_cover : set
Returns a set of vertices whose weight sum is no more than 2 * OPT.
Notes
-----
Local-Ratio algorithm for computing an approximate vertex cover.
Algorithm greedily reduces the costs over edges and iteratively
builds a cover. Worst-case runtime is `O(|E|)`.
References
----------
.. [1] Bar-Yehuda, R., & Even, S. (1985). A local-ratio theorem for
approximating the weighted vertex cover problem.
Annals of Discrete Mathematics, 25, 27–46
http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf
"""
weight_func = lambda nd: nd.get(weight, 1)
cost = dict((n, weight_func(nd)) for n, nd in G.nodes(data=True))
# while there are edges uncovered, continue
for u,v in G.edges_iter():
# select some uncovered edge
min_cost = min([cost[u], cost[v]])
cost[u] -= min_cost
cost[v] -= min_cost
return set(u for u in cost if cost[u] == 0)
| gpl-3.0 | 7,610,989,381,500,838,000 | 31.092308 | 78 | 0.643816 | false |
home-assistant/home-assistant | tests/testing_config/custom_components/test/light.py | 2 | 1589 | """
Provide a mock light platform.
Call init before using it in your tests to ensure clean test data.
"""
from homeassistant.components.light import LightEntity
from homeassistant.const import STATE_OFF, STATE_ON
from tests.common import MockToggleEntity
ENTITIES = []
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
[]
if empty
else [
MockLight("Ceiling", STATE_ON),
MockLight("Ceiling", STATE_OFF),
MockLight(None, STATE_OFF),
]
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(ENTITIES)
class MockLight(MockToggleEntity, LightEntity):
"""Mock light class."""
color_mode = None
max_mireds = 500
min_mireds = 153
supported_color_modes = None
supported_features = 0
brightness = None
color_temp = None
hs_color = None
rgb_color = None
rgbw_color = None
rgbww_color = None
xy_color = None
white_value = None
def turn_on(self, **kwargs):
"""Turn the entity on."""
super().turn_on(**kwargs)
for key, value in kwargs.items():
if key in [
"brightness",
"hs_color",
"xy_color",
"rgb_color",
"rgbw_color",
"rgbww_color",
"color_temp",
"white_value",
]:
setattr(self, key, value)
| apache-2.0 | 417,064,470,412,643,000 | 22.367647 | 66 | 0.565765 | false |
corpnewt/CorpBot.py | Cogs/Errors.py | 1 | 1559 | import asyncio
import discord
import sys
import traceback
from discord.ext import commands
from discord import errors
from Cogs import Message
def setup(bot):
bot.add_cog(Errors())
class Errors(commands.Cog):
def __init__(self):
pass
@commands.Cog.listener()
async def on_command_error(self, context, exception):
if type(exception) is commands.CommandInvokeError:
if type(exception.original) is discord.Forbidden:
return await Message.EmbedText(
title="⚠ Forbidden Error",
color=context.author,
description="Looks like I tried to do something I don't have permissions for!\nIf `{}{}` is role-based - make sure that my role is above the affected role(s).".format(context.prefix, context.command.name)
).send(context)
elif type(exception.original) is discord.errors.HTTPException and "Must be 2000 or fewer in length" in str(exception.original):
return await Message.EmbedText(
title="⚠ I Got Too Wordy",
color=context.author,
description="Looks like I tried to send a response that was greater than 2,000 characters!"
).send(context)
if str(exception).startswith("Music Cog: "):
# Generic music exception - ignore
return
cog = context.cog
if cog:
attr = '_{0.__class__.__name__}__error'.format(cog)
if hasattr(cog, attr):
return
print('Ignoring exception in command {}:'.format(context.command), file=sys.stderr)
traceback.print_exception(type(exception), exception, exception.__traceback__, file=sys.stderr)
| mit | -8,464,887,185,722,327,000 | 35.02381 | 210 | 0.695177 | false |
the-gigi/project-euler | 3/003_largest_prime_factor.py | 1 | 1075 | """Largest Prime Factor
The prime factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143 ?
"""
from collections import OrderedDict
def find_next_prime(primes):
last = primes[-1]
candidates = OrderedDict((k, 1) for k in range(last + 1, last * 2 + 1))
for p in primes:
for i in xrange(p * 2, last * 2 + 1, p):
candidates[i] = 0
for k, v in candidates.iteritems():
if v == 1:
return k
def find_largest_prime_factor(number):
largest = 1
primes = [2]
while True:
last = primes[-1]
if number % last == 0:
largest = last
#print largest, primes
number /= last
# Keep dividing
while number % last == 0:
number /= last
if number == 1:
return largest
primes.append(find_next_prime(primes))
def main():
number = 600851475143
largest = find_largest_prime_factor(number)
print largest
if __name__ == '__main__':
main()
| mit | -3,654,798,669,728,008,700 | 20.5 | 75 | 0.547907 | false |
badock/nova | nova/tests/api/openstack/compute/contrib/test_deferred_delete.py | 4 | 5504 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import webob
from nova.api.openstack.compute.contrib import deferred_delete
from nova.api.openstack.compute.plugins.v3 import deferred_delete as dd_v21
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
class FakeRequest(object):
def __init__(self, context):
self.environ = {'nova.context': context}
class DeferredDeleteExtensionTestV21(test.NoDBTestCase):
ext_ver = dd_v21.DeferredDeleteController
def setUp(self):
super(DeferredDeleteExtensionTestV21, self).setUp()
self.fake_input_dict = {}
self.fake_uuid = 'fake_uuid'
self.fake_context = context.RequestContext('fake', 'fake')
self.fake_req = FakeRequest(self.fake_context)
self.extension = self.ext_ver()
def test_force_delete(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
self.mox.StubOutWithMock(compute_api.API, 'force_delete')
fake_instance = 'fake_instance'
compute_api.API.get(self.fake_context, self.fake_uuid,
expected_attrs=None,
want_objects=True).AndReturn(fake_instance)
compute_api.API.force_delete(self.fake_context, fake_instance)
self.mox.ReplayAll()
res = self.extension._force_delete(self.fake_req, self.fake_uuid,
self.fake_input_dict)
self.assertEqual(res.status_int, 202)
def test_force_delete_instance_not_found(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
compute_api.API.get(self.fake_context, self.fake_uuid,
expected_attrs=None,
want_objects=True).AndRaise(
exception.InstanceNotFound(instance_id='instance-0000'))
self.mox.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound,
self.extension._force_delete,
self.fake_req,
self.fake_uuid,
self.fake_input_dict)
@mock.patch.object(compute_api.API, 'get')
@mock.patch.object(compute_api.API, 'force_delete',
side_effect=exception.InstanceIsLocked(
instance_uuid='fake_uuid'))
def test_force_delete_instance_locked(self, mock_force_delete, mock_get):
req = fakes.HTTPRequest.blank('/v2/fake/servers/fake_uuid/action')
ex = self.assertRaises(webob.exc.HTTPConflict,
self.extension._force_delete,
req, 'fake_uuid', '')
self.assertIn('Instance fake_uuid is locked', ex.explanation)
def test_restore(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
self.mox.StubOutWithMock(compute_api.API, 'restore')
fake_instance = 'fake_instance'
compute_api.API.get(self.fake_context, self.fake_uuid,
expected_attrs=None,
want_objects=True).AndReturn(fake_instance)
compute_api.API.restore(self.fake_context, fake_instance)
self.mox.ReplayAll()
res = self.extension._restore(self.fake_req, self.fake_uuid,
self.fake_input_dict)
self.assertEqual(res.status_int, 202)
def test_restore_instance_not_found(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
compute_api.API.get(self.fake_context, self.fake_uuid,
expected_attrs=None, want_objects=True).AndRaise(
exception.InstanceNotFound(instance_id='instance-0000'))
self.mox.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound, self.extension._restore,
self.fake_req, self.fake_uuid,
self.fake_input_dict)
def test_restore_raises_conflict_on_invalid_state(self):
self.mox.StubOutWithMock(compute_api.API, 'get')
self.mox.StubOutWithMock(compute_api.API, 'restore')
fake_instance = 'fake_instance'
exc = exception.InstanceInvalidState(attr='fake_attr',
state='fake_state', method='fake_method',
instance_uuid='fake')
compute_api.API.get(self.fake_context, self.fake_uuid,
expected_attrs=None,
want_objects=True).AndReturn(fake_instance)
compute_api.API.restore(self.fake_context, fake_instance).AndRaise(
exc)
self.mox.ReplayAll()
self.assertRaises(webob.exc.HTTPConflict, self.extension._restore,
self.fake_req, self.fake_uuid, self.fake_input_dict)
class DeferredDeleteExtensionTestV2(DeferredDeleteExtensionTestV21):
ext_ver = deferred_delete.DeferredDeleteController
| apache-2.0 | -4,010,344,135,201,649,000 | 39.77037 | 78 | 0.628452 | false |
verilylifesciences/classifaedes | classifaedes/export.py | 1 | 3902 | # Copyright 2019 Verily Life Sciences LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build the inference graph, load a checkpoint, and export."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from classifaedes import hparams_lib
from classifaedes import inputs_lib
from classifaedes import metadata
from classifaedes import model_lib
import classifaedes.simple_image_lib as sil
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.contrib import learn as contrib_learn
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('export_dir_base', None,
'(optional) Export destination; defaults to '
'[model_dir]/export_/.')
flags.DEFINE_string('model_dir', None, 'Training output_dir.')
def make_input_fn(image_shape):
"""Returns an Estimator input_fn for the exported serving graph."""
def _prep_input(png):
with tf.name_scope('prep_input'):
img = tf.image.decode_png(png, channels=1)
return inputs_lib.prep_image(img, image_shape)
def input_fn():
"""Estimator input_fn for serving.
Returns:
InputFnOps - a namedtuple of features, labels, and default_inputs.
"""
pngs = tf.placeholder(tf.string, shape=[None])
imgs = tf.map_fn(_prep_input, pngs, dtype=tf.float32)
inputs = {
'encoded_pngs': pngs,
'images': imgs, # For model_fn.
}
return contrib_learn.utils.input_fn_utils.InputFnOps(
inputs, None, {'inputs': pngs})
return input_fn
def run_test_inference(image_shape, export_dir):
"""Run a dummy prediction using the exported model under export_dir."""
with tf.Session(graph=tf.Graph()) as sess:
meta_graph_def = tf.saved_model.loader.load(
sess, [tf.saved_model.tag_constants.SERVING], export_dir)
sig = meta_graph_def.signature_def[
tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
inputs_name = sig.inputs[
tf.saved_model.signature_constants.PREDICT_INPUTS].name
outputs_name = sig.outputs[
tf.saved_model.signature_constants.PREDICT_OUTPUTS].name
dummy_image = sil.encode_png(np.ones(image_shape, dtype='uint8'))
return sess.run(outputs_name, {inputs_name: [dummy_image]})
def run_export(hps, image_shape, model_dir, export_dir_base):
"""Export model checkpoint under `model_dir` to `export_dir_base`."""
estimator = contrib_learn.Estimator(
model_fn=model_lib.build_model_fn(hps, {}),
model_dir=model_dir,
)
export_dir = estimator.export_savedmodel(
export_dir_base=export_dir_base,
serving_input_fn=make_input_fn(image_shape),
)
tf.logging.info('Exported SavedModel to %s', export_dir)
print('Exported SavedModel to', export_dir)
return export_dir
def main(unused_argv):
hps = hparams_lib.load_from_file(FLAGS.model_dir)
input_md = metadata.load_metadata(model_dir=FLAGS.model_dir)
image_shape = metadata.shape_from_metadata(input_md)
export_dir_base = FLAGS.export_dir_base or (FLAGS.model_dir + '/export_')
export_dir = run_export(hps, image_shape, FLAGS.model_dir, export_dir_base)
# Run in-process inference on a dummy image to sanity check our export.
dummy_prediction = run_test_inference(image_shape, export_dir)
tf.logging.info('Dummy prediction: %s', dummy_prediction)
if __name__ == '__main__':
tf.app.run()
| apache-2.0 | -154,210,633,739,482,460 | 34.798165 | 77 | 0.705023 | false |
data-exp-lab/girder | plugins/oauth/server/rest.py | 4 | 5537 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import datetime
import six
from girder import events
from girder.constants import AccessType
from girder.exceptions import RestException
from girder.api.describe import Description, autoDescribeRoute
from girder.api.rest import Resource
from girder.api import access
from girder.models.setting import Setting
from girder.models.token import Token
from . import constants, providers
class OAuth(Resource):
def __init__(self):
super(OAuth, self).__init__()
self.resourceName = 'oauth'
self.route('GET', ('provider',), self.listProviders)
self.route('GET', (':provider', 'callback'), self.callback)
def _createStateToken(self, redirect):
csrfToken = Token().createToken(days=0.25)
# The delimiter is arbitrary, but a dot doesn't need to be URL-encoded
state = '%s.%s' % (csrfToken['_id'], redirect)
return state
def _validateCsrfToken(self, state):
"""
Tests the CSRF token value in the cookie to authenticate the user as
the originator of the OAuth2 login. Raises a RestException if the token
is invalid.
"""
csrfTokenId, _, redirect = state.partition('.')
token = Token().load(csrfTokenId, objectId=False, level=AccessType.READ)
if token is None:
raise RestException('Invalid CSRF token (state="%s").' % state, code=403)
Token().remove(token)
if token['expires'] < datetime.datetime.utcnow():
raise RestException('Expired CSRF token (state="%s").' % state,
code=403)
if not redirect:
raise RestException('No redirect location (state="%s").' % state)
return redirect
@access.public
@autoDescribeRoute(
Description('Get the list of enabled OAuth2 providers and their URLs.')
.notes('By default, returns an object mapping names of providers to '
'the appropriate URL.')
.param('redirect', 'Where the user should be redirected upon completion'
' of the OAuth2 flow.')
.param('list', 'Whether to return the providers as an ordered list.',
required=False, dataType='boolean', default=False)
)
def listProviders(self, redirect, list):
enabledNames = Setting().get(constants.PluginSettings.PROVIDERS_ENABLED)
enabledProviders = [
provider
for providerName, provider in six.viewitems(providers.idMap)
if providerName in enabledNames
]
if enabledProviders:
state = self._createStateToken(redirect)
else:
state = None
if list:
return [
{
'id': provider.getProviderName(external=False),
'name': provider.getProviderName(external=True),
'url': provider.getUrl(state)
}
for provider in enabledProviders
]
else:
return {
provider.getProviderName(external=True): provider.getUrl(state)
for provider in enabledProviders
}
@access.public
@autoDescribeRoute(
Description('Callback called by OAuth providers.')
.param('provider', 'The provider name.', paramType='path')
.param('state', 'Opaque state string.', required=False)
.param('code', 'Authorization code from provider.', required=False)
.param('error', 'Error message from provider.', required=False),
hide=True
)
def callback(self, provider, state, code, error):
if error is not None:
raise RestException("Provider returned error: '%s'." % error, code=502)
self.requireParams({'state': state, 'code': code})
providerName = provider
provider = providers.idMap.get(providerName)
if not provider:
raise RestException('Unknown provider "%s".' % providerName)
redirect = self._validateCsrfToken(state)
providerObj = provider(cherrypy.url())
token = providerObj.getToken(code)
events.trigger('oauth.auth_callback.before', {
'provider': provider,
'token': token
})
user = providerObj.getUser(token)
events.trigger('oauth.auth_callback.after', {
'provider': provider,
'token': token,
'user': user
})
girderToken = self.sendAuthTokenCookie(user)
try:
redirect = redirect.format(girderToken=str(girderToken['_id']))
except KeyError:
pass # in case there's another {} that's not handled by format
raise cherrypy.HTTPRedirect(redirect)
| apache-2.0 | 5,318,905,734,232,105,000 | 34.722581 | 85 | 0.603937 | false |
yoelk/kivy | kivy/tests/test_audio.py | 51 | 1744 | '''
Audio tests
===========
'''
import unittest
import os
SAMPLE_FILE = os.path.join(os.path.dirname(__file__), 'sample1.ogg')
SAMPLE_LENGTH = 1.402
DELTA = SAMPLE_LENGTH * 0.01
DELAY = 0.2
class AudioTestCase(unittest.TestCase):
def get_sound(self):
import os
assert os.path.exists(SAMPLE_FILE)
from kivy.core import audio
return audio.SoundLoader.load(SAMPLE_FILE)
def test_length_simple(self):
sound = self.get_sound()
volume = sound.volume = 0.75
length = sound.length
self.assertAlmostEqual(SAMPLE_LENGTH, length, delta=DELTA)
# ensure that the gstreamer play/stop doesn't mess up the volume
assert volume == sound.volume
def test_length_playing(self):
import time
sound = self.get_sound()
sound.play()
try:
time.sleep(DELAY)
length = sound.length
self.assertAlmostEqual(SAMPLE_LENGTH, length, delta=DELTA)
finally:
sound.stop()
self.assertAlmostEqual(SAMPLE_LENGTH, length, delta=DELTA)
def test_length_stopped(self):
import time
sound = self.get_sound()
sound.play()
try:
time.sleep(DELAY)
finally:
sound.stop()
length = sound.length
self.assertAlmostEqual(SAMPLE_LENGTH, length, delta=DELTA)
class AudioGstreamerTestCase(AudioTestCase):
def make_sound(self, source):
from kivy.core.audio import audio_gstreamer
return audio_gstreamer.SoundGstreamer(source)
class AudioPygameTestCase(AudioTestCase):
def make_sound(self, source):
from kivy.core.audio import audio_pygame
return audio_pygame.SoundPygame(source)
| mit | 1,381,781,630,533,834,800 | 25.424242 | 72 | 0.630161 | false |
clk8908/pymc3 | pymc3/distributions/special.py | 13 | 3420 | '''
Created on Mar 17, 2011
@author: jsalvatier
'''
from theano import scalar, tensor
import numpy
from scipy import special, misc
from .dist_math import *
__all__ = ['gammaln', 'multigammaln', 'psi', 'trigamma', 'factln']
class GammaLn(scalar.UnaryScalarOp):
"""
Compute gammaln(x)
"""
@staticmethod
def st_impl(x):
return special.gammaln(x)
def impl(self, x):
return GammaLn.st_impl(x)
def grad(self, inp, grads):
x, = inp
gz, = grads
return [gz * scalar_psi(x)]
def c_code(self, node, name, inp, out, sub):
x, = inp
z, = out
return """%(z)s = lgamma(%(x)s);""" % locals()
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
scalar_gammaln = GammaLn(scalar.upgrade_to_float, name='scalar_gammaln')
gammaln = tensor.Elemwise(scalar_gammaln, name='gammaln')
def multigammaln(a, p):
"""Multivariate Log Gamma
:Parameters:
a : tensor like
p : int degrees of freedom
p > 0
"""
i = arange(1, p + 1)
return p * (p - 1) * log(pi) / 4. + t.sum(gammaln(a + (1. - i) / 2.), axis=0)
cpsifunc = """
#ifndef _PSIFUNCDEFINED
#define _PSIFUNCDEFINED
double _psi(double x){
/*taken from
Bernardo, J. M. (1976). Algorithm AS 103: Psi (Digamma) Function. Applied Statistics. 25 (3), 315-317.
http://www.uv.es/~bernardo/1976AppStatist.pdf */
double y, R, psi_ = 0;
double S = 1.0e-5;
double C = 8.5;
double S3 = 8.333333333e-2;
double S4 = 8.333333333e-3;
double S5 = 3.968253968e-3;
double D1 = -0.5772156649 ;
y = x;
if (y <= 0.0)
return psi_;
if (y <= S )
return D1 - 1.0/y;
while (y < C){
psi_ = psi_ - 1.0 / y;
y = y + 1;}
R = 1.0 / y;
psi_ = psi_ + log(y) - .5 * R ;
R= R*R;
psi_ = psi_ - R * (S3 - R * (S4 - R * S5));
return psi_;}
#endif
"""
class Psi(scalar.UnaryScalarOp):
"""
Compute derivative of gammaln(x)
"""
@staticmethod
def st_impl(x):
return special.psi(x)
def impl(self, x):
return Psi.st_impl(x)
def grad(self, inp, grads):
x, = inp
gz, = grads
return [gz * scalar_trigamma(x)]
def c_support_code(self):
return cpsifunc
def c_code(self, node, name, inp, out, sub):
x, = inp
z, = out
if node.inputs[0].type in scalar.complex_types:
raise NotImplementedError('type not supported', node.inputs[0].type)
return """%(z)s = _psi(%(x)s);""" % locals()
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
scalar_psi = Psi(scalar.upgrade_to_float, name='scalar_psi')
psi = tensor.Elemwise(scalar_psi, name='psi')
class Trigamma(scalar.UnaryScalarOp):
"""
Compute 2nd derivative of gammaln(x)
"""
@staticmethod
def st_impl(x):
return special.polygamma(1, x)
def impl(self, x):
return Psi.st_impl(x)
# def grad() no gradient now
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return hash(type(self))
scalar_trigamma = Trigamma(scalar.upgrade_to_float, name='scalar_trigamma')
trigamma = tensor.Elemwise(scalar_trigamma, name='trigamma')
| apache-2.0 | 8,468,932,575,345,364,000 | 21.207792 | 106 | 0.554386 | false |
experiencecoin/experiencecoin | contrib/devtools/check-doc.py | 53 | 1820 | #!/usr/bin/env python
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/%s' % FOLDER_GREP
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' %s | grep -v '%s'" % (CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' %s" % (CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True)
docd = check_output(CMD_GREP_DOCS, shell=True)
args_used = set(re.findall(REGEX_ARG,used))
args_docd = set(re.findall(REGEX_DOC,docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print "Args used : %s" % len(args_used)
print "Args documented : %s" % len(args_docd)
print "Args undocumented: %s" % len(args_need_doc)
print args_need_doc
print "Args unknown : %s" % len(args_unknown)
print args_unknown
exit(len(args_need_doc))
if __name__ == "__main__":
main()
| mit | -7,233,065,859,771,911,000 | 39.444444 | 213 | 0.654945 | false |
stackforge/cloudbase-init | cloudbaseinit/utils/windows/ws2_32.py | 12 | 1708 | # Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ctypes
from ctypes import windll
from ctypes import wintypes
AF_UNSPEC = 0
AF_INET = 2
AF_INET6 = 23
VERSION_2_2 = (2 << 8) + 2
class SOCKADDR(ctypes.Structure):
_fields_ = [
('sa_family', wintypes.USHORT),
('sa_data', ctypes.c_char * 14),
]
class WSADATA(ctypes.Structure):
_fields_ = [
('opaque_data', wintypes.BYTE * 400),
]
WSAGetLastError = windll.Ws2_32.WSAGetLastError
WSAGetLastError.argtypes = []
WSAGetLastError.restype = wintypes.INT
WSAStartup = windll.Ws2_32.WSAStartup
WSAStartup.argtypes = [wintypes.WORD, ctypes.POINTER(WSADATA)]
WSAStartup.restype = wintypes.INT
WSACleanup = windll.Ws2_32.WSACleanup
WSACleanup.argtypes = []
WSACleanup.restype = wintypes.INT
WSAAddressToStringW = windll.Ws2_32.WSAAddressToStringW
WSAAddressToStringW.argtypes = [
ctypes.POINTER(SOCKADDR), wintypes.DWORD, wintypes.LPVOID,
wintypes.LPWSTR, ctypes.POINTER(wintypes.DWORD)]
WSAAddressToStringW.restype = wintypes.INT
def init_wsa(version=VERSION_2_2):
wsadata = WSADATA()
WSAStartup(version, ctypes.byref(wsadata))
| apache-2.0 | -5,368,393,975,414,180,000 | 27 | 78 | 0.724239 | false |
samarthmed/emacs-config | .python-environments/default/lib/python2.7/site.py | 784 | 27543 | """Append module search paths for third-party packages to sys.path.
****************************************************************
* This module is automatically imported during initialization. *
****************************************************************
In earlier versions of Python (up to 1.5a3), scripts or modules that
needed to use site-specific modules would place ``import site''
somewhere near the top of their code. Because of the automatic
import, this is no longer necessary (but code that does it still
works).
This will append site-specific paths to the module search path. On
Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
appends lib/python<version>/site-packages as well as lib/site-python.
It also supports the Debian convention of
lib/python<version>/dist-packages. On other platforms (mainly Mac and
Windows), it uses just sys.prefix (and sys.exec_prefix, if different,
but this is unlikely). The resulting directories, if they exist, are
appended to sys.path, and also inspected for path configuration files.
FOR DEBIAN, this sys.path is augmented with directories in /usr/local.
Local addons go into /usr/local/lib/python<version>/site-packages
(resp. /usr/local/lib/site-python), Debian addons install into
/usr/{lib,share}/python<version>/dist-packages.
A path configuration file is a file whose name has the form
<package>.pth; its contents are additional directories (one per line)
to be added to sys.path. Non-existing directories (or
non-directories) are never added to sys.path; no directory is added to
sys.path more than once. Blank lines and lines beginning with
'#' are skipped. Lines starting with 'import' are executed.
For example, suppose sys.prefix and sys.exec_prefix are set to
/usr/local and there is a directory /usr/local/lib/python2.X/site-packages
with three subdirectories, foo, bar and spam, and two path
configuration files, foo.pth and bar.pth. Assume foo.pth contains the
following:
# foo package configuration
foo
bar
bletch
and bar.pth contains:
# bar package configuration
bar
Then the following directories are added to sys.path, in this order:
/usr/local/lib/python2.X/site-packages/bar
/usr/local/lib/python2.X/site-packages/foo
Note that bletch is omitted because it doesn't exist; bar precedes foo
because bar.pth comes alphabetically before foo.pth; and spam is
omitted because it is not mentioned in either path configuration file.
After these path manipulations, an attempt is made to import a module
named sitecustomize, which can perform arbitrary additional
site-specific customizations. If this import fails with an
ImportError exception, it is silently ignored.
"""
import sys
import os
try:
import __builtin__ as builtins
except ImportError:
import builtins
try:
set
except NameError:
from sets import Set as set
# Prefixes for site-packages; add additional prefixes like /usr/local here
PREFIXES = [sys.prefix, sys.exec_prefix]
# Enable per user site-packages directory
# set it to False to disable the feature or True to force the feature
ENABLE_USER_SITE = None
# for distutils.commands.install
USER_SITE = None
USER_BASE = None
_is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32
_is_pypy = hasattr(sys, 'pypy_version_info')
_is_jython = sys.platform[:4] == 'java'
if _is_jython:
ModuleType = type(os)
def makepath(*paths):
dir = os.path.join(*paths)
if _is_jython and (dir == '__classpath__' or
dir.startswith('__pyclasspath__')):
return dir, dir
dir = os.path.abspath(dir)
return dir, os.path.normcase(dir)
def abs__file__():
"""Set all module' __file__ attribute to an absolute path"""
for m in sys.modules.values():
if ((_is_jython and not isinstance(m, ModuleType)) or
hasattr(m, '__loader__')):
# only modules need the abspath in Jython. and don't mess
# with a PEP 302-supplied __file__
continue
f = getattr(m, '__file__', None)
if f is None:
continue
m.__file__ = os.path.abspath(f)
def removeduppaths():
""" Remove duplicate entries from sys.path along with making them
absolute"""
# This ensures that the initial path provided by the interpreter contains
# only absolute pathnames, even if we're running from the build directory.
L = []
known_paths = set()
for dir in sys.path:
# Filter out duplicate paths (on case-insensitive file systems also
# if they only differ in case); turn relative paths into absolute
# paths.
dir, dircase = makepath(dir)
if not dircase in known_paths:
L.append(dir)
known_paths.add(dircase)
sys.path[:] = L
return known_paths
# XXX This should not be part of site.py, since it is needed even when
# using the -S option for Python. See http://www.python.org/sf/586680
def addbuilddir():
"""Append ./build/lib.<platform> in case we're running in the build dir
(especially for Guido :-)"""
from distutils.util import get_platform
s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
if hasattr(sys, 'gettotalrefcount'):
s += '-pydebug'
s = os.path.join(os.path.dirname(sys.path[-1]), s)
sys.path.append(s)
def _init_pathinfo():
"""Return a set containing all existing directory entries from sys.path"""
d = set()
for dir in sys.path:
try:
if os.path.isdir(dir):
dir, dircase = makepath(dir)
d.add(dircase)
except TypeError:
continue
return d
def addpackage(sitedir, name, known_paths):
"""Add a new path to known_paths by combining sitedir and 'name' or execute
sitedir if it starts with 'import'"""
if known_paths is None:
_init_pathinfo()
reset = 1
else:
reset = 0
fullname = os.path.join(sitedir, name)
try:
f = open(fullname, "rU")
except IOError:
return
try:
for line in f:
if line.startswith("#"):
continue
if line.startswith("import"):
exec(line)
continue
line = line.rstrip()
dir, dircase = makepath(sitedir, line)
if not dircase in known_paths and os.path.exists(dir):
sys.path.append(dir)
known_paths.add(dircase)
finally:
f.close()
if reset:
known_paths = None
return known_paths
def addsitedir(sitedir, known_paths=None):
"""Add 'sitedir' argument to sys.path if missing and handle .pth files in
'sitedir'"""
if known_paths is None:
known_paths = _init_pathinfo()
reset = 1
else:
reset = 0
sitedir, sitedircase = makepath(sitedir)
if not sitedircase in known_paths:
sys.path.append(sitedir) # Add path component
try:
names = os.listdir(sitedir)
except os.error:
return
names.sort()
for name in names:
if name.endswith(os.extsep + "pth"):
addpackage(sitedir, name, known_paths)
if reset:
known_paths = None
return known_paths
def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix):
"""Add site-packages (and possibly site-python) to sys.path"""
prefixes = [os.path.join(sys_prefix, "local"), sys_prefix]
if exec_prefix != sys_prefix:
prefixes.append(os.path.join(exec_prefix, "local"))
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos') or _is_jython:
sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
elif _is_pypy:
sitedirs = [os.path.join(prefix, 'site-packages')]
elif sys.platform == 'darwin' and prefix == sys_prefix:
if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python
sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"),
os.path.join(prefix, "Extras", "lib", "python")]
else: # any other Python distros on OSX work this way
sitedirs = [os.path.join(prefix, "lib",
"python" + sys.version[:3], "site-packages")]
elif os.sep == '/':
sitedirs = [os.path.join(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
os.path.join(prefix, "lib", "site-python"),
os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")]
lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
if (os.path.exists(lib64_dir) and
os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
if _is_64bit:
sitedirs.insert(0, lib64_dir)
else:
sitedirs.append(lib64_dir)
try:
# sys.getobjects only available in --with-pydebug build
sys.getobjects
sitedirs.insert(0, os.path.join(sitedirs[0], 'debug'))
except AttributeError:
pass
# Debian-specific dist-packages directories:
sitedirs.append(os.path.join(prefix, "local/lib",
"python" + sys.version[:3],
"dist-packages"))
if sys.version[0] == '2':
sitedirs.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
"dist-packages"))
else:
sitedirs.append(os.path.join(prefix, "lib",
"python" + sys.version[0],
"dist-packages"))
sitedirs.append(os.path.join(prefix, "lib", "dist-python"))
else:
sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
sitedirs.append(
os.path.join(home,
'Library',
'Python',
sys.version[:3],
'site-packages'))
for sitedir in sitedirs:
if os.path.isdir(sitedir):
addsitedir(sitedir, known_paths)
return None
def check_enableusersite():
"""Check if user site directory is safe for inclusion
The function tests for the command line flag (including environment var),
process uid/gid equal to effective uid/gid.
None: Disabled for security reasons
False: Disabled by user (command line option)
True: Safe and enabled
"""
if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False):
return False
if hasattr(os, "getuid") and hasattr(os, "geteuid"):
# check process uid == effective uid
if os.geteuid() != os.getuid():
return None
if hasattr(os, "getgid") and hasattr(os, "getegid"):
# check process gid == effective gid
if os.getegid() != os.getgid():
return None
return True
def addusersitepackages(known_paths):
"""Add a per user site-package to sys.path
Each user has its own python directory with site-packages in the
home directory.
USER_BASE is the root directory for all Python versions
USER_SITE is the user specific site-packages directory
USER_SITE/.. can be used for data.
"""
global USER_BASE, USER_SITE, ENABLE_USER_SITE
env_base = os.environ.get("PYTHONUSERBASE", None)
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
#if sys.platform in ('os2emx', 'riscos'):
# # Don't know what to put here
# USER_BASE = ''
# USER_SITE = ''
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
USER_BASE = env_base
else:
USER_BASE = joinuser(base, "Python")
USER_SITE = os.path.join(USER_BASE,
"Python" + sys.version[0] + sys.version[2],
"site-packages")
else:
if env_base:
USER_BASE = env_base
else:
USER_BASE = joinuser("~", ".local")
USER_SITE = os.path.join(USER_BASE, "lib",
"python" + sys.version[:3],
"site-packages")
if ENABLE_USER_SITE and os.path.isdir(USER_SITE):
addsitedir(USER_SITE, known_paths)
if ENABLE_USER_SITE:
for dist_libdir in ("lib", "local/lib"):
user_site = os.path.join(USER_BASE, dist_libdir,
"python" + sys.version[:3],
"dist-packages")
if os.path.isdir(user_site):
addsitedir(user_site, known_paths)
return known_paths
def setBEGINLIBPATH():
"""The OS/2 EMX port has optional extension modules that do double duty
as DLLs (and must use the .DLL file extension) for other extensions.
The library search path needs to be amended so these will be found
during module import. Use BEGINLIBPATH so that these are at the start
of the library search path.
"""
dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
libpath = os.environ['BEGINLIBPATH'].split(';')
if libpath[-1]:
libpath.append(dllpath)
else:
libpath[-1] = dllpath
os.environ['BEGINLIBPATH'] = ';'.join(libpath)
def setquit():
"""Define new built-ins 'quit' and 'exit'.
These are simply strings that display a hint on how to exit.
"""
if os.sep == ':':
eof = 'Cmd-Q'
elif os.sep == '\\':
eof = 'Ctrl-Z plus Return'
else:
eof = 'Ctrl-D (i.e. EOF)'
class Quitter(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return 'Use %s() or %s to exit' % (self.name, eof)
def __call__(self, code=None):
# Shells like IDLE catch the SystemExit, but listen when their
# stdin wrapper is closed.
try:
sys.stdin.close()
except:
pass
raise SystemExit(code)
builtins.quit = Quitter('quit')
builtins.exit = Quitter('exit')
class _Printer(object):
"""interactive prompt objects for printing the license text, a list of
contributors and the copyright notice."""
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
self.__name = name
self.__data = data
self.__files = files
self.__dirs = dirs
self.__lines = None
def __setup(self):
if self.__lines:
return
data = None
for dir in self.__dirs:
for filename in self.__files:
filename = os.path.join(dir, filename)
try:
fp = open(filename, "rU")
data = fp.read()
fp.close()
break
except IOError:
pass
if data:
break
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print(self.__lines[i])
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
try:
key = raw_input(prompt)
except NameError:
key = input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
def setcopyright():
"""Set 'copyright' and 'credits' in __builtin__"""
builtins.copyright = _Printer("copyright", sys.copyright)
if _is_jython:
builtins.credits = _Printer(
"credits",
"Jython is maintained by the Jython developers (www.jython.org).")
elif _is_pypy:
builtins.credits = _Printer(
"credits",
"PyPy is maintained by the PyPy developers: http://pypy.org/")
else:
builtins.credits = _Printer("credits", """\
Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
for supporting Python development. See www.python.org for more information.""")
here = os.path.dirname(os.__file__)
builtins.license = _Printer(
"license", "See http://www.python.org/%.3s/license.html" % sys.version,
["LICENSE.txt", "LICENSE"],
[os.path.join(here, os.pardir), here, os.curdir])
class _Helper(object):
"""Define the built-in 'help'.
This is a wrapper around pydoc.help (with a twist).
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
def sethelper():
builtins.help = _Helper()
def aliasmbcs():
"""On Windows, some default encodings are not provided by Python,
while they are always available as "mbcs" in each locale. Make
them usable by aliasing to "mbcs" in such a case."""
if sys.platform == 'win32':
import locale, codecs
enc = locale.getdefaultlocale()[1]
if enc.startswith('cp'): # "cp***" ?
try:
codecs.lookup(enc)
except LookupError:
import encodings
encodings._cache[enc] = encodings._unknown
encodings.aliases.aliases[enc] = 'mbcs'
def setencoding():
"""Set the string encoding used by the Unicode implementation. The
default is 'ascii', but if you're willing to experiment, you can
change this."""
encoding = "ascii" # Default value set by _PyUnicode_Init()
if 0:
# Enable to support locale aware default string encodings.
import locale
loc = locale.getdefaultlocale()
if loc[1]:
encoding = loc[1]
if 0:
# Enable to switch off string to Unicode coercion and implicit
# Unicode to string conversion.
encoding = "undefined"
if encoding != "ascii":
# On Non-Unicode builds this will raise an AttributeError...
sys.setdefaultencoding(encoding) # Needs Python Unicode build !
def execsitecustomize():
"""Run custom site specific code, if available."""
try:
import sitecustomize
except ImportError:
pass
def virtual_install_main_packages():
f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt'))
sys.real_prefix = f.read().strip()
f.close()
pos = 2
hardcoded_relative_dirs = []
if sys.path[0] == '':
pos += 1
if _is_jython:
paths = [os.path.join(sys.real_prefix, 'Lib')]
elif _is_pypy:
if sys.version_info > (3, 2):
cpyver = '%d' % sys.version_info[0]
elif sys.pypy_version_info >= (1, 5):
cpyver = '%d.%d' % sys.version_info[:2]
else:
cpyver = '%d.%d.%d' % sys.version_info[:3]
paths = [os.path.join(sys.real_prefix, 'lib_pypy'),
os.path.join(sys.real_prefix, 'lib-python', cpyver)]
if sys.pypy_version_info < (1, 9):
paths.insert(1, os.path.join(sys.real_prefix,
'lib-python', 'modified-%s' % cpyver))
hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
#
# This is hardcoded in the Python executable, but relative to sys.prefix:
for path in paths[:]:
plat_path = os.path.join(path, 'plat-%s' % sys.platform)
if os.path.exists(plat_path):
paths.append(plat_path)
elif sys.platform == 'win32':
paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')]
else:
paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])]
hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
if os.path.exists(lib64_path):
if _is_64bit:
paths.insert(0, lib64_path)
else:
paths.append(lib64_path)
# This is hardcoded in the Python executable, but relative to
# sys.prefix. Debian change: we need to add the multiarch triplet
# here, which is where the real stuff lives. As per PEP 421, in
# Python 3.3+, this lives in sys.implementation, while in Python 2.7
# it lives in sys.
try:
arch = getattr(sys, 'implementation', sys)._multiarch
except AttributeError:
# This is a non-multiarch aware Python. Fallback to the old way.
arch = sys.platform
plat_path = os.path.join(sys.real_prefix, 'lib',
'python'+sys.version[:3],
'plat-%s' % arch)
if os.path.exists(plat_path):
paths.append(plat_path)
# This is hardcoded in the Python executable, but
# relative to sys.prefix, so we have to fix up:
for path in list(paths):
tk_dir = os.path.join(path, 'lib-tk')
if os.path.exists(tk_dir):
paths.append(tk_dir)
# These are hardcoded in the Apple's Python executable,
# but relative to sys.prefix, so we have to fix them up:
if sys.platform == 'darwin':
hardcoded_paths = [os.path.join(relative_dir, module)
for relative_dir in hardcoded_relative_dirs
for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')]
for path in hardcoded_paths:
if os.path.exists(path):
paths.append(path)
sys.path.extend(paths)
def force_global_eggs_after_local_site_packages():
"""
Force easy_installed eggs in the global environment to get placed
in sys.path after all packages inside the virtualenv. This
maintains the "least surprise" result that packages in the
virtualenv always mask global packages, never the other way
around.
"""
egginsert = getattr(sys, '__egginsert', 0)
for i, path in enumerate(sys.path):
if i > egginsert and path.startswith(sys.prefix):
egginsert = i
sys.__egginsert = egginsert + 1
def virtual_addsitepackages(known_paths):
force_global_eggs_after_local_site_packages()
return addsitepackages(known_paths, sys_prefix=sys.real_prefix)
def fixclasspath():
"""Adjust the special classpath sys.path entries for Jython. These
entries should follow the base virtualenv lib directories.
"""
paths = []
classpaths = []
for path in sys.path:
if path == '__classpath__' or path.startswith('__pyclasspath__'):
classpaths.append(path)
else:
paths.append(path)
sys.path = paths
sys.path.extend(classpaths)
def execusercustomize():
"""Run custom user specific code, if available."""
try:
import usercustomize
except ImportError:
pass
def main():
global ENABLE_USER_SITE
virtual_install_main_packages()
abs__file__()
paths_in_sys = removeduppaths()
if (os.name == "posix" and sys.path and
os.path.basename(sys.path[-1]) == "Modules"):
addbuilddir()
if _is_jython:
fixclasspath()
GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt'))
if not GLOBAL_SITE_PACKAGES:
ENABLE_USER_SITE = False
if ENABLE_USER_SITE is None:
ENABLE_USER_SITE = check_enableusersite()
paths_in_sys = addsitepackages(paths_in_sys)
paths_in_sys = addusersitepackages(paths_in_sys)
if GLOBAL_SITE_PACKAGES:
paths_in_sys = virtual_addsitepackages(paths_in_sys)
if sys.platform == 'os2emx':
setBEGINLIBPATH()
setquit()
setcopyright()
sethelper()
aliasmbcs()
setencoding()
execsitecustomize()
if ENABLE_USER_SITE:
execusercustomize()
# Remove sys.setdefaultencoding() so that users cannot change the
# encoding after initialization. The test for presence is needed when
# this module is run as a script, because this code is executed twice.
if hasattr(sys, "setdefaultencoding"):
del sys.setdefaultencoding
main()
def _script():
help = """\
%s [--user-base] [--user-site]
Without arguments print some useful information
With arguments print the value of USER_BASE and/or USER_SITE separated
by '%s'.
Exit codes with --user-base or --user-site:
0 - user site directory is enabled
1 - user site directory is disabled by user
2 - uses site directory is disabled by super user
or for security reasons
>2 - unknown error
"""
args = sys.argv[1:]
if not args:
print("sys.path = [")
for dir in sys.path:
print(" %r," % (dir,))
print("]")
def exists(path):
if os.path.isdir(path):
return "exists"
else:
return "doesn't exist"
print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE)))
print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE)))
print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE)
sys.exit(0)
buffer = []
if '--user-base' in args:
buffer.append(USER_BASE)
if '--user-site' in args:
buffer.append(USER_SITE)
if buffer:
print(os.pathsep.join(buffer))
if ENABLE_USER_SITE:
sys.exit(0)
elif ENABLE_USER_SITE is False:
sys.exit(1)
elif ENABLE_USER_SITE is None:
sys.exit(2)
else:
sys.exit(3)
else:
import textwrap
print(textwrap.dedent(help % (sys.argv[0], os.pathsep)))
sys.exit(10)
if __name__ == '__main__':
_script()
| gpl-2.0 | 7,201,976,940,374,427,000 | 35.336412 | 117 | 0.570163 | false |
dwagon/pymoo | moo/game/models.py | 1 | 3173 | from django.db import models
import random
import math
from race.models import Race
class GameSize(models.Model):
name = models.CharField(max_length=50)
numsystems = models.IntegerField(default=10)
max_x = models.IntegerField(default=20)
max_y = models.IntegerField(default=20)
class Game(models.Model):
size = models.ForeignKey(GameSize)
turn = models.IntegerField(default=0)
name = models.CharField(max_length=50)
numplayers = models.IntegerField(default=5)
def __unicode__(self):
return "Game %s" % self.name
def processTurn(self):
from system.models import System
from player.models import Player
for plr in Player.objects.filter(game=self):
plr.read_old_messages()
for systm in System.objects.filter(game=self):
systm.turn()
for plr in Player.objects.filter(game=self):
plr.turn()
self.turn += 1
self.save()
def makeGalaxy(self):
locs = self.getSystemLocations()
for a, b in locs:
self.makeSystem(a, b)
self.addRaces()
self.addPlayers()
def addRaces(self):
for i in range(10):
if not Race.objects.filter(name="race_%d" % i):
Race(name="race_%d" % i).save()
def addPlayers(self):
from player.models import Player
for i in range(self.numplayers):
p = Player()
p.game = self
p.race = Race.objects.get(name='race_%d' % i)
p.name = "Player %d" % i
p.save()
self.selectHome()
def allSystems(self):
from system.models import System
return [s for s in System.objects.filter(game=self)]
def selectHome(self):
from player.models import Player
for plr in Player.objects.filter(game=self):
plan = plr.selectHome()
plan.owner = plr
plan.save()
plr.addInitialShips(plan)
def pickCategory(self):
from system.models import SystemCategory
sc = SystemCategory.objects.all()
totprob = sum([s.prob_existing for s in sc])
x = random.randrange(totprob)
p = 0
for s in sc:
p += s.prob_existing
if x < p:
return s
def makeSystem(self, x, y):
from system.models import System
s = System()
s.x = x
s.y = y
s.game = self
s.category = self.pickCategory()
s.name = ""
s.save()
s.assignName()
s.makeOrbits()
def getSystemLocations(self):
locs = []
while len(locs) < self.size.numsystems:
x, y = self.randpos()
dists = []
for a, b in locs:
dists.append(self.dist(x, y, a, b))
if not dists or (dists and min(dists)) >= 3:
locs.append((x, y))
return locs
def dist(self, x, y, a, b):
d = math.sqrt((x - a) ** 2 + (y - b) ** 2)
return d
def randpos(self):
x = random.randrange(self.size.max_x)
y = random.randrange(self.size.max_y)
return x, y
# EOF
| gpl-2.0 | 447,984,862,592,508,740 | 27.330357 | 60 | 0.552474 | false |
kiyoto/statsmodels | statsmodels/examples/t_est_rlm.py | 33 | 1127 | # -*- coding: utf-8 -*-
"""
Example from robust test_rlm, fails on Mac
Created on Sun Mar 27 14:36:40 2011
"""
from __future__ import print_function
import numpy as np
import statsmodels.api as sm
RLM = sm.RLM
DECIMAL_4 = 4
DECIMAL_3 = 3
DECIMAL_2 = 2
DECIMAL_1 = 1
from statsmodels.datasets.stackloss import load
data = load() # class attributes for subclasses
data.exog = sm.add_constant(data.exog, prepend=False)
decimal_standarderrors = DECIMAL_1
decimal_scale = DECIMAL_3
results = RLM(data.endog, data.exog,\
M=sm.robust.norms.HuberT()).fit() # default M
h2 = RLM(data.endog, data.exog,\
M=sm.robust.norms.HuberT()).fit(cov="H2").bcov_scaled
h3 = RLM(data.endog, data.exog,\
M=sm.robust.norms.HuberT()).fit(cov="H3").bcov_scaled
from statsmodels.robust.tests.results.results_rlm import Huber
res2 = Huber()
print("res2.h1")
print(res2.h1)
print("results.bcov_scaled")
print(results.bcov_scaled)
print("res2.h1 - results.bcov_scaled")
print(res2.h1 - results.bcov_scaled)
from numpy.testing import assert_almost_equal
assert_almost_equal(res2.h1, results.bcov_scaled, 4)
| bsd-3-clause | 4,487,683,363,733,108,700 | 24.044444 | 65 | 0.704525 | false |
finfish/scrapy | tests/test_downloadermiddleware_httpproxy.py | 2 | 6474 | import os
import sys
from functools import partial
from twisted.trial.unittest import TestCase, SkipTest
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.exceptions import NotConfigured
from scrapy.http import Response, Request
from scrapy.spiders import Spider
from scrapy.crawler import Crawler
from scrapy.settings import Settings
spider = Spider('foo')
class TestHttpProxyMiddleware(TestCase):
failureException = AssertionError
def setUp(self):
self._oldenv = os.environ.copy()
def tearDown(self):
os.environ = self._oldenv
def test_not_enabled(self):
settings = Settings({'HTTPPROXY_ENABLED': False})
crawler = Crawler(Spider, settings)
self.assertRaises(NotConfigured, partial(HttpProxyMiddleware.from_crawler, crawler))
def test_no_environment_proxies(self):
os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
mw = HttpProxyMiddleware()
for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEqual(req.url, url)
self.assertEqual(req.meta, {})
def test_environment_proxies(self):
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
os.environ.pop('file_proxy', None)
mw = HttpProxyMiddleware()
for url, proxy in [('http://e.com', http_proxy),
('https://e.com', https_proxy), ('file://tmp/a', None)]:
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEqual(req.url, url)
self.assertEqual(req.meta.get('proxy'), proxy)
def test_proxy_precedence_meta(self):
os.environ['http_proxy'] = 'https://proxy.com'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org', meta={'proxy': 'https://new.proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://new.proxy:3128'})
def test_proxy_auth(self):
os.environ['http_proxy'] = 'https://user:pass@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjpwYXNz')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:password@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6cGFzc3dvcmQ=')
def test_proxy_auth_empty_passwd(self):
os.environ['http_proxy'] = 'https://user:@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcjo=')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': 'https://username:@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic dXNlcm5hbWU6')
def test_proxy_auth_encoding(self):
# utf-8 encoding
os.environ['http_proxy'] = u'https://m\u00E1n:pass@proxy:3128'
mw = HttpProxyMiddleware(auth_encoding='utf-8')
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic bcOhbjpwYXNz')
# proxy from request.meta
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic w7xzZXI6cGFzcw==')
# default latin-1 encoding
mw = HttpProxyMiddleware(auth_encoding='latin-1')
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic beFuOnBhc3M=')
# proxy from request.meta, latin-1 encoding
req = Request('http://scrapytest.org', meta={'proxy': u'https://\u00FCser:pass@proxy:3128'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEqual(req.headers.get('Proxy-Authorization'), b'Basic /HNlcjpwYXNz')
def test_proxy_already_seted(self):
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
req = Request('http://noproxy.com', meta={'proxy': None})
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta and req.meta['proxy'] is None
def test_no_proxy(self):
os.environ['http_proxy'] = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
os.environ['no_proxy'] = '*'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
os.environ['no_proxy'] = 'other.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta
os.environ['no_proxy'] = 'other.com,noproxy.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
# proxy from meta['proxy'] takes precedence
os.environ['no_proxy'] = '*'
req = Request('http://noproxy.com', meta={'proxy': 'http://proxy.com'})
assert mw.process_request(req, spider) is None
self.assertEqual(req.meta, {'proxy': 'http://proxy.com'})
| bsd-3-clause | -2,924,879,498,070,886,000 | 43.648276 | 102 | 0.637319 | false |
sebfung/yellowpillowcase | vendor/cache/gems/pygments.rb-0.6.3/vendor/simplejson/simplejson/tests/test_tuple.py | 71 | 1842 | import unittest
from StringIO import StringIO
import simplejson as json
class TestTuples(unittest.TestCase):
def test_tuple_array_dumps(self):
t = (1, 2, 3)
expect = json.dumps(list(t))
# Default is True
self.assertEqual(expect, json.dumps(t))
self.assertEqual(expect, json.dumps(t, tuple_as_array=True))
self.assertRaises(TypeError, json.dumps, t, tuple_as_array=False)
# Ensure that the "default" does not get called
self.assertEqual(expect, json.dumps(t, default=repr))
self.assertEqual(expect, json.dumps(t, tuple_as_array=True, default=repr))
# Ensure that the "default" gets called
self.assertEqual(
json.dumps(repr(t)),
json.dumps(t, tuple_as_array=False, default=repr))
def test_tuple_array_dump(self):
t = (1, 2, 3)
expect = json.dumps(list(t))
# Default is True
sio = StringIO()
json.dump(t, sio)
self.assertEqual(expect, sio.getvalue())
sio = StringIO()
json.dump(t, sio, tuple_as_array=True)
self.assertEqual(expect, sio.getvalue())
self.assertRaises(TypeError, json.dump, t, StringIO(), tuple_as_array=False)
# Ensure that the "default" does not get called
sio = StringIO()
json.dump(t, sio, default=repr)
self.assertEqual(expect, sio.getvalue())
sio = StringIO()
json.dump(t, sio, tuple_as_array=True, default=repr)
self.assertEqual(expect, sio.getvalue())
# Ensure that the "default" gets called
sio = StringIO()
json.dump(t, sio, tuple_as_array=False, default=repr)
self.assertEqual(
json.dumps(repr(t)),
sio.getvalue())
class TestNamedTuple(unittest.TestCase):
def test_namedtuple_dump(self):
pass | mit | 4,666,317,458,899,060,000 | 36.612245 | 84 | 0.616178 | false |
howepeng/cat | cat-agent/src/main/webapp/paas-monitor.py | 22 | 7135 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import subprocess
import json
import re
import urllib2
from datetime import datetime
COMMAND_PATTERN = "nsenter --target %s --mount --uts --ipc --net --pid -- %s"
def execute(command):
p = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
return p.returncode, p.stdout.read(), p.stderr.read()
def docker_command(pid, command):
_, result, _ = execute(COMMAND_PATTERN % (pid, command))
return result.strip()
def get_instance_ids():
return [ele['Id'] for ele in json.loads(urllib2.urlopen('http://0.0.0.0:8090/containers/ps').read())]
def instance_inspect(instance_id):
result = urllib2.urlopen("http://0.0.0.0:8090/containers/%s/json" % instance_id).read()
return json.loads(result)
def instance_metric(instance_id):
result = urllib2.urlopen("http://0.0.0.0:8090/containers/%s/metric" % instance_id).read()
return json.loads(result)
def get_hostname(inspect_info):
return inspect_info['Config']['Hostname']
def get_ip(inspect_info):
return inspect_info['NetworkSettings']['IPAddress']
def get_created(inspect_info):
created_time = datetime.strptime(re.sub(r'\..*Z', '', inspect_info['Created']), '%Y-%m-%dT%H:%M:%S')
current_time = datetime.now()
return int((current_time - created_time).total_seconds() - 8 * 3600)
def get_name(inspect_info):
return re.sub(r'(^/|_.+$)', '', inspect_info['Name'])
def get_cpu_usage(metric_info):
if 'current_usage' in metric_info['cpu_stats']['cpu_usage']:
return metric_info['cpu_stats']['cpu_usage']['current_usage']
return 0
def get_network_info(pid, network_name):
flow = docker_command(pid, 'ifconfig %s' % network_name)
total_error, total_dropped, total_collision = [0] * 3
m = re.search(r'RX bytes:\s*(\d+).*?TX bytes:\s*(\d+)', flow, re.IGNORECASE)
errors = re.findall(r'errors:\s*(\d+)', flow)
drops = re.findall(r'dropped:\s*(\d+)', flow)
collisions = re.findall(r'collisions:\s*(\d+)', flow)
for error in errors:
total_error += int(error)
for drop in drops:
total_dropped += int(drop)
for collision in collisions:
total_collision += int(collision)
rx, tx = (m.group(1), m.group(2)) if m else (0, 0)
return rx, tx, total_error, total_dropped, total_collision
def get_container_info(pid):
disk_usage = docker_command(pid, 'df -h | grep "rootfs"')
disk_usage = re.split(r'\s+', disk_usage.strip())[-2][:-1]
disk_usage = int(disk_usage) * 1.0 / 100
eth0_rx, eth0_tx, eth0_errors, eth0_dropped, eth0_collision = get_network_info(pid, 'eth0')
lo_rx, lo_tx, lo_errors, lo_dropped, lo_collision = get_network_info(pid, 'lo')
return disk_usage, eth0_rx, eth0_tx, eth0_errors, eth0_dropped, eth0_collision, \
lo_rx, lo_tx, lo_errors, lo_dropped, lo_collision
def get_swap_usage(metric_info):
return metric_info['memory_stats']['stats']['swap']
def get_memory_info(metric_info, instance_id):
used, cached = metric_info['memory_stats']['stats']['rss'], metric_info['memory_stats']['stats']['cache']
shared, buffered = [0, 0]
total = -1
try:
result = json.loads(post('http://localhost:8090/containers/%s/cgroup' % instance_id,
json.dumps({"ReadSubsystem": ["memory.limit_in_bytes"]}),
{'Content-Type': 'application/json'}))
if result[0]['Status'] == 0:
total = result[0]['Out']
except Exception, e:
pass
free = int(total) - int(used) - int(cached)
return total, used, cached, free, shared, buffered
def get_process_info(pid):
result = docker_command(pid, "top -b -n1 | grep Tasks | awk '{print $2,$4}'")
return re.split(r'\s+', result.strip())
def get_number_of_user_connected(pid):
result = docker_command(pid, "who | awk '{ print $1 }' | sort | uniq | wc -l")
return result.strip()
def get_tcp_established(pid):
result = docker_command(pid, 'netstat -anot | grep ESTABLISHED | wc -l')
return result.strip()
def get_inodes_info(pid):
result = docker_command(pid, "df -i | grep rootfs | awk '{print $2,$4}'")
return re.split(r'\s+', result.strip())
def post(url, data, headers={}):
req = urllib2.Request(url, headers=headers)
opener = urllib2.build_opener()
response = opener.open(req, data)
return response.read()
def get_all_info(current_instance=None):
instance_ids = get_instance_ids()
for instance_id in instance_ids:
if not instance_id:
continue
if current_instance and not instance_id.startswith(current_instance):
continue
inspect_info = instance_inspect(instance_id)
metric_info = instance_metric(instance_id)
pid = inspect_info['State']['Pid']
disk_usage, eth0_rx, eth0_tx, eth0_errors, eth0_dropped, eth0_collision, lo_rx, lo_tx, \
lo_errors, lo_dropped, lo_collision = get_container_info(pid)
ip = get_ip(inspect_info)
mem_total, mem_used, mem_cached, mem_free, mem_shared, mem_buffered = get_memory_info(metric_info, instance_id)
process_total, process_running = get_process_info(pid)
number_of_user_connected = get_number_of_user_connected(pid)
tcp_established_num = get_tcp_established(pid)
inodes_total, inodes_free = get_inodes_info(pid)
m = [
('domain', '', get_name(inspect_info)),
('system_cpuUsage', 'avg', '%.3f' % (float(get_cpu_usage(metric_info)))),
('system_cachedMem', 'avg', mem_cached),
('system_totalMem', 'avg', mem_total),
('system_usedMem', 'avg', mem_used),
('system_freeMem', 'avg', mem_free),
('system_sharedMem', 'avg', mem_shared),
('system_buffersMem', 'avg', mem_buffered),
('system_/-usage', 'avg', disk_usage),
('system_swapUsage', 'avg', get_swap_usage(metric_info)),
('system_eth0-outFlow', 'sum', eth0_tx),
('system_eth0-inFlow', 'sum', eth0_rx),
('system_eth0-dropped', 'sum', eth0_dropped),
('system_eth0-errors', 'sum', eth0_errors),
('system_eth0-collisions', 'sum', eth0_collision),
('system_lo-outFlow', 'sum', lo_tx),
('system_lo-inFlow', 'sum', lo_rx),
('system_totalProcess', 'avg', process_total),
('system_runningProcess', 'avg', process_running),
('system_establishedTcp', 'avg', tcp_established_num),
('system_loginUsers', 'avg', number_of_user_connected),
('system_/-freeInodes', 'avg', "%.3f" % (float(inodes_free) / int(inodes_total))),
]
print '\n'.join(['%s_%s%s=%s' % (k, ip, t and ':' + t, v) for k, t, v in m])
if __name__ == '__main__':
import sys
instance_id = None
if len(sys.argv) > 1:
instance_id = sys.argv[1]
if instance_id == 'instance_ids':
print '\n'.join(get_instance_ids())
exit(0)
get_all_info(instance_id)
| apache-2.0 | -6,752,788,449,244,952,000 | 33.138756 | 119 | 0.603083 | false |
wolfram74/numerical_methods_iserles_notes | venv/lib/python2.7/site-packages/IPython/html/terminal/api_handlers.py | 6 | 1298 | import json
from tornado import web, gen
from ..base.handlers import APIHandler, json_errors
from ..utils import url_path_join
class TerminalRootHandler(APIHandler):
@web.authenticated
@json_errors
def get(self):
tm = self.terminal_manager
terms = [{'name': name} for name in tm.terminals]
self.finish(json.dumps(terms))
@web.authenticated
@json_errors
def post(self):
"""POST /terminals creates a new terminal and redirects to it"""
name, _ = self.terminal_manager.new_named_terminal()
self.finish(json.dumps({'name': name}))
class TerminalHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'DELETE')
@web.authenticated
@json_errors
def get(self, name):
tm = self.terminal_manager
if name in tm.terminals:
self.finish(json.dumps({'name': name}))
else:
raise web.HTTPError(404, "Terminal not found: %r" % name)
@web.authenticated
@json_errors
@gen.coroutine
def delete(self, name):
tm = self.terminal_manager
if name in tm.terminals:
yield tm.terminate(name, force=True)
self.set_status(204)
self.finish()
else:
raise web.HTTPError(404, "Terminal not found: %r" % name)
| mit | 3,813,568,690,844,094,500 | 28.5 | 72 | 0.617874 | false |
munyirik/python | cpython/Lib/email/message.py | 9 | 45765 | # Copyright (C) 2001-2007 Python Software Foundation
# Author: Barry Warsaw
# Contact: [email protected]
"""Basic message object for the email package object model."""
__all__ = ['Message']
import re
import uu
import quopri
import warnings
from io import BytesIO, StringIO
# Intrapackage imports
from email import utils
from email import errors
from email._policybase import compat32
from email import charset as _charset
from email._encoded_words import decode_b
Charset = _charset.Charset
SEMISPACE = '; '
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _splitparam(param):
# Split header parameters. BAW: this may be too simple. It isn't
# strictly RFC 2045 (section 5.1) compliant, but it catches most headers
# found in the wild. We may eventually need a full fledged parser.
# RDM: we might have a Header here; for now just stringify it.
a, sep, b = str(param).partition(';')
if not sep:
return a.strip(), None
return a.strip(), b.strip()
def _formatparam(param, value=None, quote=True):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true. If value is a
three tuple (charset, language, value), it will be encoded according
to RFC2231 rules. If it contains non-ascii characters it will likewise
be encoded according to RFC2231 rules, using the utf-8 charset and
a null language.
"""
if value is not None and len(value) > 0:
# A tuple is used for RFC 2231 encoded parameter values where items
# are (charset, language, value). charset is a string, not a Charset
# instance. RFC 2231 encoded values are never quoted, per RFC.
if isinstance(value, tuple):
# Encode as per RFC 2231
param += '*'
value = utils.encode_rfc2231(value[2], value[0], value[1])
return '%s=%s' % (param, value)
else:
try:
value.encode('ascii')
except UnicodeEncodeError:
param += '*'
value = utils.encode_rfc2231(value, 'utf-8', '')
return '%s=%s' % (param, value)
# BAW: Please check this. I think that if quote is set it should
# force quoting even if not necessary.
if quote or tspecials.search(value):
return '%s="%s"' % (param, utils.quote(value))
else:
return '%s=%s' % (param, value)
else:
return param
def _parseparam(s):
# RDM This might be a Header, so for now stringify it.
s = ';' + str(s)
plist = []
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + '=' + f[i+1:].strip()
plist.append(f.strip())
s = s[end:]
return plist
def _unquotevalue(value):
# This is different than utils.collapse_rfc2231_value() because it doesn't
# try to convert the value to a unicode. Message.get_param() and
# Message.get_params() are both currently defined to return the tuple in
# the face of RFC 2231 parameters.
if isinstance(value, tuple):
return value[0], value[1], utils.unquote(value[2])
else:
return utils.unquote(value)
class Message:
"""Basic message object.
A message object is defined as something that has a bunch of RFC 2822
headers and a payload. It may optionally have an envelope header
(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a
multipart or a message/rfc822), then the payload is a list of Message
objects, otherwise it is a string.
Message objects implement part of the `mapping' interface, which assumes
there is exactly one occurrence of the header per message. Some headers
do in fact appear multiple times (e.g. Received) and for those headers,
you must use the explicit API to set or get all the headers. Not all of
the mapping methods are implemented.
"""
def __init__(self, policy=compat32):
self.policy = policy
self._headers = []
self._unixfrom = None
self._payload = None
self._charset = None
# Defaults for multipart messages
self.preamble = self.epilogue = None
self.defects = []
# Default content type
self._default_type = 'text/plain'
def __str__(self):
"""Return the entire formatted message as a string.
"""
return self.as_string()
def as_string(self, unixfrom=False, maxheaderlen=0, policy=None):
"""Return the entire formatted message as a string.
Optional 'unixfrom', when true, means include the Unix From_ envelope
header. For backward compatibility reasons, if maxheaderlen is
not specified it defaults to 0, so you must override it explicitly
if you want a different maxheaderlen. 'policy' is passed to the
Generator instance used to serialize the mesasge; if it is not
specified the policy associated with the message instance is used.
If the message object contains binary data that is not encoded
according to RFC standards, the non-compliant data will be replaced by
unicode "unknown character" code points.
"""
from email.generator import Generator
policy = self.policy if policy is None else policy
fp = StringIO()
g = Generator(fp,
mangle_from_=False,
maxheaderlen=maxheaderlen,
policy=policy)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def __bytes__(self):
"""Return the entire formatted message as a bytes object.
"""
return self.as_bytes()
def as_bytes(self, unixfrom=False, policy=None):
"""Return the entire formatted message as a bytes object.
Optional 'unixfrom', when true, means include the Unix From_ envelope
header. 'policy' is passed to the BytesGenerator instance used to
serialize the message; if not specified the policy associated with
the message instance is used.
"""
from email.generator import BytesGenerator
policy = self.policy if policy is None else policy
fp = BytesIO()
g = BytesGenerator(fp, mangle_from_=False, policy=policy)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def is_multipart(self):
"""Return True if the message consists of multiple parts."""
return isinstance(self._payload, list)
#
# Unix From_ line
#
def set_unixfrom(self, unixfrom):
self._unixfrom = unixfrom
def get_unixfrom(self):
return self._unixfrom
#
# Payload manipulation.
#
def attach(self, payload):
"""Add the given payload to the current payload.
The current payload will always be a list of objects after this method
is called. If you want to set the payload to a scalar object, use
set_payload() instead.
"""
if self._payload is None:
self._payload = [payload]
else:
try:
self._payload.append(payload)
except AttributeError:
raise TypeError("Attach is not valid on a message with a"
" non-multipart payload")
def get_payload(self, i=None, decode=False):
"""Return a reference to the payload.
The payload will either be a list object or a string. If you mutate
the list object, you modify the message's payload in place. Optional
i returns that index into the payload.
Optional decode is a flag indicating whether the payload should be
decoded or not, according to the Content-Transfer-Encoding header
(default is False).
When True and the message is not a multipart, the payload will be
decoded if this header's value is `quoted-printable' or `base64'. If
some other encoding is used, or the header is missing, or if the
payload has bogus data (i.e. bogus base64 or uuencoded data), the
payload is returned as-is.
If the message is a multipart and the decode flag is True, then None
is returned.
"""
# Here is the logic table for this code, based on the email5.0.0 code:
# i decode is_multipart result
# ------ ------ ------------ ------------------------------
# None True True None
# i True True None
# None False True _payload (a list)
# i False True _payload element i (a Message)
# i False False error (not a list)
# i True False error (not a list)
# None False False _payload
# None True False _payload decoded (bytes)
# Note that Barry planned to factor out the 'decode' case, but that
# isn't so easy now that we handle the 8 bit data, which needs to be
# converted in both the decode and non-decode path.
if self.is_multipart():
if decode:
return None
if i is None:
return self._payload
else:
return self._payload[i]
# For backward compatibility, Use isinstance and this error message
# instead of the more logical is_multipart test.
if i is not None and not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
payload = self._payload
# cte might be a Header, so for now stringify it.
cte = str(self.get('content-transfer-encoding', '')).lower()
# payload may be bytes here.
if isinstance(payload, str):
if utils._has_surrogates(payload):
bpayload = payload.encode('ascii', 'surrogateescape')
if not decode:
try:
payload = bpayload.decode(self.get_param('charset', 'ascii'), 'replace')
except LookupError:
payload = bpayload.decode('ascii', 'replace')
elif decode:
try:
bpayload = payload.encode('ascii')
except UnicodeError:
# This won't happen for RFC compliant messages (messages
# containing only ASCII code points in the unicode input).
# If it does happen, turn the string into bytes in a way
# guaranteed not to fail.
bpayload = payload.encode('raw-unicode-escape')
if not decode:
return payload
if cte == 'quoted-printable':
return quopri.decodestring(bpayload)
elif cte == 'base64':
# XXX: this is a bit of a hack; decode_b should probably be factored
# out somewhere, but I haven't figured out where yet.
value, defects = decode_b(b''.join(bpayload.splitlines()))
for defect in defects:
self.policy.handle_defect(self, defect)
return value
elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
in_file = BytesIO(bpayload)
out_file = BytesIO()
try:
uu.decode(in_file, out_file, quiet=True)
return out_file.getvalue()
except uu.Error:
# Some decoding problem
return bpayload
if isinstance(payload, str):
return bpayload
return payload
def set_payload(self, payload, charset=None):
"""Set the payload to the given value.
Optional charset sets the message's default character set. See
set_charset() for details.
"""
if hasattr(payload, 'encode'):
if charset is None:
self._payload = payload
return
if not isinstance(charset, Charset):
charset = Charset(charset)
payload = payload.encode(charset.output_charset)
if hasattr(payload, 'decode'):
self._payload = payload.decode('ascii', 'surrogateescape')
else:
self._payload = payload
if charset is not None:
self.set_charset(charset)
def set_charset(self, charset):
"""Set the charset of the payload to a given character set.
charset can be a Charset instance, a string naming a character set, or
None. If it is a string it will be converted to a Charset instance.
If charset is None, the charset parameter will be removed from the
Content-Type field. Anything else will generate a TypeError.
The message will be assumed to be of type text/* encoded with
charset.input_charset. It will be converted to charset.output_charset
and encoded properly, if needed, when generating the plain text
representation of the message. MIME headers (MIME-Version,
Content-Type, Content-Transfer-Encoding) will be added as needed.
"""
if charset is None:
self.del_param('charset')
self._charset = None
return
if not isinstance(charset, Charset):
charset = Charset(charset)
self._charset = charset
if 'MIME-Version' not in self:
self.add_header('MIME-Version', '1.0')
if 'Content-Type' not in self:
self.add_header('Content-Type', 'text/plain',
charset=charset.get_output_charset())
else:
self.set_param('charset', charset.get_output_charset())
if charset != charset.get_output_charset():
self._payload = charset.body_encode(self._payload)
if 'Content-Transfer-Encoding' not in self:
cte = charset.get_body_encoding()
try:
cte(self)
except TypeError:
# This 'if' is for backward compatibility, it allows unicode
# through even though that won't work correctly if the
# message is serialized.
payload = self._payload
if payload:
try:
payload = payload.encode('ascii', 'surrogateescape')
except UnicodeError:
payload = payload.encode(charset.output_charset)
self._payload = charset.body_encode(payload)
self.add_header('Content-Transfer-Encoding', cte)
def get_charset(self):
"""Return the Charset instance associated with the message's payload.
"""
return self._charset
#
# MAPPING INTERFACE (partial)
#
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __getitem__(self, name):
"""Get a header value.
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, exactly which
occurrence gets returned is undefined. Use get_all() to get all
the values matching a header field name.
"""
return self.get(name)
def __setitem__(self, name, val):
"""Set the value of a header.
Note: this does not overwrite an existing header with the same field
name. Use __delitem__() first to delete any existing headers.
"""
max_count = self.policy.header_max_count(name)
if max_count:
lname = name.lower()
found = 0
for k, v in self._headers:
if k.lower() == lname:
found += 1
if found >= max_count:
raise ValueError("There may be at most {} {} headers "
"in a message".format(max_count, name))
self._headers.append(self.policy.header_store_parse(name, val))
def __delitem__(self, name):
"""Delete all occurrences of a header, if present.
Does not raise an exception if the header is missing.
"""
name = name.lower()
newheaders = []
for k, v in self._headers:
if k.lower() != name:
newheaders.append((k, v))
self._headers = newheaders
def __contains__(self, name):
return name.lower() in [k.lower() for k, v in self._headers]
def __iter__(self):
for field, value in self._headers:
yield field
def keys(self):
"""Return a list of all the message's header field names.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all the message's header values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [self.policy.header_fetch_parse(k, v)
for k, v in self._headers]
def items(self):
"""Get all the message's header fields and values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [(k, self.policy.header_fetch_parse(k, v))
for k, v in self._headers]
def get(self, name, failobj=None):
"""Get a header value.
Like __getitem__() but return failobj instead of None when the field
is missing.
"""
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
return self.policy.header_fetch_parse(k, v)
return failobj
#
# "Internal" methods (public API, but only intended for use by a parser
# or generator, not normal application code.
#
def set_raw(self, name, value):
"""Store name and value in the model without modification.
This is an "internal" API, intended only for use by a parser.
"""
self._headers.append((name, value))
def raw_items(self):
"""Return the (name, value) header pairs without modification.
This is an "internal" API, intended only for use by a generator.
"""
return iter(self._headers.copy())
#
# Additional useful stuff
#
def get_all(self, name, failobj=None):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original
message, and may contain duplicates. Any fields deleted and
re-inserted are always appended to the header list.
If no such fields exist, failobj is returned (defaults to None).
"""
values = []
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
values.append(self.policy.header_fetch_parse(k, v))
if not values:
return failobj
return values
def add_header(self, _name, _value, **_params):
"""Extended header setting.
name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added. If a
parameter value contains non-ASCII characters it can be specified as a
three-tuple of (charset, language, value), in which case it will be
encoded according to RFC2231 rules. Otherwise it will be encoded using
the utf-8 charset and a language of ''.
Examples:
msg.add_header('content-disposition', 'attachment', filename='bud.gif')
msg.add_header('content-disposition', 'attachment',
filename=('utf-8', '', Fußballer.ppt'))
msg.add_header('content-disposition', 'attachment',
filename='Fußballer.ppt'))
"""
parts = []
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
if _value is not None:
parts.insert(0, _value)
self[_name] = SEMISPACE.join(parts)
def replace_header(self, _name, _value):
"""Replace a header.
Replace the first matching header found in the message, retaining
header order and case. If no matching header was found, a KeyError is
raised.
"""
_name = _name.lower()
for i, (k, v) in zip(range(len(self._headers)), self._headers):
if k.lower() == _name:
self._headers[i] = self.policy.header_store_parse(k, _value)
break
else:
raise KeyError(_name)
#
# Use these three methods instead of the three above.
#
def get_content_type(self):
"""Return the message's content type.
The returned string is coerced to lower case of the form
`maintype/subtype'. If there was no Content-Type header in the
message, the default type as given by get_default_type() will be
returned. Since according to RFC 2045, messages always have a default
type this will always return a value.
RFC 2045 defines a message's default type to be text/plain unless it
appears inside a multipart/digest container, in which case it would be
message/rfc822.
"""
missing = object()
value = self.get('content-type', missing)
if value is missing:
# This should have no parameters
return self.get_default_type()
ctype = _splitparam(value)[0].lower()
# RFC 2045, section 5.2 says if its invalid, use text/plain
if ctype.count('/') != 1:
return 'text/plain'
return ctype
def get_content_maintype(self):
"""Return the message's main content type.
This is the `maintype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[0]
def get_content_subtype(self):
"""Returns the message's sub-content type.
This is the `subtype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[1]
def get_default_type(self):
"""Return the `default' content type.
Most messages have a default content type of text/plain, except for
messages that are subparts of multipart/digest containers. Such
subparts have a default content type of message/rfc822.
"""
return self._default_type
def set_default_type(self, ctype):
"""Set the `default' content type.
ctype should be either "text/plain" or "message/rfc822", although this
is not enforced. The default content type is not stored in the
Content-Type header.
"""
self._default_type = ctype
def _get_params_preserve(self, failobj, header):
# Like get_params() but preserves the quoting of values. BAW:
# should this be part of the public interface?
missing = object()
value = self.get(header, missing)
if value is missing:
return failobj
params = []
for p in _parseparam(value):
try:
name, val = p.split('=', 1)
name = name.strip()
val = val.strip()
except ValueError:
# Must have been a bare attribute
name = p.strip()
val = ''
params.append((name, val))
params = utils.decode_params(params)
return params
def get_params(self, failobj=None, header='content-type', unquote=True):
"""Return the message's Content-Type parameters, as a list.
The elements of the returned list are 2-tuples of key/value pairs, as
split on the `=' sign. The left hand side of the `=' is the key,
while the right hand side is the value. If there is no `=' sign in
the parameter the value is the empty string. The value is as
described in the get_param() method.
Optional failobj is the object to return if there is no Content-Type
header. Optional header is the header to search instead of
Content-Type. If unquote is True, the value is unquoted.
"""
missing = object()
params = self._get_params_preserve(missing, header)
if params is missing:
return failobj
if unquote:
return [(k, _unquotevalue(v)) for k, v in params]
else:
return params
def get_param(self, param, failobj=None, header='content-type',
unquote=True):
"""Return the parameter value if found in the Content-Type header.
Optional failobj is the object to return if there is no Content-Type
header, or the Content-Type header has no such parameter. Optional
header is the header to search instead of Content-Type.
Parameter keys are always compared case insensitively. The return
value can either be a string, or a 3-tuple if the parameter was RFC
2231 encoded. When it's a 3-tuple, the elements of the value are of
the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and
LANGUAGE can be None, in which case you should consider VALUE to be
encoded in the us-ascii charset. You can usually ignore LANGUAGE.
The parameter value (either the returned string, or the VALUE item in
the 3-tuple) is always unquoted, unless unquote is set to False.
If your application doesn't care whether the parameter was RFC 2231
encoded, it can turn the return value into a string as follows:
rawparam = msg.get_param('foo')
param = email.utils.collapse_rfc2231_value(rawparam)
"""
if header not in self:
return failobj
for k, v in self._get_params_preserve(failobj, header):
if k.lower() == param.lower():
if unquote:
return _unquotevalue(v)
else:
return v
return failobj
def set_param(self, param, value, header='Content-Type', requote=True,
charset=None, language='', replace=False):
"""Set a parameter in the Content-Type header.
If the parameter already exists in the header, its value will be
replaced with the new value.
If header is Content-Type and has not yet been defined for this
message, it will be set to "text/plain" and the new parameter and
value will be appended as per RFC 2045.
An alternate header can specified in the header argument, and all
parameters will be quoted as necessary unless requote is False.
If charset is specified, the parameter will be encoded according to RFC
2231. Optional language specifies the RFC 2231 language, defaulting
to the empty string. Both charset and language should be strings.
"""
if not isinstance(value, tuple) and charset:
value = (charset, language, value)
if header not in self and header.lower() == 'content-type':
ctype = 'text/plain'
else:
ctype = self.get(header)
if not self.get_param(param, header=header):
if not ctype:
ctype = _formatparam(param, value, requote)
else:
ctype = SEMISPACE.join(
[ctype, _formatparam(param, value, requote)])
else:
ctype = ''
for old_param, old_value in self.get_params(header=header,
unquote=requote):
append_param = ''
if old_param.lower() == param.lower():
append_param = _formatparam(param, value, requote)
else:
append_param = _formatparam(old_param, old_value, requote)
if not ctype:
ctype = append_param
else:
ctype = SEMISPACE.join([ctype, append_param])
if ctype != self.get(header):
if replace:
self.replace_header(header, ctype)
else:
del self[header]
self[header] = ctype
def del_param(self, param, header='content-type', requote=True):
"""Remove the given parameter completely from the Content-Type header.
The header will be re-written in place without the parameter or its
value. All values will be quoted as necessary unless requote is
False. Optional header specifies an alternative to the Content-Type
header.
"""
if header not in self:
return
new_ctype = ''
for p, v in self.get_params(header=header, unquote=requote):
if p.lower() != param.lower():
if not new_ctype:
new_ctype = _formatparam(p, v, requote)
else:
new_ctype = SEMISPACE.join([new_ctype,
_formatparam(p, v, requote)])
if new_ctype != self.get(header):
del self[header]
self[header] = new_ctype
def set_type(self, type, header='Content-Type', requote=True):
"""Set the main type and subtype for the Content-Type header.
type must be a string in the form "maintype/subtype", otherwise a
ValueError is raised.
This method replaces the Content-Type header, keeping all the
parameters in place. If requote is False, this leaves the existing
header's quoting as is. Otherwise, the parameters will be quoted (the
default).
An alternative header can be specified in the header argument. When
the Content-Type header is set, we'll always also add a MIME-Version
header.
"""
# BAW: should we be strict?
if not type.count('/') == 1:
raise ValueError
# Set the Content-Type, you get a MIME-Version
if header.lower() == 'content-type':
del self['mime-version']
self['MIME-Version'] = '1.0'
if header not in self:
self[header] = type
return
params = self.get_params(header=header, unquote=requote)
del self[header]
self[header] = type
# Skip the first param; it's the old type.
for p, v in params[1:]:
self.set_param(p, v, header, requote)
def get_filename(self, failobj=None):
"""Return the filename associated with the payload if present.
The filename is extracted from the Content-Disposition header's
`filename' parameter, and it is unquoted. If that header is missing
the `filename' parameter, this method falls back to looking for the
`name' parameter.
"""
missing = object()
filename = self.get_param('filename', missing, 'content-disposition')
if filename is missing:
filename = self.get_param('name', missing, 'content-type')
if filename is missing:
return failobj
return utils.collapse_rfc2231_value(filename).strip()
def get_boundary(self, failobj=None):
"""Return the boundary associated with the payload if present.
The boundary is extracted from the Content-Type header's `boundary'
parameter, and it is unquoted.
"""
missing = object()
boundary = self.get_param('boundary', missing)
if boundary is missing:
return failobj
# RFC 2046 says that boundaries may begin but not end in w/s
return utils.collapse_rfc2231_value(boundary).rstrip()
def set_boundary(self, boundary):
"""Set the boundary parameter in Content-Type to 'boundary'.
This is subtly different than deleting the Content-Type header and
adding a new one with a new boundary parameter via add_header(). The
main difference is that using the set_boundary() method preserves the
order of the Content-Type header in the original message.
HeaderParseError is raised if the message has no Content-Type header.
"""
missing = object()
params = self._get_params_preserve(missing, 'content-type')
if params is missing:
# There was no Content-Type header, and we don't know what type
# to set it to, so raise an exception.
raise errors.HeaderParseError('No Content-Type header found')
newparams = []
foundp = False
for pk, pv in params:
if pk.lower() == 'boundary':
newparams.append(('boundary', '"%s"' % boundary))
foundp = True
else:
newparams.append((pk, pv))
if not foundp:
# The original Content-Type header had no boundary attribute.
# Tack one on the end. BAW: should we raise an exception
# instead???
newparams.append(('boundary', '"%s"' % boundary))
# Replace the existing Content-Type header with the new value
newheaders = []
for h, v in self._headers:
if h.lower() == 'content-type':
parts = []
for k, v in newparams:
if v == '':
parts.append(k)
else:
parts.append('%s=%s' % (k, v))
val = SEMISPACE.join(parts)
newheaders.append(self.policy.header_store_parse(h, val))
else:
newheaders.append((h, v))
self._headers = newheaders
def get_content_charset(self, failobj=None):
"""Return the charset parameter of the Content-Type header.
The returned string is always coerced to lower case. If there is no
Content-Type header, or if that header has no charset parameter,
failobj is returned.
"""
missing = object()
charset = self.get_param('charset', missing)
if charset is missing:
return failobj
if isinstance(charset, tuple):
# RFC 2231 encoded, so decode it, and it better end up as ascii.
pcharset = charset[0] or 'us-ascii'
try:
# LookupError will be raised if the charset isn't known to
# Python. UnicodeError will be raised if the encoded text
# contains a character not in the charset.
as_bytes = charset[2].encode('raw-unicode-escape')
charset = str(as_bytes, pcharset)
except (LookupError, UnicodeError):
charset = charset[2]
# charset characters must be in us-ascii range
try:
charset.encode('us-ascii')
except UnicodeError:
return failobj
# RFC 2046, $4.1.2 says charsets are not case sensitive
return charset.lower()
def get_charsets(self, failobj=None):
"""Return a list containing the charset(s) used in this message.
The returned list of items describes the Content-Type headers'
charset parameter for this message and all the subparts in its
payload.
Each item will either be a string (the value of the charset parameter
in the Content-Type header of that part) or the value of the
'failobj' parameter (defaults to None), if the part does not have a
main MIME type of "text", or the charset is not defined.
The list will contain one string for each part of the message, plus
one for the container message (i.e. self), so that a non-multipart
message will still return a list of length 1.
"""
return [part.get_content_charset(failobj) for part in self.walk()]
def get_content_disposition(self):
"""Return the message's content-disposition if it exists, or None.
The return values can be either 'inline', 'attachment' or None
according to the rfc2183.
"""
value = self.get('content-disposition')
if value is None:
return None
c_d = _splitparam(value)[0].lower()
return c_d
# I.e. def walk(self): ...
from email.iterators import walk
class MIMEPart(Message):
def __init__(self, policy=None):
if policy is None:
from email.policy import default
policy = default
Message.__init__(self, policy)
def is_attachment(self):
c_d = self.get('content-disposition')
return False if c_d is None else c_d.content_disposition == 'attachment'
def _find_body(self, part, preferencelist):
if part.is_attachment():
return
maintype, subtype = part.get_content_type().split('/')
if maintype == 'text':
if subtype in preferencelist:
yield (preferencelist.index(subtype), part)
return
if maintype != 'multipart':
return
if subtype != 'related':
for subpart in part.iter_parts():
yield from self._find_body(subpart, preferencelist)
return
if 'related' in preferencelist:
yield (preferencelist.index('related'), part)
candidate = None
start = part.get_param('start')
if start:
for subpart in part.iter_parts():
if subpart['content-id'] == start:
candidate = subpart
break
if candidate is None:
subparts = part.get_payload()
candidate = subparts[0] if subparts else None
if candidate is not None:
yield from self._find_body(candidate, preferencelist)
def get_body(self, preferencelist=('related', 'html', 'plain')):
"""Return best candidate mime part for display as 'body' of message.
Do a depth first search, starting with self, looking for the first part
matching each of the items in preferencelist, and return the part
corresponding to the first item that has a match, or None if no items
have a match. If 'related' is not included in preferencelist, consider
the root part of any multipart/related encountered as a candidate
match. Ignore parts with 'Content-Disposition: attachment'.
"""
best_prio = len(preferencelist)
body = None
for prio, part in self._find_body(self, preferencelist):
if prio < best_prio:
best_prio = prio
body = part
if prio == 0:
break
return body
_body_types = {('text', 'plain'),
('text', 'html'),
('multipart', 'related'),
('multipart', 'alternative')}
def iter_attachments(self):
"""Return an iterator over the non-main parts of a multipart.
Skip the first of each occurrence of text/plain, text/html,
multipart/related, or multipart/alternative in the multipart (unless
they have a 'Content-Disposition: attachment' header) and include all
remaining subparts in the returned iterator. When applied to a
multipart/related, return all parts except the root part. Return an
empty iterator when applied to a multipart/alternative or a
non-multipart.
"""
maintype, subtype = self.get_content_type().split('/')
if maintype != 'multipart' or subtype == 'alternative':
return
parts = self.get_payload()
if maintype == 'multipart' and subtype == 'related':
# For related, we treat everything but the root as an attachment.
# The root may be indicated by 'start'; if there's no start or we
# can't find the named start, treat the first subpart as the root.
start = self.get_param('start')
if start:
found = False
attachments = []
for part in parts:
if part.get('content-id') == start:
found = True
else:
attachments.append(part)
if found:
yield from attachments
return
parts.pop(0)
yield from parts
return
# Otherwise we more or less invert the remaining logic in get_body.
# This only really works in edge cases (ex: non-text relateds or
# alternatives) if the sending agent sets content-disposition.
seen = [] # Only skip the first example of each candidate type.
for part in parts:
maintype, subtype = part.get_content_type().split('/')
if ((maintype, subtype) in self._body_types and
not part.is_attachment() and subtype not in seen):
seen.append(subtype)
continue
yield part
def iter_parts(self):
"""Return an iterator over all immediate subparts of a multipart.
Return an empty iterator for a non-multipart.
"""
if self.get_content_maintype() == 'multipart':
yield from self.get_payload()
def get_content(self, *args, content_manager=None, **kw):
if content_manager is None:
content_manager = self.policy.content_manager
return content_manager.get_content(self, *args, **kw)
def set_content(self, *args, content_manager=None, **kw):
if content_manager is None:
content_manager = self.policy.content_manager
content_manager.set_content(self, *args, **kw)
def _make_multipart(self, subtype, disallowed_subtypes, boundary):
if self.get_content_maintype() == 'multipart':
existing_subtype = self.get_content_subtype()
disallowed_subtypes = disallowed_subtypes + (subtype,)
if existing_subtype in disallowed_subtypes:
raise ValueError("Cannot convert {} to {}".format(
existing_subtype, subtype))
keep_headers = []
part_headers = []
for name, value in self._headers:
if name.lower().startswith('content-'):
part_headers.append((name, value))
else:
keep_headers.append((name, value))
if part_headers:
# There is existing content, move it to the first subpart.
part = type(self)(policy=self.policy)
part._headers = part_headers
part._payload = self._payload
self._payload = [part]
else:
self._payload = []
self._headers = keep_headers
self['Content-Type'] = 'multipart/' + subtype
if boundary is not None:
self.set_param('boundary', boundary)
def make_related(self, boundary=None):
self._make_multipart('related', ('alternative', 'mixed'), boundary)
def make_alternative(self, boundary=None):
self._make_multipart('alternative', ('mixed',), boundary)
def make_mixed(self, boundary=None):
self._make_multipart('mixed', (), boundary)
def _add_multipart(self, _subtype, *args, _disp=None, **kw):
if (self.get_content_maintype() != 'multipart' or
self.get_content_subtype() != _subtype):
getattr(self, 'make_' + _subtype)()
part = type(self)(policy=self.policy)
part.set_content(*args, **kw)
if _disp and 'content-disposition' not in part:
part['Content-Disposition'] = _disp
self.attach(part)
def add_related(self, *args, **kw):
self._add_multipart('related', *args, _disp='inline', **kw)
def add_alternative(self, *args, **kw):
self._add_multipart('alternative', *args, **kw)
def add_attachment(self, *args, **kw):
self._add_multipart('mixed', *args, _disp='attachment', **kw)
def clear(self):
self._headers = []
self._payload = None
def clear_content(self):
self._headers = [(n, v) for n, v in self._headers
if not n.lower().startswith('content-')]
self._payload = None
class EmailMessage(MIMEPart):
def set_content(self, *args, **kw):
super().set_content(*args, **kw)
if 'MIME-Version' not in self:
self['MIME-Version'] = '1.0'
| bsd-3-clause | -1,807,882,591,881,800,200 | 38.897995 | 96 | 0.585233 | false |
SwagColoredKitteh/servo | tests/wpt/css-tests/css-text-decor-3_dev/xhtml1print/reference/support/generate-text-emphasis-ruby-tests.py | 829 | 3042 | #!/usr/bin/env python
# - * - coding: UTF-8 - * -
"""
This script generates tests text-emphasis-ruby-001 ~ 004 which tests
emphasis marks with ruby in four directions. It outputs a list of all
tests it generated in the format of Mozilla reftest.list to the stdout.
"""
from __future__ import unicode_literals
TEST_FILE = 'text-emphasis-ruby-{:03}{}.html'
TEST_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Test: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<link rel="help" href="https://drafts.csswg.org/css-text-decor-3/#text-emphasis-position-property">
<meta name="assert" content="emphasis marks are drawn outside the ruby">
<link rel="match" href="text-emphasis-ruby-{index:03}-ref.html">
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {ruby_pos}; text-emphasis-position: {posval}">ルビ<span style="text-emphasis: circle">と<ruby>圏<rt>けん</rt>点<rt>てん</rt></ruby>を</span>同時</div>
'''
REF_FILE = 'text-emphasis-ruby-{:03}-ref.html'
REF_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Reference: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<style> rtc {{ font-variant-east-asian: inherit; }} </style>
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {posval}">ルビ<ruby>と<rtc>●</rtc>圏<rt>けん</rt><rtc>●</rtc>点<rt>てん</rt><rtc>●</rtc>を<rtc>●</rtc></ruby>同時</div>
'''
TEST_CASES = [
('top', 'horizontal-tb', 'over', [
('horizontal-tb', 'over right')]),
('bottom', 'horizontal-tb', 'under', [
('horizontal-tb', 'under right')]),
('right', 'vertical-rl', 'over', [
('vertical-rl', 'over right'),
('vertical-lr', 'over right')]),
('left', 'vertical-rl', 'under', [
('vertical-rl', 'over left'),
('vertical-lr', 'over left')]),
]
SUFFIXES = ['', 'a']
def write_file(filename, content):
with open(filename, 'wb') as f:
f.write(content.encode('UTF-8'))
print("# START tests from {}".format(__file__))
idx = 0
for pos, ref_wm, ruby_pos, subtests in TEST_CASES:
idx += 1
ref_file = REF_FILE.format(idx)
ref_content = REF_TEMPLATE.format(pos=pos, wm=ref_wm, posval=ruby_pos)
write_file(ref_file, ref_content)
suffix = iter(SUFFIXES)
for wm, posval in subtests:
test_file = TEST_FILE.format(idx, next(suffix))
test_content = TEST_TEMPLATE.format(
wm=wm, pos=pos, index=idx, ruby_pos=ruby_pos, posval=posval)
write_file(test_file, test_content)
print("== {} {}".format(test_file, ref_file))
print("# END tests from {}".format(__file__))
| mpl-2.0 | 7,033,308,974,888,286,000 | 42.391304 | 201 | 0.633601 | false |
zmike/servo | components/script/dom/bindings/codegen/parser/tests/test_putForwards.py | 142 | 2341 | def WebIDLTest(parser, harness):
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] readonly attribute long A;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] readonly attribute J A;
};
interface J {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=B] static readonly attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
callback interface I {
[PutForwards=B] readonly attribute J A;
};
interface J {
attribute long B;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
parser = parser.reset();
threw = False
try:
parser.parse("""
interface I {
[PutForwards=C] readonly attribute J A;
[PutForwards=C] readonly attribute J B;
};
interface J {
[PutForwards=D] readonly attribute K C;
};
interface K {
[PutForwards=A] readonly attribute I D;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Should have thrown.")
| mpl-2.0 | 8,253,913,350,364,618,000 | 20.878505 | 60 | 0.465186 | false |
web30s/odoo-9.0c-20160402 | hello/templates/openerp/addons/account/wizard/account_report_aged_partner_balance.py | 24 | 1763 | # -*- coding: utf-8 -*-
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
from openerp import api, fields, models, _
from openerp.exceptions import UserError
class AccountAgedTrialBalance(models.TransientModel):
_name = 'account.aged.trial.balance'
_inherit = 'account.common.partner.report'
_description = 'Account Aged Trial balance Report'
period_length = fields.Integer(string='Period Length (days)', required=True, default=30)
journal_ids = fields.Many2many('account.journal', string='Journals', required=True)
date_from = fields.Date(default=lambda *a: time.strftime('%Y-%m-%d'))
def _print_report(self, data):
res = {}
data = self.pre_print_report(data)
data['form'].update(self.read(['period_length'])[0])
period_length = data['form']['period_length']
if period_length<=0:
raise UserError(_('You must set a period length greater than 0.'))
if not data['form']['date_from']:
raise UserError(_('You must set a start date.'))
start = datetime.strptime(data['form']['date_from'], "%Y-%m-%d")
for i in range(5)[::-1]:
stop = start - relativedelta(days=period_length - 1)
res[str(i)] = {
'name': (i!=0 and (str((5-(i+1)) * period_length) + '-' + str((5-i) * period_length)) or ('+'+str(4 * period_length))),
'stop': start.strftime('%Y-%m-%d'),
'start': (i!=0 and stop.strftime('%Y-%m-%d') or False),
}
start = stop - relativedelta(days=1)
data['form'].update(res)
return self.env['report'].with_context(landscape=True).get_action(self, 'account.report_agedpartnerbalance', data=data)
| gpl-3.0 | 8,760,537,449,349,430,000 | 42 | 135 | 0.609756 | false |
impromptuartist/impromptuartist.github.io | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/dotnet.py | 94 | 26751 | # -*- coding: utf-8 -*-
"""
pygments.lexers.dotnet
~~~~~~~~~~~~~~~~~~~~~~
Lexers for .net languages.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
using, this
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
from pygments.util import get_choice_opt
from pygments import unistring as uni
from pygments.lexers.web import XmlLexer
__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
class CSharpLexer(RegexLexer):
"""
For `C# <http://msdn2.microsoft.com/en-us/vcsharp/default.aspx>`_
source code.
Additional options accepted:
`unicodelevel`
Determines which Unicode characters this lexer allows for identifiers.
The possible values are:
* ``none`` -- only the ASCII letters and numbers are allowed. This
is the fastest selection.
* ``basic`` -- all Unicode characters from the specification except
category ``Lo`` are allowed.
* ``full`` -- all Unicode characters as specified in the C# specs
are allowed. Note that this means a considerable slowdown since the
``Lo`` category has more than 40,000 characters in it!
The default value is ``basic``.
*New in Pygments 0.8.*
"""
name = 'C#'
aliases = ['csharp', 'c#']
filenames = ['*.cs']
mimetypes = ['text/x-csharp'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
# for the range of allowed unicode characters in identifiers,
# see http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = {
'none': '@?[_a-zA-Z][a-zA-Z0-9_]*',
'basic': ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
'full': ('@?(?:_|[^' +
uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl') + '])'
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
}
tokens = {}
token_variants = True
for levelname, cs_ident in list(levels.items()):
tokens[levelname] = {
'root': [
# method names
(r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?"
r"[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'#[ \t]*(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b.*?\n',
Comment.Preproc),
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
Keyword)),
(r'(abstract|as|async|await|base|break|case|catch|'
r'checked|const|continue|default|delegate|'
r'do|else|enum|event|explicit|extern|false|finally|'
r'fixed|for|foreach|goto|if|implicit|in|interface|'
r'internal|is|lock|new|null|operator|'
r'out|override|params|private|protected|public|readonly|'
r'ref|return|sealed|sizeof|stackalloc|static|'
r'switch|this|throw|true|try|typeof|'
r'unchecked|unsafe|virtual|void|while|'
r'get|set|new|partial|yield|add|remove|value|alias|ascending|'
r'descending|from|group|into|orderby|select|where|'
r'join|equals)\b', Keyword),
(r'(global)(::)', bygroups(Keyword, Punctuation)),
(r'(bool|byte|char|decimal|double|dynamic|float|int|long|object|'
r'sbyte|short|string|uint|ulong|ushort|var)\b\??', Keyword.Type),
(r'(class|struct)(\s+)', bygroups(Keyword, Text), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Text), 'namespace'),
(cs_ident, Name),
],
'class': [
(cs_ident, Name.Class, '#pop')
],
'namespace': [
(r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
]
}
def __init__(self, **options):
level = get_choice_opt(options, 'unicodelevel', list(self.tokens.keys()), 'basic')
if level not in self._all_tokens:
# compile the regexes now
self._tokens = self.__class__.process_tokendef(level)
else:
self._tokens = self._all_tokens[level]
RegexLexer.__init__(self, **options)
class NemerleLexer(RegexLexer):
"""
For `Nemerle <http://nemerle.org>`_ source code.
Additional options accepted:
`unicodelevel`
Determines which Unicode characters this lexer allows for identifiers.
The possible values are:
* ``none`` -- only the ASCII letters and numbers are allowed. This
is the fastest selection.
* ``basic`` -- all Unicode characters from the specification except
category ``Lo`` are allowed.
* ``full`` -- all Unicode characters as specified in the C# specs
are allowed. Note that this means a considerable slowdown since the
``Lo`` category has more than 40,000 characters in it!
The default value is ``basic``.
*New in Pygments 1.5.*
"""
name = 'Nemerle'
aliases = ['nemerle']
filenames = ['*.n']
mimetypes = ['text/x-nemerle'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
# for the range of allowed unicode characters in identifiers, see
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = dict(
none = '@?[_a-zA-Z][a-zA-Z0-9_]*',
basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
'[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
'Nl') + '])'
+ '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
)
tokens = {}
token_variants = True
for levelname, cs_ident in list(levels.items()):
tokens[levelname] = {
'root': [
# method names
(r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
(r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
(r'\$\s*"', String, 'splice-string'),
(r'\$\s*<#', String, 'splice-string2'),
(r'<#', String, 'recursive-string'),
(r'(<\[)\s*(' + cs_ident + ':)?', Keyword),
(r'\]\>', Keyword),
# quasiquotation only
(r'\$' + cs_ident, Name),
(r'(\$)(\()', bygroups(Name, Punctuation),
'splice-string-content'),
(r'[~!%^&*()+=|\[\]:;,.<>/?-]', Punctuation),
(r'[{}]', Punctuation),
(r'@"(""|[^"])*"', String),
(r'"(\\\\|\\"|[^"\n])*["\n]', String),
(r"'\\.'|'[^\\]'", String.Char),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?", Number),
(r'#[ \t]*(if|endif|else|elif|define|undef|'
r'line|error|warning|region|endregion|pragma)\b.*?\n',
Comment.Preproc),
(r'\b(extern)(\s+)(alias)\b', bygroups(Keyword, Text,
Keyword)),
(r'(abstract|and|as|base|catch|def|delegate|'
r'enum|event|extern|false|finally|'
r'fun|implements|interface|internal|'
r'is|macro|match|matches|module|mutable|new|'
r'null|out|override|params|partial|private|'
r'protected|public|ref|sealed|static|'
r'syntax|this|throw|true|try|type|typeof|'
r'virtual|volatile|when|where|with|'
r'assert|assert2|async|break|checked|continue|do|else|'
r'ensures|for|foreach|if|late|lock|new|nolate|'
r'otherwise|regexp|repeat|requires|return|surroundwith|'
r'unchecked|unless|using|while|yield)\b', Keyword),
(r'(global)(::)', bygroups(Keyword, Punctuation)),
(r'(bool|byte|char|decimal|double|float|int|long|object|sbyte|'
r'short|string|uint|ulong|ushort|void|array|list)\b\??',
Keyword.Type),
(r'(:>?)\s*(' + cs_ident + r'\??)',
bygroups(Punctuation, Keyword.Type)),
(r'(class|struct|variant|module)(\s+)',
bygroups(Keyword, Text), 'class'),
(r'(namespace|using)(\s+)', bygroups(Keyword, Text),
'namespace'),
(cs_ident, Name),
],
'class': [
(cs_ident, Name.Class, '#pop')
],
'namespace': [
(r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
],
'splice-string': [
(r'[^"$]', String),
(r'\$' + cs_ident, Name),
(r'(\$)(\()', bygroups(Name, Punctuation),
'splice-string-content'),
(r'\\"', String),
(r'"', String, '#pop')
],
'splice-string2': [
(r'[^#<>$]', String),
(r'\$' + cs_ident, Name),
(r'(\$)(\()', bygroups(Name, Punctuation),
'splice-string-content'),
(r'<#', String, '#push'),
(r'#>', String, '#pop')
],
'recursive-string': [
(r'[^#<>]', String),
(r'<#', String, '#push'),
(r'#>', String, '#pop')
],
'splice-string-content': [
(r'if|match', Keyword),
(r'[~!%^&*+=|\[\]:;,.<>/?-\\"$ ]', Punctuation),
(cs_ident, Name),
(r'\d+', Number),
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop')
]
}
def __init__(self, **options):
level = get_choice_opt(options, 'unicodelevel', list(self.tokens.keys()),
'basic')
if level not in self._all_tokens:
# compile the regexes now
self._tokens = self.__class__.process_tokendef(level)
else:
self._tokens = self._all_tokens[level]
RegexLexer.__init__(self, **options)
class BooLexer(RegexLexer):
"""
For `Boo <http://boo.codehaus.org/>`_ source code.
"""
name = 'Boo'
aliases = ['boo']
filenames = ['*.boo']
mimetypes = ['text/x-boo']
tokens = {
'root': [
(r'\s+', Text),
(r'(#|//).*$', Comment.Single),
(r'/[*]', Comment.Multiline, 'comment'),
(r'[]{}:(),.;[]', Punctuation),
(r'\\\n', Text),
(r'\\', Text),
(r'(in|is|and|or|not)\b', Operator.Word),
(r'/(\\\\|\\/|[^/\s])/', String.Regex),
(r'@/(\\\\|\\/|[^/])*/', String.Regex),
(r'=~|!=|==|<<|>>|[-+/*%=<>&^|]', Operator),
(r'(as|abstract|callable|constructor|destructor|do|import|'
r'enum|event|final|get|interface|internal|of|override|'
r'partial|private|protected|public|return|set|static|'
r'struct|transient|virtual|yield|super|and|break|cast|'
r'continue|elif|else|ensure|except|for|given|goto|if|in|'
r'is|isa|not|or|otherwise|pass|raise|ref|try|unless|when|'
r'while|from|as)\b', Keyword),
(r'def(?=\s+\(.*?\))', Keyword),
(r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(namespace)(\s+)', bygroups(Keyword, Text), 'namespace'),
(r'(?<!\.)(true|false|null|self|__eval__|__switch__|array|'
r'assert|checked|enumerate|filter|getter|len|lock|map|'
r'matrix|max|min|normalArrayIndexing|print|property|range|'
r'rawArrayIndexing|required|typeof|unchecked|using|'
r'yieldAll|zip)\b', Name.Builtin),
(r'"""(\\\\|\\"|.*?)"""', String.Double),
(r'"(\\\\|\\"|[^"]*?)"', String.Double),
(r"'(\\\\|\\'|[^']*?)'", String.Single),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
(r'[0-9][0-9\.]*(ms?|d|h|s)', Number),
(r'0\d+', Number.Oct),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'\d+L', Number.Integer.Long),
(r'\d+', Number.Integer),
],
'comment': [
('/[*]', Comment.Multiline, '#push'),
('[*]/', Comment.Multiline, '#pop'),
('[^/*]', Comment.Multiline),
('[*/]', Comment.Multiline)
],
'funcname': [
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
],
'classname': [
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'namespace': [
('[a-zA-Z_][a-zA-Z0-9_.]*', Name.Namespace, '#pop')
]
}
class VbNetLexer(RegexLexer):
"""
For
`Visual Basic.NET <http://msdn2.microsoft.com/en-us/vbasic/default.aspx>`_
source code.
"""
name = 'VB.net'
aliases = ['vb.net', 'vbnet']
filenames = ['*.vb', '*.bas']
mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'^\s*<.*?>', Name.Attribute),
(r'\s+', Text),
(r'\n', Text),
(r'rem\b.*?\n', Comment),
(r"'.*?\n", Comment),
(r'#If\s.*?\sThen|#ElseIf\s.*?\sThen|#End\s+If|#Const|'
r'#ExternalSource.*?\n|#End\s+ExternalSource|'
r'#Region.*?\n|#End\s+Region|#ExternalChecksum',
Comment.Preproc),
(r'[\(\){}!#,.:]', Punctuation),
(r'Option\s+(Strict|Explicit|Compare)\s+'
r'(On|Off|Binary|Text)', Keyword.Declaration),
(r'(?<!\.)(AddHandler|Alias|'
r'ByRef|ByVal|Call|Case|Catch|CBool|CByte|CChar|CDate|'
r'CDec|CDbl|CInt|CLng|CObj|Continue|CSByte|CShort|'
r'CSng|CStr|CType|CUInt|CULng|CUShort|Declare|'
r'Default|Delegate|DirectCast|Do|Each|Else|ElseIf|'
r'EndIf|Erase|Error|Event|Exit|False|Finally|For|'
r'Friend|Get|Global|GoSub|GoTo|Handles|If|'
r'Implements|Inherits|Interface|'
r'Let|Lib|Loop|Me|MustInherit|'
r'MustOverride|MyBase|MyClass|Narrowing|New|Next|'
r'Not|Nothing|NotInheritable|NotOverridable|Of|On|'
r'Operator|Option|Optional|Overloads|Overridable|'
r'Overrides|ParamArray|Partial|Private|Protected|'
r'Public|RaiseEvent|ReadOnly|ReDim|RemoveHandler|Resume|'
r'Return|Select|Set|Shadows|Shared|Single|'
r'Static|Step|Stop|SyncLock|Then|'
r'Throw|To|True|Try|TryCast|Wend|'
r'Using|When|While|Widening|With|WithEvents|'
r'WriteOnly)\b', Keyword),
(r'(?<!\.)End\b', Keyword, 'end'),
(r'(?<!\.)(Dim|Const)\b', Keyword, 'dim'),
(r'(?<!\.)(Function|Sub|Property)(\s+)',
bygroups(Keyword, Text), 'funcname'),
(r'(?<!\.)(Class|Structure|Enum)(\s+)',
bygroups(Keyword, Text), 'classname'),
(r'(?<!\.)(Module|Namespace|Imports)(\s+)',
bygroups(Keyword, Text), 'namespace'),
(r'(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|'
r'Object|SByte|Short|Single|String|Variant|UInteger|ULong|'
r'UShort)\b', Keyword.Type),
(r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
(r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
r'<=|>=|<>|[-&*/\\^+=<>]',
Operator),
('"', String, 'string'),
('[a-zA-Z_][a-zA-Z0-9_]*[%&@!#$]?', Name),
('#.*?#', Literal.Date),
(r'(\d+\.\d*|\d*\.\d+)([fF][+-]?[0-9]+)?', Number.Float),
(r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'_\n', Text), # Line continuation
],
'string': [
(r'""', String),
(r'"C?', String, '#pop'),
(r'[^"]+', String),
],
'dim': [
(r'[a-z_][a-z0-9_]*', Name.Variable, '#pop'),
(r'', Text, '#pop'), # any other syntax
],
'funcname': [
(r'[a-z_][a-z0-9_]*', Name.Function, '#pop'),
],
'classname': [
(r'[a-z_][a-z0-9_]*', Name.Class, '#pop'),
],
'namespace': [
(r'[a-z_][a-z0-9_.]*', Name.Namespace, '#pop'),
],
'end': [
(r'\s+', Text),
(r'(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b',
Keyword, '#pop'),
(r'', Text, '#pop'),
]
}
class GenericAspxLexer(RegexLexer):
"""
Lexer for ASP.NET pages.
"""
name = 'aspx-gen'
filenames = []
mimetypes = []
flags = re.DOTALL
tokens = {
'root': [
(r'(<%[@=#]?)(.*?)(%>)', bygroups(Name.Tag, Other, Name.Tag)),
(r'(<script.*?>)(.*?)(</script>)', bygroups(using(XmlLexer),
Other,
using(XmlLexer))),
(r'(.+?)(?=<)', using(XmlLexer)),
(r'.+', using(XmlLexer)),
],
}
#TODO support multiple languages within the same source file
class CSharpAspxLexer(DelegatingLexer):
"""
Lexer for highligting C# within ASP.NET pages.
"""
name = 'aspx-cs'
aliases = ['aspx-cs']
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
mimetypes = []
def __init__(self, **options):
super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
**options)
def analyse_text(text):
if re.search(r'Page\s*Language="C#"', text, re.I) is not None:
return 0.2
elif re.search(r'script[^>]+language=["\']C#', text, re.I) is not None:
return 0.15
class VbNetAspxLexer(DelegatingLexer):
"""
Lexer for highligting Visual Basic.net within ASP.NET pages.
"""
name = 'aspx-vb'
aliases = ['aspx-vb']
filenames = ['*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd']
mimetypes = []
def __init__(self, **options):
super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
**options)
def analyse_text(text):
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
return 0.2
elif re.search(r'script[^>]+language=["\']vb', text, re.I) is not None:
return 0.15
# Very close to functional.OcamlLexer
class FSharpLexer(RegexLexer):
"""
For the F# language (version 3.0).
*New in Pygments 1.5.*
"""
name = 'FSharp'
aliases = ['fsharp']
filenames = ['*.fs', '*.fsi']
mimetypes = ['text/x-fsharp']
keywords = [
'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
'while', 'with', 'yield!', 'yield',
]
# Reserved words; cannot hurt to color them as keywords too.
keywords += [
'atomic', 'break', 'checked', 'component', 'const', 'constraint',
'constructor', 'continue', 'eager', 'event', 'external', 'fixed',
'functor', 'include', 'method', 'mixin', 'object', 'parallel',
'process', 'protected', 'pure', 'sealed', 'tailcall', 'trait',
'virtual', 'volatile',
]
keyopts = [
'!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
'->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
'<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
'_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>',
]
operators = r'[!$%&*+\./:<=>?@^|~-]'
word_operators = ['and', 'or', 'not']
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = [
'sbyte', 'byte', 'char', 'nativeint', 'unativeint', 'float32', 'single',
'float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32',
'uint32', 'int64', 'uint64', 'decimal', 'unit', 'bool', 'string',
'list', 'exn', 'obj', 'enum',
]
# See http://msdn.microsoft.com/en-us/library/dd233181.aspx and/or
# http://fsharp.org/about/files/spec.pdf for reference. Good luck.
tokens = {
'escape-sequence': [
(r'\\[\\\"\'ntbrafv]', String.Escape),
(r'\\[0-9]{3}', String.Escape),
(r'\\u[0-9a-fA-F]{4}', String.Escape),
(r'\\U[0-9a-fA-F]{8}', String.Escape),
],
'root': [
(r'\s+', Text),
(r'\(\)|\[\]', Name.Builtin.Pseudo),
(r'\b(?<!\.)([A-Z][A-Za-z0-9_\']*)(?=\s*\.)',
Name.Namespace, 'dotted'),
(r'\b([A-Z][A-Za-z0-9_\']*)', Name),
(r'///.*?\n', String.Doc),
(r'//.*?\n', Comment.Single),
(r'\(\*(?!\))', Comment, 'comment'),
(r'@"', String, 'lstring'),
(r'"""', String, 'tqs'),
(r'"', String, 'string'),
(r'\b(open|module)(\s+)([a-zA-Z0-9_.]+)',
bygroups(Keyword, Text, Name.Namespace)),
(r'\b(let!?)(\s+)([a-zA-Z0-9_]+)',
bygroups(Keyword, Text, Name.Variable)),
(r'\b(type)(\s+)([a-zA-Z0-9_]+)',
bygroups(Keyword, Text, Name.Class)),
(r'\b(member|override)(\s+)([a-zA-Z0-9_]+)(\.)([a-zA-Z0-9_]+)',
bygroups(Keyword, Text, Name, Punctuation, Name.Function)),
(r'\b(%s)\b' % '|'.join(keywords), Keyword),
(r'(%s)' % '|'.join(keyopts), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r'#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n',
Comment.Preproc),
(r"[^\W\d][\w']*", Name),
(r'\d[\d_]*[uU]?[yslLnQRZINGmM]?', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?', Number.Hex),
(r'0[oO][0-7][0-7_]*[uU]?[yslLn]?', Number.Oct),
(r'0[bB][01][01_]*[uU]?[yslLn]?', Number.Binary),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?',
Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][A-Za-z0-9_\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][A-Za-z0-9_\']*', Name, '#pop'),
(r'[a-z_][A-Za-z0-9_\']*', Name, '#pop'),
],
'comment': [
(r'[^(*)@"]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
# comments cannot be closed within strings in comments
(r'@"', String, 'lstring'),
(r'"""', String, 'tqs'),
(r'"', String, 'string'),
(r'[(*)@]', Comment),
],
'string': [
(r'[^\\"]+', String),
include('escape-sequence'),
(r'\\\n', String),
(r'\n', String), # newlines are allowed in any string
(r'"B?', String, '#pop'),
],
'lstring': [
(r'[^"]+', String),
(r'\n', String),
(r'""', String),
(r'"B?', String, '#pop'),
],
'tqs': [
(r'[^"]+', String),
(r'\n', String),
(r'"""B?', String, '#pop'),
(r'"', String),
],
}
| mit | 1,984,150,438,384,094,200 | 38.867362 | 90 | 0.447572 | false |
cortedeltimo/SickRage | lib/mako/pygen.py | 61 | 9952 | # mako/pygen.py
# Copyright (C) 2006-2016 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""utilities for generating and formatting literal Python code."""
import re
from mako import exceptions
class PythonPrinter(object):
def __init__(self, stream):
# indentation counter
self.indent = 0
# a stack storing information about why we incremented
# the indentation counter, to help us determine if we
# should decrement it
self.indent_detail = []
# the string of whitespace multiplied by the indent
# counter to produce a line
self.indentstring = " "
# the stream we are writing to
self.stream = stream
# current line number
self.lineno = 1
# a list of lines that represents a buffered "block" of code,
# which can be later printed relative to an indent level
self.line_buffer = []
self.in_indent_lines = False
self._reset_multi_line_flags()
# mapping of generated python lines to template
# source lines
self.source_map = {}
def _update_lineno(self, num):
self.lineno += num
def start_source(self, lineno):
if self.lineno not in self.source_map:
self.source_map[self.lineno] = lineno
def write_blanks(self, num):
self.stream.write("\n" * num)
self._update_lineno(num)
def write_indented_block(self, block):
"""print a line or lines of python which already contain indentation.
The indentation of the total block of lines will be adjusted to that of
the current indent level."""
self.in_indent_lines = False
for l in re.split(r'\r?\n', block):
self.line_buffer.append(l)
self._update_lineno(1)
def writelines(self, *lines):
"""print a series of lines of python."""
for line in lines:
self.writeline(line)
def writeline(self, line):
"""print a line of python, indenting it according to the current
indent level.
this also adjusts the indentation counter according to the
content of the line.
"""
if not self.in_indent_lines:
self._flush_adjusted_lines()
self.in_indent_lines = True
if (
line is None or
re.match(r"^\s*#", line) or
re.match(r"^\s*$", line)
):
hastext = False
else:
hastext = True
is_comment = line and len(line) and line[0] == '#'
# see if this line should decrease the indentation level
if (
not is_comment and
(not hastext or self._is_unindentor(line))
):
if self.indent > 0:
self.indent -= 1
# if the indent_detail stack is empty, the user
# probably put extra closures - the resulting
# module wont compile.
if len(self.indent_detail) == 0:
raise exceptions.SyntaxException(
"Too many whitespace closures")
self.indent_detail.pop()
if line is None:
return
# write the line
self.stream.write(self._indent_line(line) + "\n")
self._update_lineno(len(line.split("\n")))
# see if this line should increase the indentation level.
# note that a line can both decrase (before printing) and
# then increase (after printing) the indentation level.
if re.search(r":[ \t]*(?:#.*)?$", line):
# increment indentation count, and also
# keep track of what the keyword was that indented us,
# if it is a python compound statement keyword
# where we might have to look for an "unindent" keyword
match = re.match(r"^\s*(if|try|elif|while|for|with)", line)
if match:
# its a "compound" keyword, so we will check for "unindentors"
indentor = match.group(1)
self.indent += 1
self.indent_detail.append(indentor)
else:
indentor = None
# its not a "compound" keyword. but lets also
# test for valid Python keywords that might be indenting us,
# else assume its a non-indenting line
m2 = re.match(r"^\s*(def|class|else|elif|except|finally)",
line)
if m2:
self.indent += 1
self.indent_detail.append(indentor)
def close(self):
"""close this printer, flushing any remaining lines."""
self._flush_adjusted_lines()
def _is_unindentor(self, line):
"""return true if the given line is an 'unindentor',
relative to the last 'indent' event received.
"""
# no indentation detail has been pushed on; return False
if len(self.indent_detail) == 0:
return False
indentor = self.indent_detail[-1]
# the last indent keyword we grabbed is not a
# compound statement keyword; return False
if indentor is None:
return False
# if the current line doesnt have one of the "unindentor" keywords,
# return False
match = re.match(r"^\s*(else|elif|except|finally).*\:", line)
if not match:
return False
# whitespace matches up, we have a compound indentor,
# and this line has an unindentor, this
# is probably good enough
return True
# should we decide that its not good enough, heres
# more stuff to check.
# keyword = match.group(1)
# match the original indent keyword
# for crit in [
# (r'if|elif', r'else|elif'),
# (r'try', r'except|finally|else'),
# (r'while|for', r'else'),
# ]:
# if re.match(crit[0], indentor) and re.match(crit[1], keyword):
# return True
# return False
def _indent_line(self, line, stripspace=''):
"""indent the given line according to the current indent level.
stripspace is a string of space that will be truncated from the
start of the line before indenting."""
return re.sub(r"^%s" % stripspace, self.indentstring
* self.indent, line)
def _reset_multi_line_flags(self):
"""reset the flags which would indicate we are in a backslashed
or triple-quoted section."""
self.backslashed, self.triplequoted = False, False
def _in_multi_line(self, line):
"""return true if the given line is part of a multi-line block,
via backslash or triple-quote."""
# we are only looking for explicitly joined lines here, not
# implicit ones (i.e. brackets, braces etc.). this is just to
# guard against the possibility of modifying the space inside of
# a literal multiline string with unfortunately placed
# whitespace
current_state = (self.backslashed or self.triplequoted)
if re.search(r"\\$", line):
self.backslashed = True
else:
self.backslashed = False
triples = len(re.findall(r"\"\"\"|\'\'\'", line))
if triples == 1 or triples % 2 != 0:
self.triplequoted = not self.triplequoted
return current_state
def _flush_adjusted_lines(self):
stripspace = None
self._reset_multi_line_flags()
for entry in self.line_buffer:
if self._in_multi_line(entry):
self.stream.write(entry + "\n")
else:
entry = entry.expandtabs()
if stripspace is None and re.search(r"^[ \t]*[^# \t]", entry):
stripspace = re.match(r"^([ \t]*)", entry).group(1)
self.stream.write(self._indent_line(entry, stripspace) + "\n")
self.line_buffer = []
self._reset_multi_line_flags()
def adjust_whitespace(text):
"""remove the left-whitespace margin of a block of Python code."""
state = [False, False]
(backslashed, triplequoted) = (0, 1)
def in_multi_line(line):
start_state = (state[backslashed] or state[triplequoted])
if re.search(r"\\$", line):
state[backslashed] = True
else:
state[backslashed] = False
def match(reg, t):
m = re.match(reg, t)
if m:
return m, t[len(m.group(0)):]
else:
return None, t
while line:
if state[triplequoted]:
m, line = match(r"%s" % state[triplequoted], line)
if m:
state[triplequoted] = False
else:
m, line = match(r".*?(?=%s|$)" % state[triplequoted], line)
else:
m, line = match(r'#', line)
if m:
return start_state
m, line = match(r"\"\"\"|\'\'\'", line)
if m:
state[triplequoted] = m.group(0)
continue
m, line = match(r".*?(?=\"\"\"|\'\'\'|#|$)", line)
return start_state
def _indent_line(line, stripspace=''):
return re.sub(r"^%s" % stripspace, '', line)
lines = []
stripspace = None
for line in re.split(r'\r?\n', text):
if in_multi_line(line):
lines.append(line)
else:
line = line.expandtabs()
if stripspace is None and re.search(r"^[ \t]*[^# \t]", line):
stripspace = re.match(r"^([ \t]*)", line).group(1)
lines.append(_indent_line(line, stripspace))
return "\n".join(lines)
| gpl-3.0 | 4,617,808,127,890,937,000 | 31.844884 | 79 | 0.548432 | false |
sushantgoel/Flask | Work/TriviaMVA/TriviaMVA/env/Lib/site-packages/pip/_vendor/requests/packages/chardet/compat.py | 2943 | 1157 | ######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
if sys.version_info < (3, 0):
base_str = (str, unicode)
else:
base_str = (bytes, str)
def wrap_ord(a):
if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
else:
return a
| apache-2.0 | -3,122,168,218,244,358,700 | 33.029412 | 69 | 0.656007 | false |
ping/youtube-dl | youtube_dl/extractor/ynet.py | 64 | 1807 | # coding: utf-8
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote_plus
class YnetIE(InfoExtractor):
_VALID_URL = r'https?://(?:.+?\.)?ynet\.co\.il/(?:.+?/)?0,7340,(?P<id>L(?:-[0-9]+)+),00\.html'
_TESTS = [
{
'url': 'http://hot.ynet.co.il/home/0,7340,L-11659-99244,00.html',
'info_dict': {
'id': 'L-11659-99244',
'ext': 'flv',
'title': 'איש לא יודע מאיפה באנו',
'thumbnail': r're:^https?://.*\.jpg',
}
}, {
'url': 'http://hot.ynet.co.il/home/0,7340,L-8859-84418,00.html',
'info_dict': {
'id': 'L-8859-84418',
'ext': 'flv',
'title': "צפו: הנשיקה הלוהטת של תורגי' ויוליה פלוטקין",
'thumbnail': r're:^https?://.*\.jpg',
}
}
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
content = compat_urllib_parse_unquote_plus(self._og_search_video_url(webpage))
config = json.loads(self._search_regex(r'config=({.+?})$', content, 'video config'))
f4m_url = config['clip']['url']
title = self._og_search_title(webpage)
m = re.search(r'ynet - HOT -- (["\']+)(?P<title>.+?)\1', title)
if m:
title = m.group('title')
formats = self._extract_f4m_formats(f4m_url, video_id)
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'formats': formats,
'thumbnail': self._og_search_thumbnail(webpage),
}
| unlicense | 6,232,628,829,191,414,000 | 32.730769 | 98 | 0.497719 | false |
ossdemura/django-miniblog | Lib/site-packages/django/contrib/postgres/fields/ranges.py | 109 | 6308 | import json
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange, Range
from django.contrib.postgres import forms, lookups
from django.db import models
from django.utils import six
from .utils import AttributeSetter
__all__ = [
'RangeField', 'IntegerRangeField', 'BigIntegerRangeField',
'FloatRangeField', 'DateTimeRangeField', 'DateRangeField',
]
class RangeField(models.Field):
empty_strings_allowed = False
def __init__(self, *args, **kwargs):
# Initializing base_field here ensures that its model matches the model for self.
if hasattr(self, 'base_field'):
self.base_field = self.base_field()
super(RangeField, self).__init__(*args, **kwargs)
@property
def model(self):
try:
return self.__dict__['model']
except KeyError:
raise AttributeError("'%s' object has no attribute 'model'" % self.__class__.__name__)
@model.setter
def model(self, model):
self.__dict__['model'] = model
self.base_field.model = model
def get_prep_value(self, value):
if value is None:
return None
elif isinstance(value, Range):
return value
elif isinstance(value, (list, tuple)):
return self.range_type(value[0], value[1])
return value
def to_python(self, value):
if isinstance(value, six.string_types):
# Assume we're deserializing
vals = json.loads(value)
for end in ('lower', 'upper'):
if end in vals:
vals[end] = self.base_field.to_python(vals[end])
value = self.range_type(**vals)
elif isinstance(value, (list, tuple)):
value = self.range_type(value[0], value[1])
return value
def set_attributes_from_name(self, name):
super(RangeField, self).set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
def value_to_string(self, obj):
value = self.value_from_object(obj)
if value is None:
return None
if value.isempty:
return json.dumps({"empty": True})
base_field = self.base_field
result = {"bounds": value._bounds}
for end in ('lower', 'upper'):
val = getattr(value, end)
if val is None:
result[end] = None
else:
obj = AttributeSetter(base_field.attname, val)
result[end] = base_field.value_to_string(obj)
return json.dumps(result)
def formfield(self, **kwargs):
kwargs.setdefault('form_class', self.form_field)
return super(RangeField, self).formfield(**kwargs)
class IntegerRangeField(RangeField):
base_field = models.IntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int4range'
class BigIntegerRangeField(RangeField):
base_field = models.BigIntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return 'int8range'
class FloatRangeField(RangeField):
base_field = models.FloatField
range_type = NumericRange
form_field = forms.FloatRangeField
def db_type(self, connection):
return 'numrange'
class DateTimeRangeField(RangeField):
base_field = models.DateTimeField
range_type = DateTimeTZRange
form_field = forms.DateTimeRangeField
def db_type(self, connection):
return 'tstzrange'
class DateRangeField(RangeField):
base_field = models.DateField
range_type = DateRange
form_field = forms.DateRangeField
def db_type(self, connection):
return 'daterange'
RangeField.register_lookup(lookups.DataContains)
RangeField.register_lookup(lookups.ContainedBy)
RangeField.register_lookup(lookups.Overlap)
class RangeContainedBy(models.Lookup):
lookup_name = 'contained_by'
type_mapping = {
'integer': 'int4range',
'bigint': 'int8range',
'double precision': 'numrange',
'date': 'daterange',
'timestamp with time zone': 'tstzrange',
}
def as_sql(self, qn, connection):
field = self.lhs.output_field
if isinstance(field, models.FloatField):
sql = '%s::numeric <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
else:
sql = '%s <@ %s::{}'.format(self.type_mapping[field.db_type(connection)])
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return sql % (lhs, rhs), params
def get_prep_lookup(self):
return RangeField().get_prep_value(self.rhs)
models.DateField.register_lookup(RangeContainedBy)
models.DateTimeField.register_lookup(RangeContainedBy)
models.IntegerField.register_lookup(RangeContainedBy)
models.BigIntegerField.register_lookup(RangeContainedBy)
models.FloatField.register_lookup(RangeContainedBy)
@RangeField.register_lookup
class FullyLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_lt'
operator = '<<'
@RangeField.register_lookup
class FullGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'fully_gt'
operator = '>>'
@RangeField.register_lookup
class NotLessThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_lt'
operator = '&>'
@RangeField.register_lookup
class NotGreaterThan(lookups.PostgresSimpleLookup):
lookup_name = 'not_gt'
operator = '&<'
@RangeField.register_lookup
class AdjacentToLookup(lookups.PostgresSimpleLookup):
lookup_name = 'adjacent_to'
operator = '-|-'
@RangeField.register_lookup
class RangeStartsWith(models.Transform):
lookup_name = 'startswith'
function = 'lower'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class RangeEndsWith(models.Transform):
lookup_name = 'endswith'
function = 'upper'
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class IsEmpty(models.Transform):
lookup_name = 'isempty'
function = 'isempty'
output_field = models.BooleanField()
| mit | 5,687,126,680,121,333,000 | 27.542986 | 98 | 0.653932 | false |
aptivate/ihpresultsweb | ihp/submissions/migrations/0018_auto__add_agencyprofile__add_unique_agencyprofile_agency_language.py | 1 | 20882 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'AgencyProfile'
db.create_table('submissions_agencyprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('agency', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['submissions.Agency'])),
('language', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['submissions.Language'])),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('submissions', ['AgencyProfile'])
# Adding unique constraint on 'AgencyProfile', fields ['agency', 'language']
db.create_unique('submissions_agencyprofile', ['agency_id', 'language_id'])
for agency in orm.Agency.objects.all():
for language in orm.Language.objects.all():
profile, _ = orm.AgencyProfile.objects.get_or_create(agency=agency, language=language)
profile.description = agency.description
profile.save()
def backwards(self, orm):
# Removing unique constraint on 'AgencyProfile', fields ['agency', 'language']
db.delete_unique('submissions_agencyprofile', ['agency_id', 'language_id'])
# Deleting model 'AgencyProfile'
db.delete_table('submissions_agencyprofile')
models = {
'submissions.agency': {
'Meta': {'object_name': 'Agency'},
'agency': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '15'})
},
'submissions.agencycountries': {
'Meta': {'object_name': 'AgencyCountries'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']"}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'submissions.agencyprofile': {
'Meta': {'unique_together': "(['agency', 'language'],)", 'object_name': 'AgencyProfile'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Language']"})
},
'submissions.agencytargets': {
'Meta': {'object_name': 'AgencyTargets'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']", 'null': 'True', 'blank': 'True'}),
'arrow_criterion_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'arrow_criterion_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'tick_criterion_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'tick_criterion_value': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'submissions.agencyworkingdraft': {
'Meta': {'object_name': 'AgencyWorkingDraft'},
'agency': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['submissions.Agency']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'submissions.country': {
'Meta': {'object_name': 'Country'},
'country': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'submissions.country8dpfix': {
'Meta': {'unique_together': "(['agency', 'country'],)", 'object_name': 'Country8DPFix'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']"}),
'baseline_progress': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_progress': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'submissions.countryexclusion': {
'Meta': {'unique_together': "(['country', 'question_number'],)", 'object_name': 'CountryExclusion'},
'baseline_applicable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_applicable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question_number': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'submissions.countryscorecardoverridecomments': {
'Meta': {'unique_together': "(['country', 'language'],)", 'object_name': 'CountryScorecardOverrideComments'},
'cd2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'dbr2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'hmis2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jar4': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Language']"}),
'pf2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pfm2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pr2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rf2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rf3': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ta2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'submissions.countrytargets': {
'Meta': {'object_name': 'CountryTargets'},
'arrow_criterion_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'arrow_criterion_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'tick_criterion_type': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'tick_criterion_value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
'submissions.countryworkingdraft': {
'Meta': {'object_name': 'CountryWorkingDraft'},
'country': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['submissions.Country']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'submissions.dpquestion': {
'Meta': {'object_name': 'DPQuestion'},
'baseline_value': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'baseline_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'comments': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_value': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'latest_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'question_number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Submission']"})
},
'submissions.dpscorecardcomments': {
'Meta': {'unique_together': "(['agency', 'language'],)", 'object_name': 'DPScorecardComments'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']"}),
'er1': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er2a': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er2b': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er2c': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er3': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er4': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er5a': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er5b': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er5c': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er6': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er7': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er8': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Language']"})
},
'submissions.dpscorecardratings': {
'Meta': {'object_name': 'DPScorecardRatings'},
'agency': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['submissions.Agency']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'r1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r2a': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r2b': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r2c': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r3': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r4': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r5a': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r5b': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r5c': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r6': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r7': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r8': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'submissions.dpscorecardsummary': {
'Meta': {'unique_together': "(['agency', 'language'],)", 'object_name': 'DPScorecardSummary'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']"}),
'erb1': ('django.db.models.fields.TextField', [], {}),
'erb2': ('django.db.models.fields.TextField', [], {}),
'erb3': ('django.db.models.fields.TextField', [], {}),
'erb4': ('django.db.models.fields.TextField', [], {}),
'erb5': ('django.db.models.fields.TextField', [], {}),
'erb6': ('django.db.models.fields.TextField', [], {}),
'erb7': ('django.db.models.fields.TextField', [], {}),
'erb8': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Language']"})
},
'submissions.govquestion': {
'Meta': {'object_name': 'GovQuestion'},
'baseline_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'baseline_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'comments': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_value': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'latest_year': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'question_number': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Submission']"})
},
'submissions.govscorecardcomments': {
'Meta': {'unique_together': "(['country', 'language'],)", 'object_name': 'GovScorecardComments'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'er1': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er2a': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er2b': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er3': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er4': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er5a': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er5b': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er6': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er7': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'er8': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Language']"})
},
'submissions.govscorecardratings': {
'Meta': {'object_name': 'GovScorecardRatings'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'hmis1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'hsm1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'hsm4': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'hsp1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'hsp2': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jar1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r1': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r2a': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r2b': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r3': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r4': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r5a': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r5b': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r6': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r7': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'r8': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'submissions.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'submissions.mdgdata': {
'Meta': {'object_name': 'MDGData'},
'arrow': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'baseline_value': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'baseline_year': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_value': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'latest_year': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True'}),
'mdg_target': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'submissions.notapplicable': {
'Meta': {'object_name': 'NotApplicable'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'variation': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'submissions.submission': {
'Meta': {'object_name': 'Submission'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Agency']", 'null': 'True'}),
'completed_by': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Country']"}),
'date_submitted': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'docversion': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'})
}
}
complete_apps = ['submissions']
| gpl-3.0 | -7,729,627,572,642,085,000 | 75.772059 | 145 | 0.543387 | false |
agaldona/odoomrp-wip-1 | mrp_project_link_mto/__openerp__.py | 10 | 1266 | # -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MRP Project Link - MTO",
"summary": "Link production with projects for MTO",
"version": "8.0.1.0.0",
"depends": [
"mrp_project",
],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"category": "Manufacturing",
'data': [
"views/procurement_order_view.xml",
],
'installable': True,
}
| agpl-3.0 | -7,804,172,982,593,342,000 | 36.235294 | 78 | 0.563981 | false |
wenxer/peewee | playhouse/tests/test_apis.py | 19 | 1170 | from peewee import Node
from peewee import *
from playhouse.tests.base import PeeweeTestCase
class TestNodeAPI(PeeweeTestCase):
def test_extend(self):
@Node.extend()
def add(self, lhs, rhs):
return lhs + rhs
n = Node()
self.assertEqual(n.add(4, 2), 6)
delattr(Node, 'add')
self.assertRaises(AttributeError, lambda: n.add(2, 4))
def test_clone(self):
@Node.extend(clone=True)
def hack(self, alias):
self._negated = True
self._alias = alias
n = Node()
c = n.hack('magic!')
self.assertFalse(n._negated)
self.assertEqual(n._alias, None)
self.assertTrue(c._negated)
self.assertEqual(c._alias, 'magic!')
class TestModel(Model):
data = CharField()
hacked = TestModel.data.hack('nugget')
self.assertFalse(TestModel.data._negated)
self.assertEqual(TestModel.data._alias, None)
self.assertTrue(hacked._negated)
self.assertEqual(hacked._alias, 'nugget')
delattr(Node, 'hack')
self.assertRaises(AttributeError, lambda: TestModel.data.hack())
| mit | -7,561,303,767,391,358,000 | 28.25 | 72 | 0.596581 | false |
Towerthousand/tvhack | server.py | 1 | 2475 | #!/usr/bin/env python3
from flask import Flask
from flask import jsonify
from flask import render_template
from flask.ext.cors import CORS
from firebase import firebase
app = Flask(__name__)
CORS(app)
firebase = firebase.FirebaseApplication('https://tvhack.firebaseio.com', None)
_calling = False
@app.route('/')
def render_index():
""" Returns the DirectTV UI page """
return render_template('index.html')
@app.route('/call')
def render_call():
""" Returns the DirectTV UI page """
return render_template('call.html')
@app.route('/message')
def render_message():
""" Returns the message page """
return render_template('message.html')
@app.route('/api/isCaredBy/<uid>/', methods=['GET'])
def usersCaredBy(uid):
""" Returns the user objects taking care of a given user """
users = firebase.get('/users-tv/' + uid + '/isCaredBy', None)
users_info = []
for user in users:
users_info.append(firebase.get('/users-carer', user))
return jsonify({'users': users_info})
@app.route('/api/user-tv/<uid>/', methods=['GET'])
def user_tv(uid):
""" Returns the requested tv user object """
user = firebase.get('/users-tv', uid)
return jsonify(user)
@app.route('/api/user-carer/<uid>/', methods=['GET'])
def user_carer(uid):
""" Returns the requested carer user object """
user = firebase.get('/users-carer', uid)
return jsonify(user)
@app.route('/api/stayOnline/<uid>/', methods=['GET'])
def stay_alive(uid):
""" Notifies the server that uid is still online """
global _calling
return 'OK'
@app.route('/api/call/<uid>/', methods=['GET'])
def call(uid):
""" calls a given uid """
global _calling
_calling = True
return 'OK'
@app.route('/api/uncall/<uid>/', methods=['GET'])
def uncall(uid):
""" uncalls a given uid """
global _calling
_calling = False
return 'OK'
@app.route('/api/isCalling/<uid>/', methods=['GET'])
def is_calling(uid):
""" Returns true if uid is being called """
return jsonify({"isCalling": _calling})
@app.route('/api/remind/<uid>/', methods=['GET'])
def remid(uid):
return 'OK'
@app.route('/api/notify/<id>/', methods=['GET'])
def notify(id):
firebase.patch('/push-notifications', {0: True})
return 'OK'
@app.route('/api/denotify/<id>/', methods=['GET'])
def denotify(id):
firebase.patch('/push-notifications', {0: False})
return 'OK'
if __name__ == "__main__":
app.run(debug=True)
| mit | -7,619,877,272,132,671,000 | 22.130841 | 78 | 0.632727 | false |
foone/7gen | code/basictexture.py | 1 | 1894 | #!/usr/env python
from error import SaveError,LoadError,CompressedError
from zipfs import fsopen,isZip,GetFileNameInZip
import os
import pygame
class TextureConverter:
def __init__(self,palette=None):
if palette is not None:
self.palette_surf=palette
else:
self.palette_surf=pygame.image.load('code/palette.bmp')
def palettesMatch(self,othersurf):
return othersurf.get_palette()==self.palette_surf.get_palette()
def quantizeImage(self,infile,outfile,dither):
import quantizer2.quantizer
return quantizer2.quantizer.quantize(infile,outfile,'palette.bmp',dither)
def fixPalette(self,surf):
newsurf=pygame.Surface(surf.get_size(),0,surf)
newsurf.set_palette(self.palette_surf.get_palette())
newsurf.blit(surf,(0,0)) #palette mapping should save us.
return newsurf
def getTexture(self,filename,dither=False,changedir=True):
gdither=dither
if filename[0] in ('-','+'):
gdither=filename[0]=='+'
filename=filename[1:]
if not os.path.exists(filename):
raise SaveError('Image %s does not exist!' % (filename))
try:
texsurf=pygame.image.load(fsopen(filename,'rb'))
if texsurf.get_bitsize()==8 and self.palettesMatch(texsurf):
return texsurf
except pygame.error:
pass
# Try to quantize
if changedir:
olddir=os.getcwd()
os.chdir('..')
try:
qfilename='quant_temp_in.bmp'
pygame.image.save(texsurf,qfilename)
if not self.quantizeImage(qfilename,'quant_temp.tga',gdither):
os.unlink(qfilename)
raise SaveError('Quantizing image failed!')
else:
texsurf=pygame.image.load('quant_temp.tga')
texsurf=self.fixPalette(texsurf)
os.unlink('quant_temp.tga')
os.unlink(qfilename)
if changedir:
os.chdir(olddir)
return texsurf
except ImportError:
if isZip(filename):
os.unlink(qfilename)
if changedir:
os.chdir(olddir)
raise# SaveError('Bad palette, and missing quantizer!')
| gpl-2.0 | 5,942,606,552,221,485,000 | 30.566667 | 75 | 0.722281 | false |
cytec/SickRage | lib/chardet/euckrfreq.py | 342 | 13546 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKR_CHAR_TO_FREQ_ORDER = (
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
)
| gpl-3.0 | 416,433,358,791,484,700 | 68.466667 | 92 | 0.696663 | false |
haihabi/simpy | simpy/core/tests_configuration.py | 1 | 3303 | from .base_function import read_enum, build_function_input_dict
from simpy.core.result.container import ResultContainer, Result
class TestConfiguration(object):
def __init__(self, name, enable, config_name, overwrite):
if not isinstance(enable, bool):
raise Exception('enable must be a boolean')
self.name = name
self.enable = enable
self.config_name = config_name
self.overwrite = overwrite
def enable_test(self):
self.enable = True
class Test(object):
def __init__(self, test_config, param_config):
self.test_config = test_config
# overwrite params
param_config = param_config.copy() # create a clean copy of params
param_config.update(test_config.overwrite) # perform overwrite
self.param_config = param_config
def get_name(self):
return self.test_config.name
def get_param(self, name):
return self.param_config.get(name)
def get_params(self):
return self.param_config
def read_enum(self, input_enum):
return read_enum(input_enum, self.param_config)
def run_function_from_test(self, input_function):
function_inputs = build_function_input_dict(input_function, self.param_config)
return input_function(**function_inputs)
def generate_test_result(self, *args):
return Result(self, *args)
def has_same_param(self, input_test):
if input_test.param_config.__len__() != self.param_config.__len__():
return False
for k in input_test.param_config.keys():
if self.param_config.get(k) is None:
return False
return True
class TestsRunner(object):
def __init__(self, test_list, global_param, param_configurations):
self.test_list = test_list
self.global_param = global_param
self.param_configurations = param_configurations
self.current = 0
def __iter__(self):
return self
def __next__(self): # Python 3: def __next__(self)
if self.current >= self.test_list.__len__():
raise StopIteration
else:
while True:
if self.test_list[self.current].enable:
tc = self.test_list[self.current]
if self.param_configurations.get(tc.config_name) is None:
raise Exception('cant find param config')
t = Test(tc, self.param_configurations.get(tc.config_name))
print("Running Test:" + t.get_name())
self.current += 1
return t
self.current += 1
if self.current >= self.test_list.__len__():
raise StopIteration
next = __next__ # Python 2
def get_global_param(self, param_name):
return self.global_param.get(param_name)
def generate_result_container(self):
return ResultContainer(self.global_param)
def run_function_from_global(self, input_function):
function_inputs = build_function_input_dict(input_function, self.global_param)
return input_function(**function_inputs)
def generate_test_iterator(self):
return TestsRunner(self.test_list, self.global_param, self.param_configurations)
| mit | -3,384,892,642,855,937,500 | 34.138298 | 88 | 0.611262 | false |
pataquets/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/net/web_mock.py | 184 | 2241 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
class MockWeb(object):
def __init__(self, urls=None):
self.urls = urls or {}
self.urls_fetched = []
def get_binary(self, url, convert_404_to_None=False):
self.urls_fetched.append(url)
if url in self.urls:
return self.urls[url]
return "MOCK Web result, convert 404 to None=%s" % convert_404_to_None
# FIXME: Classes which are using Browser probably want to use Web instead.
class MockBrowser(object):
params = {}
def open(self, url):
pass
def select_form(self, name):
pass
def __setitem__(self, key, value):
self.params[key] = value
def submit(self):
return StringIO.StringIO()
| bsd-3-clause | -4,405,033,378,034,579,500 | 37.637931 | 78 | 0.727354 | false |
fawazn/Resume-Viewer | ResumeViewer/settings.py | 1 | 2237 | """
Django settings for ResumeView project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
# import django
# django.setup()
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
TEMPLATE_DIRS = (PROJECT_PATH,)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '1i6+^md=xhgar&&tas%$@xc9jn4rh%0tbb$r_oy-&d2u4&jtg('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'ResumeViewer'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ResumeViewer.urls'
WSGI_APPLICATION = 'ResumeViewer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
| mit | 5,723,165,481,105,619,000 | 23.712644 | 71 | 0.691104 | false |
okfn/ofs | setup.py | 1 | 1171 | from setuptools import setup, find_packages
try:
fo = open('README.rst')
long_description = fo.read()
except:
long_description="""OFS - provides plugin-orientated low-level blobstore. """,
finally:
fo.close()
setup(
name="ofs",
version="0.4.3",
description="OFS - provides plugin-orientated low-level blobstore.",
long_description=long_description,
author="Ben O'Steen, Friedrich Lindenberg, Rufus Pollock",
author_email="[email protected]",
license="http://www.apache.org/licenses/LICENSE-2.0",
url="http://github.com/okfn/ofs",
packages=find_packages(),
test_suite = "test.test.TestPairtreeOFS",
install_requires = ["argparse", "six", "boto"],
entry_points="""
[ofs.backend]
pairtree = ofs.local.pairtreestore:PTOFS
mdpairtree= ofs.local.metadatastore:MDOFS
s3 = ofs.remote.botostore:S3OFS
google = ofs.remote.botostore:GSOFS
s3bounce = ofs.remote.proxystore:S3Bounce
archive.org = ofs.remote.botostore:ArchiveOrgOFS
reststore = ofs.remote.reststore:RESTOFS
swift = ofs.remote.swiftstore:SwiftOFS
[console_scripts]
ofs_upload = ofs.command:ofs
"""
)
| mit | 53,485,295,088,569,420 | 30.648649 | 82 | 0.684885 | false |
goyalankit/po-compiler | object_files/networkx-1.8.1/networkx/algorithms/connectivity/connectivity.py | 21 | 20381 | # -*- coding: utf-8 -*-
"""
Flow based connectivity algorithms
"""
import itertools
import networkx as nx
__author__ = '\n'.join(['Jordi Torrents <[email protected]>'])
__all__ = [ 'average_node_connectivity',
'local_node_connectivity',
'node_connectivity',
'local_edge_connectivity',
'edge_connectivity',
'all_pairs_node_connectivity_matrix',
'dominating_set',
]
def average_node_connectivity(G):
r"""Returns the average connectivity of a graph G.
The average connectivity `\bar{\kappa}` of a graph G is the average
of local node connectivity over all pairs of nodes of G [1]_ .
.. math::
\bar{\kappa}(G) = \frac{\sum_{u,v} \kappa_{G}(u,v)}{{n \choose 2}}
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
K : float
Average node connectivity
See also
--------
local_node_connectivity
node_connectivity
local_edge_connectivity
edge_connectivity
max_flow
ford_fulkerson
References
----------
.. [1] Beineke, L., O. Oellermann, and R. Pippert (2002). The average
connectivity of a graph. Discrete mathematics 252(1-3), 31-45.
http://www.sciencedirect.com/science/article/pii/S0012365X01001807
"""
if G.is_directed():
iter_func = itertools.permutations
else:
iter_func = itertools.combinations
H, mapping = _aux_digraph_node_connectivity(G)
num = 0.
den = 0.
for u,v in iter_func(G, 2):
den += 1
num += local_node_connectivity(G, u, v, aux_digraph=H, mapping=mapping)
if den == 0: # Null Graph
return 0
return num/den
def _aux_digraph_node_connectivity(G):
r""" Creates a directed graph D from an undirected graph G to compute flow
based node connectivity.
For an undirected graph G having `n` nodes and `m` edges we derive a
directed graph D with 2n nodes and 2m+n arcs by replacing each
original node `v` with two nodes `vA`,`vB` linked by an (internal)
arc in D. Then for each edge (u,v) in G we add two arcs (uB,vA)
and (vB,uA) in D. Finally we set the attribute capacity = 1 for each
arc in D [1].
For a directed graph having `n` nodes and `m` arcs we derive a
directed graph D with 2n nodes and m+n arcs by replacing each
original node `v` with two nodes `vA`,`vB` linked by an (internal)
arc `(vA,vB)` in D. Then for each arc (u,v) in G we add one arc (uB,vA)
in D. Finally we set the attribute capacity = 1 for each arc in D.
References
----------
.. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and
Erlebach, 'Network Analysis: Methodological Foundations', Lecture
Notes in Computer Science, Volume 3418, Springer-Verlag, 2005.
http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
"""
directed = G.is_directed()
mapping = {}
D = nx.DiGraph()
for i,node in enumerate(G):
mapping[node] = i
D.add_node('%dA' % i,id=node)
D.add_node('%dB' % i,id=node)
D.add_edge('%dA' % i, '%dB' % i, capacity=1)
edges = []
for (source, target) in G.edges():
edges.append(('%sB' % mapping[source], '%sA' % mapping[target]))
if not directed:
edges.append(('%sB' % mapping[target], '%sA' % mapping[source]))
D.add_edges_from(edges, capacity=1)
return D, mapping
def local_node_connectivity(G, s, t, aux_digraph=None, mapping=None):
r"""Computes local node connectivity for nodes s and t.
Local node connectivity for two non adjacent nodes s and t is the
minimum number of nodes that must be removed (along with their incident
edges) to disconnect them.
This is a flow based implementation of node connectivity. We compute the
maximum flow on an auxiliary digraph build from the original input
graph (see below for details). This is equal to the local node
connectivity because the value of a maximum s-t-flow is equal to the
capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ .
Parameters
----------
G : NetworkX graph
Undirected graph
s : node
Source node
t : node
Target node
aux_digraph : NetworkX DiGraph (default=None)
Auxiliary digraph to compute flow based node connectivity. If None
the auxiliary digraph is build.
mapping : dict (default=None)
Dictionary with a mapping of node names in G and in the auxiliary digraph.
Returns
-------
K : integer
local node connectivity for nodes s and t
Examples
--------
>>> # Platonic icosahedral graph has node connectivity 5
>>> # for each non adjacent node pair
>>> G = nx.icosahedral_graph()
>>> nx.local_node_connectivity(G,0,6)
5
Notes
-----
This is a flow based implementation of node connectivity. We compute the
maximum flow using the Ford and Fulkerson algorithm on an auxiliary digraph
build from the original input graph:
For an undirected graph G having `n` nodes and `m` edges we derive a
directed graph D with 2n nodes and 2m+n arcs by replacing each
original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
arc in `D`. Then for each edge (`u`, `v`) in G we add two arcs
(`u_B`, `v_A`) and (`v_B`, `u_A`) in `D`. Finally we set the attribute
capacity = 1 for each arc in `D` [1]_ .
For a directed graph G having `n` nodes and `m` arcs we derive a
directed graph `D` with `2n` nodes and `m+n` arcs by replacing each
original node `v` with two nodes `v_A`, `v_B` linked by an (internal)
arc `(v_A, v_B)` in D. Then for each arc `(u,v)` in G we add one arc
`(u_B,v_A)` in `D`. Finally we set the attribute capacity = 1 for
each arc in `D`.
This is equal to the local node connectivity because the value of
a maximum s-t-flow is equal to the capacity of a minimum s-t-cut (Ford
and Fulkerson theorem).
See also
--------
node_connectivity
all_pairs_node_connectivity_matrix
local_edge_connectivity
edge_connectivity
max_flow
ford_fulkerson
References
----------
.. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and
Erlebach, 'Network Analysis: Methodological Foundations', Lecture
Notes in Computer Science, Volume 3418, Springer-Verlag, 2005.
http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf
"""
if aux_digraph is None or mapping is None:
H, mapping = _aux_digraph_node_connectivity(G)
else:
H = aux_digraph
return nx.max_flow(H,'%sB' % mapping[s], '%sA' % mapping[t])
def node_connectivity(G, s=None, t=None):
r"""Returns node connectivity for a graph or digraph G.
Node connectivity is equal to the minimum number of nodes that
must be removed to disconnect G or render it trivial. If source
and target nodes are provided, this function returns the local node
connectivity: the minimum number of nodes that must be removed to break
all paths from source to target in G.
This is a flow based implementation. The algorithm is based in
solving a number of max-flow problems (ie local st-node connectivity,
see local_node_connectivity) to determine the capacity of the
minimum cut on an auxiliary directed network that corresponds to the
minimum node cut of G. It handles both directed and undirected graphs.
Parameters
----------
G : NetworkX graph
Undirected graph
s : node
Source node. Optional (default=None)
t : node
Target node. Optional (default=None)
Returns
-------
K : integer
Node connectivity of G, or local node connectivity if source
and target were provided
Examples
--------
>>> # Platonic icosahedral graph is 5-node-connected
>>> G = nx.icosahedral_graph()
>>> nx.node_connectivity(G)
5
>>> nx.node_connectivity(G, 3, 7)
5
Notes
-----
This is a flow based implementation of node connectivity. The
algorithm works by solving `O((n-\delta-1+\delta(\delta-1)/2)` max-flow
problems on an auxiliary digraph. Where `\delta` is the minimum degree
of G. For details about the auxiliary digraph and the computation of
local node connectivity see local_node_connectivity.
This implementation is based on algorithm 11 in [1]_. We use the Ford
and Fulkerson algorithm to compute max flow (see ford_fulkerson).
See also
--------
local_node_connectivity
all_pairs_node_connectivity_matrix
local_edge_connectivity
edge_connectivity
max_flow
ford_fulkerson
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
# Local node connectivity
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return local_node_connectivity(G, s, t)
# Global node connectivity
if G.is_directed():
if not nx.is_weakly_connected(G):
return 0
iter_func = itertools.permutations
# I think that it is necessary to consider both predecessors
# and successors for directed graphs
def neighbors(v):
return itertools.chain.from_iterable([G.predecessors_iter(v),
G.successors_iter(v)])
else:
if not nx.is_connected(G):
return 0
iter_func = itertools.combinations
neighbors = G.neighbors_iter
# Initial guess \kappa = n - 1
K = G.order()-1
deg = G.degree()
min_deg = min(deg.values())
v = next(n for n,d in deg.items() if d==min_deg)
# Reuse the auxiliary digraph
H, mapping = _aux_digraph_node_connectivity(G)
# compute local node connectivity with all non-neighbors nodes
for w in set(G) - set(neighbors(v)) - set([v]):
K = min(K, local_node_connectivity(G, v, w,
aux_digraph=H, mapping=mapping))
# Same for non adjacent pairs of neighbors of v
for x,y in iter_func(neighbors(v), 2):
if y in G[x]: continue
K = min(K, local_node_connectivity(G, x, y,
aux_digraph=H, mapping=mapping))
return K
def all_pairs_node_connectivity_matrix(G):
"""Return a numpy 2d ndarray with node connectivity between all pairs
of nodes.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
K : 2d numpy ndarray
node connectivity between all pairs of nodes.
See also
--------
local_node_connectivity
node_connectivity
local_edge_connectivity
edge_connectivity
max_flow
ford_fulkerson
"""
try:
import numpy
except ImportError:
raise ImportError(\
"all_pairs_node_connectivity_matrix() requires NumPy")
n = G.order()
M = numpy.zeros((n, n), dtype=int)
# Create auxiliary Digraph
D, mapping = _aux_digraph_node_connectivity(G)
if G.is_directed():
for u, v in itertools.permutations(G, 2):
K = local_node_connectivity(G, u, v, aux_digraph=D, mapping=mapping)
M[mapping[u],mapping[v]] = K
else:
for u, v in itertools.combinations(G, 2):
K = local_node_connectivity(G, u, v, aux_digraph=D, mapping=mapping)
M[mapping[u],mapping[v]] = M[mapping[v],mapping[u]] = K
return M
def _aux_digraph_edge_connectivity(G):
"""Auxiliary digraph for computing flow based edge connectivity
If the input graph is undirected, we replace each edge (u,v) with
two reciprocal arcs (u,v) and (v,u) and then we set the attribute
'capacity' for each arc to 1. If the input graph is directed we simply
add the 'capacity' attribute. Part of algorithm 1 in [1]_ .
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. (this is a
chapter, look for the reference of the book).
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if G.is_directed():
if nx.get_edge_attributes(G, 'capacity'):
return G
D = G.copy()
capacity = dict((e,1) for e in D.edges())
nx.set_edge_attributes(D, 'capacity', capacity)
return D
else:
D = G.to_directed()
capacity = dict((e,1) for e in D.edges())
nx.set_edge_attributes(D, 'capacity', capacity)
return D
def local_edge_connectivity(G, u, v, aux_digraph=None):
r"""Returns local edge connectivity for nodes s and t in G.
Local edge connectivity for two nodes s and t is the minimum number
of edges that must be removed to disconnect them.
This is a flow based implementation of edge connectivity. We compute the
maximum flow on an auxiliary digraph build from the original
network (see below for details). This is equal to the local edge
connectivity because the value of a maximum s-t-flow is equal to the
capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ .
Parameters
----------
G : NetworkX graph
Undirected or directed graph
s : node
Source node
t : node
Target node
aux_digraph : NetworkX DiGraph (default=None)
Auxiliary digraph to compute flow based edge connectivity. If None
the auxiliary digraph is build.
Returns
-------
K : integer
local edge connectivity for nodes s and t
Examples
--------
>>> # Platonic icosahedral graph has edge connectivity 5
>>> # for each non adjacent node pair
>>> G = nx.icosahedral_graph()
>>> nx.local_edge_connectivity(G,0,6)
5
Notes
-----
This is a flow based implementation of edge connectivity. We compute the
maximum flow using the Ford and Fulkerson algorithm on an auxiliary digraph
build from the original graph:
If the input graph is undirected, we replace each edge (u,v) with
two reciprocal arcs `(u,v)` and `(v,u)` and then we set the attribute
'capacity' for each arc to 1. If the input graph is directed we simply
add the 'capacity' attribute. This is an implementation of algorithm 1
in [1]_.
The maximum flow in the auxiliary network is equal to the local edge
connectivity because the value of a maximum s-t-flow is equal to the
capacity of a minimum s-t-cut (Ford and Fulkerson theorem).
See also
--------
local_node_connectivity
node_connectivity
edge_connectivity
max_flow
ford_fulkerson
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if aux_digraph is None:
H = _aux_digraph_edge_connectivity(G)
else:
H = aux_digraph
return nx.max_flow(H, u, v)
def edge_connectivity(G, s=None, t=None):
r"""Returns the edge connectivity of the graph or digraph G.
The edge connectivity is equal to the minimum number of edges that
must be removed to disconnect G or render it trivial. If source
and target nodes are provided, this function returns the local edge
connectivity: the minimum number of edges that must be removed to
break all paths from source to target in G.
This is a flow based implementation. The algorithm is based in solving
a number of max-flow problems (ie local st-edge connectivity, see
local_edge_connectivity) to determine the capacity of the minimum
cut on an auxiliary directed network that corresponds to the minimum
edge cut of G. It handles both directed and undirected graphs.
Parameters
----------
G : NetworkX graph
Undirected or directed graph
s : node
Source node. Optional (default=None)
t : node
Target node. Optional (default=None)
Returns
-------
K : integer
Edge connectivity for G, or local edge connectivity if source
and target were provided
Examples
--------
>>> # Platonic icosahedral graph is 5-edge-connected
>>> G = nx.icosahedral_graph()
>>> nx.edge_connectivity(G)
5
Notes
-----
This is a flow based implementation of global edge connectivity. For
undirected graphs the algorithm works by finding a 'small' dominating
set of nodes of G (see algorithm 7 in [1]_ ) and computing local max flow
(see local_edge_connectivity) between an arbitrary node in the dominating
set and the rest of nodes in it. This is an implementation of
algorithm 6 in [1]_ .
For directed graphs, the algorithm does n calls to the max flow function.
This is an implementation of algorithm 8 in [1]_ . We use the Ford and
Fulkerson algorithm to compute max flow (see ford_fulkerson).
See also
--------
local_node_connectivity
node_connectivity
local_edge_connectivity
max_flow
ford_fulkerson
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
# Local edge connectivity
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return local_edge_connectivity(G, s, t)
# Global edge connectivity
if G.is_directed():
# Algorithm 8 in [1]
if not nx.is_weakly_connected(G):
return 0
# initial value for lambda is min degree (\delta(G))
L = min(G.degree().values())
# reuse auxiliary digraph
H = _aux_digraph_edge_connectivity(G)
nodes = G.nodes()
n = len(nodes)
for i in range(n):
try:
L = min(L, local_edge_connectivity(G, nodes[i],
nodes[i+1], aux_digraph=H))
except IndexError: # last node!
L = min(L, local_edge_connectivity(G, nodes[i],
nodes[0], aux_digraph=H))
return L
else: # undirected
# Algorithm 6 in [1]
if not nx.is_connected(G):
return 0
# initial value for lambda is min degree (\delta(G))
L = min(G.degree().values())
# reuse auxiliary digraph
H = _aux_digraph_edge_connectivity(G)
# A dominating set is \lambda-covering
# We need a dominating set with at least two nodes
for node in G:
D = dominating_set(G, start_with=node)
v = D.pop()
if D: break
else:
# in complete graphs the dominating sets will always be of one node
# thus we return min degree
return L
for w in D:
L = min(L, local_edge_connectivity(G, v, w, aux_digraph=H))
return L
def dominating_set(G, start_with=None):
# Algorithm 7 in [1]
all_nodes = set(G)
if start_with is None:
v = set(G).pop() # pick a node
else:
if start_with not in G:
raise nx.NetworkXError('node %s not in G' % start_with)
v = start_with
D = set([v])
ND = set([nbr for nbr in G[v]])
other = all_nodes - ND - D
while other:
w = other.pop()
D.add(w)
ND.update([nbr for nbr in G[w] if nbr not in D])
other = all_nodes - ND - D
return D
def is_dominating_set(G, nbunch):
# Proposed by Dan on the mailing list
allnodes=set(G)
testset=set(n for n in nbunch if n in G)
nbrs=set()
for n in testset:
nbrs.update(G[n])
if nbrs - allnodes: # some nodes left--not dominating
return False
else:
return True
| apache-2.0 | -8,427,922,534,663,341,000 | 32.576606 | 88 | 0.618959 | false |
RJVB/audacity | lib-src/lv2/sord/waflib/Logs.py | 134 | 4714 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,re,traceback,sys
try:
import threading
except ImportError:
pass
else:
wlock=threading.Lock()
class sync_stream(object):
def __init__(self,stream):
self.stream=stream
self.encoding=self.stream.encoding
def write(self,txt):
try:
wlock.acquire()
self.stream.write(txt)
self.stream.flush()
finally:
wlock.release()
def fileno(self):
return self.stream.fileno()
def flush(self):
self.stream.flush()
def isatty(self):
return self.stream.isatty()
_nocolor=os.environ.get('NOCOLOR','no')not in('no','0','false')
try:
if not _nocolor:
import waflib.ansiterm
except ImportError:
pass
if not os.environ.get('NOSYNC',False):
if id(sys.stdout)==id(sys.__stdout__):
sys.stdout=sync_stream(sys.stdout)
sys.stderr=sync_stream(sys.stderr)
import logging
LOG_FORMAT="%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT="%H:%M:%S"
zones=''
verbose=0
colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',}
got_tty=not os.environ.get('TERM','dumb')in['dumb','emacs']
if got_tty:
try:
got_tty=sys.stderr.isatty()and sys.stdout.isatty()
except AttributeError:
got_tty=False
if(not got_tty and os.environ.get('TERM','dumb')!='msys')or _nocolor:
colors_lst['USE']=False
def get_term_cols():
return 80
try:
import struct,fcntl,termios
except ImportError:
pass
else:
if got_tty:
def get_term_cols_real():
dummy_lines,cols=struct.unpack("HHHH",fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[:2]
return cols
try:
get_term_cols_real()
except Exception:
pass
else:
get_term_cols=get_term_cols_real
get_term_cols.__doc__="""
Get the console width in characters.
:return: the number of characters per line
:rtype: int
"""
def get_color(cl):
if not colors_lst['USE']:return''
return colors_lst.get(cl,'')
class color_dict(object):
def __getattr__(self,a):
return get_color(a)
def __call__(self,a):
return get_color(a)
colors=color_dict()
re_log=re.compile(r'(\w+): (.*)',re.M)
class log_filter(logging.Filter):
def __init__(self,name=None):
pass
def filter(self,rec):
rec.c1=colors.PINK
rec.c2=colors.NORMAL
rec.zone=rec.module
if rec.levelno>=logging.INFO:
if rec.levelno>=logging.ERROR:
rec.c1=colors.RED
elif rec.levelno>=logging.WARNING:
rec.c1=colors.YELLOW
else:
rec.c1=colors.GREEN
return True
m=re_log.match(rec.msg)
if m:
rec.zone=m.group(1)
rec.msg=m.group(2)
if zones:
return getattr(rec,'zone','')in zones or'*'in zones
elif not verbose>2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT)
def format(self,rec):
if rec.levelno>=logging.WARNING or rec.levelno==logging.INFO:
try:
msg=rec.msg.decode('utf-8')
except Exception:
msg=rec.msg
return'%s%s%s'%(rec.c1,msg,rec.c2)
return logging.Formatter.format(self,rec)
log=None
def debug(*k,**kw):
if verbose:
k=list(k)
k[0]=k[0].replace('\n',' ')
global log
log.debug(*k,**kw)
def error(*k,**kw):
global log
log.error(*k,**kw)
if verbose>2:
st=traceback.extract_stack()
if st:
st=st[:-1]
buf=[]
for filename,lineno,name,line in st:
buf.append(' File "%s", line %d, in %s'%(filename,lineno,name))
if line:
buf.append(' %s'%line.strip())
if buf:log.error("\n".join(buf))
def warn(*k,**kw):
global log
log.warn(*k,**kw)
def info(*k,**kw):
global log
log.info(*k,**kw)
def init_log():
global log
log=logging.getLogger('waflib')
log.handlers=[]
log.filters=[]
hdlr=logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
def make_logger(path,name):
logger=logging.getLogger(name)
hdlr=logging.FileHandler(path,'w')
formatter=logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
def make_mem_logger(name,to_log,size=10000):
from logging.handlers import MemoryHandler
logger=logging.getLogger(name)
hdlr=MemoryHandler(size,target=to_log)
formatter=logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.memhandler=hdlr
logger.setLevel(logging.DEBUG)
return logger
def pprint(col,str,label='',sep='\n'):
sys.stderr.write("%s%s%s %s%s"%(colors(col),str,colors.NORMAL,label,sep))
| gpl-2.0 | -7,297,165,476,239,302,000 | 25.784091 | 228 | 0.68286 | false |
xyyangkun/git-repo | subcmds/help.py | 23 | 5249 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import re
import sys
from formatter import AbstractFormatter, DumbWriter
from color import Coloring
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
import gitc_utils
class Help(PagedCommand, MirrorSafeCommand):
common = False
helpSummary = "Display detailed help on a command"
helpUsage = """
%prog [--all|command]
"""
helpDescription = """
Displays detailed usage information about a command.
"""
def _PrintAllCommands(self):
print('usage: repo COMMAND [ARGS]')
print('The complete list of recognized repo commands are:')
commandNames = list(sorted(self.commands))
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print(fmt % (name, summary))
print("See 'repo help <command>' for more information on a "
'specific command.')
def _PrintCommonCommands(self):
print('usage: repo COMMAND [ARGS]')
print('The most commonly used repo commands are:')
def gitc_supported(cmd):
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
return True
if self.manifest.isGitcClient:
return True
if isinstance(cmd, GitcClientCommand):
return False
if gitc_utils.get_gitc_manifest_dir():
return True
return False
commandNames = list(sorted([name
for name, command in self.commands.items()
if command.common and gitc_supported(command)]))
maxlen = 0
for name in commandNames:
maxlen = max(maxlen, len(name))
fmt = ' %%-%ds %%s' % maxlen
for name in commandNames:
command = self.commands[name]
try:
summary = command.helpSummary.strip()
except AttributeError:
summary = ''
print(fmt % (name, summary))
print(
"See 'repo help <command>' for more information on a specific command.\n"
"See 'repo help --all' for a complete list of recognized commands.")
def _PrintCommandHelp(self, cmd):
class _Out(Coloring):
def __init__(self, gc):
Coloring.__init__(self, gc, 'help')
self.heading = self.printer('heading', attr='bold')
self.wrap = AbstractFormatter(DumbWriter())
def _PrintSection(self, heading, bodyAttr):
try:
body = getattr(cmd, bodyAttr)
except AttributeError:
return
if body == '' or body is None:
return
self.nl()
self.heading('%s', heading)
self.nl()
self.heading('%s', ''.ljust(len(heading), '-'))
self.nl()
me = 'repo %s' % cmd.NAME
body = body.strip()
body = body.replace('%prog', me)
asciidoc_hdr = re.compile(r'^\n?([^\n]{1,})\n([=~-]{2,})$')
for para in body.split("\n\n"):
if para.startswith(' '):
self.write('%s', para)
self.nl()
self.nl()
continue
m = asciidoc_hdr.match(para)
if m:
title = m.group(1)
section_type = m.group(2)
if section_type[0] in ('=', '-'):
p = self.heading
else:
def _p(fmt, *args):
self.write(' ')
self.heading(fmt, *args)
p = _p
p('%s', title)
self.nl()
p('%s', ''.ljust(len(title), section_type[0]))
self.nl()
continue
self.wrap.add_flowing_data(para)
self.wrap.end_paragraph(1)
self.wrap.end_paragraph(0)
out = _Out(self.manifest.globalConfig)
out._PrintSection('Summary', 'helpSummary')
cmd.OptionParser.print_help()
out._PrintSection('Description', 'helpDescription')
def _Options(self, p):
p.add_option('-a', '--all',
dest='show_all', action='store_true',
help='show the complete list of commands')
def Execute(self, opt, args):
if len(args) == 0:
if opt.show_all:
self._PrintAllCommands()
else:
self._PrintCommonCommands()
elif len(args) == 1:
name = args[0]
try:
cmd = self.commands[name]
except KeyError:
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
sys.exit(1)
cmd.manifest = self.manifest
self._PrintCommandHelp(cmd)
else:
self._PrintCommandHelp(self)
| apache-2.0 | 8,828,714,842,065,362,000 | 28.488764 | 94 | 0.596685 | false |
keedio/hue | desktop/core/ext-py/python-openid-2.2.5/openid/test/test_association.py | 85 | 6439 | from openid.test import datadriven
import unittest
from openid.message import Message, BARE_NS, OPENID_NS, OPENID2_NS
from openid import association
import time
from openid import cryptutil
import warnings
class AssociationSerializationTest(unittest.TestCase):
def test_roundTrip(self):
issued = int(time.time())
lifetime = 600
assoc = association.Association(
'handle', 'secret', issued, lifetime, 'HMAC-SHA1')
s = assoc.serialize()
assoc2 = association.Association.deserialize(s)
self.failUnlessEqual(assoc.handle, assoc2.handle)
self.failUnlessEqual(assoc.issued, assoc2.issued)
self.failUnlessEqual(assoc.secret, assoc2.secret)
self.failUnlessEqual(assoc.lifetime, assoc2.lifetime)
self.failUnlessEqual(assoc.assoc_type, assoc2.assoc_type)
from openid.server.server import \
DiffieHellmanSHA1ServerSession, \
DiffieHellmanSHA256ServerSession, \
PlainTextServerSession
from openid.consumer.consumer import \
DiffieHellmanSHA1ConsumerSession, \
DiffieHellmanSHA256ConsumerSession, \
PlainTextConsumerSession
from openid.dh import DiffieHellman
def createNonstandardConsumerDH():
nonstandard_dh = DiffieHellman(1315291, 2)
return DiffieHellmanSHA1ConsumerSession(nonstandard_dh)
class DiffieHellmanSessionTest(datadriven.DataDrivenTestCase):
secrets = [
'\x00' * 20,
'\xff' * 20,
' ' * 20,
'This is a secret....',
]
session_factories = [
(DiffieHellmanSHA1ConsumerSession, DiffieHellmanSHA1ServerSession),
(createNonstandardConsumerDH, DiffieHellmanSHA1ServerSession),
(PlainTextConsumerSession, PlainTextServerSession),
]
def generateCases(cls):
return [(c, s, sec)
for c, s in cls.session_factories
for sec in cls.secrets]
generateCases = classmethod(generateCases)
def __init__(self, csess_fact, ssess_fact, secret):
datadriven.DataDrivenTestCase.__init__(self, csess_fact.__name__)
self.secret = secret
self.csess_fact = csess_fact
self.ssess_fact = ssess_fact
def runOneTest(self):
csess = self.csess_fact()
msg = Message.fromOpenIDArgs(csess.getRequest())
ssess = self.ssess_fact.fromMessage(msg)
check_secret = csess.extractSecret(
Message.fromOpenIDArgs(ssess.answer(self.secret)))
self.failUnlessEqual(self.secret, check_secret)
class TestMakePairs(unittest.TestCase):
"""Check the key-value formatting methods of associations.
"""
def setUp(self):
self.message = m = Message(OPENID2_NS)
m.updateArgs(OPENID2_NS, {
'mode': 'id_res',
'identifier': '=example',
'signed': 'identifier,mode',
'sig': 'cephalopod',
})
m.updateArgs(BARE_NS, {'xey': 'value'})
self.assoc = association.Association.fromExpiresIn(
3600, '{sha1}', 'very_secret', "HMAC-SHA1")
def testMakePairs(self):
"""Make pairs using the OpenID 1.x type signed list."""
pairs = self.assoc._makePairs(self.message)
expected = [
('identifier', '=example'),
('mode', 'id_res'),
]
self.failUnlessEqual(pairs, expected)
class TestMac(unittest.TestCase):
def setUp(self):
self.pairs = [('key1', 'value1'),
('key2', 'value2')]
def test_sha1(self):
assoc = association.Association.fromExpiresIn(
3600, '{sha1}', 'very_secret', "HMAC-SHA1")
expected = ('\xe0\x1bv\x04\xf1G\xc0\xbb\x7f\x9a\x8b'
'\xe9\xbc\xee}\\\xe5\xbb7*')
sig = assoc.sign(self.pairs)
self.failUnlessEqual(sig, expected)
if cryptutil.SHA256_AVAILABLE:
def test_sha256(self):
assoc = association.Association.fromExpiresIn(
3600, '{sha256SA}', 'very_secret', "HMAC-SHA256")
expected = ('\xfd\xaa\xfe;\xac\xfc*\x988\xad\x05d6-\xeaVy'
'\xd5\xa5Z.<\xa9\xed\x18\x82\\$\x95x\x1c&')
sig = assoc.sign(self.pairs)
self.failUnlessEqual(sig, expected)
class TestMessageSigning(unittest.TestCase):
def setUp(self):
self.message = m = Message(OPENID2_NS)
m.updateArgs(OPENID2_NS, {'mode': 'id_res',
'identifier': '=example'})
m.updateArgs(BARE_NS, {'xey': 'value'})
self.args = {'openid.mode': 'id_res',
'openid.identifier': '=example',
'xey': 'value'}
def test_signSHA1(self):
assoc = association.Association.fromExpiresIn(
3600, '{sha1}', 'very_secret', "HMAC-SHA1")
signed = assoc.signMessage(self.message)
self.failUnless(signed.getArg(OPENID_NS, "sig"))
self.failUnlessEqual(signed.getArg(OPENID_NS, "signed"),
"assoc_handle,identifier,mode,ns,signed")
self.failUnlessEqual(signed.getArg(BARE_NS, "xey"), "value",
signed)
if cryptutil.SHA256_AVAILABLE:
def test_signSHA256(self):
assoc = association.Association.fromExpiresIn(
3600, '{sha1}', 'very_secret', "HMAC-SHA256")
signed = assoc.signMessage(self.message)
self.failUnless(signed.getArg(OPENID_NS, "sig"))
self.failUnlessEqual(signed.getArg(OPENID_NS, "signed"),
"assoc_handle,identifier,mode,ns,signed")
self.failUnlessEqual(signed.getArg(BARE_NS, "xey"), "value",
signed)
class TestCheckMessageSignature(unittest.TestCase):
def test_aintGotSignedList(self):
m = Message(OPENID2_NS)
m.updateArgs(OPENID2_NS, {'mode': 'id_res',
'identifier': '=example',
'sig': 'coyote',
})
m.updateArgs(BARE_NS, {'xey': 'value'})
assoc = association.Association.fromExpiresIn(
3600, '{sha1}', 'very_secret', "HMAC-SHA1")
self.failUnlessRaises(ValueError, assoc.checkMessageSignature, m)
def pyUnitTests():
return datadriven.loadTests(__name__)
if __name__ == '__main__':
suite = pyUnitTests()
runner = unittest.TextTestRunner()
runner.run(suite)
| apache-2.0 | -2,020,375,773,662,696,000 | 34.185792 | 75 | 0.601802 | false |
jonparrott/gcloud-python | dlp/docs/conf.py | 3 | 10194 | # -*- coding: utf-8 -*-
#
# google-cloud-dlp documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
__version__ = '0.1.0'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
]
# autodoc/autosummary flags
autoclass_content = 'both'
autodoc_default_flags = ['members']
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'google-cloud-dlp'
copyright = u'2017, Google'
author = u'Google APIs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = '.'.join(release.split('.')[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'google-cloud-dlp-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'google-cloud-dlp.tex', u'google-cloud-dlp Documentation',
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'google-cloud-dlp',
u'google-cloud-dlp Documentation', [author], 1)]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'google-cloud-dlp', u'google-cloud-dlp Documentation', author,
'google-cloud-dlp',
'GAPIC library for the {metadata.shortName} v2 service', 'APIs'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('http://python.readthedocs.org/en/latest/', None),
'gax': ('https://gax-python.readthedocs.org/en/latest/', None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| apache-2.0 | -548,096,411,997,857,200 | 31.883871 | 79 | 0.706494 | false |
korrosivesec/crits | crits/core/management/commands/get_duplicate_targets.py | 23 | 2145 | from bson import Code
from django.core.management.base import BaseCommand
from optparse import make_option
from crits.emails.email import Email
from crits.targets.target import Target
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--delete',
'-d',
dest='is_delete',
default=False,
action='store_true',
help='Delete duplicate targets based on the email_address field.'),
)
help = 'Prints out duplicate target emails due to case sensitivity.'
is_delete = False
def handle(self, *args, **options):
"""
Script Execution.
"""
self.is_delete = options.get('is_delete')
mapcode = """
function () {
try {
this.to.forEach(function(z) {
emit(z.toLowerCase(), {count: 1});
});
} catch(err) {}
}
"""
reducecode = """
function(k,v) {
var count = 0;
v.forEach(function(v) {
count += v["count"];
});
return {count: count};
}
"""
m = Code(mapcode)
r = Code(reducecode)
results = Email.objects(to__exists=True).map_reduce(m, r, 'inline')
for result in results:
try:
targets = Target.objects(email_address__iexact=result.key)
targ_dup_count = targets.count()
if targ_dup_count > 1:
print str(result.key) + " [" + str(targ_dup_count) + "]"
for target in targets:
print target.to_json()
if self.is_delete:
delete_up_to = targets.count() - 1
for target in targets[:delete_up_to]:
print "Deleting target: " + str(target.id)
target.delete()
except Exception, e:
print e
pass
| mit | -734,066,303,281,574,500 | 29.211268 | 87 | 0.456876 | false |
dims/heat | heat/engine/constraint/common_constraints.py | 3 | 2856 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import croniter
import eventlet
import netaddr
import pytz
import six
from oslo_utils import netutils
from oslo_utils import timeutils
from heat.common.i18n import _
from heat.engine import constraints
class TestConstraintDelay(constraints.BaseCustomConstraint):
def validate_with_client(self, client, value):
eventlet.sleep(value)
class IPConstraint(constraints.BaseCustomConstraint):
def validate(self, value, context):
self._error_message = 'Invalid IP address'
return netutils.is_valid_ip(value)
class MACConstraint(constraints.BaseCustomConstraint):
def validate(self, value, context):
self._error_message = 'Invalid MAC address.'
return netaddr.valid_mac(value)
class CIDRConstraint(constraints.BaseCustomConstraint):
def _validate_whitespace(self, data):
self._error_message = ("Invalid net cidr '%s' contains "
"whitespace" % data)
if len(data.split()) > 1:
return False
return True
def validate(self, value, context):
try:
netaddr.IPNetwork(value)
return self._validate_whitespace(value)
except Exception as ex:
self._error_message = 'Invalid net cidr %s ' % six.text_type(ex)
return False
class ISO8601Constraint(object):
def validate(self, value, context):
try:
timeutils.parse_isotime(value)
except Exception:
return False
else:
return True
class CRONExpressionConstraint(constraints.BaseCustomConstraint):
def validate(self, value, context):
if not value:
return True
try:
croniter.croniter(value)
return True
except Exception as ex:
self._error_message = _(
'Invalid CRON expression: %s') % six.text_type(ex)
return False
class TimezoneConstraint(constraints.BaseCustomConstraint):
def validate(self, value, context):
if not value:
return True
try:
pytz.timezone(value)
return True
except Exception as ex:
self._error_message = _(
'Invalid timezone: %s') % six.text_type(ex)
return False
| apache-2.0 | 2,391,320,653,362,171,000 | 27.277228 | 78 | 0.645308 | false |
midori1/midorinoblog | site-packages/django/contrib/gis/measure.py | 93 | 12310 | # Copyright (c) 2007, Robert Coup <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Distance nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
Distance and Area objects to allow for sensible and convenient calculation
and conversions.
Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio
Inspired by GeoPy (http://exogen.case.edu/projects/geopy/)
and Geoff Biggs' PhD work on dimensioned units for robotics.
"""
__all__ = ['A', 'Area', 'D', 'Distance']
from decimal import Decimal
from django.utils.functional import total_ordering
from django.utils import six
NUMERIC_TYPES = six.integer_types + (float, Decimal)
AREA_PREFIX = "sq_"
def pretty_name(obj):
return obj.__name__ if obj.__class__ == type else obj.__class__.__name__
@total_ordering
class MeasureBase(object):
STANDARD_UNIT = None
ALIAS = {}
UNITS = {}
LALIAS = {}
def __init__(self, default_unit=None, **kwargs):
value, self._default_unit = self.default_units(kwargs)
setattr(self, self.STANDARD_UNIT, value)
if default_unit and isinstance(default_unit, six.string_types):
self._default_unit = default_unit
def _get_standard(self):
return getattr(self, self.STANDARD_UNIT)
def _set_standard(self, value):
setattr(self, self.STANDARD_UNIT, value)
standard = property(_get_standard, _set_standard)
def __getattr__(self, name):
if name in self.UNITS:
return self.standard / self.UNITS[name]
else:
raise AttributeError('Unknown unit type: %s' % name)
def __repr__(self):
return '%s(%s=%s)' % (pretty_name(self), self._default_unit,
getattr(self, self._default_unit))
def __str__(self):
return '%s %s' % (getattr(self, self._default_unit), self._default_unit)
# **** Comparison methods ****
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.standard == other.standard
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, self.__class__):
return self.standard < other.standard
else:
return NotImplemented
# **** Operators methods ****
def __add__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard + other.standard)})
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __iadd__(self, other):
if isinstance(other, self.__class__):
self.standard += other.standard
return self
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __sub__(self, other):
if isinstance(other, self.__class__):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard - other.standard)})
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __isub__(self, other):
if isinstance(other, self.__class__):
self.standard -= other.standard
return self
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __mul__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __imul__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard *= float(other)
return self
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __rmul__(self, other):
return self * other
def __truediv__(self, other):
if isinstance(other, self.__class__):
return self.standard / other.standard
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
def __itruediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard /= float(other)
return self
else:
raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)})
def __idiv__(self, other): # Python 2 compatibility
return type(self).__itruediv__(self, other)
def __bool__(self):
return bool(self.standard)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def default_units(self, kwargs):
"""
Return the unit value and the default units specified
from the given keyword arguments dictionary.
"""
val = 0.0
default_unit = self.STANDARD_UNIT
for unit, value in six.iteritems(kwargs):
if not isinstance(value, float):
value = float(value)
if unit in self.UNITS:
val += self.UNITS[unit] * value
default_unit = unit
elif unit in self.ALIAS:
u = self.ALIAS[unit]
val += self.UNITS[u] * value
default_unit = u
else:
lower = unit.lower()
if lower in self.UNITS:
val += self.UNITS[lower] * value
default_unit = lower
elif lower in self.LALIAS:
u = self.LALIAS[lower]
val += self.UNITS[u] * value
default_unit = u
else:
raise AttributeError('Unknown unit type: %s' % unit)
return val, default_unit
@classmethod
def unit_attname(cls, unit_str):
"""
Retrieves the unit attribute name for the given unit string.
For example, if the given unit string is 'metre', 'm' would be returned.
An exception is raised if an attribute cannot be found.
"""
lower = unit_str.lower()
if unit_str in cls.UNITS:
return unit_str
elif lower in cls.UNITS:
return lower
elif lower in cls.LALIAS:
return cls.LALIAS[lower]
else:
raise Exception('Could not find a unit keyword associated with "%s"' % unit_str)
class Distance(MeasureBase):
STANDARD_UNIT = "m"
UNITS = {
'chain': 20.1168,
'chain_benoit': 20.116782,
'chain_sears': 20.1167645,
'british_chain_benoit': 20.1167824944,
'british_chain_sears': 20.1167651216,
'british_chain_sears_truncated': 20.116756,
'cm': 0.01,
'british_ft': 0.304799471539,
'british_yd': 0.914398414616,
'clarke_ft': 0.3047972654,
'clarke_link': 0.201166195164,
'fathom': 1.8288,
'ft': 0.3048,
'german_m': 1.0000135965,
'gold_coast_ft': 0.304799710181508,
'indian_yd': 0.914398530744,
'inch': 0.0254,
'km': 1000.0,
'link': 0.201168,
'link_benoit': 0.20116782,
'link_sears': 0.20116765,
'm': 1.0,
'mi': 1609.344,
'mm': 0.001,
'nm': 1852.0,
'nm_uk': 1853.184,
'rod': 5.0292,
'sears_yd': 0.91439841,
'survey_ft': 0.304800609601,
'um': 0.000001,
'yd': 0.9144,
}
# Unit aliases for `UNIT` terms encountered in Spatial Reference WKT.
ALIAS = {
'centimeter': 'cm',
'foot': 'ft',
'inches': 'inch',
'kilometer': 'km',
'kilometre': 'km',
'meter': 'm',
'metre': 'm',
'micrometer': 'um',
'micrometre': 'um',
'millimeter': 'mm',
'millimetre': 'mm',
'mile': 'mi',
'yard': 'yd',
'British chain (Benoit 1895 B)': 'british_chain_benoit',
'British chain (Sears 1922)': 'british_chain_sears',
'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated',
'British foot (Sears 1922)': 'british_ft',
'British foot': 'british_ft',
'British yard (Sears 1922)': 'british_yd',
'British yard': 'british_yd',
"Clarke's Foot": 'clarke_ft',
"Clarke's link": 'clarke_link',
'Chain (Benoit)': 'chain_benoit',
'Chain (Sears)': 'chain_sears',
'Foot (International)': 'ft',
'German legal metre': 'german_m',
'Gold Coast foot': 'gold_coast_ft',
'Indian yard': 'indian_yd',
'Link (Benoit)': 'link_benoit',
'Link (Sears)': 'link_sears',
'Nautical Mile': 'nm',
'Nautical Mile (UK)': 'nm_uk',
'US survey foot': 'survey_ft',
'U.S. Foot': 'survey_ft',
'Yard (Indian)': 'indian_yd',
'Yard (Sears)': 'sears_yd'
}
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
def __mul__(self, other):
if isinstance(other, self.__class__):
return Area(default_unit=AREA_PREFIX + self._default_unit,
**{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)})
elif isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)})
else:
raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % {
"distance": pretty_name(self.__class__),
})
class Area(MeasureBase):
STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT
# Getting the square units values and the alias dictionary.
UNITS = dict(('%s%s' % (AREA_PREFIX, k), v ** 2) for k, v in Distance.UNITS.items())
ALIAS = dict((k, '%s%s' % (AREA_PREFIX, v)) for k, v in Distance.ALIAS.items())
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
def __truediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)})
else:
raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
# Shortcuts
D = Distance
A = Area
| apache-2.0 | -5,187,531,898,122,543,000 | 35.966967 | 112 | 0.584484 | false |
tensorflow/models | official/recommendation/ncf_common.py | 1 | 12278 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common functionalities used by both Keras and Estimator implementations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
from official.common import distribute_utils
from official.recommendation import constants as rconst
from official.recommendation import data_pipeline
from official.recommendation import data_preprocessing
from official.recommendation import movielens
from official.utils.flags import core as flags_core
FLAGS = flags.FLAGS
def get_inputs(params):
"""Returns some parameters used by the model."""
if FLAGS.download_if_missing and not FLAGS.use_synthetic_data:
movielens.download(FLAGS.dataset, FLAGS.data_dir)
if FLAGS.seed is not None:
np.random.seed(FLAGS.seed)
if FLAGS.use_synthetic_data:
producer = data_pipeline.DummyConstructor()
num_users, num_items = movielens.DATASET_TO_NUM_USERS_AND_ITEMS[
FLAGS.dataset]
num_train_steps = rconst.SYNTHETIC_BATCHES_PER_EPOCH
num_eval_steps = rconst.SYNTHETIC_BATCHES_PER_EPOCH
else:
num_users, num_items, producer = data_preprocessing.instantiate_pipeline(
dataset=FLAGS.dataset,
data_dir=FLAGS.data_dir,
params=params,
constructor_type=FLAGS.constructor_type,
deterministic=FLAGS.seed is not None)
num_train_steps = producer.train_batches_per_epoch
num_eval_steps = producer.eval_batches_per_epoch
return num_users, num_items, num_train_steps, num_eval_steps, producer
def parse_flags(flags_obj):
"""Convenience function to turn flags into params."""
num_gpus = flags_core.get_num_gpus(flags_obj)
batch_size = flags_obj.batch_size
eval_batch_size = flags_obj.eval_batch_size or flags_obj.batch_size
return {
"train_epochs": flags_obj.train_epochs,
"batches_per_step": 1,
"use_seed": flags_obj.seed is not None,
"batch_size": batch_size,
"eval_batch_size": eval_batch_size,
"learning_rate": flags_obj.learning_rate,
"mf_dim": flags_obj.num_factors,
"model_layers": [int(layer) for layer in flags_obj.layers],
"mf_regularization": flags_obj.mf_regularization,
"mlp_reg_layers": [float(reg) for reg in flags_obj.mlp_regularization],
"num_neg": flags_obj.num_neg,
"distribution_strategy": flags_obj.distribution_strategy,
"num_gpus": num_gpus,
"use_tpu": flags_obj.tpu is not None,
"tpu": flags_obj.tpu,
"tpu_zone": flags_obj.tpu_zone,
"tpu_gcp_project": flags_obj.tpu_gcp_project,
"beta1": flags_obj.beta1,
"beta2": flags_obj.beta2,
"epsilon": flags_obj.epsilon,
"match_mlperf": flags_obj.ml_perf,
"epochs_between_evals": flags_obj.epochs_between_evals,
"keras_use_ctl": flags_obj.keras_use_ctl,
"hr_threshold": flags_obj.hr_threshold,
"stream_files": flags_obj.tpu is not None,
"train_dataset_path": flags_obj.train_dataset_path,
"eval_dataset_path": flags_obj.eval_dataset_path,
"input_meta_data_path": flags_obj.input_meta_data_path,
}
def get_v1_distribution_strategy(params):
"""Returns the distribution strategy to use."""
if params["use_tpu"]:
# Some of the networking libraries are quite chatty.
for name in [
"googleapiclient.discovery", "googleapiclient.discovery_cache",
"oauth2client.transport"
]:
logging.getLogger(name).setLevel(logging.ERROR)
tpu_cluster_resolver = tf.distribute.cluster_resolver.TPUClusterResolver(
tpu=params["tpu"],
zone=params["tpu_zone"],
project=params["tpu_gcp_project"],
coordinator_name="coordinator")
logging.info("Issuing reset command to TPU to ensure a clean state.")
tf.Session.reset(tpu_cluster_resolver.get_master())
# Estimator looks at the master it connects to for MonitoredTrainingSession
# by reading the `TF_CONFIG` environment variable, and the coordinator
# is used by StreamingFilesDataset.
tf_config_env = {
"session_master":
tpu_cluster_resolver.get_master(),
"eval_session_master":
tpu_cluster_resolver.get_master(),
"coordinator":
tpu_cluster_resolver.cluster_spec().as_dict()["coordinator"]
}
os.environ["TF_CONFIG"] = json.dumps(tf_config_env)
distribution = tf.distribute.TPUStrategy(
tpu_cluster_resolver, steps_per_run=100)
else:
distribution = distribute_utils.get_distribution_strategy(
num_gpus=params["num_gpus"])
return distribution
def define_ncf_flags():
"""Add flags for running ncf_main."""
# Add common flags
flags_core.define_base(
model_dir=True,
clean=True,
train_epochs=True,
epochs_between_evals=True,
export_dir=False,
run_eagerly=True,
stop_threshold=True,
num_gpu=True,
distribution_strategy=True)
flags_core.define_performance(
synthetic_data=True,
dtype=True,
fp16_implementation=True,
loss_scale=True,
enable_xla=True,
)
flags_core.define_device(tpu=True)
flags_core.define_benchmark()
flags.adopt_module_key_flags(flags_core)
movielens.define_flags()
flags_core.set_defaults(
model_dir="/tmp/ncf/",
data_dir="/tmp/movielens-data/",
dataset=movielens.ML_1M,
train_epochs=2,
batch_size=99000,
tpu=None)
# Add ncf-specific flags
flags.DEFINE_boolean(
name="download_if_missing",
default=True,
help=flags_core.help_wrap(
"Download data to data_dir if it is not already present."))
flags.DEFINE_integer(
name="eval_batch_size",
default=None,
help=flags_core.help_wrap(
"The batch size used for evaluation. This should generally be larger"
"than the training batch size as the lack of back propagation during"
"evaluation can allow for larger batch sizes to fit in memory. If not"
"specified, the training batch size (--batch_size) will be used."))
flags.DEFINE_integer(
name="num_factors",
default=8,
help=flags_core.help_wrap("The Embedding size of MF model."))
# Set the default as a list of strings to be consistent with input arguments
flags.DEFINE_list(
name="layers",
default=["64", "32", "16", "8"],
help=flags_core.help_wrap(
"The sizes of hidden layers for MLP. Example "
"to specify different sizes of MLP layers: --layers=32,16,8,4"))
flags.DEFINE_float(
name="mf_regularization",
default=0.,
help=flags_core.help_wrap(
"The regularization factor for MF embeddings. The factor is used by "
"regularizer which allows to apply penalties on layer parameters or "
"layer activity during optimization."))
flags.DEFINE_list(
name="mlp_regularization",
default=["0.", "0.", "0.", "0."],
help=flags_core.help_wrap(
"The regularization factor for each MLP layer. See mf_regularization "
"help for more info about regularization factor."))
flags.DEFINE_integer(
name="num_neg",
default=4,
help=flags_core.help_wrap(
"The Number of negative instances to pair with a positive instance."))
flags.DEFINE_float(
name="learning_rate",
default=0.001,
help=flags_core.help_wrap("The learning rate."))
flags.DEFINE_float(
name="beta1",
default=0.9,
help=flags_core.help_wrap("beta1 hyperparameter for the Adam optimizer."))
flags.DEFINE_float(
name="beta2",
default=0.999,
help=flags_core.help_wrap("beta2 hyperparameter for the Adam optimizer."))
flags.DEFINE_float(
name="epsilon",
default=1e-8,
help=flags_core.help_wrap("epsilon hyperparameter for the Adam "
"optimizer."))
flags.DEFINE_float(
name="hr_threshold",
default=1.0,
help=flags_core.help_wrap(
"If passed, training will stop when the evaluation metric HR is "
"greater than or equal to hr_threshold. For dataset ml-1m, the "
"desired hr_threshold is 0.68 which is the result from the paper; "
"For dataset ml-20m, the threshold can be set as 0.95 which is "
"achieved by MLPerf implementation."))
flags.DEFINE_enum(
name="constructor_type",
default="bisection",
enum_values=["bisection", "materialized"],
case_sensitive=False,
help=flags_core.help_wrap(
"Strategy to use for generating false negatives. materialized has a"
"precompute that scales badly, but a faster per-epoch construction"
"time and can be faster on very large systems."))
flags.DEFINE_string(
name="train_dataset_path",
default=None,
help=flags_core.help_wrap("Path to training data."))
flags.DEFINE_string(
name="eval_dataset_path",
default=None,
help=flags_core.help_wrap("Path to evaluation data."))
flags.DEFINE_string(
name="input_meta_data_path",
default=None,
help=flags_core.help_wrap("Path to input meta data file."))
flags.DEFINE_bool(
name="ml_perf",
default=False,
help=flags_core.help_wrap(
"If set, changes the behavior of the model slightly to match the "
"MLPerf reference implementations here: \n"
"https://github.com/mlperf/reference/tree/master/recommendation/"
"pytorch\n"
"The two changes are:\n"
"1. When computing the HR and NDCG during evaluation, remove "
"duplicate user-item pairs before the computation. This results in "
"better HRs and NDCGs.\n"
"2. Use a different soring algorithm when sorting the input data, "
"which performs better due to the fact the sorting algorithms are "
"not stable."))
flags.DEFINE_bool(
name="output_ml_perf_compliance_logging",
default=False,
help=flags_core.help_wrap(
"If set, output the MLPerf compliance logging. This is only useful "
"if one is running the model for MLPerf. See "
"https://github.com/mlperf/policies/blob/master/training_rules.adoc"
"#submission-compliance-logs for details. This uses sudo and so may "
"ask for your password, as root access is needed to clear the system "
"caches, which is required for MLPerf compliance."))
flags.DEFINE_integer(
name="seed",
default=None,
help=flags_core.help_wrap(
"This value will be used to seed both NumPy and TensorFlow."))
@flags.validator(
"eval_batch_size",
"eval_batch_size must be at least {}".format(rconst.NUM_EVAL_NEGATIVES +
1))
def eval_size_check(eval_batch_size):
return (eval_batch_size is None or
int(eval_batch_size) > rconst.NUM_EVAL_NEGATIVES)
flags.DEFINE_bool(
name="early_stopping",
default=False,
help=flags_core.help_wrap(
"If True, we stop the training when it reaches hr_threshold"))
flags.DEFINE_bool(
name="keras_use_ctl",
default=False,
help=flags_core.help_wrap(
"If True, we use a custom training loop for keras."))
def convert_to_softmax_logits(logits):
"""Convert the logits returned by the base model to softmax logits.
Args:
logits: used to create softmax.
Returns:
Softmax with the first column of zeros is equivalent to sigmoid.
"""
softmax_logits = tf.concat([logits * 0, logits], axis=1)
return softmax_logits
| apache-2.0 | -6,756,908,630,323,899,000 | 33.880682 | 80 | 0.662812 | false |
Salandora/OctoPrint | tests/server/util/test_webassets.py | 3 | 1112 | # coding=utf-8
"""
Unit tests for ``octoprint.server.util.flask``.
"""
from __future__ import absolute_import
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2016 The OctoPrint Project - Released under terms of the AGPLv3 License"
import unittest
import ddt
from octoprint.server.util.webassets import replace_url
@ddt.ddt
class UrlReplaceTest(unittest.TestCase):
@ddt.data(
("mytest/some/path/", "mytest/another/longer/path/", "http://example.com/foo.html", "http://example.com/foo.html"),
("mytest/some/path/", "mytest/another/longer/path/", "/path/foo.html", "/path/foo.html"),
("http://example.com/mytest/some/path/", "mytest/another/longer/path/", "../foo.html", "http://example.com/mytest/some/foo.html"),
("mytest/some/path/", "mytest/another/longer/path/", "../foo.html", "../../../some/foo.html")
)
@ddt.unpack
def test_replace_url(self, source_url, output_url, url, expected):
actual = replace_url(source_url, output_url, url)
self.assertEqual(actual, expected)
| agpl-3.0 | -3,710,204,959,848,737,300 | 36 | 132 | 0.696396 | false |
Vvucinic/Wander | venv_2_7/lib/python2.7/site-packages/pandas/core/indexing.py | 9 | 64500 | # pylint: disable=W0223
from pandas.core.index import Index, MultiIndex
from pandas.compat import range, zip
import pandas.compat as compat
import pandas.core.common as com
from pandas.core.common import (is_bool_indexer, is_integer_dtype,
_asarray_tuplesafe, is_list_like, isnull,
is_null_slice, is_full_slice,
ABCSeries, ABCDataFrame, ABCPanel, is_float,
_values_from_object, _infer_fill_value, is_integer)
import numpy as np
# the supported indexers
def get_indexers_list():
return [
('ix', _IXIndexer),
('iloc', _iLocIndexer),
('loc', _LocIndexer),
('at', _AtIndexer),
('iat', _iAtIndexer),
]
# "null slice"
_NS = slice(None, None)
# the public IndexSlicerMaker
class _IndexSlice(object):
def __getitem__(self, arg):
return arg
IndexSlice = _IndexSlice()
class IndexingError(Exception):
pass
class _NDFrameIndexer(object):
_valid_types = None
_exception = KeyError
def __init__(self, obj, name):
self.obj = obj
self.ndim = obj.ndim
self.name = name
self.axis = None
def __call__(self, *args, **kwargs):
# we need to return a copy of ourselves
self = self.__class__(self.obj, self.name)
# set the passed in values
for k, v in compat.iteritems(kwargs):
setattr(self,k,v)
return self
def __iter__(self):
raise NotImplementedError('ix is not iterable')
def __getitem__(self, key):
if type(key) is tuple:
try:
values = self.obj.get_value(*key)
if np.isscalar(values):
return values
except Exception:
pass
return self._getitem_tuple(key)
else:
return self._getitem_axis(key, axis=0)
def _get_label(self, label, axis=0):
if self.ndim == 1:
# for perf reasons we want to try _xs first
# as its basically direct indexing
# but will fail when the index is not present
# see GH5667
try:
return self.obj._xs(label, axis=axis)
except:
return self.obj[label]
elif (isinstance(label, tuple) and
isinstance(label[axis], slice)):
raise IndexingError('no slices here, handle elsewhere')
return self.obj._xs(label, axis=axis)
def _get_loc(self, key, axis=0):
return self.obj._ixs(key, axis=axis)
def _slice(self, obj, axis=0, kind=None):
return self.obj._slice(obj, axis=axis, kind=kind)
def _get_setitem_indexer(self, key):
if self.axis is not None:
return self._convert_tuple(key, is_setter=True)
axis = self.obj._get_axis(0)
if isinstance(axis, MultiIndex):
try:
return axis.get_loc(key)
except Exception:
pass
if isinstance(key, tuple) and not self.ndim < len(key):
return self._convert_tuple(key, is_setter=True)
if isinstance(key, range):
return self._convert_range(key, is_setter=True)
try:
return self._convert_to_indexer(key, is_setter=True)
except TypeError:
raise IndexingError(key)
def __setitem__(self, key, value):
indexer = self._get_setitem_indexer(key)
self._setitem_with_indexer(indexer, value)
def _has_valid_type(self, k, axis):
raise NotImplementedError()
def _has_valid_tuple(self, key):
""" check the key for valid keys across my indexer """
for i, k in enumerate(key):
if i >= self.obj.ndim:
raise IndexingError('Too many indexers')
if not self._has_valid_type(k, i):
raise ValueError("Location based indexing can only have [%s] "
"types" % self._valid_types)
def _should_validate_iterable(self, axis=0):
""" return a boolean whether this axes needs validation for a passed iterable """
ax = self.obj._get_axis(axis)
if isinstance(ax, MultiIndex):
return False
elif ax.is_floating():
return False
return True
def _is_nested_tuple_indexer(self, tup):
if any([ isinstance(ax, MultiIndex) for ax in self.obj.axes ]):
return any([ is_nested_tuple(tup,ax) for ax in self.obj.axes ])
return False
def _convert_tuple(self, key, is_setter=False):
keyidx = []
if self.axis is not None:
axis = self.obj._get_axis_number(self.axis)
for i in range(self.ndim):
if i == axis:
keyidx.append(self._convert_to_indexer(key, axis=axis, is_setter=is_setter))
else:
keyidx.append(slice(None))
else:
for i, k in enumerate(key):
idx = self._convert_to_indexer(k, axis=i, is_setter=is_setter)
keyidx.append(idx)
return tuple(keyidx)
def _convert_range(self, key, is_setter=False):
""" convert a range argument """
return list(key)
def _convert_scalar_indexer(self, key, axis):
# if we are accessing via lowered dim, use the last dim
ax = self.obj._get_axis(min(axis, self.ndim - 1))
# a scalar
return ax._convert_scalar_indexer(key, kind=self.name)
def _convert_slice_indexer(self, key, axis):
# if we are accessing via lowered dim, use the last dim
ax = self.obj._get_axis(min(axis, self.ndim - 1))
return ax._convert_slice_indexer(key, kind=self.name)
def _has_valid_setitem_indexer(self, indexer):
return True
def _has_valid_positional_setitem_indexer(self, indexer):
""" validate that an positional indexer cannot enlarge its target
will raise if needed, does not modify the indexer externally """
if isinstance(indexer, dict):
raise IndexError("{0} cannot enlarge its target object"
.format(self.name))
else:
if not isinstance(indexer, tuple):
indexer = self._tuplify(indexer)
for ax, i in zip(self.obj.axes, indexer):
if isinstance(i, slice):
# should check the stop slice?
pass
elif is_list_like_indexer(i):
# should check the elements?
pass
elif is_integer(i):
if i >= len(ax):
raise IndexError("{0} cannot enlarge its target object"
.format(self.name))
elif isinstance(i, dict):
raise IndexError("{0} cannot enlarge its target object"
.format(self.name))
return True
def _setitem_with_indexer(self, indexer, value):
self._has_valid_setitem_indexer(indexer)
# also has the side effect of consolidating in-place
from pandas import Panel, DataFrame, Series
info_axis = self.obj._info_axis_number
# maybe partial set
take_split_path = self.obj._is_mixed_type
# if there is only one block/type, still have to take split path
# unless the block is one-dimensional or it can hold the value
if not take_split_path and self.obj._data.blocks:
blk, = self.obj._data.blocks
if 1 < blk.ndim: # in case of dict, keys are indices
val = list(value.values()) if isinstance(value,dict) else value
take_split_path = not blk._can_hold_element(val)
if isinstance(indexer, tuple) and len(indexer) == len(self.obj.axes):
for i, ax in zip(indexer, self.obj.axes):
# if we have any multi-indexes that have non-trivial slices (not null slices)
# then we must take the split path, xref GH 10360
if isinstance(ax, MultiIndex) and not (is_integer(i) or is_null_slice(i)):
take_split_path = True
break
if isinstance(indexer, tuple):
nindexer = []
for i, idx in enumerate(indexer):
if isinstance(idx, dict):
# reindex the axis to the new value
# and set inplace
key, _ = convert_missing_indexer(idx)
# if this is the items axes, then take the main missing
# path first
# this correctly sets the dtype and avoids cache issues
# essentially this separates out the block that is needed
# to possibly be modified
if self.ndim > 1 and i == self.obj._info_axis_number:
# add the new item, and set the value
# must have all defined axes if we have a scalar
# or a list-like on the non-info axes if we have a
# list-like
len_non_info_axes = [
len(_ax) for _i, _ax in enumerate(self.obj.axes)
if _i != i
]
if any([not l for l in len_non_info_axes]):
if not is_list_like_indexer(value):
raise ValueError("cannot set a frame with no "
"defined index and a scalar")
self.obj[key] = value
return self.obj
# add a new item with the dtype setup
self.obj[key] = _infer_fill_value(value)
new_indexer = convert_from_missing_indexer_tuple(
indexer, self.obj.axes)
self._setitem_with_indexer(new_indexer, value)
return self.obj
# reindex the axis
# make sure to clear the cache because we are
# just replacing the block manager here
# so the object is the same
index = self.obj._get_axis(i)
labels = index.insert(len(index),key)
self.obj._data = self.obj.reindex_axis(labels, i)._data
self.obj._maybe_update_cacher(clear=True)
self.obj.is_copy=None
nindexer.append(labels.get_loc(key))
else:
nindexer.append(idx)
indexer = tuple(nindexer)
else:
indexer, missing = convert_missing_indexer(indexer)
if missing:
# reindex the axis to the new value
# and set inplace
if self.ndim == 1:
index = self.obj.index
new_index = index.insert(len(index),indexer)
# this preserves dtype of the value
new_values = Series([value])._values
if len(self.obj._values):
new_values = np.concatenate([self.obj._values,
new_values])
self.obj._data = self.obj._constructor(
new_values, index=new_index, name=self.obj.name)._data
self.obj._maybe_update_cacher(clear=True)
return self.obj
elif self.ndim == 2:
# no columns and scalar
if not len(self.obj.columns):
raise ValueError(
"cannot set a frame with no defined columns"
)
# append a Series
if isinstance(value, Series):
value = value.reindex(index=self.obj.columns,copy=True)
value.name = indexer
# a list-list
else:
# must have conforming columns
if is_list_like_indexer(value):
if len(value) != len(self.obj.columns):
raise ValueError(
"cannot set a row with mismatched columns"
)
value = Series(value,index=self.obj.columns,name=indexer)
self.obj._data = self.obj.append(value)._data
self.obj._maybe_update_cacher(clear=True)
return self.obj
# set using setitem (Panel and > dims)
elif self.ndim >= 3:
return self.obj.__setitem__(indexer, value)
# set
item_labels = self.obj._get_axis(info_axis)
# align and set the values
if take_split_path:
if not isinstance(indexer, tuple):
indexer = self._tuplify(indexer)
if isinstance(value, ABCSeries):
value = self._align_series(indexer, value)
info_idx = indexer[info_axis]
if is_integer(info_idx):
info_idx = [info_idx]
labels = item_labels[info_idx]
# if we have a partial multiindex, then need to adjust the plane
# indexer here
if (len(labels) == 1 and
isinstance(self.obj[labels[0]].axes[0], MultiIndex)):
item = labels[0]
obj = self.obj[item]
index = obj.index
idx = indexer[:info_axis][0]
plane_indexer = tuple([idx]) + indexer[info_axis + 1:]
lplane_indexer = length_of_indexer(plane_indexer[0], index)
# require that we are setting the right number of values that
# we are indexing
if is_list_like_indexer(value) and np.iterable(value) and lplane_indexer != len(value):
if len(obj[idx]) != len(value):
raise ValueError(
"cannot set using a multi-index selection indexer "
"with a different length than the value"
)
# make sure we have an ndarray
value = getattr(value,'values',value).ravel()
# we can directly set the series here
# as we select a slice indexer on the mi
idx = index._convert_slice_indexer(idx)
obj._consolidate_inplace()
obj = obj.copy()
obj._data = obj._data.setitem(indexer=tuple([idx]), value=value)
self.obj[item] = obj
return
# non-mi
else:
plane_indexer = indexer[:info_axis] + indexer[info_axis + 1:]
if info_axis > 0:
plane_axis = self.obj.axes[:info_axis][0]
lplane_indexer = length_of_indexer(plane_indexer[0],
plane_axis)
else:
lplane_indexer = 0
def setter(item, v):
s = self.obj[item]
pi = plane_indexer[0] if lplane_indexer == 1 else plane_indexer
# perform the equivalent of a setitem on the info axis
# as we have a null slice or a slice with full bounds
# which means essentially reassign to the columns of a multi-dim object
# GH6149 (null slice), GH10408 (full bounds)
if isinstance(pi, tuple) and all(is_null_slice(idx) or is_full_slice(idx, len(self.obj)) for idx in pi):
s = v
else:
# set the item, possibly having a dtype change
s._consolidate_inplace()
s = s.copy()
s._data = s._data.setitem(indexer=pi, value=v)
s._maybe_update_cacher(clear=True)
# reset the sliced object if unique
self.obj[item] = s
def can_do_equal_len():
""" return True if we have an equal len settable """
if not len(labels) == 1 or not np.iterable(value):
return False
l = len(value)
item = labels[0]
index = self.obj[item].index
# equal len list/ndarray
if len(index) == l:
return True
elif lplane_indexer == l:
return True
return False
# we need an iterable, with a ndim of at least 1
# eg. don't pass through np.array(0)
if is_list_like_indexer(value) and getattr(value,'ndim',1) > 0:
# we have an equal len Frame
if isinstance(value, ABCDataFrame) and value.ndim > 1:
sub_indexer = list(indexer)
multiindex_indexer = isinstance(labels, MultiIndex)
for item in labels:
if item in value:
sub_indexer[info_axis] = item
v = self._align_series(
tuple(sub_indexer), value[item], multiindex_indexer
)
else:
v = np.nan
setter(item, v)
# we have an equal len ndarray/convertible to our labels
elif np.array(value).ndim == 2:
# note that this coerces the dtype if we are mixed
# GH 7551
value = np.array(value,dtype=object)
if len(labels) != value.shape[1]:
raise ValueError('Must have equal len keys and value '
'when setting with an ndarray')
for i, item in enumerate(labels):
# setting with a list, recoerces
setter(item, value[:, i].tolist())
# we have an equal len list/ndarray
elif can_do_equal_len():
setter(labels[0], value)
# per label values
else:
if len(labels) != len(value):
raise ValueError('Must have equal len keys and value '
'when setting with an iterable')
for item, v in zip(labels, value):
setter(item, v)
else:
# scalar
for item in labels:
setter(item, value)
else:
if isinstance(indexer, tuple):
indexer = maybe_convert_ix(*indexer)
# if we are setting on the info axis ONLY
# set using those methods to avoid block-splitting
# logic here
if len(indexer) > info_axis and is_integer(indexer[info_axis]) and all(
is_null_slice(idx) for i, idx in enumerate(indexer) if i != info_axis):
self.obj[item_labels[indexer[info_axis]]] = value
return
if isinstance(value, (ABCSeries, dict)):
value = self._align_series(indexer, Series(value))
elif isinstance(value, ABCDataFrame):
value = self._align_frame(indexer, value)
if isinstance(value, ABCPanel):
value = self._align_panel(indexer, value)
# check for chained assignment
self.obj._check_is_chained_assignment_possible()
# actually do the set
self.obj._consolidate_inplace()
self.obj._data = self.obj._data.setitem(indexer=indexer, value=value)
self.obj._maybe_update_cacher(clear=True)
def _align_series(self, indexer, ser, multiindex_indexer=False):
"""
Parameters
----------
indexer : tuple, slice, scalar
The indexer used to get the locations that will be set to
`ser`
ser : pd.Series
The values to assign to the locations specified by `indexer`
multiindex_indexer : boolean, optional
Defaults to False. Should be set to True if `indexer` was from
a `pd.MultiIndex`, to avoid unnecessary broadcasting.
Returns:
--------
`np.array` of `ser` broadcast to the appropriate shape for assignment
to the locations selected by `indexer`
"""
if isinstance(indexer, (slice, np.ndarray, list, Index)):
indexer = tuple([indexer])
if isinstance(indexer, tuple):
# flatten np.ndarray indexers
ravel = lambda i: i.ravel() if isinstance(i, np.ndarray) else i
indexer = tuple(map(ravel, indexer))
aligners = [not is_null_slice(idx) for idx in indexer]
sum_aligners = sum(aligners)
single_aligner = sum_aligners == 1
is_frame = self.obj.ndim == 2
is_panel = self.obj.ndim >= 3
obj = self.obj
# are we a single alignable value on a non-primary
# dim (e.g. panel: 1,2, or frame: 0) ?
# hence need to align to a single axis dimension
# rather that find all valid dims
# frame
if is_frame:
single_aligner = single_aligner and aligners[0]
# panel
elif is_panel:
single_aligner = (single_aligner and
(aligners[1] or aligners[2]))
# we have a frame, with multiple indexers on both axes; and a
# series, so need to broadcast (see GH5206)
if (sum_aligners == self.ndim and
all([com.is_sequence(_) for _ in indexer])):
ser = ser.reindex(obj.axes[0][indexer[0]], copy=True)._values
# single indexer
if len(indexer) > 1 and not multiindex_indexer:
l = len(indexer[1])
ser = np.tile(ser, l).reshape(l, -1).T
return ser
for i, idx in enumerate(indexer):
ax = obj.axes[i]
# multiple aligners (or null slices)
if com.is_sequence(idx) or isinstance(idx, slice):
if single_aligner and is_null_slice(idx):
continue
new_ix = ax[idx]
if not is_list_like_indexer(new_ix):
new_ix = Index([new_ix])
else:
new_ix = Index(new_ix)
if ser.index.equals(new_ix) or not len(new_ix):
return ser._values.copy()
return ser.reindex(new_ix)._values
# 2 dims
elif single_aligner and is_frame:
# reindex along index
ax = self.obj.axes[1]
if ser.index.equals(ax) or not len(ax):
return ser._values.copy()
return ser.reindex(ax)._values
# >2 dims
elif single_aligner:
broadcast = []
for n, labels in enumerate(self.obj._get_plane_axes(i)):
# reindex along the matching dimensions
if len(labels & ser.index):
ser = ser.reindex(labels)
else:
broadcast.append((n, len(labels)))
# broadcast along other dims
ser = ser._values.copy()
for (axis, l) in broadcast:
shape = [-1] * (len(broadcast) + 1)
shape[axis] = l
ser = np.tile(ser, l).reshape(shape)
if self.obj.ndim == 3:
ser = ser.T
return ser
elif np.isscalar(indexer):
ax = self.obj._get_axis(1)
if ser.index.equals(ax):
return ser._values.copy()
return ser.reindex(ax)._values
raise ValueError('Incompatible indexer with Series')
def _align_frame(self, indexer, df):
is_frame = self.obj.ndim == 2
is_panel = self.obj.ndim >= 3
if isinstance(indexer, tuple):
aligners = [not is_null_slice(idx) for idx in indexer]
sum_aligners = sum(aligners)
single_aligner = sum_aligners == 1
idx, cols = None, None
sindexers = []
for i, ix in enumerate(indexer):
ax = self.obj.axes[i]
if com.is_sequence(ix) or isinstance(ix, slice):
if idx is None:
idx = ax[ix].ravel()
elif cols is None:
cols = ax[ix].ravel()
else:
break
else:
sindexers.append(i)
# panel
if is_panel:
# need to conform to the convention
# as we are not selecting on the items axis
# and we have a single indexer
# GH 7763
if len(sindexers) == 1 and sindexers[0] != 0:
df = df.T
if idx is None:
idx = df.index
if cols is None:
cols = df.columns
if idx is not None and cols is not None:
if df.index.equals(idx) and df.columns.equals(cols):
val = df.copy()._values
else:
val = df.reindex(idx, columns=cols)._values
return val
elif ((isinstance(indexer, slice) or is_list_like_indexer(indexer))
and is_frame):
ax = self.obj.index[indexer]
if df.index.equals(ax):
val = df.copy()._values
else:
# we have a multi-index and are trying to align
# with a particular, level GH3738
if isinstance(ax, MultiIndex) and isinstance(
df.index, MultiIndex) and ax.nlevels != df.index.nlevels:
raise TypeError("cannot align on a multi-index with out specifying the join levels")
val = df.reindex(index=ax)._values
return val
elif np.isscalar(indexer) and is_panel:
idx = self.obj.axes[1]
cols = self.obj.axes[2]
# by definition we are indexing on the 0th axis
# a passed in dataframe which is actually a transpose
# of what is needed
if idx.equals(df.index) and cols.equals(df.columns):
return df.copy()._values
return df.reindex(idx, columns=cols)._values
raise ValueError('Incompatible indexer with DataFrame')
def _align_panel(self, indexer, df):
is_frame = self.obj.ndim == 2
is_panel = self.obj.ndim >= 3
raise NotImplementedError("cannot set using an indexer with a Panel "
"yet!")
def _getitem_tuple(self, tup):
try:
return self._getitem_lowerdim(tup)
except IndexingError:
pass
# no multi-index, so validate all of the indexers
self._has_valid_tuple(tup)
# ugly hack for GH #836
if self._multi_take_opportunity(tup):
return self._multi_take(tup)
# no shortcut needed
retval = self.obj
for i, key in enumerate(tup):
if i >= self.obj.ndim:
raise IndexingError('Too many indexers')
if is_null_slice(key):
continue
retval = getattr(retval, self.name)._getitem_axis(key, axis=i)
return retval
def _multi_take_opportunity(self, tup):
from pandas.core.generic import NDFrame
# ugly hack for GH #836
if not isinstance(self.obj, NDFrame):
return False
if not all(is_list_like_indexer(x) for x in tup):
return False
# just too complicated
for indexer, ax in zip(tup, self.obj._data.axes):
if isinstance(ax, MultiIndex):
return False
elif is_bool_indexer(indexer):
return False
elif not ax.is_unique:
return False
return True
def _multi_take(self, tup):
""" create the reindex map for our objects, raise the _exception if we
can't create the indexer
"""
try:
o = self.obj
d = dict([
(a, self._convert_for_reindex(t, axis=o._get_axis_number(a)))
for t, a in zip(tup, o._AXIS_ORDERS)
])
return o.reindex(**d)
except:
raise self._exception
def _convert_for_reindex(self, key, axis=0):
labels = self.obj._get_axis(axis)
if is_bool_indexer(key):
key = check_bool_indexer(labels, key)
return labels[key]
else:
if isinstance(key, Index):
# want Index objects to pass through untouched
keyarr = key
else:
# asarray can be unsafe, NumPy strings are weird
keyarr = _asarray_tuplesafe(key)
if is_integer_dtype(keyarr) and not labels.is_integer():
keyarr = com._ensure_platform_int(keyarr)
return labels.take(keyarr)
return keyarr
def _handle_lowerdim_multi_index_axis0(self, tup):
# we have an axis0 multi-index, handle or raise
try:
# fast path for series or for tup devoid of slices
return self._get_label(tup, axis=0)
except TypeError:
# slices are unhashable
pass
except Exception as e1:
if isinstance(tup[0], (slice, Index)):
raise IndexingError("Handle elsewhere")
# raise the error if we are not sorted
ax0 = self.obj._get_axis(0)
if not ax0.is_lexsorted_for_tuple(tup):
raise e1
return None
def _getitem_lowerdim(self, tup):
# we can directly get the axis result since the axis is specified
if self.axis is not None:
axis = self.obj._get_axis_number(self.axis)
return self._getitem_axis(tup, axis=axis)
# we may have a nested tuples indexer here
if self._is_nested_tuple_indexer(tup):
return self._getitem_nested_tuple(tup)
# we maybe be using a tuple to represent multiple dimensions here
ax0 = self.obj._get_axis(0)
if isinstance(ax0, MultiIndex):
result = self._handle_lowerdim_multi_index_axis0(tup)
if result is not None:
return result
if len(tup) > self.obj.ndim:
raise IndexingError("Too many indexers. handle elsewhere")
# to avoid wasted computation
# df.ix[d1:d2, 0] -> columns first (True)
# df.ix[0, ['C', 'B', A']] -> rows first (False)
for i, key in enumerate(tup):
if is_label_like(key) or isinstance(key, tuple):
section = self._getitem_axis(key, axis=i)
# we have yielded a scalar ?
if not is_list_like_indexer(section):
return section
elif section.ndim == self.ndim:
# we're in the middle of slicing through a MultiIndex
# revise the key wrt to `section` by inserting an _NS
new_key = tup[:i] + (_NS,) + tup[i + 1:]
else:
new_key = tup[:i] + tup[i + 1:]
# unfortunately need an odious kludge here because of
# DataFrame transposing convention
if (isinstance(section, ABCDataFrame) and i > 0
and len(new_key) == 2):
a, b = new_key
new_key = b, a
if len(new_key) == 1:
new_key, = new_key
# This is an elided recursive call to iloc/loc/etc'
return getattr(section, self.name)[new_key]
raise IndexingError('not applicable')
def _getitem_nested_tuple(self, tup):
# we have a nested tuple so have at least 1 multi-index level
# we should be able to match up the dimensionaility here
# we have too many indexers for our dim, but have at least 1
# multi-index dimension, try to see if we have something like
# a tuple passed to a series with a multi-index
if len(tup) > self.ndim:
result = self._handle_lowerdim_multi_index_axis0(tup)
if result is not None:
return result
# this is a series with a multi-index specified a tuple of selectors
return self._getitem_axis(tup, axis=0)
# handle the multi-axis by taking sections and reducing
# this is iterative
obj = self.obj
axis = 0
for i, key in enumerate(tup):
if is_null_slice(key):
axis += 1
continue
current_ndim = obj.ndim
obj = getattr(obj, self.name)._getitem_axis(key, axis=axis)
axis += 1
# if we have a scalar, we are done
if np.isscalar(obj) or not hasattr(obj,'ndim'):
break
# has the dim of the obj changed?
# GH 7199
if obj.ndim < current_ndim:
# GH 7516
# if had a 3 dim and are going to a 2d
# axes are reversed on a DataFrame
if i >= 1 and current_ndim == 3 and obj.ndim == 2:
obj = obj.T
axis -= 1
return obj
def _getitem_axis(self, key, axis=0):
if self._should_validate_iterable(axis):
self._has_valid_type(key, axis)
labels = self.obj._get_axis(axis)
if isinstance(key, slice):
return self._get_slice_axis(key, axis=axis)
elif is_list_like_indexer(key) and not (isinstance(key, tuple) and
isinstance(labels, MultiIndex)):
if hasattr(key, 'ndim') and key.ndim > 1:
raise ValueError('Cannot index with multidimensional key')
return self._getitem_iterable(key, axis=axis)
else:
if is_integer(key):
if axis == 0 and isinstance(labels, MultiIndex):
try:
return self._get_label(key, axis=axis)
except (KeyError, TypeError):
if self.obj.index.levels[0].is_integer():
raise
# this is the fallback! (for a non-float, non-integer index)
if not labels.is_floating() and not labels.is_integer():
return self._get_loc(key, axis=axis)
return self._get_label(key, axis=axis)
def _getitem_iterable(self, key, axis=0):
if self._should_validate_iterable(axis):
self._has_valid_type(key, axis)
labels = self.obj._get_axis(axis)
if is_bool_indexer(key):
key = check_bool_indexer(labels, key)
inds, = key.nonzero()
return self.obj.take(inds, axis=axis, convert=False)
else:
if isinstance(key, Index):
# want Index objects to pass through untouched
keyarr = key
else:
# asarray can be unsafe, NumPy strings are weird
keyarr = _asarray_tuplesafe(key)
# have the index handle the indexer and possibly return
# an indexer or raising
indexer = labels._convert_list_indexer(keyarr, kind=self.name)
if indexer is not None:
return self.obj.take(indexer, axis=axis)
# this is not the most robust, but...
if (isinstance(labels, MultiIndex) and len(keyarr) and
not isinstance(keyarr[0], tuple)):
level = 0
else:
level = None
# existing labels are unique and indexer are unique
if labels.is_unique and Index(keyarr).is_unique:
try:
result = self.obj.reindex_axis(keyarr, axis=axis, level=level)
# this is an error as we are trying to find
# keys in a multi-index that don't exist
if isinstance(labels, MultiIndex) and level is not None:
if hasattr(result,'ndim') and not np.prod(result.shape) and len(keyarr):
raise KeyError("cannot index a multi-index axis with these keys")
return result
except AttributeError:
# Series
if axis != 0:
raise AssertionError('axis must be 0')
return self.obj.reindex(keyarr, level=level)
# existing labels are non-unique
else:
# reindex with the specified axis
if axis + 1 > self.obj.ndim:
raise AssertionError("invalid indexing error with "
"non-unique index")
new_target, indexer, new_indexer = labels._reindex_non_unique(keyarr)
if new_indexer is not None:
result = self.obj.take(indexer[indexer!=-1], axis=axis,
convert=False)
result = result._reindex_with_indexers({
axis: [new_target, new_indexer]
}, copy=True, allow_dups=True)
else:
result = self.obj.take(indexer, axis=axis,
convert=False)
return result
def _convert_to_indexer(self, obj, axis=0, is_setter=False):
"""
Convert indexing key into something we can use to do actual fancy
indexing on an ndarray
Examples
ix[:5] -> slice(0, 5)
ix[[1,2,3]] -> [1,2,3]
ix[['foo', 'bar', 'baz']] -> [i, j, k] (indices of foo, bar, baz)
Going by Zen of Python?
"In the face of ambiguity, refuse the temptation to guess."
raise AmbiguousIndexError with integer labels?
- No, prefer label-based indexing
"""
labels = self.obj._get_axis(axis)
# if we are a scalar indexer and not type correct raise
obj = self._convert_scalar_indexer(obj, axis)
# see if we are positional in nature
is_int_index = labels.is_integer()
is_int_positional = is_integer(obj) and not is_int_index
# if we are a label return me
try:
return labels.get_loc(obj)
except LookupError:
if isinstance(obj, tuple) and isinstance(labels, MultiIndex):
if is_setter and len(obj) == labels.nlevels:
return {'key': obj}
raise
except TypeError:
pass
except (ValueError):
if not is_int_positional:
raise
# a positional
if is_int_positional:
# if we are setting and its not a valid location
# its an insert which fails by definition
if is_setter:
# always valid
if self.name == 'loc':
return {'key': obj}
# a positional
if (obj >= self.obj.shape[axis] and
not isinstance(labels, MultiIndex)):
raise ValueError("cannot set by positional indexing with "
"enlargement")
return obj
if isinstance(obj, slice):
return self._convert_slice_indexer(obj, axis)
elif is_nested_tuple(obj, labels):
return labels.get_locs(obj)
elif is_list_like_indexer(obj):
if is_bool_indexer(obj):
obj = check_bool_indexer(labels, obj)
inds, = obj.nonzero()
return inds
else:
if isinstance(obj, Index):
objarr = obj.values
else:
objarr = _asarray_tuplesafe(obj)
# The index may want to handle a list indexer differently
# by returning an indexer or raising
indexer = labels._convert_list_indexer(objarr, kind=self.name)
if indexer is not None:
return indexer
# this is not the most robust, but...
if (isinstance(labels, MultiIndex) and
not isinstance(objarr[0], tuple)):
level = 0
_, indexer = labels.reindex(objarr, level=level)
# take all
if indexer is None:
indexer = np.arange(len(labels))
check = labels.levels[0].get_indexer(objarr)
else:
level = None
# unique index
if labels.is_unique:
indexer = check = labels.get_indexer(objarr)
# non-unique (dups)
else:
(indexer,
missing) = labels.get_indexer_non_unique(objarr)
check = indexer
mask = check == -1
if mask.any():
raise KeyError('%s not in index' % objarr[mask])
return _values_from_object(indexer)
else:
try:
return labels.get_loc(obj)
except LookupError:
# allow a not found key only if we are a setter
if not is_list_like_indexer(obj) and is_setter:
return {'key': obj}
raise
def _tuplify(self, loc):
tup = [slice(None, None) for _ in range(self.ndim)]
tup[0] = loc
return tuple(tup)
def _get_slice_axis(self, slice_obj, axis=0):
obj = self.obj
if not need_slice(slice_obj):
return obj
indexer = self._convert_slice_indexer(slice_obj, axis)
if isinstance(indexer, slice):
return self._slice(indexer, axis=axis, kind='iloc')
else:
return self.obj.take(indexer, axis=axis, convert=False)
class _IXIndexer(_NDFrameIndexer):
"""A primarily label-location based indexer, with integer position
fallback.
``.ix[]`` supports mixed integer and label based access. It is
primarily label based, but will fall back to integer positional
access unless the corresponding axis is of integer type.
``.ix`` is the most general indexer and will support any of the
inputs in ``.loc`` and ``.iloc``. ``.ix`` also supports floating
point label schemes. ``.ix`` is exceptionally useful when dealing
with mixed positional and label based hierachical indexes.
However, when an axis is integer based, ONLY label based access
and not positional access is supported. Thus, in such cases, it's
usually better to be explicit and use ``.iloc`` or ``.loc``.
See more at :ref:`Advanced Indexing <advanced>`.
"""
def _has_valid_type(self, key, axis):
if isinstance(key, slice):
return True
elif is_bool_indexer(key):
return True
elif is_list_like_indexer(key):
return True
else:
self._convert_scalar_indexer(key, axis)
return True
class _LocationIndexer(_NDFrameIndexer):
_exception = Exception
def __getitem__(self, key):
if type(key) is tuple:
return self._getitem_tuple(key)
else:
return self._getitem_axis(key, axis=0)
def _getitem_axis(self, key, axis=0):
raise NotImplementedError()
def _getbool_axis(self, key, axis=0):
labels = self.obj._get_axis(axis)
key = check_bool_indexer(labels, key)
inds, = key.nonzero()
try:
return self.obj.take(inds, axis=axis, convert=False)
except Exception as detail:
raise self._exception(detail)
def _get_slice_axis(self, slice_obj, axis=0):
""" this is pretty simple as we just have to deal with labels """
obj = self.obj
if not need_slice(slice_obj):
return obj
labels = obj._get_axis(axis)
indexer = labels.slice_indexer(slice_obj.start, slice_obj.stop,
slice_obj.step)
if isinstance(indexer, slice):
return self._slice(indexer, axis=axis, kind='iloc')
else:
return self.obj.take(indexer, axis=axis, convert=False)
class _LocIndexer(_LocationIndexer):
"""Purely label-location based indexer for selection by label.
``.loc[]`` is primarily label based, but may also be used with a
boolean array.
Allowed inputs are:
- A single label, e.g. ``5`` or ``'a'``, (note that ``5`` is
interpreted as a *label* of the index, and **never** as an
integer position along the index).
- A list or array of labels, e.g. ``['a', 'b', 'c']``.
- A slice object with labels, e.g. ``'a':'f'`` (note that contrary
to usual python slices, **both** the start and the stop are included!).
- A boolean array.
``.loc`` will raise a ``KeyError`` when the items are not found.
See more at :ref:`Selection by Label <indexing.label>`
"""
_valid_types = ("labels (MUST BE IN THE INDEX), slices of labels (BOTH "
"endpoints included! Can be slices of integers if the "
"index is integers), listlike of labels, boolean")
_exception = KeyError
def _has_valid_type(self, key, axis):
ax = self.obj._get_axis(axis)
# valid for a label where all labels are in the index
# slice of lables (where start-end in labels)
# slice of integers (only if in the lables)
# boolean
if isinstance(key, slice):
return True
elif is_bool_indexer(key):
return True
elif is_list_like_indexer(key):
# mi is just a passthru
if isinstance(key, tuple) and isinstance(ax, MultiIndex):
return True
# TODO: don't check the entire key unless necessary
if len(key) and np.all(ax.get_indexer_for(key) < 0):
raise KeyError("None of [%s] are in the [%s]" %
(key, self.obj._get_axis_name(axis)))
return True
else:
def error():
if isnull(key):
raise TypeError(
"cannot use label indexing with a null key")
raise KeyError("the label [%s] is not in the [%s]" %
(key, self.obj._get_axis_name(axis)))
try:
key = self._convert_scalar_indexer(key, axis)
if not key in ax:
error()
except (TypeError) as e:
# python 3 type errors should be raised
if 'unorderable' in str(e): # pragma: no cover
error()
raise
except:
error()
return True
def _getitem_axis(self, key, axis=0):
labels = self.obj._get_axis(axis)
if isinstance(key, slice):
self._has_valid_type(key, axis)
return self._get_slice_axis(key, axis=axis)
elif is_bool_indexer(key):
return self._getbool_axis(key, axis=axis)
elif is_list_like_indexer(key):
# GH 7349
# possibly convert a list-like into a nested tuple
# but don't convert a list-like of tuples
if isinstance(labels, MultiIndex):
if not isinstance(key, tuple) and len(key) > 1 and not isinstance(key[0], tuple):
key = tuple([key])
# an iterable multi-selection
if not (isinstance(key, tuple) and
isinstance(labels, MultiIndex)):
if hasattr(key, 'ndim') and key.ndim > 1:
raise ValueError('Cannot index with multidimensional key')
return self._getitem_iterable(key, axis=axis)
# nested tuple slicing
if is_nested_tuple(key, labels):
locs = labels.get_locs(key)
indexer = [ slice(None) ] * self.ndim
indexer[axis] = locs
return self.obj.iloc[tuple(indexer)]
# fall thru to straight lookup
self._has_valid_type(key, axis)
return self._get_label(key, axis=axis)
class _iLocIndexer(_LocationIndexer):
"""Purely integer-location based indexing for selection by position.
``.iloc[]`` is primarily integer position based (from ``0`` to
``length-1`` of the axis), but may also be used with a boolean
array.
Allowed inputs are:
- An integer, e.g. ``5``.
- A list or array of integers, e.g. ``[4, 3, 0]``.
- A slice object with ints, e.g. ``1:7``.
- A boolean array.
``.iloc`` will raise ``IndexError`` if a requested indexer is
out-of-bounds, except *slice* indexers which allow out-of-bounds
indexing (this conforms with python/numpy *slice* semantics).
See more at :ref:`Selection by Position <indexing.integer>`
"""
_valid_types = ("integer, integer slice (START point is INCLUDED, END "
"point is EXCLUDED), listlike of integers, boolean array")
_exception = IndexError
def _has_valid_type(self, key, axis):
if is_bool_indexer(key):
if hasattr(key, 'index') and isinstance(key.index, Index):
if key.index.inferred_type == 'integer':
raise NotImplementedError(
"iLocation based boolean indexing on an integer type "
"is not available"
)
raise ValueError("iLocation based boolean indexing cannot use "
"an indexable as a mask")
return True
if isinstance(key, slice):
return True
elif is_integer(key):
return self._is_valid_integer(key, axis)
elif is_list_like_indexer(key):
return self._is_valid_list_like(key, axis)
return False
def _has_valid_setitem_indexer(self, indexer):
self._has_valid_positional_setitem_indexer(indexer)
def _is_valid_integer(self, key, axis):
# return a boolean if we have a valid integer indexer
ax = self.obj._get_axis(axis)
l = len(ax)
if key >= l or key < -l:
raise IndexError("single positional indexer is out-of-bounds")
return True
def _is_valid_list_like(self, key, axis):
# return a boolean if we are a valid list-like (e.g. that we dont' have out-of-bounds values)
# coerce the key to not exceed the maximum size of the index
arr = np.array(key)
ax = self.obj._get_axis(axis)
l = len(ax)
if len(arr) and (arr.max() >= l or arr.min() < -l):
raise IndexError("positional indexers are out-of-bounds")
return True
def _getitem_tuple(self, tup):
self._has_valid_tuple(tup)
try:
return self._getitem_lowerdim(tup)
except:
pass
retval = self.obj
axis=0
for i, key in enumerate(tup):
if i >= self.obj.ndim:
raise IndexingError('Too many indexers')
if is_null_slice(key):
axis += 1
continue
retval = getattr(retval, self.name)._getitem_axis(key, axis=axis)
# if the dim was reduced, then pass a lower-dim the next time
if retval.ndim<self.ndim:
axis -= 1
# try to get for the next axis
axis += 1
return retval
def _get_slice_axis(self, slice_obj, axis=0):
obj = self.obj
if not need_slice(slice_obj):
return obj
slice_obj = self._convert_slice_indexer(slice_obj, axis)
if isinstance(slice_obj, slice):
return self._slice(slice_obj, axis=axis, kind='iloc')
else:
return self.obj.take(slice_obj, axis=axis, convert=False)
def _getitem_axis(self, key, axis=0):
if isinstance(key, slice):
self._has_valid_type(key, axis)
return self._get_slice_axis(key, axis=axis)
elif is_bool_indexer(key):
self._has_valid_type(key, axis)
return self._getbool_axis(key, axis=axis)
# a single integer or a list of integers
else:
if is_list_like_indexer(key):
# validate list bounds
self._is_valid_list_like(key, axis)
# force an actual list
key = list(key)
else:
key = self._convert_scalar_indexer(key, axis)
if not is_integer(key):
raise TypeError("Cannot index by location index with a "
"non-integer key")
# validate the location
self._is_valid_integer(key, axis)
return self._get_loc(key, axis=axis)
def _convert_to_indexer(self, obj, axis=0, is_setter=False):
""" much simpler as we only have to deal with our valid types """
# make need to convert a float key
if isinstance(obj, slice):
return self._convert_slice_indexer(obj, axis)
elif is_float(obj):
return self._convert_scalar_indexer(obj, axis)
elif self._has_valid_type(obj, axis):
return obj
raise ValueError("Can only index by location with a [%s]" %
self._valid_types)
class _ScalarAccessIndexer(_NDFrameIndexer):
""" access scalars quickly """
def _convert_key(self, key, is_setter=False):
return list(key)
def __getitem__(self, key):
if not isinstance(key, tuple):
# we could have a convertible item here (e.g. Timestamp)
if not is_list_like_indexer(key):
key = tuple([key])
else:
raise ValueError('Invalid call for scalar access (getting)!')
key = self._convert_key(key)
return self.obj.get_value(*key, takeable=self._takeable)
def __setitem__(self, key, value):
if not isinstance(key, tuple):
key = self._tuplify(key)
if len(key) != self.obj.ndim:
raise ValueError('Not enough indexers for scalar access '
'(setting)!')
key = list(self._convert_key(key, is_setter=True))
key.append(value)
self.obj.set_value(*key, takeable=self._takeable)
class _AtIndexer(_ScalarAccessIndexer):
"""Fast label-based scalar accessor
Similarly to ``loc``, ``at`` provides **label** based scalar lookups.
You can also set using these indexers.
"""
_takeable = False
def _convert_key(self, key, is_setter=False):
""" require they keys to be the same type as the index (so we don't fallback) """
# allow arbitrary setting
if is_setter:
return list(key)
for ax, i in zip(self.obj.axes, key):
if ax.is_integer():
if not is_integer(i):
raise ValueError("At based indexing on an integer index can only have integer "
"indexers")
else:
if is_integer(i):
raise ValueError("At based indexing on an non-integer index can only have non-integer "
"indexers")
return key
class _iAtIndexer(_ScalarAccessIndexer):
"""Fast integer location scalar accessor.
Similarly to ``iloc``, ``iat`` provides **integer** based lookups.
You can also set using these indexers.
"""
_takeable = True
def _has_valid_setitem_indexer(self, indexer):
self._has_valid_positional_setitem_indexer(indexer)
def _convert_key(self, key, is_setter=False):
""" require integer args (and convert to label arguments) """
for a, i in zip(self.obj.axes, key):
if not is_integer(i):
raise ValueError("iAt based indexing can only have integer "
"indexers")
return key
# 32-bit floating point machine epsilon
_eps = np.finfo('f4').eps
def length_of_indexer(indexer, target=None):
"""return the length of a single non-tuple indexer which could be a slice
"""
if target is not None and isinstance(indexer, slice):
l = len(target)
start = indexer.start
stop = indexer.stop
step = indexer.step
if start is None:
start = 0
elif start < 0:
start += l
if stop is None or stop > l:
stop = l
elif stop < 0:
stop += l
if step is None:
step = 1
elif step < 0:
step = -step
return (stop - start + step-1) // step
elif isinstance(indexer, (ABCSeries, Index, np.ndarray, list)):
return len(indexer)
elif not is_list_like_indexer(indexer):
return 1
raise AssertionError("cannot find the length of the indexer")
def convert_to_index_sliceable(obj, key):
"""if we are index sliceable, then return my slicer, otherwise return None
"""
idx = obj.index
if isinstance(key, slice):
return idx._convert_slice_indexer(key, kind='getitem')
elif isinstance(key, compat.string_types):
# we are an actual column
if key in obj._data.items:
return None
# we need a timelike key here
if idx.is_all_dates:
try:
return idx._get_string_slice(key)
except:
return None
return None
def is_index_slice(obj):
def _is_valid_index(x):
return (is_integer(x) or is_float(x)
and np.allclose(x, int(x), rtol=_eps, atol=0))
def _crit(v):
return v is None or _is_valid_index(v)
both_none = obj.start is None and obj.stop is None
return not both_none and (_crit(obj.start) and _crit(obj.stop))
def check_bool_indexer(ax, key):
# boolean indexing, need to check that the data are aligned, otherwise
# disallowed
# this function assumes that is_bool_indexer(key) == True
result = key
if isinstance(key, ABCSeries) and not key.index.equals(ax):
result = result.reindex(ax)
mask = com.isnull(result._values)
if mask.any():
raise IndexingError('Unalignable boolean Series key provided')
result = result.astype(bool)._values
else:
# is_bool_indexer has already checked for nulls in the case of an
# object array key, so no check needed here
result = np.asarray(result, dtype=bool)
return result
def convert_missing_indexer(indexer):
""" reverse convert a missing indexer, which is a dict
return the scalar indexer and a boolean indicating if we converted """
if isinstance(indexer, dict):
# a missing key (but not a tuple indexer)
indexer = indexer['key']
if isinstance(indexer, bool):
raise KeyError("cannot use a single bool to index into setitem")
return indexer, True
return indexer, False
def convert_from_missing_indexer_tuple(indexer, axes):
""" create a filtered indexer that doesn't have any missing indexers """
def get_indexer(_i, _idx):
return (axes[_i].get_loc(_idx['key'])
if isinstance(_idx, dict) else _idx)
return tuple([get_indexer(_i, _idx) for _i, _idx in enumerate(indexer)])
def maybe_convert_indices(indices, n):
""" if we have negative indicies, translate to postive here
if have indicies that are out-of-bounds, raise an IndexError
"""
if isinstance(indices, list):
indices = np.array(indices)
if len(indices) == 0:
# If list is empty, np.array will return float and cause indexing
# errors.
return np.empty(0, dtype=np.int_)
mask = indices < 0
if mask.any():
indices[mask] += n
mask = (indices >= n) | (indices < 0)
if mask.any():
raise IndexError("indices are out-of-bounds")
return indices
def maybe_convert_ix(*args):
"""
We likely want to take the cross-product
"""
ixify = True
for arg in args:
if not isinstance(arg, (np.ndarray, list, ABCSeries, Index)):
ixify = False
if ixify:
return np.ix_(*args)
else:
return args
def is_nested_tuple(tup, labels):
# check for a compatiable nested tuple and multiindexes among the axes
if not isinstance(tup, tuple):
return False
# are we nested tuple of: tuple,list,slice
for i, k in enumerate(tup):
if isinstance(k, (tuple, list, slice)):
return isinstance(labels, MultiIndex)
return False
def is_list_like_indexer(key):
# allow a list_like, but exclude NamedTuples which can be indexers
return is_list_like(key) and not (isinstance(key, tuple) and type(key) is not tuple)
def is_label_like(key):
# select a label or row
return not isinstance(key, slice) and not is_list_like_indexer(key)
def need_slice(obj):
return (obj.start is not None or
obj.stop is not None or
(obj.step is not None and obj.step != 1))
def maybe_droplevels(index, key):
# drop levels
original_index = index
if isinstance(key, tuple):
for _ in key:
try:
index = index.droplevel(0)
except:
# we have dropped too much, so back out
return original_index
else:
try:
index = index.droplevel(0)
except:
pass
return index
def _non_reducing_slice(slice_):
"""
Ensurse that a slice doesn't reduce to a Series or Scalar.
Any user-paseed `subset` should have this called on it
to make sure we're always working with DataFrames.
"""
# default to column slice, like DataFrame
# ['A', 'B'] -> IndexSlices[:, ['A', 'B']]
kinds = tuple(list(compat.string_types) +
[ABCSeries, np.ndarray, Index, list])
if isinstance(slice_, kinds):
slice_ = IndexSlice[:, slice_]
def pred(part):
# true when slice does *not* reduce
return isinstance(part, slice) or com.is_list_like(part)
if not com.is_list_like(slice_):
if not isinstance(slice_, slice):
# a 1-d slice, like df.loc[1]
slice_ = [[slice_]]
else:
# slice(a, b, c)
slice_ = [slice_] # to tuplize later
else:
slice_ = [part if pred(part) else [part] for part in slice_]
return tuple(slice_)
def _maybe_numeric_slice(df, slice_, include_bool=False):
"""
want nice defaults for background_gradient that don't break
with non-numeric data. But if slice_ is passed go with that.
"""
if slice_ is None:
dtypes = [np.number]
if include_bool:
dtypes.append(bool)
slice_ = IndexSlice[:, df.select_dtypes(include=dtypes).columns]
return slice_
| artistic-2.0 | -3,756,945,072,343,831,000 | 33.733441 | 120 | 0.523101 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/sympy/physics/vector/frame.py | 1 | 27712 | from sympy import (diff, trigsimp, expand, sin, cos, solve, Symbol, sympify,
eye, ImmutableMatrix as Matrix)
from sympy.core.compatibility import string_types, u
from sympy.physics.vector.vector import Vector, _check_vector
__all__ = ['CoordinateSym', 'ReferenceFrame']
class CoordinateSym(Symbol):
"""
A coordinate symbol/base scalar associated wrt a Reference Frame.
Ideally, users should not instantiate this class. Instances of
this class must only be accessed through the corresponding frame
as 'frame[index]'.
CoordinateSyms having the same frame and index parameters are equal
(even though they may be instantiated separately).
Parameters
==========
name : string
The display name of the CoordinateSym
frame : ReferenceFrame
The reference frame this base scalar belongs to
index : 0, 1 or 2
The index of the dimension denoted by this coordinate variable
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, CoordinateSym
>>> A = ReferenceFrame('A')
>>> A[1]
A_y
>>> type(A[0])
<class 'sympy.physics.vector.frame.CoordinateSym'>
>>> a_y = CoordinateSym('a_y', A, 1)
>>> a_y == A[1]
True
"""
def __new__(cls, name, frame, index):
obj = super(CoordinateSym, cls).__new__(cls, name)
_check_frame(frame)
if index not in range(0, 3):
raise ValueError("Invalid index specified")
obj._id = (frame, index)
return obj
@property
def frame(self):
return self._id[0]
def __eq__(self, other):
#Check if the other object is a CoordinateSym of the same frame
#and same index
if isinstance(other, CoordinateSym):
if other._id == self._id:
return True
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return tuple((self._id[0].__hash__(), self._id[1])).__hash__()
class ReferenceFrame(object):
"""A reference frame in classical mechanics.
ReferenceFrame is a class used to represent a reference frame in classical
mechanics. It has a standard basis of three unit vectors in the frame's
x, y, and z directions.
It also can have a rotation relative to a parent frame; this rotation is
defined by a direction cosine matrix relating this frame's basis vectors to
the parent frame's basis vectors. It can also have an angular velocity
vector, defined in another frame.
"""
def __init__(self, name, indices=None, latexs=None, variables=None):
"""ReferenceFrame initialization method.
A ReferenceFrame has a set of orthonormal basis vectors, along with
orientations relative to other ReferenceFrames and angular velocities
relative to other ReferenceFrames.
Parameters
==========
indices : list (of strings)
If custom indices are desired for console, pretty, and LaTeX
printing, supply three as a list. The basis vectors can then be
accessed with the get_item method.
latexs : list (of strings)
If custom names are desired for LaTeX printing of each basis
vector, supply the names here in a list.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, vlatex
>>> N = ReferenceFrame('N')
>>> N.x
N.x
>>> O = ReferenceFrame('O', indices=('1', '2', '3'))
>>> O.x
O['1']
>>> O['1']
O['1']
>>> P = ReferenceFrame('P', latexs=('A1', 'A2', 'A3'))
>>> vlatex(P.x)
'A1'
"""
if not isinstance(name, string_types):
raise TypeError('Need to supply a valid name')
# The if statements below are for custom printing of basis-vectors for
# each frame.
# First case, when custom indices are supplied
if indices is not None:
if not isinstance(indices, (tuple, list)):
raise TypeError('Supply the indices as a list')
if len(indices) != 3:
raise ValueError('Supply 3 indices')
for i in indices:
if not isinstance(i, string_types):
raise TypeError('Indices must be strings')
self.str_vecs = [(name + '[\'' + indices[0] + '\']'),
(name + '[\'' + indices[1] + '\']'),
(name + '[\'' + indices[2] + '\']')]
self.pretty_vecs = [(u("\033[94m\033[1m") + name.lower() + u("_") +
indices[0] + u("\033[0;0m\x1b[0;0m")),
(u("\033[94m\033[1m") + name.lower() + u("_") +
indices[1] + u("\033[0;0m\x1b[0;0m")),
(u("\033[94m\033[1m") + name.lower() + u("_") +
indices[2] + u("\033[0;0m\x1b[0;0m"))]
self.latex_vecs = [(r"\mathbf{\hat{%s}_{%s}}" % (name.lower(),
indices[0])), (r"\mathbf{\hat{%s}_{%s}}" %
(name.lower(), indices[1])),
(r"\mathbf{\hat{%s}_{%s}}" % (name.lower(),
indices[2]))]
self.indices = indices
# Second case, when no custom indices are supplied
else:
self.str_vecs = [(name + '.x'), (name + '.y'), (name + '.z')]
self.pretty_vecs = [(u("\033[94m\033[1m") + name.lower() +
u("_x\033[0;0m\x1b[0;0m")),
(u("\033[94m\033[1m") + name.lower() +
u("_y\033[0;0m\x1b[0;0m")),
(u("\033[94m\033[1m") + name.lower() +
u("_z\033[0;0m\x1b[0;0m"))]
self.latex_vecs = [(r"\mathbf{\hat{%s}_x}" % name.lower()),
(r"\mathbf{\hat{%s}_y}" % name.lower()),
(r"\mathbf{\hat{%s}_z}" % name.lower())]
self.indices = ['x', 'y', 'z']
# Different step, for custom latex basis vectors
if latexs is not None:
if not isinstance(latexs, (tuple, list)):
raise TypeError('Supply the indices as a list')
if len(latexs) != 3:
raise ValueError('Supply 3 indices')
for i in latexs:
if not isinstance(i, string_types):
raise TypeError('Latex entries must be strings')
self.latex_vecs = latexs
self.name = name
self._var_dict = {}
#The _dcm_dict dictionary will only store the dcms of parent-child
#relationships. The _dcm_cache dictionary will work as the dcm
#cache.
self._dcm_dict = {}
self._dcm_cache = {}
self._ang_vel_dict = {}
self._ang_acc_dict = {}
self._dlist = [self._dcm_dict, self._ang_vel_dict, self._ang_acc_dict]
self._cur = 0
self._x = Vector([(Matrix([1, 0, 0]), self)])
self._y = Vector([(Matrix([0, 1, 0]), self)])
self._z = Vector([(Matrix([0, 0, 1]), self)])
#Associate coordinate symbols wrt this frame
if variables is not None:
if not isinstance(variables, (tuple, list)):
raise TypeError('Supply the variable names as a list/tuple')
if len(variables) != 3:
raise ValueError('Supply 3 variable names')
for i in variables:
if not isinstance(i, string_types):
raise TypeError('Variable names must be strings')
else:
variables = [name + '_x', name + '_y', name + '_z']
self.varlist = (CoordinateSym(variables[0], self, 0), \
CoordinateSym(variables[1], self, 1), \
CoordinateSym(variables[2], self, 2))
def __getitem__(self, ind):
"""
Returns basis vector for the provided index, if the index is a string.
If the index is a number, returns the coordinate variable correspon-
-ding to that index.
"""
if not isinstance(ind, str):
if ind < 3:
return self.varlist[ind]
else:
raise ValueError("Invalid index provided")
if self.indices[0] == ind:
return self.x
if self.indices[1] == ind:
return self.y
if self.indices[2] == ind:
return self.z
else:
raise ValueError('Not a defined index')
def __iter__(self):
return iter([self.x, self.y, self.z])
def __str__(self):
"""Returns the name of the frame. """
return self.name
__repr__ = __str__
def _dict_list(self, other, num):
"""Creates a list from self to other using _dcm_dict. """
outlist = [[self]]
oldlist = [[]]
while outlist != oldlist:
oldlist = outlist[:]
for i, v in enumerate(outlist):
templist = v[-1]._dlist[num].keys()
for i2, v2 in enumerate(templist):
if not v.__contains__(v2):
littletemplist = v + [v2]
if not outlist.__contains__(littletemplist):
outlist.append(littletemplist)
for i, v in enumerate(oldlist):
if v[-1] != other:
outlist.remove(v)
outlist.sort(key=len)
if len(outlist) != 0:
return outlist[0]
raise ValueError('No Connecting Path found between ' + self.name +
' and ' + other.name)
def _w_diff_dcm(self, otherframe):
"""Angular velocity from time differentiating the DCM. """
from sympy.physics.vector.functions import dynamicsymbols
dcm2diff = self.dcm(otherframe)
diffed = dcm2diff.diff(dynamicsymbols._t)
angvelmat = diffed * dcm2diff.T
w1 = trigsimp(expand(angvelmat[7]), recursive=True)
w2 = trigsimp(expand(angvelmat[2]), recursive=True)
w3 = trigsimp(expand(angvelmat[3]), recursive=True)
return -Vector([(Matrix([w1, w2, w3]), self)])
def variable_map(self, otherframe):
"""
Returns a dictionary which expresses the coordinate variables
of this frame in terms of the variables of otherframe.
If Vector.simp is True, returns a simplified version of the mapped
values. Else, returns them without simplification.
Simplification of the expressions may take time.
Parameters
==========
otherframe : ReferenceFrame
The other frame to map the variables to
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, dynamicsymbols
>>> A = ReferenceFrame('A')
>>> q = dynamicsymbols('q')
>>> B = A.orientnew('B', 'Axis', [q, A.z])
>>> A.variable_map(B)
{A_x: B_x*cos(q(t)) - B_y*sin(q(t)), A_y: B_x*sin(q(t)) + B_y*cos(q(t)), A_z: B_z}
"""
_check_frame(otherframe)
if (otherframe, Vector.simp) in self._var_dict:
return self._var_dict[(otherframe, Vector.simp)]
else:
vars_matrix = self.dcm(otherframe) * Matrix(otherframe.varlist)
mapping = {}
for i, x in enumerate(self):
if Vector.simp:
mapping[self.varlist[i]] = trigsimp(vars_matrix[i], method='fu')
else:
mapping[self.varlist[i]] = vars_matrix[i]
self._var_dict[(otherframe, Vector.simp)] = mapping
return mapping
def ang_acc_in(self, otherframe):
"""Returns the angular acceleration Vector of the ReferenceFrame.
Effectively returns the Vector:
^N alpha ^B
which represent the angular acceleration of B in N, where B is self, and
N is otherframe.
Parameters
==========
otherframe : ReferenceFrame
The ReferenceFrame which the angular acceleration is returned in.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> N = ReferenceFrame('N')
>>> A = ReferenceFrame('A')
>>> V = 10 * N.x
>>> A.set_ang_acc(N, V)
>>> A.ang_acc_in(N)
10*N.x
"""
_check_frame(otherframe)
if otherframe in self._ang_acc_dict:
return self._ang_acc_dict[otherframe]
else:
return self.ang_vel_in(otherframe).dt(otherframe)
def ang_vel_in(self, otherframe):
"""Returns the angular velocity Vector of the ReferenceFrame.
Effectively returns the Vector:
^N omega ^B
which represent the angular velocity of B in N, where B is self, and
N is otherframe.
Parameters
==========
otherframe : ReferenceFrame
The ReferenceFrame which the angular velocity is returned in.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> N = ReferenceFrame('N')
>>> A = ReferenceFrame('A')
>>> V = 10 * N.x
>>> A.set_ang_vel(N, V)
>>> A.ang_vel_in(N)
10*N.x
"""
_check_frame(otherframe)
flist = self._dict_list(otherframe, 1)
outvec = Vector(0)
for i in range(len(flist) - 1):
outvec += flist[i]._ang_vel_dict[flist[i + 1]]
return outvec
def dcm(self, otherframe):
"""The direction cosine matrix between frames.
This gives the DCM between this frame and the otherframe.
The format is N.xyz = N.dcm(B) * B.xyz
A SymPy Matrix is returned.
Parameters
==========
otherframe : ReferenceFrame
The otherframe which the DCM is generated to.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> from sympy import symbols
>>> q1 = symbols('q1')
>>> N = ReferenceFrame('N')
>>> A = N.orientnew('A', 'Axis', [q1, N.x])
>>> N.dcm(A)
Matrix([
[1, 0, 0],
[0, cos(q1), -sin(q1)],
[0, sin(q1), cos(q1)]])
"""
_check_frame(otherframe)
#Check if the dcm wrt that frame has already been calculated
if otherframe in self._dcm_cache:
return self._dcm_cache[otherframe]
flist = self._dict_list(otherframe, 0)
outdcm = eye(3)
for i in range(len(flist) - 1):
outdcm = outdcm * flist[i]._dcm_dict[flist[i + 1]]
#After calculation, store the dcm in dcm cache for faster
#future retrieval
self._dcm_cache[otherframe] = outdcm
otherframe._dcm_cache[self] = outdcm.T
return outdcm
def orient(self, parent, rot_type, amounts, rot_order=''):
"""Defines the orientation of this frame relative to a parent frame.
Parameters
==========
parent : ReferenceFrame
The frame that this ReferenceFrame will have its orientation matrix
defined in relation to.
rot_type : str
The type of orientation matrix that is being created. Supported
types are 'Body', 'Space', 'Quaternion', and 'Axis'. See examples
for correct usage.
amounts : list OR value
The quantities that the orientation matrix will be defined by.
rot_order : str
If applicable, the order of a series of rotations.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> from sympy import symbols
>>> q0, q1, q2, q3, q4 = symbols('q0 q1 q2 q3 q4')
>>> N = ReferenceFrame('N')
>>> B = ReferenceFrame('B')
Now we have a choice of how to implement the orientation. First is
Body. Body orientation takes this reference frame through three
successive simple rotations. Acceptable rotation orders are of length
3, expressed in XYZ or 123, and cannot have a rotation about about an
axis twice in a row.
>>> B.orient(N, 'Body', [q1, q2, q3], '123')
>>> B.orient(N, 'Body', [q1, q2, 0], 'ZXZ')
>>> B.orient(N, 'Body', [0, 0, 0], 'XYX')
Next is Space. Space is like Body, but the rotations are applied in the
opposite order.
>>> B.orient(N, 'Space', [q1, q2, q3], '312')
Next is Quaternion. This orients the new ReferenceFrame with
Quaternions, defined as a finite rotation about lambda, a unit vector,
by some amount theta.
This orientation is described by four parameters:
q0 = cos(theta/2)
q1 = lambda_x sin(theta/2)
q2 = lambda_y sin(theta/2)
q3 = lambda_z sin(theta/2)
Quaternion does not take in a rotation order.
>>> B.orient(N, 'Quaternion', [q0, q1, q2, q3])
Last is Axis. This is a rotation about an arbitrary, non-time-varying
axis by some angle. The axis is supplied as a Vector. This is how
simple rotations are defined.
>>> B.orient(N, 'Axis', [q1, N.x + 2 * N.y])
"""
from sympy.physics.vector.functions import dynamicsymbols
_check_frame(parent)
amounts = list(amounts)
for i, v in enumerate(amounts):
if not isinstance(v, Vector):
amounts[i] = sympify(v)
def _rot(axis, angle):
"""DCM for simple axis 1,2,or 3 rotations. """
if axis == 1:
return Matrix([[1, 0, 0],
[0, cos(angle), -sin(angle)],
[0, sin(angle), cos(angle)]])
elif axis == 2:
return Matrix([[cos(angle), 0, sin(angle)],
[0, 1, 0],
[-sin(angle), 0, cos(angle)]])
elif axis == 3:
return Matrix([[cos(angle), -sin(angle), 0],
[sin(angle), cos(angle), 0],
[0, 0, 1]])
approved_orders = ('123', '231', '312', '132', '213', '321', '121',
'131', '212', '232', '313', '323', '')
rot_order = str(
rot_order).upper() # Now we need to make sure XYZ = 123
rot_type = rot_type.upper()
rot_order = [i.replace('X', '1') for i in rot_order]
rot_order = [i.replace('Y', '2') for i in rot_order]
rot_order = [i.replace('Z', '3') for i in rot_order]
rot_order = ''.join(rot_order)
if not rot_order in approved_orders:
raise TypeError('The supplied order is not an approved type')
parent_orient = []
if rot_type == 'AXIS':
if not rot_order == '':
raise TypeError('Axis orientation takes no rotation order')
if not (isinstance(amounts, (list, tuple)) & (len(amounts) == 2)):
raise TypeError('Amounts are a list or tuple of length 2')
theta = amounts[0]
axis = amounts[1]
axis = _check_vector(axis)
if not axis.dt(parent) == 0:
raise ValueError('Axis cannot be time-varying')
axis = axis.express(parent).normalize()
axis = axis.args[0][0]
parent_orient = ((eye(3) - axis * axis.T) * cos(theta) +
Matrix([[0, -axis[2], axis[1]], [axis[2], 0, -axis[0]],
[-axis[1], axis[0], 0]]) * sin(theta) + axis * axis.T)
elif rot_type == 'QUATERNION':
if not rot_order == '':
raise TypeError(
'Quaternion orientation takes no rotation order')
if not (isinstance(amounts, (list, tuple)) & (len(amounts) == 4)):
raise TypeError('Amounts are a list or tuple of length 4')
q0, q1, q2, q3 = amounts
parent_orient = (Matrix([[q0 ** 2 + q1 ** 2 - q2 ** 2 - q3 **
2, 2 * (q1 * q2 - q0 * q3), 2 * (q0 * q2 + q1 * q3)],
[2 * (q1 * q2 + q0 * q3), q0 ** 2 - q1 ** 2 + q2 ** 2 - q3 ** 2,
2 * (q2 * q3 - q0 * q1)], [2 * (q1 * q3 - q0 * q2), 2 * (q0 *
q1 + q2 * q3), q0 ** 2 - q1 ** 2 - q2 ** 2 + q3 ** 2]]))
elif rot_type == 'BODY':
if not (len(amounts) == 3 & len(rot_order) == 3):
raise TypeError('Body orientation takes 3 values & 3 orders')
a1 = int(rot_order[0])
a2 = int(rot_order[1])
a3 = int(rot_order[2])
parent_orient = (_rot(a1, amounts[0]) * _rot(a2, amounts[1])
* _rot(a3, amounts[2]))
elif rot_type == 'SPACE':
if not (len(amounts) == 3 & len(rot_order) == 3):
raise TypeError('Space orientation takes 3 values & 3 orders')
a1 = int(rot_order[0])
a2 = int(rot_order[1])
a3 = int(rot_order[2])
parent_orient = (_rot(a3, amounts[2]) * _rot(a2, amounts[1])
* _rot(a1, amounts[0]))
else:
raise NotImplementedError('That is not an implemented rotation')
#Reset the _dcm_cache of this frame, and remove it from the _dcm_caches
#of the frames it is linked to. Also remove it from the _dcm_dict of
#its parent
frames = self._dcm_cache.keys()
for frame in frames:
if frame in self._dcm_dict:
del frame._dcm_dict[self]
del frame._dcm_cache[self]
#Add the dcm relationship to _dcm_dict
self._dcm_dict = self._dlist[0] = {}
self._dcm_dict.update({parent: parent_orient.T})
parent._dcm_dict.update({self: parent_orient})
#Also update the dcm cache after resetting it
self._dcm_cache = {}
self._dcm_cache.update({parent: parent_orient.T})
parent._dcm_cache.update({self: parent_orient})
if rot_type == 'QUATERNION':
t = dynamicsymbols._t
q0, q1, q2, q3 = amounts
q0d = diff(q0, t)
q1d = diff(q1, t)
q2d = diff(q2, t)
q3d = diff(q3, t)
w1 = 2 * (q1d * q0 + q2d * q3 - q3d * q2 - q0d * q1)
w2 = 2 * (q2d * q0 + q3d * q1 - q1d * q3 - q0d * q2)
w3 = 2 * (q3d * q0 + q1d * q2 - q2d * q1 - q0d * q3)
wvec = Vector([(Matrix([w1, w2, w3]), self)])
elif rot_type == 'AXIS':
thetad = (amounts[0]).diff(dynamicsymbols._t)
wvec = thetad * amounts[1].express(parent).normalize()
else:
try:
from sympy.polys.polyerrors import CoercionFailed
from sympy.physics.vector.functions import kinematic_equations
q1, q2, q3 = amounts
u1, u2, u3 = dynamicsymbols('u1, u2, u3')
templist = kinematic_equations([u1, u2, u3], [q1, q2, q3],
rot_type, rot_order)
templist = [expand(i) for i in templist]
td = solve(templist, [u1, u2, u3])
u1 = expand(td[u1])
u2 = expand(td[u2])
u3 = expand(td[u3])
wvec = u1 * self.x + u2 * self.y + u3 * self.z
except (CoercionFailed, AssertionError):
wvec = self._w_diff_dcm(parent)
self._ang_vel_dict.update({parent: wvec})
parent._ang_vel_dict.update({self: -wvec})
self._var_dict = {}
def orientnew(self, newname, rot_type, amounts, rot_order='', variables=None,
indices=None, latexs=None):
"""Creates a new ReferenceFrame oriented with respect to this Frame.
See ReferenceFrame.orient() for acceptable rotation types, amounts,
and orders. Parent is going to be self.
Parameters
==========
newname : str
The name for the new ReferenceFrame
rot_type : str
The type of orientation matrix that is being created.
amounts : list OR value
The quantities that the orientation matrix will be defined by.
rot_order : str
If applicable, the order of a series of rotations.
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> from sympy import symbols
>>> q1 = symbols('q1')
>>> N = ReferenceFrame('N')
>>> A = N.orientnew('A', 'Axis', [q1, N.x])
.orient() documentation:\n
========================
"""
newframe = ReferenceFrame(newname, variables, indices, latexs)
newframe.orient(self, rot_type, amounts, rot_order)
return newframe
orientnew.__doc__ += orient.__doc__
def set_ang_acc(self, otherframe, value):
"""Define the angular acceleration Vector in a ReferenceFrame.
Defines the angular acceleration of this ReferenceFrame, in another.
Angular acceleration can be defined with respect to multiple different
ReferenceFrames. Care must be taken to not create loops which are
inconsistent.
Parameters
==========
otherframe : ReferenceFrame
A ReferenceFrame to define the angular acceleration in
value : Vector
The Vector representing angular acceleration
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> N = ReferenceFrame('N')
>>> A = ReferenceFrame('A')
>>> V = 10 * N.x
>>> A.set_ang_acc(N, V)
>>> A.ang_acc_in(N)
10*N.x
"""
if value == 0:
value = Vector(0)
value = _check_vector(value)
_check_frame(otherframe)
self._ang_acc_dict.update({otherframe: value})
otherframe._ang_acc_dict.update({self: -value})
def set_ang_vel(self, otherframe, value):
"""Define the angular velocity vector in a ReferenceFrame.
Defines the angular velocity of this ReferenceFrame, in another.
Angular velocity can be defined with respect to multiple different
ReferenceFrames. Care must be taken to not create loops which are
inconsistent.
Parameters
==========
otherframe : ReferenceFrame
A ReferenceFrame to define the angular velocity in
value : Vector
The Vector representing angular velocity
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Vector
>>> N = ReferenceFrame('N')
>>> A = ReferenceFrame('A')
>>> V = 10 * N.x
>>> A.set_ang_vel(N, V)
>>> A.ang_vel_in(N)
10*N.x
"""
if value == 0:
value = Vector(0)
value = _check_vector(value)
_check_frame(otherframe)
self._ang_vel_dict.update({otherframe: value})
otherframe._ang_vel_dict.update({self: -value})
@property
def x(self):
"""The basis Vector for the ReferenceFrame, in the x direction. """
return self._x
@property
def y(self):
"""The basis Vector for the ReferenceFrame, in the y direction. """
return self._y
@property
def z(self):
"""The basis Vector for the ReferenceFrame, in the z direction. """
return self._z
def _check_frame(other):
from .vector import VectorTypeError
if not isinstance(other, ReferenceFrame):
raise VectorTypeError(other, ReferenceFrame('A'))
| gpl-3.0 | 1,754,418,600,326,618,600 | 36.347709 | 90 | 0.530456 | false |
yosshy/nova | nova/virt/driver.py | 29 | 58892 | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
(Beginning of) the contract that compute drivers must follow, and shared
types that support that contract
"""
import sys
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from nova.i18n import _, _LE, _LI
from nova import utils
from nova.virt import event as virtevent
driver_opts = [
cfg.StrOpt('compute_driver',
help='Driver to use for controlling virtualization. Options '
'include: libvirt.LibvirtDriver, xenapi.XenAPIDriver, '
'fake.FakeDriver, ironic.IronicDriver, '
'vmwareapi.VMwareVCDriver, hyperv.HyperVDriver'),
cfg.StrOpt('default_ephemeral_format',
help='The default format an ephemeral_volume will be '
'formatted with on creation.'),
cfg.StrOpt('preallocate_images',
default='none',
choices=('none', 'space'),
help='VM image preallocation mode: '
'"none" => no storage provisioning is done up front, '
'"space" => storage is fully allocated at instance start'),
cfg.BoolOpt('use_cow_images',
default=True,
help='Whether to use cow images'),
cfg.BoolOpt('vif_plugging_is_fatal',
default=True,
help="Fail instance boot if vif plugging fails"),
cfg.IntOpt('vif_plugging_timeout',
default=300,
help='Number of seconds to wait for neutron vif plugging '
'events to arrive before continuing or failing (see '
'vif_plugging_is_fatal). If this is set to zero and '
'vif_plugging_is_fatal is False, events should not '
'be expected to arrive at all.'),
]
CONF = cfg.CONF
CONF.register_opts(driver_opts)
LOG = logging.getLogger(__name__)
def driver_dict_from_config(named_driver_config, *args, **kwargs):
driver_registry = dict()
for driver_str in named_driver_config:
driver_type, _sep, driver = driver_str.partition('=')
driver_class = importutils.import_class(driver)
driver_registry[driver_type] = driver_class(*args, **kwargs)
return driver_registry
def get_block_device_info(instance, block_device_mapping):
"""Converts block device mappings for an instance to driver format.
Virt drivers expect block device mapping to be presented in the format
of a dict containing the following keys:
- root_device_name: device name of the root disk
- ephemerals: a (potentially empty) list of DriverEphemeralBlockDevice
instances
- swap: An instance of DriverSwapBlockDevice or None
- block_device_mapping: a (potentially empty) list of
DriverVolumeBlockDevice or any of it's more
specialized subclasses.
"""
from nova.virt import block_device as virt_block_device
block_device_info = {
'root_device_name': instance.root_device_name,
'ephemerals': virt_block_device.convert_ephemerals(
block_device_mapping),
'block_device_mapping':
virt_block_device.convert_all_volumes(*block_device_mapping)
}
swap_list = virt_block_device.convert_swap(block_device_mapping)
block_device_info['swap'] = virt_block_device.get_swap(swap_list)
return block_device_info
def block_device_info_get_root(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('root_device_name')
def block_device_info_get_swap(block_device_info):
block_device_info = block_device_info or {}
return block_device_info.get('swap') or {'device_name': None,
'swap_size': 0}
def swap_is_usable(swap):
return swap and swap['device_name'] and swap['swap_size'] > 0
def block_device_info_get_ephemerals(block_device_info):
block_device_info = block_device_info or {}
ephemerals = block_device_info.get('ephemerals') or []
return ephemerals
def block_device_info_get_mapping(block_device_info):
block_device_info = block_device_info or {}
block_device_mapping = block_device_info.get('block_device_mapping') or []
return block_device_mapping
class ComputeDriver(object):
"""Base class for compute drivers.
The interface to this class talks in terms of 'instances' (Amazon EC2 and
internal Nova terminology), by which we mean 'running virtual machine'
(XenAPI terminology) or domain (Xen or libvirt terminology).
An instance has an ID, which is the identifier chosen by Nova to represent
the instance further up the stack. This is unfortunately also called a
'name' elsewhere. As far as this layer is concerned, 'instance ID' and
'instance name' are synonyms.
Note that the instance ID or name is not human-readable or
customer-controlled -- it's an internal ID chosen by Nova. At the
nova.virt layer, instances do not have human-readable names at all -- such
things are only known higher up the stack.
Most virtualization platforms will also have their own identity schemes,
to uniquely identify a VM or domain. These IDs must stay internal to the
platform-specific layer, and never escape the connection interface. The
platform-specific layer is responsible for keeping track of which instance
ID maps to which platform-specific ID, and vice versa.
Some methods here take an instance of nova.compute.service.Instance. This
is the data structure used by nova.compute to store details regarding an
instance, and pass them into this layer. This layer is responsible for
translating that generic data structure into terms that are specific to the
virtualization platform.
"""
capabilities = {
"has_imagecache": False,
"supports_recreate": False,
"supports_migrate_to_same_host": False
}
def __init__(self, virtapi):
self.virtapi = virtapi
self._compute_event_callback = None
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function,
including catching up with currently running VM's on the given host.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def cleanup_host(self, host):
"""Clean up anything that is necessary for the driver gracefully stop,
including ending remote sessions. This is optional.
"""
pass
def get_info(self, instance):
"""Get the current status of an instance, by name (not ID!)
:param instance: nova.objects.instance.Instance object
Returns a InstanceInfo object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_num_instances(self):
"""Return the total number of virtual machines.
Return the number of virtual machines that the hypervisor knows
about.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
return len(self.list_instances())
def instance_exists(self, instance):
"""Checks existence of an instance on the host.
:param instance: The instance to lookup
Returns True if an instance with the supplied ID exists on
the host, False otherwise.
.. note::
This implementation works for all drivers, but it is
not particularly efficient. Maintainers of the virt drivers are
encouraged to override this method with something more
efficient.
"""
try:
return instance.uuid in self.list_instance_uuids()
except NotImplementedError:
return instance.name in self.list_instances()
def estimate_instance_overhead(self, instance_info):
"""Estimate the virtualization overhead required to build an instance
of the given flavor.
Defaults to zero, drivers should override if per-instance overhead
calculations are desired.
:param instance_info: Instance/flavor to calculate overhead for.
:returns: Dict of estimated overhead values.
"""
return {'memory_mb': 0}
def list_instances(self):
"""Return the names of all the instances known to the virtualization
layer, as a list.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def list_instance_uuids(self):
"""Return the UUIDS of all the instances known to the virtualization
layer, as a list.
"""
raise NotImplementedError()
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, bdms, detach_block_devices,
attach_block_devices, network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
"""Destroy and re-make this instance.
A 'rebuild' effectively purges all existing data from the system and
remakes the VM with given 'metadata' and 'personalities'.
This base class method shuts down the VM, detaches all block devices,
then spins up the new VM afterwards. It may be overridden by
hypervisors that need to - e.g. for optimisations, or when the 'VM'
is actually proxied and needs to be held across the shutdown + spin
up steps.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param image_meta: image object returned by nova.image.glance that
defines the image from which to boot this instance
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param bdms: block-device-mappings to use for rebuild
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param recreate: True if the instance is being recreated on a new
hypervisor - all the cleanup of old state is skipped.
:param block_device_info: Information about block devices to be
attached to the instance.
:param preserve_ephemeral: True if the default ephemeral storage
partition must be preserved on rebuild
"""
raise NotImplementedError()
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
"""Create a new instance/VM/domain on the virtualization platform.
Once this successfully completes, the instance should be
running (power_state.RUNNING).
If this fails, any partial instance should be completely
cleaned up, and the virtualization platform should be in the state
that it was before this call began.
:param context: security context
:param instance: nova.objects.instance.Instance
This function should use the data there to guide
the creation of the new instance.
:param image_meta: image object returned by nova.image.glance that
defines the image from which to boot this instance
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in instance.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices to be
attached to the instance.
"""
raise NotImplementedError()
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy the specified instance from the Hypervisor.
If the instance is not found (for example if networking failed), this
function should still succeed. It's probably a good idea to log a
warning in that case.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup the instance resources .
Instance should have been destroyed from the Hypervisor before calling
this method.
:param context: security context
:param instance: Instance object as returned by DB layer.
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: Information about block devices that should
be detached from the instance.
:param destroy_disks: Indicates if disks should be destroyed
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
After this is called successfully, the instance's state
goes back to power_state.RUNNING. The virtualization
platform should ensure that the reboot action has completed
successfully even in cases in which the underlying domain/vm
is paused or halted/stopped.
:param instance: nova.objects.instance.Instance
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param reboot_type: Either a HARD or SOFT reboot
:param block_device_info: Info pertaining to attached volumes
:param bad_volumes_callback: Function to handle any bad volumes
encountered
"""
raise NotImplementedError()
def get_console_pool_info(self, console_type):
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_console_output(self, context, instance):
"""Get console output for an instance
:param context: security context
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_vnc_console(self, context, instance):
"""Get connection info for a vnc console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleVNC
"""
raise NotImplementedError()
def get_spice_console(self, context, instance):
"""Get connection info for a spice console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleSpice
"""
raise NotImplementedError()
def get_rdp_console(self, context, instance):
"""Get connection info for a rdp console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleRDP
"""
raise NotImplementedError()
def get_serial_console(self, context, instance):
"""Get connection info for a serial console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleSerial
"""
raise NotImplementedError()
def get_mks_console(self, context, instance):
"""Get connection info for a MKS console.
:param context: security context
:param instance: nova.objects.instance.Instance
:returns an instance of console.type.ConsoleMKS
"""
raise NotImplementedError()
def get_diagnostics(self, instance):
"""Return data about VM diagnostics.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_all_bw_counters(self, instances):
"""Return bandwidth usage counters for each interface on each
running VM.
:param instances: nova.objects.instance.InstanceList
"""
raise NotImplementedError()
def get_all_volume_usage(self, context, compute_host_bdms):
"""Return usage info for volumes attached to vms on
a given host.-
"""
raise NotImplementedError()
def get_host_ip_addr(self):
"""Retrieves the IP address of the dom0
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach the disk to the instance at mountpoint using info."""
raise NotImplementedError()
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach the disk attached to the instance."""
raise NotImplementedError()
def swap_volume(self, old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
"""Replace the disk attached to the instance.
:param instance: nova.objects.instance.Instance
:param resize_to: This parameter is used to indicate the new volume
size when the new volume lager than old volume.
And the units is Gigabyte.
"""
raise NotImplementedError()
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def detach_interface(self, instance, vif):
"""Detach an interface from the instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
:param instance: nova.objects.instance.Instance
:param timeout: time to wait for GuestOS to shutdown
:param retry_interval: How often to signal guest while
waiting for it to shutdown
"""
raise NotImplementedError()
def snapshot(self, context, instance, image_id, update_task_state):
"""Snapshots the specified instance.
:param context: security context
:param instance: nova.objects.instance.Instance
:param image_id: Reference to a pre-created image that will
hold the snapshot.
"""
raise NotImplementedError()
def post_interrupted_snapshot_cleanup(self, context, instance):
"""Cleans up any resources left after an interrupted snapshot.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize.
:param context: the context for the migration/resize
:param migration: the migrate/resize information
:param instance: nova.objects.instance.Instance being migrated/resized
:param disk_info: the newly transferred disk information
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param image_meta: image object returned by nova.image.glance that
defines the image from which this instance
was created
:param resize_instance: True if the instance is being resized,
False otherwise
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize.
:param context: the context for the finish_revert_migration
:param instance: nova.objects.instance.Instance being migrated/resized
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: instance volume block device info
:param power_on: True if the instance should be powered on, False
otherwise
"""
raise NotImplementedError()
def pause(self, instance):
"""Pause the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unpause(self, instance):
"""Unpause paused VM instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def suspend(self, context, instance):
"""suspend the specified instance.
:param context: the context for the suspend
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def resume(self, context, instance, network_info, block_device_info=None):
"""resume the specified instance.
:param context: the context for the resume
:param instance: nova.objects.instance.Instance being resumed
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
:param block_device_info: instance volume block device info
"""
raise NotImplementedError()
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def set_bootable(self, instance, is_bootable):
"""Set the ability to power on/off an instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def unrescue(self, instance, network_info):
"""Unrescue the specified instance.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance.
:param instance: nova.objects.instance.Instance
:param timeout: time to wait for GuestOS to shutdown
:param retry_interval: How often to signal guest while
waiting for it to shutdown
"""
raise NotImplementedError()
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def inject_nmi(self, instance):
"""Inject an NMI to the specified instance.
:param instance: nova objects.instance.Instance
"""
raise NotImplementedError()
def soft_delete(self, instance):
"""Soft delete the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def restore(self, instance):
"""Restore the specified instance.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename:
node which the caller want to get resources from
a driver that manages only one node can safely ignore this
:returns: Dictionary describing resources
"""
raise NotImplementedError()
def pre_live_migration(self, context, instance, block_device_info,
network_info, disk_info, migrate_data=None):
"""Prepare an instance for live migration
:param context: security context
:param instance: nova.objects.instance.Instance object
:param block_device_info: instance block device information
:param network_info: instance network information
:param disk_info: instance disk information
:param migrate_data: implementation specific data dict.
"""
raise NotImplementedError()
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host.
:param context: security context
:param instance:
nova.db.sqlalchemy.models.Instance object
instance object that is migrated.
:param dest: destination host
:param post_method:
post operation method.
expected nova.compute.manager._post_live_migration.
:param recover_method:
recovery method when any exception occurs.
expected nova.compute.manager._rollback_live_migration.
:param block_migration: if true, migrate VM disk.
:param migrate_data: implementation specific params.
"""
raise NotImplementedError()
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Clean up destination node after a failed live migration.
:param context: security context
:param instance: instance object that was being migrated
:param network_info: instance network information
:param block_device_info: instance block device information
:param destroy_disks:
if true, destroy disks at destination during cleanup
:param migrate_data: implementation specific params
"""
raise NotImplementedError()
def post_live_migration(self, context, instance, block_device_info,
migrate_data=None):
"""Post operation of live migration at source host.
:param context: security context
:instance: instance object that was migrated
:block_device_info: instance block device information
:param migrate_data: if not None, it is a dict which has data
"""
pass
def post_live_migration_at_source(self, context, instance, network_info):
"""Unplug VIFs from networks at source.
:param context: security context
:param instance: instance object reference
:param network_info: instance network information
"""
raise NotImplementedError(_("Hypervisor driver does not support "
"post_live_migration_at_source method"))
def post_live_migration_at_destination(self, context, instance,
network_info,
block_migration=False,
block_device_info=None):
"""Post operation of live migration at destination host.
:param context: security context
:param instance: instance object that is migrated
:param network_info: instance network information
:param block_migration: if true, post operation of block_migration.
"""
raise NotImplementedError()
def check_instance_shared_storage_local(self, context, instance):
"""Check if instance files located on shared storage.
This runs check on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.objects.instance.Instance object
"""
raise NotImplementedError()
def check_instance_shared_storage_remote(self, context, data):
"""Check if instance files located on shared storage.
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
raise NotImplementedError()
def check_instance_shared_storage_cleanup(self, context, data):
"""Do cleanup on host after check_instance_shared_storage calls
:param context: security context
:param data: result of check_instance_shared_storage_local
"""
pass
def check_can_live_migrate_destination(self, context, instance,
src_compute_info, dst_compute_info,
block_migration=False,
disk_over_commit=False):
"""Check if it is possible to execute live migration.
This runs checks on the destination host, and then calls
back to the source host to check the results.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param src_compute_info: Info about the sending machine
:param dst_compute_info: Info about the receiving machine
:param block_migration: if true, prepare for block migration
:param disk_over_commit: if true, allow disk over commit
:returns: a dict containing migration info (hypervisor-dependent)
"""
raise NotImplementedError()
def check_can_live_migrate_destination_cleanup(self, context,
dest_check_data):
"""Do required cleanup on dest host after check_can_live_migrate calls
:param context: security context
:param dest_check_data: result of check_can_live_migrate_destination
"""
raise NotImplementedError()
def check_can_live_migrate_source(self, context, instance,
dest_check_data, block_device_info=None):
"""Check if it is possible to execute live migration.
This checks if the live migration can succeed, based on the
results from check_can_live_migrate_destination.
:param context: security context
:param instance: nova.db.sqlalchemy.models.Instance
:param dest_check_data: result of check_can_live_migrate_destination
:param block_device_info: result of _get_instance_block_device_info
:returns: a dict containing migration info (hypervisor-dependent)
"""
raise NotImplementedError()
def get_instance_disk_info(self, instance,
block_device_info=None):
"""Retrieve information about actual disk sizes of an instance.
:param instance: nova.objects.Instance
:param block_device_info:
Optional; Can be used to filter out devices which are
actually volumes.
:return:
json strings with below format::
"[{'path':'disk',
'type':'raw',
'virt_disk_size':'10737418240',
'backing_file':'backing_file',
'disk_size':'83886080'
'over_committed_disk_size':'10737418240'},
...]"
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""This method is called after a change to security groups.
All security groups and their associated rules live in the datastore,
and calling this method should apply the updated rules to instances
running the specified security group.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""This method is called when a security group is added to an instance.
This message is sent to the virtualization drivers on hosts that are
running an instance that belongs to a security group that has a rule
that references the security group identified by `security_group_id`.
It is the responsibility of this method to make sure any rules
that authorize traffic flow with members of the security group are
updated and any new members can communicate, and any removed members
cannot.
Scenario:
* we are running on host 'H0' and we have an instance 'i-0'.
* instance 'i-0' is a member of security group 'speaks-b'
* group 'speaks-b' has an ingress rule that authorizes group 'b'
* another host 'H1' runs an instance 'i-1'
* instance 'i-1' is a member of security group 'b'
When 'i-1' launches or terminates we will receive the message
to update members of group 'b', at which time we will make
any changes needed to the rules for instance 'i-0' to allow
or deny traffic coming from 'i-1', depending on if it is being
added or removed from the group.
In this scenario, 'i-1' could just as easily have been running on our
host 'H0' and this method would still have been called. The point was
that this method isn't called on the host where instances of that
group are running (as is the case with
:py:meth:`refresh_security_group_rules`) but is called where references
are made to authorizing those instances.
An error should be raised if the operation cannot complete.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""This triggers a firewall update based on database changes.
When this is called, rules have either been added or removed from the
datastore. You can retrieve rules with
:py:meth:`nova.db.provider_fw_rule_get_all`.
Provider rules take precedence over security group rules. If an IP
would be allowed by a security group ingress rule, but blocked by
a provider rule, then packets from the IP are dropped. This includes
intra-project traffic in the case of the allow_project_net_traffic
flag for the libvirt-derived classes.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or loses a rule.
"""
raise NotImplementedError()
def reset_network(self, instance):
"""reset networking for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Setting up filtering rules and waiting for its completion.
To migrate an instance, filtering rules to hypervisors
and firewalls are inevitable on destination host.
( Waiting only for filtering rules to hypervisor,
since filtering rules to firewall rules can be set faster).
Concretely, the below method must be called.
- setup_basic_filtering (for nova-basic, etc.)
- prepare_instance_filter(for nova-instance-instance-xxx, etc.)
to_xml may have to be called since it defines PROJNET, PROJMASK.
but libvirt migrates those value through migrateToURI(),
so , no need to be called.
Don't use thread for this method since migration should
not be started when setting-up filtering rules operations
are not completed.
:param instance: nova.objects.instance.Instance object
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def filter_defer_apply_on(self):
"""Defer application of IPTables rules."""
pass
def filter_defer_apply_off(self):
"""Turn off deferral of IPTables rules and apply the rules now."""
pass
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def set_admin_password(self, instance, new_pass):
"""Set the root password on the specified instance.
:param instance: nova.objects.instance.Instance
:param new_pass: the new password
"""
raise NotImplementedError()
def inject_file(self, instance, b64_path, b64_contents):
"""Writes a file on the specified instance.
The first parameter is an instance of nova.compute.service.Instance,
and so the instance is being specified as instance.name. The second
parameter is the base64-encoded path to which the file is to be
written on the instance; the third is the contents of the file, also
base64-encoded.
NOTE(russellb) This method is deprecated and will be removed once it
can be removed from nova.compute.manager.
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def change_instance_metadata(self, context, instance, diff):
"""Applies a diff to the instance metadata.
This is an optional driver method which is used to publish
changes to the instance's metadata to the hypervisor. If the
hypervisor has no means of publishing the instance metadata to
the instance, then this method should not be implemented.
:param context: security context
:param instance: nova.objects.instance.Instance
"""
pass
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
# TODO(Vek): Need to pass context in for access to auth_token
pass
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances
:param timeout: the currently configured timeout for considering
rebooting instances to be stuck
:param instances: instances that have been in rebooting state
longer than the configured timeout
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def host_power_action(self, action):
"""Reboots, shuts down or powers up the host."""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Sets the specified host's ability to accept new instances."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def get_host_uptime(self):
"""Returns the result of calling "uptime" on the target host."""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks.
:param instance: nova.objects.instance.Instance
"""
# TODO(Vek): Need to pass context in for access to auth_token
raise NotImplementedError()
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks.
:param instance: nova.objects.instance.Instance
"""
raise NotImplementedError()
def get_host_cpu_stats(self):
"""Get the currently known host CPU stats.
:returns: a dict containing the CPU stat info, eg:
| {'kernel': kern,
| 'idle': idle,
| 'user': user,
| 'iowait': wait,
| 'frequency': freq},
where kern and user indicate the cumulative CPU time
(nanoseconds) spent by kernel and user processes
respectively, idle indicates the cumulative idle CPU time
(nanoseconds), wait indicates the cumulative I/O wait CPU
time (nanoseconds), since the host is booting up; freq
indicates the current CPU frequency (MHz). All values are
long integers.
"""
raise NotImplementedError()
def block_stats(self, instance, disk_id):
"""Return performance counters associated with the given disk_id on the
given instance. These are returned as [rd_req, rd_bytes, wr_req,
wr_bytes, errs], where rd indicates read, wr indicates write, req is
the total number of I/O requests made, bytes is the total number of
bytes transferred, and errs is the number of requests held up due to a
full pipeline.
All counters are long integers.
This method is optional. On some platforms (e.g. XenAPI) performance
statistics can be retrieved directly in aggregate form, without Nova
having to do the aggregation. On those platforms, this method is
unused.
Note that this function takes an instance ID.
"""
raise NotImplementedError()
def deallocate_networks_on_reschedule(self, instance):
"""Does the driver want networks deallocated on reschedule?"""
return False
def macs_for_instance(self, instance):
"""What MAC addresses must this instance have?
Some hypervisors (such as bare metal) cannot do freeform virtualisation
of MAC addresses. This method allows drivers to return a set of MAC
addresses that the instance is to have. allocate_for_instance will take
this into consideration when provisioning networking for the instance.
Mapping of MAC addresses to actual networks (or permitting them to be
freeform) is up to the network implementation layer. For instance,
with openflow switches, fixed MAC addresses can still be virtualised
onto any L2 domain, with arbitrary VLANs etc, but regular switches
require pre-configured MAC->network mappings that will match the
actual configuration.
Most hypervisors can use the default implementation which returns None.
Hypervisors with MAC limits should return a set of MAC addresses, which
will be supplied to the allocate_for_instance call by the compute
manager, and it is up to that call to ensure that all assigned network
details are compatible with the set of MAC addresses.
This is called during spawn_instance by the compute manager.
:return: None, or a set of MAC ids (e.g. set(['12:34:56:78:90:ab'])).
None means 'no constraints', a set means 'these and only these
MAC addresses'.
"""
return None
def dhcp_options_for_instance(self, instance):
"""Get DHCP options for this instance.
Some hypervisors (such as bare metal) require that instances boot from
the network, and manage their own TFTP service. This requires passing
the appropriate options out to the DHCP service. Most hypervisors can
use the default implementation which returns None.
This is called during spawn_instance by the compute manager.
Note that the format of the return value is specific to Quantum
client API.
:return: None, or a set of DHCP options, eg:
| [{'opt_name': 'bootfile-name',
| 'opt_value': '/tftpboot/path/to/config'},
| {'opt_name': 'server-ip-address',
| 'opt_value': '1.2.3.4'},
| {'opt_name': 'tftp-server',
| 'opt_value': '1.2.3.4'}
| ]
"""
return None
def manage_image_cache(self, context, all_instances):
"""Manage the driver's local image cache.
Some drivers chose to cache images for instances on disk. This method
is an opportunity to do management of that cache which isn't directly
related to other calls into the driver. The prime example is to clean
the cache and remove images which are no longer of interest.
:param all_instances: nova.objects.instance.InstanceList
"""
pass
def add_to_aggregate(self, context, aggregate, host, **kwargs):
"""Add a compute host to an aggregate."""
# NOTE(jogo) Currently only used for XenAPI-Pool
raise NotImplementedError()
def remove_from_aggregate(self, context, aggregate, host, **kwargs):
"""Remove a compute host from an aggregate."""
raise NotImplementedError()
def undo_aggregate_operation(self, context, op, aggregate,
host, set_error=True):
"""Undo for Resource Pools."""
raise NotImplementedError()
def get_volume_connector(self, instance):
"""Get connector information for the instance for attaching to volumes.
Connector information is a dictionary representing the ip of the
machine that will be making the connection, the name of the iscsi
initiator and the hostname of the machine as follows::
{
'ip': ip,
'initiator': initiator,
'host': hostname
}
"""
raise NotImplementedError()
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This method is for multi compute-nodes support. If a driver supports
multi compute-nodes, this method returns a list of nodenames managed
by the service. Otherwise, this method should return
[hypervisor_hostname].
"""
raise NotImplementedError()
def node_is_available(self, nodename):
"""Return whether this compute service manages a particular node."""
if nodename in self.get_available_nodes():
return True
# Refresh and check again.
return nodename in self.get_available_nodes(refresh=True)
def get_per_instance_usage(self):
"""Get information about instance resource usage.
:returns: dict of nova uuid => dict of usage info
"""
return {}
def instance_on_disk(self, instance):
"""Checks access of instance files on the host.
:param instance: nova.objects.instance.Instance to lookup
Returns True if files of an instance with the supplied ID accessible on
the host, False otherwise.
.. note::
Used in rebuild for HA implementation and required for validation
of access to instance shared disk files
"""
return False
def register_event_listener(self, callback):
"""Register a callback to receive events.
Register a callback to receive asynchronous event
notifications from hypervisors. The callback will
be invoked with a single parameter, which will be
an instance of the nova.virt.event.Event class.
"""
self._compute_event_callback = callback
def emit_event(self, event):
"""Dispatches an event to the compute manager.
Invokes the event callback registered by the
compute manager to dispatch the event. This
must only be invoked from a green thread.
"""
if not self._compute_event_callback:
LOG.debug("Discarding event %s", str(event))
return
if not isinstance(event, virtevent.Event):
raise ValueError(
_("Event must be an instance of nova.virt.event.Event"))
try:
LOG.debug("Emitting event %s", str(event))
self._compute_event_callback(event)
except Exception as ex:
LOG.error(_LE("Exception dispatching event %(event)s: %(ex)s"),
{'event': event, 'ex': ex})
def delete_instance_files(self, instance):
"""Delete any lingering instance files for an instance.
:param instance: nova.objects.instance.Instance
:returns: True if the instance was deleted from disk, False otherwise.
"""
return True
@property
def need_legacy_block_device_info(self):
"""Tell the caller if the driver requires legacy block device info.
Tell the caller whether we expect the legacy format of block
device info to be passed in to methods that expect it.
"""
return True
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
"""Snapshots volumes attached to a specified instance.
:param context: request context
:param instance: nova.objects.instance.Instance that has the volume
attached
:param volume_id: Volume to be snapshotted
:param create_info: The data needed for nova to be able to attach
to the volume. This is the same data format returned by
Cinder's initialize_connection() API call. In the case of
doing a snapshot, it is the image file Cinder expects to be
used as the active disk after the snapshot operation has
completed. There may be other data included as well that is
needed for creating the snapshot.
"""
raise NotImplementedError()
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
"""Snapshots volumes attached to a specified instance.
:param context: request context
:param instance: nova.objects.instance.Instance that has the volume
attached
:param volume_id: Attached volume associated with the snapshot
:param snapshot_id: The snapshot to delete.
:param delete_info: Volume backend technology specific data needed to
be able to complete the snapshot. For example, in the case of
qcow2 backed snapshots, this would include the file being
merged, and the file being merged into (if appropriate).
"""
raise NotImplementedError()
def default_root_device_name(self, instance, image_meta, root_bdm):
"""Provide a default root device name for the driver."""
raise NotImplementedError()
def default_device_names_for_instance(self, instance, root_device_name,
*block_device_lists):
"""Default the missing device names in the block device mapping."""
raise NotImplementedError()
def get_device_name_for_instance(self, instance,
bdms, block_device_obj):
"""Get the next device name based on the block device mapping.
:param instance: nova.objects.instance.Instance that volume is
requesting a device name
:param bdms: a nova.objects.BlockDeviceMappingList for the instance
:param block_device_obj: A nova.objects.BlockDeviceMapping instance
with all info about the requested block
device. device_name does not need to be set,
and should be decided by the driver
implementation if not set.
:returns: The chosen device name.
"""
raise NotImplementedError()
def is_supported_fs_format(self, fs_type):
"""Check whether the file format is supported by this driver
:param fs_type: the file system type to be checked,
the validate values are defined at disk API module.
"""
# NOTE(jichenjc): Return False here so that every hypervisor
# need to define their supported file system
# type and implement this function at their
# virt layer.
return False
def quiesce(self, context, instance, image_meta):
"""Quiesce the specified instance to prepare for snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be quiesced
:param image_meta: image object returned by nova.image.glance that
defines the image from which this instance
was created
"""
raise NotImplementedError()
def unquiesce(self, context, instance, image_meta):
"""Unquiesce the specified instance after snapshots.
If the specified instance doesn't support quiescing,
InstanceQuiesceNotSupported is raised. When it fails to quiesce by
other errors (e.g. agent timeout), NovaException is raised.
:param context: request context
:param instance: nova.objects.instance.Instance to be unquiesced
:param image_meta: image object returned by nova.image.glance that
defines the image from which this instance
was created
"""
raise NotImplementedError()
def load_compute_driver(virtapi, compute_driver=None):
"""Load a compute driver module.
Load the compute driver module specified by the compute_driver
configuration option or, if supplied, the driver name supplied as an
argument.
Compute drivers constructors take a VirtAPI object as their first object
and this must be supplied.
:param virtapi: a VirtAPI instance
:param compute_driver: a compute driver name to override the config opt
:returns: a ComputeDriver instance
"""
if not compute_driver:
compute_driver = CONF.compute_driver
if not compute_driver:
LOG.error(_LE("Compute driver option required, but not specified"))
sys.exit(1)
LOG.info(_LI("Loading compute driver '%s'"), compute_driver)
try:
driver = importutils.import_object_ns('nova.virt',
compute_driver,
virtapi)
return utils.check_isinstance(driver, ComputeDriver)
except ImportError:
LOG.exception(_LE("Unable to load the virtualization driver"))
sys.exit(1)
def compute_driver_matches(match):
return CONF.compute_driver and CONF.compute_driver.endswith(match)
| apache-2.0 | 5,705,245,557,200,438,000 | 39.281806 | 79 | 0.636249 | false |
zsjohny/jumpserver | apps/perms/utils/database_app_permission.py | 1 | 2832 | # coding: utf-8
#
from django.utils.translation import ugettext as _
from django.db.models import Q
from orgs.utils import set_to_root_org
from ..models import DatabaseAppPermission
from common.tree import TreeNode
from applications.models import DatabaseApp
from assets.models import SystemUser
__all__ = [
'DatabaseAppPermissionUtil',
'construct_database_apps_tree_root',
'parse_database_app_to_tree_node'
]
def get_user_database_app_permissions(user, include_group=True):
if include_group:
groups = user.groups.all()
arg = Q(users=user) | Q(user_groups__in=groups)
else:
arg = Q(users=user)
return DatabaseAppPermission.objects.all().valid().filter(arg)
def get_user_group_database_app_permission(user_group):
return DatabaseAppPermission.objects.all().valid().filter(
user_group=user_group
)
class DatabaseAppPermissionUtil:
get_permissions_map = {
'User': get_user_database_app_permissions,
'UserGroup': get_user_group_database_app_permission
}
def __init__(self, obj):
self.object = obj
self.change_org_if_need()
@staticmethod
def change_org_if_need():
set_to_root_org()
@property
def permissions(self):
obj_class = self.object.__class__.__name__
func = self.get_permissions_map[obj_class]
_permissions = func(self.object)
return _permissions
def get_database_apps(self):
database_apps = DatabaseApp.objects.filter(
granted_by_permissions__in=self.permissions
).distinct()
return database_apps
def get_database_app_system_users(self, database_app):
queryset = self.permissions
kwargs = {'database_apps': database_app}
queryset = queryset.filter(**kwargs)
system_users_ids = queryset.values_list('system_users', flat=True)
system_users_ids = system_users_ids.distinct()
system_users = SystemUser.objects.filter(id__in=system_users_ids)
system_users = system_users.order_by('-priority')
return system_users
def construct_database_apps_tree_root():
tree_root = {
'id': 'ID_DATABASE_APP_ROOT',
'name': _('DatabaseApp'),
'title': 'DatabaseApp',
'pId': '',
'open': False,
'isParent': True,
'iconSkin': '',
'meta': {'type': 'database_app'}
}
return TreeNode(**tree_root)
def parse_database_app_to_tree_node(parent, database_app):
pid = parent.id if parent else ''
tree_node = {
'id': database_app.id,
'name': database_app.name,
'title': database_app.name,
'pId': pid,
'open': False,
'isParent': False,
'iconSkin': 'file',
'meta': {'type': 'database_app'}
}
return TreeNode(**tree_node)
| gpl-2.0 | -8,469,248,723,224,852,000 | 27.32 | 74 | 0.626059 | false |
dongjoon-hyun/spark | python/pyspark/tests/test_daemon.py | 23 | 2950 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import time
import unittest
from pyspark.serializers import read_int
class DaemonTests(unittest.TestCase):
def connect(self, port):
from socket import socket, AF_INET, SOCK_STREAM
sock = socket(AF_INET, SOCK_STREAM)
sock.connect(('127.0.0.1', port))
# send a split index of -1 to shutdown the worker
sock.send(b"\xFF\xFF\xFF\xFF")
sock.close()
return True
def do_termination_test(self, terminator):
from subprocess import Popen, PIPE
from errno import ECONNREFUSED
# start daemon
daemon_path = os.path.join(os.path.dirname(__file__), "..", "daemon.py")
python_exec = sys.executable or os.environ.get("PYSPARK_PYTHON")
daemon = Popen([python_exec, daemon_path], stdin=PIPE, stdout=PIPE)
# read the port number
port = read_int(daemon.stdout)
# daemon should accept connections
self.assertTrue(self.connect(port))
# wait worker process spawned from daemon exit.
time.sleep(1)
# request shutdown
terminator(daemon)
time.sleep(1)
# daemon should no longer accept connections
try:
self.connect(port)
except EnvironmentError as exception:
self.assertEqual(exception.errno, ECONNREFUSED)
else:
self.fail("Expected EnvironmentError to be raised")
def test_termination_stdin(self):
"""Ensure that daemon and workers terminate when stdin is closed."""
self.do_termination_test(lambda daemon: daemon.stdin.close())
def test_termination_sigterm(self):
"""Ensure that daemon and workers terminate on SIGTERM."""
from signal import SIGTERM
self.do_termination_test(lambda daemon: os.kill(daemon.pid, SIGTERM))
if __name__ == "__main__":
from pyspark.tests.test_daemon import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 | 4,872,231,066,018,637,000 | 34.542169 | 87 | 0.679322 | false |
nirzari18/Query-Analysis-Application-on-Google-App-Engine_Extension | lib/simplejson/tests/test_bigint_as_string.py | 122 | 2238 | from unittest import TestCase
import simplejson as json
class TestBigintAsString(TestCase):
# Python 2.5, at least the one that ships on Mac OS X, calculates
# 2 ** 53 as 0! It manages to calculate 1 << 53 correctly.
values = [(200, 200),
((1 << 53) - 1, 9007199254740991),
((1 << 53), '9007199254740992'),
((1 << 53) + 1, '9007199254740993'),
(-100, -100),
((-1 << 53), '-9007199254740992'),
((-1 << 53) - 1, '-9007199254740993'),
((-1 << 53) + 1, -9007199254740991)]
options = (
{"bigint_as_string": True},
{"int_as_string_bitcount": 53}
)
def test_ints(self):
for opts in self.options:
for val, expect in self.values:
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_lists(self):
for opts in self.options:
for val, expect in self.values:
val = [val, val]
expect = [expect, expect]
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_dicts(self):
for opts in self.options:
for val, expect in self.values:
val = {'k': val}
expect = {'k': expect}
self.assertEqual(
val,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
def test_dict_keys(self):
for opts in self.options:
for val, _ in self.values:
expect = {str(val): 'value'}
val = {val: 'value'}
self.assertEqual(
expect,
json.loads(json.dumps(val)))
self.assertEqual(
expect,
json.loads(json.dumps(val, **opts)))
| apache-2.0 | -6,286,807,250,610,201,000 | 32.402985 | 69 | 0.447274 | false |
curiosityio/taiga-docker | taiga-back/taiga-back/taiga/projects/notifications/mixins.py | 2 | 10277 | # Copyright (C) 2014-2016 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2016 Jesús Espino <[email protected]>
# Copyright (C) 2014-2016 David Barragán <[email protected]>
# Copyright (C) 2014-2016 Alejandro Alonso <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from functools import partial
from operator import is_not
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from taiga.base import response
from taiga.base.decorators import detail_route
from taiga.base.api import serializers
from taiga.base.api.utils import get_object_or_404
from taiga.base.fields import WatchersField
from taiga.projects.notifications import services
from taiga.projects.notifications.utils import (attach_watchers_to_queryset,
attach_is_watcher_to_queryset,
attach_total_watchers_to_queryset)
from . serializers import WatcherSerializer
class WatchedResourceMixin:
"""
Rest Framework resource mixin for resources susceptible
to be notifiable about their changes.
NOTE:
- this mixin has hard dependency on HistoryMixin
defined on history app and should be located always
after it on inheritance definition.
- the classes using this mixing must have a method:
def pre_conditions_on_save(self, obj)
"""
_not_notify = False
def attach_watchers_attrs_to_queryset(self, queryset):
queryset = attach_watchers_to_queryset(queryset)
queryset = attach_total_watchers_to_queryset(queryset)
if self.request.user.is_authenticated():
queryset = attach_is_watcher_to_queryset(queryset, self.request.user)
return queryset
@detail_route(methods=["POST"])
def watch(self, request, pk=None):
obj = self.get_object()
self.check_permissions(request, "watch", obj)
self.pre_conditions_on_save(obj)
services.add_watcher(obj, request.user)
return response.Ok()
@detail_route(methods=["POST"])
def unwatch(self, request, pk=None):
obj = self.get_object()
self.check_permissions(request, "unwatch", obj)
self.pre_conditions_on_save(obj)
services.remove_watcher(obj, request.user)
return response.Ok()
def send_notifications(self, obj, history=None):
"""
Shortcut method for resources with special save
cases on actions methods that not uses standard
`post_save` hook of drf resources.
"""
if history is None:
history = self.get_last_history()
# If not history found, or it is empty. Do notthing.
if not history:
return
if self._not_notify:
return
obj = self.get_object_for_snapshot(obj)
# Process that analizes the corresponding diff and
# some text fields for extract mentions and add them
# to watchers before obtain a complete list of
# notifiable users.
services.analize_object_for_watchers(obj, history.comment, history.owner)
# Get a complete list of notifiable users for current
# object and send the change notification to them.
services.send_notifications(obj, history=history)
def post_save(self, obj, created=False):
self.send_notifications(obj)
super().post_save(obj, created)
def pre_delete(self, obj):
self.send_notifications(obj)
super().pre_delete(obj)
class WatchedModelMixin(object):
"""
Generic model mixin that makes model compatible
with notification system.
NOTE: is mandatory extend your model class with
this mixin if you want send notifications about
your model class.
"""
def get_project(self) -> object:
"""
Default implementation method for obtain a project
instance from current object.
It comes with generic default implementation
that should works in almost all cases.
"""
return self.project
def get_watchers(self) -> object:
"""
Default implementation method for obtain a list of
watchers for current instance.
"""
return services.get_watchers(self)
def get_related_people(self) -> object:
"""
Default implementation for obtain the related people of
current instance.
"""
return services.get_related_people(self)
def get_watched(self, user_or_id):
return services.get_watched(user_or_id, type(self))
def add_watcher(self, user):
services.add_watcher(self, user)
def remove_watcher(self, user):
services.remove_watcher(self, user)
def get_owner(self) -> object:
"""
Default implementation for obtain the owner of
current instance.
"""
return self.owner
def get_assigned_to(self) -> object:
"""
Default implementation for obtain the assigned
user.
"""
if hasattr(self, "assigned_to"):
return self.assigned_to
return None
def get_participants(self) -> frozenset:
"""
Default implementation for obtain the list
of participans. It is mainly the owner and
assigned user.
"""
participants = (self.get_assigned_to(),
self.get_owner(),)
is_not_none = partial(is_not, None)
return frozenset(filter(is_not_none, participants))
class WatchedResourceModelSerializer(serializers.ModelSerializer):
is_watcher = serializers.SerializerMethodField("get_is_watcher")
total_watchers = serializers.SerializerMethodField("get_total_watchers")
def get_is_watcher(self, obj):
if "request" in self.context:
user = self.context["request"].user
return user.is_authenticated() and user.is_watcher(obj)
return False
def get_total_watchers(self, obj):
# The "total_watchers" attribute is attached in the get_queryset of the viewset.
return getattr(obj, "total_watchers", 0) or 0
class EditableWatchedResourceModelSerializer(WatchedResourceModelSerializer):
watchers = WatchersField(required=False)
def restore_object(self, attrs, instance=None):
#watchers is not a field from the model but can be attached in the get_queryset of the viewset.
#If that's the case we need to remove it before calling the super method
watcher_field = self.fields.pop("watchers", None)
self.validate_watchers(attrs, "watchers")
new_watcher_ids = attrs.pop("watchers", None)
obj = super(WatchedResourceModelSerializer, self).restore_object(attrs, instance)
#A partial update can exclude the watchers field or if the new instance can still not be saved
if instance is None or new_watcher_ids is None:
return obj
new_watcher_ids = set(new_watcher_ids)
old_watcher_ids = set(obj.get_watchers().values_list("id", flat=True))
adding_watcher_ids = list(new_watcher_ids.difference(old_watcher_ids))
removing_watcher_ids = list(old_watcher_ids.difference(new_watcher_ids))
User = get_user_model()
adding_users = get_user_model().objects.filter(id__in=adding_watcher_ids)
removing_users = get_user_model().objects.filter(id__in=removing_watcher_ids)
for user in adding_users:
services.add_watcher(obj, user)
for user in removing_users:
services.remove_watcher(obj, user)
obj.watchers = obj.get_watchers()
return obj
def to_native(self, obj):
#if watchers wasn't attached via the get_queryset of the viewset we need to manually add it
if obj is not None:
if not hasattr(obj, "watchers"):
obj.watchers = [user.id for user in obj.get_watchers()]
request = self.context.get("request", None)
user = request.user if request else None
if user and user.is_authenticated():
obj.is_watcher = user.id in obj.watchers
return super(WatchedResourceModelSerializer, self).to_native(obj)
def save(self, **kwargs):
obj = super(EditableWatchedResourceModelSerializer, self).save(**kwargs)
self.fields["watchers"] = WatchersField(required=False)
obj.watchers = [user.id for user in obj.get_watchers()]
return obj
class WatchersViewSetMixin:
# Is a ModelListViewSet with two required params: permission_classes and resource_model
serializer_class = WatcherSerializer
list_serializer_class = WatcherSerializer
permission_classes = None
resource_model = None
def retrieve(self, request, *args, **kwargs):
pk = kwargs.get("pk", None)
resource_id = kwargs.get("resource_id", None)
resource = get_object_or_404(self.resource_model, pk=resource_id)
self.check_permissions(request, 'retrieve', resource)
try:
self.object = resource.get_watchers().get(pk=pk)
except ObjectDoesNotExist: # or User.DoesNotExist
return response.NotFound()
serializer = self.get_serializer(self.object)
return response.Ok(serializer.data)
def list(self, request, *args, **kwargs):
resource_id = kwargs.get("resource_id", None)
resource = get_object_or_404(self.resource_model, pk=resource_id)
self.check_permissions(request, 'list', resource)
return super().list(request, *args, **kwargs)
def get_queryset(self):
resource = self.resource_model.objects.get(pk=self.kwargs.get("resource_id"))
return resource.get_watchers()
| mit | -4,441,189,852,457,946,600 | 34.926573 | 103 | 0.667835 | false |
shi2wei3/virt-test | virttest/storage.py | 6 | 21105 | """
Classes and functions to handle storage devices.
This exports:
- two functions for get image/blkdebug filename
- class for image operates and basic parameters
"""
import logging
import os
import shutil
import re
from autotest.client import utils
try:
from virttest import iscsi
except ImportError:
from autotest.client.shared import iscsi
import utils_misc
import virt_vm
import gluster
import lvm
import ceph
def preprocess_images(bindir, params, env):
# Clone master image form vms.
for vm_name in params.get("vms").split():
vm = env.get_vm(vm_name)
if vm:
vm.destroy(free_mac_addresses=False)
vm_params = params.object_params(vm_name)
for image in vm_params.get("master_images_clone").split():
image_obj = QemuImg(params, bindir, image)
image_obj.clone_image(params, vm_name, image, bindir)
def preprocess_image_backend(bindir, params, env):
enable_gluster = params.get("enable_gluster")
gluster_image = params.get("gluster_brick")
if enable_gluster and gluster_image:
return gluster.create_gluster_vol(params)
return True
def postprocess_images(bindir, params):
for vm in params.get("vms").split():
vm_params = params.object_params(vm)
for image in vm_params.get("master_images_clone").split():
image_obj = QemuImg(params, bindir, image)
image_obj.rm_cloned_image(params, vm, image, bindir)
def file_exists(params, filename_path):
"""
Check if image_filename exists.
:param params: Dictionary containing the test parameters.
:param filename_path: path to file
:type filename_path: str
:param root_dir: Base directory for relative filenames.
:type root_dir: str
:return: True if image file exists else False
"""
gluster_image = params.get("gluster_brick")
if gluster_image:
return gluster.file_exists(params, filename_path)
if params.get("enable_ceph") == "yes":
image_name = params.get("image_name")
image_format = params.get("image_format", "qcow2")
ceph_monitor = params["ceph_monitor"]
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1], image_format)
return ceph.rbd_image_exist(ceph_monitor, rbd_pool_name,
rbd_image_name)
return os.path.exists(filename_path)
def file_remove(params, filename_path):
"""
Remove the image
:param params: Dictionary containing the test parameters.
:param filename_path: path to file
"""
if params.get("enable_ceph") == "yes":
image_name = params.get("image_name")
image_format = params.get("image_format", "qcow2")
ceph_monitor = params["ceph_monitor"]
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1], image_format)
return ceph.rbd_image_rm(ceph_monitor, rbd_pool_name, rbd_image_name)
def get_image_blkdebug_filename(params, root_dir):
"""
Generate an blkdebug file path from params and root_dir.
blkdebug files allow error injection in the block subsystem.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:note: params should contain:
blkdebug -- the name of the debug file.
"""
blkdebug_name = params.get("drive_blkdebug", None)
if blkdebug_name is not None:
blkdebug_filename = utils_misc.get_path(root_dir, blkdebug_name)
else:
blkdebug_filename = None
return blkdebug_filename
def get_image_filename(params, root_dir):
"""
Generate an image path from params and root_dir.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param image_name: Force name of image.
:param image_format: Format for image.
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
:raise VMDeviceError: When no matching disk found (in indirect method).
"""
enable_gluster = params.get("enable_gluster", "no") == "yes"
enable_ceph = params.get("enable_ceph", "no") == "yes"
image_name = params.get("image_name")
if image_name:
if enable_gluster:
image_name = params.get("image_name", "image")
image_format = params.get("image_format", "qcow2")
return gluster.get_image_filename(params, image_name, image_format)
if enable_ceph:
image_format = params.get("image_format", "qcow2")
ceph_monitor = params["ceph_monitor"]
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1],
image_format)
return ceph.get_image_filename(ceph_monitor, rbd_pool_name,
rbd_image_name)
return get_image_filename_filesytem(params, root_dir)
else:
logging.warn("image_name parameter not set.")
def get_image_filename_filesytem(params, root_dir):
"""
Generate an image path from params and root_dir.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
:raise VMDeviceError: When no matching disk found (in indirect method).
"""
def sort_cmp(first, second):
"""
This function used for sort to suit for this test, first sort by len
then by value.
"""
first_contains_digit = re.findall(r'[vhs]d[a-z]*[\d]+', first)
second_contains_digit = re.findall(r'[vhs]d[a-z]*[\d]+', second)
if not first_contains_digit and not second_contains_digit:
if len(first) > len(second):
return 1
elif len(first) < len(second):
return -1
if len(first) == len(second):
if first_contains_digit and second_contains_digit:
return cmp(first, second)
elif first_contains_digit:
return -1
elif second_contains_digit:
return 1
return cmp(first, second)
image_name = params.get("image_name", "image")
indirect_image_select = params.get("indirect_image_select")
if indirect_image_select:
re_name = image_name
indirect_image_select = int(indirect_image_select)
matching_images = utils.system_output("ls -1d %s" % re_name)
matching_images = sorted(matching_images.split('\n'), cmp=sort_cmp)
if matching_images[-1] == '':
matching_images = matching_images[:-1]
try:
image_name = matching_images[indirect_image_select]
except IndexError:
raise virt_vm.VMDeviceError("No matching disk found for "
"name = '%s', matching = '%s' and "
"selector = '%s'" %
(re_name, matching_images,
indirect_image_select))
for protected in params.get('indirect_image_blacklist', '').split(' '):
match_image = re.match(protected, image_name)
if match_image and match_image.group(0) == image_name:
# We just need raise an error if it is totally match, such as
# sda sda1 and so on, but sdaa should not raise an error.
raise virt_vm.VMDeviceError("Matching disk is in blacklist. "
"name = '%s', matching = '%s' and "
"selector = '%s'" %
(re_name, matching_images,
indirect_image_select))
image_format = params.get("image_format", "qcow2")
if params.get("image_raw_device") == "yes":
return image_name
if image_format:
image_filename = "%s.%s" % (image_name, image_format)
else:
image_filename = image_name
image_filename = utils_misc.get_path(root_dir, image_filename)
return image_filename
class OptionMissing(Exception):
"""
Option not found in the odbject
"""
def __init__(self, option):
self.option = option
def __str__(self):
return "%s is missing. Please check your parameters" % self.option
class QemuImg(object):
"""
A basic class for handling operations of disk/block images.
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images.
"""
self.image_filename = get_image_filename(params, root_dir)
self.image_format = params.get("image_format", "qcow2")
self.size = params.get("image_size", "10G")
self.storage_type = params.get("storage_type", "local fs")
self.check_output = params.get("check_output") == "yes"
self.image_blkdebug_filename = get_image_blkdebug_filename(params,
root_dir)
self.remote_keywords = params.get("remote_image",
"gluster iscsi ceph").split()
image_chain = params.get("image_chain")
self.root_dir = root_dir
self.base_tag = None
self.snapshot_tag = None
if image_chain:
image_chain = re.split(r"\s+", image_chain)
if tag in image_chain:
index = image_chain.index(tag)
if index < len(image_chain) - 1:
self.snapshot_tag = image_chain[index + 1]
if index > 0:
self.base_tag = image_chain[index - 1]
if self.base_tag:
base_params = params.object_params(self.base_tag)
self.base_image_filename = get_image_filename(base_params,
root_dir)
self.base_format = base_params.get("image_format")
if self.snapshot_tag:
ss_params = params.object_params(self.snapshot_tag)
self.snapshot_image_filename = get_image_filename(ss_params,
root_dir)
self.snapshot_format = ss_params.get("image_format")
def check_option(self, option):
"""
Check if object has the option required.
:param option: option should be checked
"""
if option not in self.__dict__:
raise OptionMissing(option)
def is_remote_image(self):
"""
Check if image is from a remote server or not
"""
for keyword in self.remote_keywords:
if keyword in self.image_filename:
return True
return False
def backup_image(self, params, root_dir, action, good=True,
skip_existing=False):
"""
Backup or restore a disk image, depending on the action chosen.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param action: Whether we want to backup or restore the image.
:param good: If we are backing up a good image(we want to restore it)
or a bad image (we are saving a bad image for posterior analysis).
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
"""
def backup_raw_device(src, dst):
if os.path.exists(src):
utils.system("dd if=%s of=%s bs=4k conv=sync" % (src, dst))
else:
logging.info("No source %s, skipping dd...", src)
def backup_image_file(src, dst):
logging.debug("Copying %s -> %s", src, dst)
if os.path.isfile(dst) and os.path.isfile(src):
os.unlink(dst)
if os.path.isfile(src):
shutil.copy(src, dst)
else:
logging.info("No source file %s, skipping copy...", src)
def get_backup_set(filename, backup_dir, action, good):
"""
Get all sources and destinations required for each backup.
"""
if not os.path.isdir(backup_dir):
os.makedirs(backup_dir)
basename = os.path.basename(filename)
bkp_set = []
if good:
src = filename
dst = os.path.join(backup_dir, "%s.backup" % basename)
if action == 'backup':
bkp_set = [[src, dst]]
elif action == 'restore':
bkp_set = [[dst, src]]
else:
# We have to make 2 backups, one of the bad image, another one
# of the good image
src_bad = filename
src_good = os.path.join(backup_dir, "%s.backup" % basename)
hsh = utils_misc.generate_random_string(4)
dst_bad = (os.path.join(backup_dir, "%s.bad.%s" %
(basename, hsh)))
dst_good = (os.path.join(backup_dir, "%s.good.%s" %
(basename, hsh)))
if action == 'backup':
bkp_set = [[src_bad, dst_bad], [src_good, dst_good]]
elif action == 'restore':
bkp_set = [[src_good, src_bad]]
if not bkp_set:
logging.error("No backup sets for action: %s, state: %s",
action, good)
return bkp_set
image_filename = self.image_filename
backup_dir = params.get("backup_dir", "")
if not os.path.isabs(backup_dir):
backup_dir = os.path.join(root_dir, backup_dir)
if params.get('image_raw_device') == 'yes':
iname = "raw_device"
iformat = params.get("image_format", "qcow2")
ifilename = "%s.%s" % (iname, iformat)
ifilename = utils_misc.get_path(root_dir, ifilename)
backup_set = get_backup_set(ifilename, backup_dir, action, good)
backup_func = backup_raw_device
else:
backup_set = get_backup_set(image_filename, backup_dir, action,
good)
backup_func = backup_image_file
if action == 'backup':
image_dir = os.path.dirname(image_filename)
image_dir_disk_free = utils.freespace(image_dir)
backup_size = 0
for src, dst in backup_set:
if os.path.isfile(src):
backup_size += os.path.getsize(src)
minimum_disk_free = 1.2 * backup_size
if image_dir_disk_free < minimum_disk_free:
image_dir_disk_free_gb = float(image_dir_disk_free) / 10 ** 9
backup_size_gb = float(backup_size) / 10 ** 9
minimum_disk_free_gb = float(minimum_disk_free) / 10 ** 9
logging.error("Free space on %s: %.1f GB", image_dir,
image_dir_disk_free_gb)
logging.error("Backup size: %.1f GB", backup_size_gb)
logging.error("Minimum free space acceptable: %.1f GB",
minimum_disk_free_gb)
logging.error("Available disk space is not sufficient for a"
"full backup. Skipping backup...")
return
for src, dst in backup_set:
if action == 'backup' and skip_existing and os.path.exists(dst):
continue
backup_func(src, dst)
@staticmethod
def clone_image(params, vm_name, image_name, root_dir):
"""
Clone master image to vm specific file.
:param params: Dictionary containing the test parameters.
:param vm_name: Vm name.
:param image_name: Master image name.
:param root_dir: Base directory for relative filenames.
"""
if not params.get("image_name_%s_%s" % (image_name, vm_name)):
m_image_name = params.get("image_name", "image")
vm_image_name = "%s_%s" % (m_image_name, vm_name)
if params.get("clone_master", "yes") == "yes":
image_params = params.object_params(image_name)
image_params["image_name"] = vm_image_name
m_image_fn = get_image_filename(params, root_dir)
image_fn = get_image_filename(image_params, root_dir)
force_clone = params.get("force_image_clone", "no")
if not os.path.exists(image_fn) or force_clone == "yes":
logging.info("Clone master image for vms.")
utils.run(params.get("image_clone_command") % (m_image_fn,
image_fn))
params["image_name_%s_%s" % (image_name, vm_name)] = vm_image_name
@staticmethod
def rm_cloned_image(params, vm_name, image_name, root_dir):
"""
Remove vm specific file.
:param params: Dictionary containing the test parameters.
:param vm_name: Vm name.
:param image_name: Master image name.
:param root_dir: Base directory for relative filenames.
"""
if params.get("image_name_%s_%s" % (image_name, vm_name)):
m_image_name = params.get("image_name", "image")
vm_image_name = "%s_%s" % (m_image_name, vm_name)
if params.get("clone_master", "yes") == "yes":
image_params = params.object_params(image_name)
image_params["image_name"] = vm_image_name
image_fn = get_image_filename(image_params, root_dir)
logging.debug("Removing vm specific image file %s", image_fn)
if os.path.exists(image_fn):
utils.run(params.get("image_remove_command") % (image_fn))
else:
logging.debug("Image file %s not found", image_fn)
class Rawdev(object):
"""
Base class for raw storage devices such as iscsi and local disks
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
host_set_flag = params.get("host_setup_flag")
if host_set_flag is not None:
self.exec_cleanup = int(host_set_flag) & 2 == 2
else:
self.exec_cleanup = False
if params.get("force_cleanup") == "yes":
self.exec_cleanup = True
self.image_name = tag
class Iscsidev(Rawdev):
"""
Class for handle iscsi devices for VM
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
Rawdev.__init__(self, params, root_dir, tag)
self.emulated_file_remove = False
self.emulated_image = params.get("emulated_image")
if self.emulated_image:
self.emulated_image = os.path.join(root_dir, self.emulated_image)
if params.get("emulated_file_remove", "no") == "yes":
self.emulated_file_remove = True
params["iscsi_thread_id"] = self.image_name
self.iscsidevice = iscsi.Iscsi.create_iSCSI(params, root_dir=root_dir)
self.device_id = params.get("device_id")
self.iscsi_init_timeout = int(params.get("iscsi_init_timeout", 10))
class LVMdev(Rawdev):
"""
Class for handle LVM devices for VM
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
super(LVMdev, self).__init__(params, root_dir, tag)
if params.get("emulational_device", "yes") == "yes":
self.lvmdevice = lvm.EmulatedLVM(params, root_dir=root_dir)
else:
self.lvmdevice = lvm.LVM(params)
| gpl-2.0 | -7,251,056,142,510,471,000 | 38.228625 | 79 | 0.56399 | false |
iMyon/imageFilter | includes/uiControl.py | 1 | 2301 | #!/usr/bin/python
#coding:utf-8
#Filename: test.py
#Created: 2014-01-02 02:45:15
#Author: Myon, [email protected]
from PyQt4 import QtGui,QtCore
from mainwindow import Ui_MainWindow
from PyQt4.QtCore import *
from filter import filterImages
class Widget(QtGui.QMainWindow, Ui_MainWindow):
"""QtGui.QWidget和界面设计时选择的类型一致"""
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setupUi(self) # Ui_Form.setupUi
#checkbox连接信号
self.cbWidth.stateChanged.connect(self.toggleGrpWidth)
self.cbHeight.stateChanged.connect(self.toggleGrpHeight)
self.cbRate.stateChanged.connect(self.toggleGrpRate)
self.center()
#去除标题栏边框等
#self.setWindowFlags(Qt.FramelessWindowHint)
#工作目录按钮点击事件
@pyqtSignature("")
def on_btnFileChoose_clicked(self):
path=QtGui.QFileDialog.getExistingDirectory(None,u'选择要筛选的图片目录')
if path != "":
self.lbFilePath.setText(path)
#输出目录按钮点击事件
@pyqtSignature("")
def on_btnOutputPath_clicked(self):
outputPath=QtGui.QFileDialog.getExistingDirectory(None,u'选择输出目录')
if outputPath!= "":
self.lbOutputPath.setText(outputPath)
#开始按钮点击时间
@pyqtSignature("")
def on_btnStart_clicked(self):
if self.lbFilePath.text()==QtCore.QString(u'未选择筛选目录') or QtCore.QString(self.lbOutputPath.text()==u'未选择输出目录'):
self.statusBar.showMessage(u"目录未选择!")
return
self.btnFileChoose.setEnabled(False)
self.btnOutputPath.setEnabled(False)
self.btnStart.setEnabled(False)
#调用筛选图片类处理
ft=filterImages(self)
ft.doFilter(self)
#checkbox状态转变事件
def toggleGrpWidth(self):
if self.cbWidth.isChecked():
self.grpWidth.setEnabled(True)
else:
self.grpWidth.setEnabled(False)
def toggleGrpHeight(self):
if self.cbHeight.isChecked():
self.grpHeight.setEnabled(True)
else:
self.grpHeight.setEnabled(False)
def toggleGrpRate(self):
if self.cbRate.isChecked():
self.grpRate.setEnabled(True)
else:
self.grpRate.setEnabled(False)
#窗口居中
def center(self):
screen=QtGui.QDesktopWidget().screenGeometry()
size=self.geometry()
self.move((screen.width()-size.width())/2,(screen.height()-self.height())/2)
| gpl-2.0 | -3,002,598,958,445,188,000 | 28.785714 | 112 | 0.751559 | false |
home-assistant/home-assistant | homeassistant/components/hue/sensor.py | 2 | 4083 | """Hue sensor entities."""
from aiohue.sensors import (
TYPE_ZLL_LIGHTLEVEL,
TYPE_ZLL_ROTARY,
TYPE_ZLL_SWITCH,
TYPE_ZLL_TEMPERATURE,
)
from homeassistant.components.sensor import STATE_CLASS_MEASUREMENT, SensorEntity
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_ILLUMINANCE,
DEVICE_CLASS_TEMPERATURE,
LIGHT_LUX,
PERCENTAGE,
TEMP_CELSIUS,
)
from .const import DOMAIN as HUE_DOMAIN
from .sensor_base import SENSOR_CONFIG_MAP, GenericHueSensor, GenericZLLSensor
LIGHT_LEVEL_NAME_FORMAT = "{} light level"
REMOTE_NAME_FORMAT = "{} battery level"
TEMPERATURE_NAME_FORMAT = "{} temperature"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Defer sensor setup to the shared sensor module."""
bridge = hass.data[HUE_DOMAIN][config_entry.entry_id]
if not bridge.sensor_manager:
return
await bridge.sensor_manager.async_register_component("sensor", async_add_entities)
class GenericHueGaugeSensorEntity(GenericZLLSensor, SensorEntity):
"""Parent class for all 'gauge' Hue device sensors."""
class HueLightLevel(GenericHueGaugeSensorEntity):
"""The light level sensor entity for a Hue motion sensor device."""
_attr_device_class = DEVICE_CLASS_ILLUMINANCE
_attr_unit_of_measurement = LIGHT_LUX
@property
def state(self):
"""Return the state of the device."""
if self.sensor.lightlevel is None:
return None
# https://developers.meethue.com/develop/hue-api/supported-devices/#clip_zll_lightlevel
# Light level in 10000 log10 (lux) +1 measured by sensor. Logarithm
# scale used because the human eye adjusts to light levels and small
# changes at low lux levels are more noticeable than at high lux
# levels.
return round(float(10 ** ((self.sensor.lightlevel - 1) / 10000)), 2)
@property
def extra_state_attributes(self):
"""Return the device state attributes."""
attributes = super().extra_state_attributes
attributes.update(
{
"lightlevel": self.sensor.lightlevel,
"daylight": self.sensor.daylight,
"dark": self.sensor.dark,
"threshold_dark": self.sensor.tholddark,
"threshold_offset": self.sensor.tholdoffset,
}
)
return attributes
class HueTemperature(GenericHueGaugeSensorEntity):
"""The temperature sensor entity for a Hue motion sensor device."""
_attr_device_class = DEVICE_CLASS_TEMPERATURE
_attr_state_class = STATE_CLASS_MEASUREMENT
_attr_unit_of_measurement = TEMP_CELSIUS
@property
def state(self):
"""Return the state of the device."""
if self.sensor.temperature is None:
return None
return self.sensor.temperature / 100
class HueBattery(GenericHueSensor, SensorEntity):
"""Battery class for when a batt-powered device is only represented as an event."""
_attr_device_class = DEVICE_CLASS_BATTERY
_attr_state_class = STATE_CLASS_MEASUREMENT
_attr_unit_of_measurement = PERCENTAGE
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return f"{self.sensor.uniqueid}-battery"
@property
def state(self):
"""Return the state of the battery."""
return self.sensor.battery
SENSOR_CONFIG_MAP.update(
{
TYPE_ZLL_LIGHTLEVEL: {
"platform": "sensor",
"name_format": LIGHT_LEVEL_NAME_FORMAT,
"class": HueLightLevel,
},
TYPE_ZLL_TEMPERATURE: {
"platform": "sensor",
"name_format": TEMPERATURE_NAME_FORMAT,
"class": HueTemperature,
},
TYPE_ZLL_SWITCH: {
"platform": "sensor",
"name_format": REMOTE_NAME_FORMAT,
"class": HueBattery,
},
TYPE_ZLL_ROTARY: {
"platform": "sensor",
"name_format": REMOTE_NAME_FORMAT,
"class": HueBattery,
},
}
)
| apache-2.0 | -2,351,362,010,555,407,000 | 29.699248 | 95 | 0.638256 | false |
dragon/dr_months_calendar | drCalendar.py | 1 | 4598 | import sublime, sublime_plugin, locale, datetime
import calendar,locale,sys
# view.run_command("dr_calendar_insert_month")
def add_month(y,m,x):
r_m = m + x
r_y = y
if (r_m < 1):
r_y -= 1
r_m = 12
if (r_m > 12):
r_m = 1
r_y += 1
return (r_y,r_m)
class TDrCalend():
def __init__(self,y,m):
self.y = y
self.m = m
self.mc = self.GetMonthCalendarAsText()
def GetMonthCalendarAsText(self):
c = calendar.TextCalendar()
mc = c.monthdatescalendar(self.y, self.m)
today = datetime.date.today()#+datetime.timedelta(days=10) #debug
self.dd = datetime.date(self.y, self.m, 1)
# s = '{: %B %Y}\n'.format(self.dd)
s = '{0: %B %Y}'.format(self.dd)
s = '{0: ^21}'.format(s)+"\n"
# s += '\n'
for w in mc:
for d in w:
s += ' ' + '{0:%a}'.format(d)[0:2]
s += '\n'
break
for w in mc:
for d in w:
if (d.month == self.m):
if d == today:
s += '{0:>3}'.format('*'+str(d.day))
else:
s += '{0:>3}'.format(d.day)
else:
s += " "
s += "\n"
return (s)
class DrMonthsCalendarOpen(sublime_plugin.TextCommand):
def is_visible(self):
return True
def is_enabled(self):
return True
def run(self, edit):
loc = locale.getlocale() # get current locale
if sys.version[0] == "2":
locale.setlocale(locale.LC_ALL, 'C')
else:
locale.setlocale(locale.LC_ALL, '')
c = calendar.TextCalendar()
# self.view.insert(edit, self.view.sel().begin(), c.formatmonth(2013,9))
today = datetime.date.today()#+datetime.timedelta(days=10) #debug
(pr_y, pr_m) = add_month(today.year,today.month,-1)
c1 = TDrCalend(pr_y,pr_m).GetMonthCalendarAsText()
c11 = c1.splitlines()
c2 = TDrCalend(today.year,today.month).GetMonthCalendarAsText()
c22 = c2.splitlines()
(n_y, n_m) = add_month(today.year,today.month,1)
c3 = TDrCalend(n_y,n_m).GetMonthCalendarAsText()
c33 = c3.splitlines()
c4 = []
for x in range(0,8):
if len(c11) <= x:
c4.append('{0: >23}'.format(""))
else:
c4.append('{0: >23}'.format(c11[x])) # use '*' as a fill char
if len(c22) <= x:
c4[x] = c4[x] + ' ' +'{0: <21}'.format("")
else:
c4[x] = c4[x] + ' ' +'{0: <21}'.format(c22[x])
if len(c33) <= x:
c4[x] = c4[x] + ' ' +'{0: <21}'.format("")
else:
c4[x] = c4[x] + ' ' +'{0: <21}'.format(c33[x])
# for pos in self.view.sel():
# b = pos.begin()
# rr = self.view.insert(edit, pos.begin(), "\n".join(c4)+'\n')
# r = sublime.Region(b, b+rr)
# print ("\n".join(c4)+'\n')
# self.view.add_regions('DrCalendar',[r],"keyword",'',sublime.HIDDEN)
self.output_view = self.view.window().get_output_panel("textarea_drcalendar")
self.view.window().run_command("show_panel", {"panel": "output.textarea_drcalendar"})
# self.output_view.run_command("set_file_type", {"Packages/DrCalendar/DrCalendar.sublime-syntax"})
if sys.version[0] == "2":
self.output_view.set_syntax_file("Packages/DrMonthsCalendar/DrCalendar.tmLanguage")
else:
self.output_view.set_syntax_file("Packages/DrMonthsCalendar/DrCalendar.sublime-syntax")
self.output_view.set_read_only(False)
# self.output_view.insert(edit, self.output_view.size(), today.strftime(' %A %x')+"\n\n")
self.output_view.insert(edit, self.output_view.size(),"\n".join(c4))
self.output_view.insert(edit, self.output_view.size(), "\n"+today.strftime(' %A %x'))
locale.setlocale(locale.LC_ALL, loc)
# self.output_view.insert(edit, self.output_view.size(),"\n".join(c22)+'\n')
# self.output_view.run_command("append", {"characters": "Hello, World!"})
# self.output_view.set_read_only(True)
# r = sublime.Region(1, 100)
# # DRAW_OUTLINED
# self.output_view.add_regions('DrCalendar',[r],"keyword",'',sublime.HIDDEN)
# self.view.window().focusView(self.output_view)
# self.view.window().show_quick_panel("123", None, sublime.MONOSPACE_FONT)
class DrCalendarInsertMonthCommand1(sublime_plugin.TextCommand):
def run(self, edit):
loc = locale.getlocale() # get current locale
locale.setlocale(locale.LC_ALL, '')
c = calendar.TextCalendar()
# self.view.insert(edit, self.view.sel().begin(), c.formatmonth(2013,9))
i = len(self.view.sel())
if i > 1 :
i = -1
for pos in self.view.sel():
self.view.insert(edit, pos.begin(), c.formatmonth(2013,9+i))
i = i + 1;
| mit | -655,033,843,156,563,300 | 32.318841 | 100 | 0.571988 | false |
sicily/qt4.8.4 | src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/MSVSSettings_test.py | 12 | 65283 | #!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the MSVSSettings.py file. """
import sys
import unittest
import MSVSSettings
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
""" Compares recorded lines to expected warnings. """
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line != '']
self.assertEqual(sorted(expected), sorted(actual))
def test_ValidateMSVSSettings_tool_names(self):
""" Tests that only MSVS tool names are allowed. """
MSVSSettings.ValidateMSVSSettings({
'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def test_ValidateMSVSSettings_settings(self):
""" Tests that for invalid MSVS settings. """
MSVSSettings.ValidateMSVSSettings({
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized value "5" for VCCLCompilerTool/'
'BasicRuntimeChecks',
'Warning: unrecognized value "fdkslj" for VCCLCompilerTool/'
'BrowseInformation',
'Warning: unrecognized value "-1" for VCCLCompilerTool/'
'CallingConvention',
'Warning: unrecognized value "2" for VCCLCompilerTool/'
'DebugInformationFormat',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: unrecognized value "2" for VCLinkerTool/TargetMachine',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: unrecognized value "truel" for VCManifestTool/'
'UpdateFileHashes'])
def test_ValidateMSBuildSettings_settings(self):
""" Tests that for invalid MSBuild settings. """
MSVSSettings.ValidateMSBuildSettings({
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'Pure',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Mt': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Mt/notgood3',
'Warning: unrecognized value "truel" for Mt/GenerateCatalogFiles',
'Warning: unrecognized value "MachineX86i" for Lib/TargetMachine',
'Warning: unrecognized value "fal" for Mt/EnableDPIAwareness'])
def test_ConvertToMsBuildSettings_empty(self):
""" Tests an empty conversion. """
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def test_ConvertToMsBuildSettings_minimal(self):
""" Tests a minimal conversion. """
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def test_ConvertToMsBuildSettings_warnings(self):
""" Tests conversion that generates warnings. """
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: unrecognized value "12" for VCCLCompilerTool/'
'BasicRuntimeChecks while converting to MSBuild.',
'Warning: unrecognized value "21" for VCCLCompilerTool/'
'BrowseInformation while converting to MSBuild.',
'Warning: unrecognized value "13" for VCCLCompilerTool/'
'UsePrecompiledHeader while converting to MSBuild.',
'Warning: unrecognized value "14" for VCCLCompilerTool/'
'GeneratePreprocessedFile while converting to MSBuild.',
'Warning: unrecognized value "10" for VCLinkerTool/'
'Driver while converting to MSBuild.',
'Warning: unrecognized value "31" for VCLinkerTool/'
'LinkTimeCodeGeneration while converting to MSBuild.',
'Warning: unrecognized value "21" for VCLinkerTool/'
'ErrorReporting while converting to MSBuild.',
'Warning: unrecognized value "6" for VCLinkerTool/'
'FixedBaseAddress while converting to MSBuild.',
])
def test_ConvertToMsBuildSettings_full_synthetic(self):
""" Tests conversion of all the MsBuild settings. """
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Mt': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def test_ConvertToMsBuildSettings_actual(self):
""" Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Mt:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Mt': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMsBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main() | lgpl-2.1 | -7,910,920,124,600,184,000 | 43.169147 | 80 | 0.558095 | false |
buildbot/buildbot | worker/buildbot_worker/test/util/sourcecommand.py | 17 | 3389 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from buildbot_worker import runprocess
from buildbot_worker.test.util import command
class SourceCommandTestMixin(command.CommandTestMixin):
"""
Support for testing Source Commands; an extension of CommandTestMixin
"""
def make_command(self, cmdclass, args, makedirs=False, initial_sourcedata=''):
"""
Same as the parent class method, but this also adds some source-specific
patches:
* writeSourcedata - writes to self.sourcedata (self is the TestCase)
* readSourcedata - reads from self.sourcedata
* doClobber - invokes RunProcess(['clobber', DIRECTORY])
* doCopy - invokes RunProcess(['copy', cmd.srcdir, cmd.workdir])
"""
cmd = command.CommandTestMixin.make_command(self, cmdclass, args, makedirs)
# note that these patches are to an *instance*, not a class, so there
# is no need to use self.patch() to reverse them
self.sourcedata = initial_sourcedata
def readSourcedata():
if self.sourcedata is None:
raise IOError("File not found")
return self.sourcedata
cmd.readSourcedata = readSourcedata
def writeSourcedata(res):
self.sourcedata = cmd.sourcedata
return res
cmd.writeSourcedata = writeSourcedata
# patch out a bunch of actions with invocations of RunProcess that will
# end up being Expect-able by the tests.
def doClobber(_, dirname):
r = runprocess.RunProcess(self.builder,
['clobber', dirname],
self.builder.basedir)
return r.start()
cmd.doClobber = doClobber
def doCopy(_):
r = runprocess.RunProcess(self.builder,
['copy', cmd.srcdir, cmd.workdir],
self.builder.basedir)
return r.start()
cmd.doCopy = doCopy
def setFileContents(filename, contents):
r = runprocess.RunProcess(self.builder,
['setFileContents', filename, contents],
self.builder.basedir)
return r.start()
cmd.setFileContents = setFileContents
def check_sourcedata(self, _, expected_sourcedata):
"""
Assert that the sourcedata (from the patched functions - see
make_command) is correct. Use this as a deferred callback.
"""
self.assertEqual(self.sourcedata, expected_sourcedata)
return _
| gpl-2.0 | 5,717,326,681,975,649,000 | 37.511364 | 83 | 0.629979 | false |
schlueter/ansible | lib/ansible/modules/network/avi/avi_cluster.py | 15 | 3916 | #!/usr/bin/python
#
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cluster
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of Cluster Avi RESTful Object
description:
- This module is used to configure Cluster object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
name:
description:
- Name of the object.
required: true
nodes:
description:
- List of clusternode.
rejoin_nodes_automatically:
description:
- Re-join cluster nodes automatically in the event one of the node is reset to factory.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
tenant_ref:
description:
- It is a reference to an object of type tenant.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
virtual_ip:
description:
- A virtual ip address.
- This ip address will be dynamically reconfigured so that it always is the ip of the cluster leader.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create Cluster object
avi_cluster:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cluster
"""
RETURN = '''
obj:
description: Cluster (api/cluster) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
name=dict(type='str', required=True),
nodes=dict(type='list',),
rejoin_nodes_automatically=dict(type='bool',),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
virtual_ip=dict(type='dict',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'cluster',
set([]))
if __name__ == '__main__':
main()
| gpl-3.0 | -30,702,878,865,125,536 | 30.837398 | 113 | 0.611083 | false |
jelly/calibre | src/calibre/gui2/device.py | 2 | 88912 | from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>'
# Imports {{{
import os, traceback, Queue, time, cStringIO, re, sys, weakref
from threading import Thread, Event
from PyQt5.Qt import (
QMenu, QAction, QActionGroup, QIcon, Qt, pyqtSignal, QDialog,
QObject, QVBoxLayout, QDialogButtonBox, QCursor, QCoreApplication,
QApplication, QEventLoop)
from calibre.customize.ui import (available_input_formats, available_output_formats,
device_plugins, disabled_device_plugins)
from calibre.devices.interface import DevicePlugin, currently_connected_device
from calibre.devices.errors import (UserFeedback, OpenFeedback, OpenFailed, OpenActionNeeded,
InitialConnectionError)
from calibre.ebooks.covers import cprefs, override_prefs, scale_cover, generate_cover
from calibre.gui2.dialogs.choose_format_device import ChooseFormatDeviceDialog
from calibre.utils.ipc.job import BaseJob
from calibre.devices.scanner import DeviceScanner
from calibre.gui2 import (config, error_dialog, Dispatcher, dynamic,
warning_dialog, info_dialog, choose_dir, FunctionDispatcher,
show_restart_warning, gprefs, question_dialog)
from calibre.ebooks.metadata import authors_to_string
from calibre import preferred_encoding, prints, force_unicode, as_unicode, sanitize_file_name2
from calibre.utils.filenames import ascii_filename
from calibre.devices.errors import (FreeSpaceError, WrongDestinationError,
BlacklistedDevice)
from calibre.devices.folder_device.driver import FOLDER_DEVICE
from calibre.constants import DEBUG
from calibre.utils.config import tweaks, device_prefs
from calibre.utils.img import scale_image
from calibre.library.save_to_disk import find_plugboard
from calibre.ptempfile import PersistentTemporaryFile, force_unicode as filename_to_unicode
# }}}
class DeviceJob(BaseJob): # {{{
def __init__(self, func, done, job_manager, args=[], kwargs={},
description=''):
BaseJob.__init__(self, description)
self.func = func
self.callback_on_done = done
if not isinstance(self.callback_on_done, (Dispatcher,
FunctionDispatcher)):
self.callback_on_done = FunctionDispatcher(self.callback_on_done)
self.args, self.kwargs = args, kwargs
self.exception = None
self.job_manager = job_manager
self._details = _('No details available.')
self._aborted = False
def start_work(self):
if DEBUG:
prints('Job:', self.id, self.description, 'started',
safe_encode=True)
self.start_time = time.time()
self.job_manager.changed_queue.put(self)
def job_done(self):
self.duration = time.time() - self.start_time
self.percent = 1
if DEBUG:
prints('DeviceJob:', self.id, self.description,
'done, calling callback', safe_encode=True)
try:
self.callback_on_done(self)
except:
pass
if DEBUG:
prints('DeviceJob:', self.id, self.description,
'callback returned', safe_encode=True)
self.job_manager.changed_queue.put(self)
def report_progress(self, percent, msg=''):
self.notifications.put((percent, msg))
self.job_manager.changed_queue.put(self)
def run(self):
self.start_work()
try:
self.result = self.func(*self.args, **self.kwargs)
if self._aborted:
return
except (Exception, SystemExit) as err:
if self._aborted:
return
self.failed = True
ex = as_unicode(err)
self._details = ex + '\n\n' + \
force_unicode(traceback.format_exc())
self.exception = err
finally:
self.job_done()
def abort(self, err):
call_job_done = False
if self.run_state == self.WAITING:
self.start_work()
call_job_done = True
self._aborted = True
self.failed = True
self._details = unicode(err)
self.exception = err
if call_job_done:
self.job_done()
@property
def log_file(self):
return cStringIO.StringIO(self._details.encode('utf-8'))
# }}}
def device_name_for_plugboards(device_class):
if hasattr(device_class, 'DEVICE_PLUGBOARD_NAME'):
return device_class.DEVICE_PLUGBOARD_NAME
return device_class.__class__.__name__
class BusyCursor(object):
def __enter__(self):
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
def __exit__(self, *args):
QApplication.restoreOverrideCursor()
class DeviceManager(Thread): # {{{
def __init__(self, connected_slot, job_manager, open_feedback_slot,
open_feedback_msg, allow_connect_slot,
after_callback_feedback_slot, sleep_time=2):
'''
:sleep_time: Time to sleep between device probes in secs
'''
Thread.__init__(self)
self.setDaemon(True)
# [Device driver, Showing in GUI, Ejected]
self.devices = list(device_plugins())
self.disabled_device_plugins = list(disabled_device_plugins())
self.managed_devices = [x for x in self.devices if
not x.MANAGES_DEVICE_PRESENCE]
self.unmanaged_devices = [x for x in self.devices if
x.MANAGES_DEVICE_PRESENCE]
self.sleep_time = sleep_time
self.connected_slot = connected_slot
self.allow_connect_slot = allow_connect_slot
self.jobs = Queue.Queue(0)
self.job_steps = Queue.Queue(0)
self.keep_going = True
self.job_manager = job_manager
self.reported_errors = set([])
self.current_job = None
self.scanner = DeviceScanner()
self.connected_device = None
self.connected_device_kind = None
self.ejected_devices = set([])
self.mount_connection_requests = Queue.Queue(0)
self.open_feedback_slot = open_feedback_slot
self.open_feedback_only_once_seen = set()
self.after_callback_feedback_slot = after_callback_feedback_slot
self.open_feedback_msg = open_feedback_msg
self._device_information = None
self.current_library_uuid = None
self.call_shutdown_on_disconnect = False
self.devices_initialized = Event()
self.dynamic_plugins = {}
def report_progress(self, *args):
pass
@property
def is_device_connected(self):
return self.connected_device is not None
@property
def is_device_present(self):
return self.connected_device is not None and self.connected_device not in self.ejected_devices
@property
def device(self):
return self.connected_device
def do_connect(self, connected_devices, device_kind):
for dev, detected_device in connected_devices:
if dev.OPEN_FEEDBACK_MESSAGE is not None:
self.open_feedback_slot(dev.OPEN_FEEDBACK_MESSAGE)
try:
dev.reset(detected_device=detected_device,
report_progress=self.report_progress)
dev.open(detected_device, self.current_library_uuid)
except OpenFeedback as e:
if dev not in self.ejected_devices:
self.open_feedback_msg(dev.get_gui_name(), e)
self.ejected_devices.add(dev)
continue
except OpenFailed:
raise
except:
tb = traceback.format_exc()
if DEBUG or tb not in self.reported_errors:
self.reported_errors.add(tb)
prints('Unable to open device', str(dev))
prints(tb)
continue
self.after_device_connect(dev, device_kind)
return True
return False
def after_device_connect(self, dev, device_kind):
allow_connect = True
try:
uid = dev.get_device_uid()
except NotImplementedError:
uid = None
asked = gprefs.get('ask_to_manage_device', [])
if (dev.ASK_TO_ALLOW_CONNECT and uid and uid not in asked):
if not self.allow_connect_slot(dev.get_gui_name(), dev.icon):
allow_connect = False
asked.append(uid)
gprefs.set('ask_to_manage_device', asked)
if not allow_connect:
dev.ignore_connected_device(uid)
return
self.connected_device = currently_connected_device._device = dev
self.connected_device.specialize_global_preferences(device_prefs)
self.connected_device_kind = device_kind
self.connected_slot(True, device_kind)
def connected_device_removed(self):
while True:
try:
job = self.jobs.get_nowait()
job.abort(Exception(_('Device no longer connected.')))
except Queue.Empty:
break
try:
self.connected_device.post_yank_cleanup()
except:
pass
if self.connected_device in self.ejected_devices:
self.ejected_devices.remove(self.connected_device)
else:
self.connected_slot(False, self.connected_device_kind)
if self.call_shutdown_on_disconnect:
# The current device is an instance of a plugin class instantiated
# to handle this connection, probably as a mounted device. We are
# now abandoning the instance that we created, so we tell it that it
# is being shut down.
self.connected_device.shutdown()
self.call_shutdown_on_disconnect = False
device_prefs.set_overrides()
self.connected_device = currently_connected_device._device = None
self._device_information = None
def detect_device(self):
self.scanner.scan()
if self.is_device_connected:
if self.connected_device.MANAGES_DEVICE_PRESENCE:
cd = self.connected_device.detect_managed_devices(self.scanner.devices)
if cd is None:
self.connected_device_removed()
else:
connected, detected_device = \
self.scanner.is_device_connected(self.connected_device,
only_presence=True)
if not connected:
if DEBUG:
# Allow the device subsystem to output debugging info about
# why it thinks the device is not connected. Used, for e.g.
# in the can_handle() method of the T1 driver
self.scanner.is_device_connected(self.connected_device,
only_presence=True, debug=True)
self.connected_device_removed()
else:
for dev in self.unmanaged_devices:
try:
cd = dev.detect_managed_devices(self.scanner.devices)
except:
prints('Error during device detection for %s:'%dev)
traceback.print_exc()
else:
if cd is not None:
try:
dev.open(cd, self.current_library_uuid)
except BlacklistedDevice as e:
prints('Ignoring blacklisted device: %s'%
as_unicode(e))
except OpenActionNeeded as e:
if e.only_once_id not in self.open_feedback_only_once_seen:
self.open_feedback_only_once_seen.add(e.only_once_id)
self.open_feedback_msg(e.device_name, e)
except:
prints('Error while trying to open %s (Driver: %s)'%
(cd, dev))
traceback.print_exc()
else:
self.after_device_connect(dev, 'unmanaged-device')
return
try:
possibly_connected_devices = []
for device in self.managed_devices:
if device in self.ejected_devices:
continue
try:
possibly_connected, detected_device = \
self.scanner.is_device_connected(device)
except InitialConnectionError as e:
self.open_feedback_msg(device.get_gui_name(), e)
continue
if possibly_connected:
possibly_connected_devices.append((device, detected_device))
if possibly_connected_devices:
if not self.do_connect(possibly_connected_devices,
device_kind='device'):
if DEBUG:
prints('Connect to device failed, retrying in 5 seconds...')
time.sleep(5)
if not self.do_connect(possibly_connected_devices,
device_kind='device'):
if DEBUG:
prints('Device connect failed again, giving up')
except OpenFailed as e:
if e.show_me:
traceback.print_exc()
# Mount devices that don't use USB, such as the folder device
# This will be called on the GUI thread. Because of this, we must store
# information that the scanner thread will use to do the real work.
def mount_device(self, kls, kind, path):
self.mount_connection_requests.put((kls, kind, path))
# disconnect a device
def umount_device(self, *args):
if self.is_device_connected and not self.job_manager.has_device_jobs():
if self.connected_device_kind in {'unmanaged-device', 'device'}:
self.connected_device.eject()
if self.connected_device_kind != 'unmanaged-device':
self.ejected_devices.add(self.connected_device)
self.connected_slot(False, self.connected_device_kind)
elif hasattr(self.connected_device, 'unmount_device'):
# As we are on the wrong thread, this call must *not* do
# anything besides set a flag that the right thread will see.
self.connected_device.unmount_device()
def next(self):
if not self.job_steps.empty():
try:
return self.job_steps.get_nowait()
except Queue.Empty:
pass
if not self.jobs.empty():
try:
return self.jobs.get_nowait()
except Queue.Empty:
pass
def run_startup(self, dev):
name = 'unknown'
try:
name = dev.__class__.__name__
dev.startup()
except:
prints('Startup method for device %s threw exception'%name)
traceback.print_exc()
def run(self):
# Do any device-specific startup processing.
for d in self.devices:
self.run_startup(d)
n = d.is_dynamically_controllable()
if n:
self.dynamic_plugins[n] = d
self.devices_initialized.set()
while self.keep_going:
kls = None
while True:
try:
(kls,device_kind, folder_path) = \
self.mount_connection_requests.get_nowait()
except Queue.Empty:
break
if kls is not None:
try:
dev = kls(folder_path)
# We just created a new device instance. Call its startup
# method and set the flag to call the shutdown method when
# it disconnects.
self.run_startup(dev)
self.call_shutdown_on_disconnect = True
self.do_connect([[dev, None],], device_kind=device_kind)
except:
prints('Unable to open %s as device (%s)'%(device_kind, folder_path))
traceback.print_exc()
else:
self.detect_device()
do_sleep = True
while True:
job = self.next()
if job is not None:
do_sleep = False
self.current_job = job
if self.device is not None:
self.device.set_progress_reporter(job.report_progress)
self.current_job.run()
self.current_job = None
feedback = getattr(self.device, 'user_feedback_after_callback', None)
if feedback is not None:
self.device.user_feedback_after_callback = None
self.after_callback_feedback_slot(feedback)
else:
break
if do_sleep:
time.sleep(self.sleep_time)
# We are exiting. Call the shutdown method for each plugin
for p in self.devices:
try:
p.shutdown()
except:
pass
def create_job_step(self, func, done, description, to_job, args=[], kwargs={}):
job = DeviceJob(func, done, self.job_manager,
args=args, kwargs=kwargs, description=description)
self.job_manager.add_job(job)
if (done is None or isinstance(done, FunctionDispatcher)) and \
(to_job is not None and to_job == self.current_job):
self.job_steps.put(job)
else:
self.jobs.put(job)
return job
def create_job(self, func, done, description, args=[], kwargs={}):
return self.create_job_step(func, done, description, None, args, kwargs)
def has_card(self):
try:
return bool(self.device.card_prefix())
except:
return False
def _debug_detection(self):
from calibre.devices import debug
raw = debug(plugins=self.devices,
disabled_plugins=self.disabled_device_plugins)
return raw
def debug_detection(self, done):
if self.is_device_connected:
raise ValueError('Device is currently detected in calibre, cannot'
' debug device detection')
self.create_job(self._debug_detection, done,
_('Debug device detection'))
def _get_device_information(self):
info = self.device.get_device_information(end_session=False)
if len(info) < 5:
info = tuple(list(info) + [{}])
info = [i.replace('\x00', '').replace('\x01', '') if isinstance(i, basestring) else i
for i in info]
cp = self.device.card_prefix(end_session=False)
fs = self.device.free_space()
self._device_information = {'info': info, 'prefixes': cp, 'freespace': fs}
return info, cp, fs
def get_device_information(self, done, add_as_step_to_job=None):
'''Get device information and free space on device'''
return self.create_job_step(self._get_device_information, done,
description=_('Get device information'), to_job=add_as_step_to_job)
def _set_library_information(self, library_name, library_uuid, field_metadata):
'''Give the device the current library information'''
self.device.set_library_info(library_name, library_uuid, field_metadata)
def set_library_information(self, done, library_name, library_uuid,
field_metadata, add_as_step_to_job=None):
'''Give the device the current library information'''
return self.create_job_step(self._set_library_information, done,
args=[library_name, library_uuid, field_metadata],
description=_('Set library information'), to_job=add_as_step_to_job)
def slow_driveinfo(self):
''' Update the stored device information with the driveinfo if the
device indicates that getting driveinfo is slow '''
info = self._device_information['info']
if (not info[4] and self.device.SLOW_DRIVEINFO):
info = list(info)
info[4] = self.device.get_driveinfo()
self._device_information['info'] = tuple(info)
def get_current_device_information(self):
return self._device_information
def _books(self):
'''Get metadata from device'''
mainlist = self.device.books(oncard=None, end_session=False)
cardalist = self.device.books(oncard='carda')
cardblist = self.device.books(oncard='cardb')
return (mainlist, cardalist, cardblist)
def books(self, done, add_as_step_to_job=None):
'''Return callable that returns the list of books on device as two booklists'''
return self.create_job_step(self._books, done,
description=_('Get list of books on device'), to_job=add_as_step_to_job)
def _prepare_addable_books(self, paths):
return self.device.prepare_addable_books(paths)
def prepare_addable_books(self, done, paths, add_as_step_to_job=None):
return self.create_job_step(self._prepare_addable_books, done, args=[paths],
description=_('Prepare files for transfer from device'),
to_job=add_as_step_to_job)
def _annotations(self, path_map):
return self.device.get_annotations(path_map)
def annotations(self, done, path_map, add_as_step_to_job=None):
'''Return mapping of ids to annotations. Each annotation is of the
form (type, location_info, content). path_map is a mapping of
ids to paths on the device.'''
return self.create_job_step(self._annotations, done, args=[path_map],
description=_('Get annotations from device'), to_job=add_as_step_to_job)
def _sync_booklists(self, booklists):
'''Sync metadata to device'''
self.device.sync_booklists(booklists, end_session=False)
return self.device.card_prefix(end_session=False), self.device.free_space()
def sync_booklists(self, done, booklists, plugboards, add_as_step_to_job=None):
if hasattr(self.connected_device, 'set_plugboards') and \
callable(self.connected_device.set_plugboards):
self.connected_device.set_plugboards(plugboards, find_plugboard)
return self.create_job_step(self._sync_booklists, done, args=[booklists],
description=_('Send metadata to device'), to_job=add_as_step_to_job)
def upload_collections(self, done, booklist, on_card, add_as_step_to_job=None):
return self.create_job_step(booklist.rebuild_collections, done,
args=[booklist, on_card],
description=_('Send collections to device'),
to_job=add_as_step_to_job)
def _upload_books(self, files, names, on_card=None, metadata=None, plugboards=None):
'''Upload books to device: '''
from calibre.ebooks.metadata.meta import set_metadata
if hasattr(self.connected_device, 'set_plugboards') and \
callable(self.connected_device.set_plugboards):
self.connected_device.set_plugboards(plugboards, find_plugboard)
if metadata and files and len(metadata) == len(files):
for f, mi in zip(files, metadata):
if isinstance(f, unicode):
ext = f.rpartition('.')[-1].lower()
cpb = find_plugboard(
device_name_for_plugboards(self.connected_device),
ext, plugboards)
if ext:
try:
if DEBUG:
prints('Setting metadata in:', mi.title, 'at:',
f, file=sys.__stdout__)
with lopen(f, 'r+b') as stream:
if cpb:
newmi = mi.deepcopy_metadata()
newmi.template_to_attribute(mi, cpb)
else:
newmi = mi
nuke_comments = getattr(self.connected_device,
'NUKE_COMMENTS', None)
if nuke_comments is not None:
mi.comments = nuke_comments
set_metadata(stream, newmi, stream_type=ext)
except:
if DEBUG:
prints(traceback.format_exc(), file=sys.__stdout__)
try:
return self.device.upload_books(files, names, on_card,
metadata=metadata, end_session=False)
finally:
if metadata:
for mi in metadata:
try:
if mi.cover:
os.remove(mi.cover)
except:
pass
def upload_books(self, done, files, names, on_card=None, titles=None,
metadata=None, plugboards=None, add_as_step_to_job=None):
desc = ngettext('Upload one book to the device', 'Upload {} books to the device', len(names)).format(len(names))
if titles:
desc += u': ' + u', '.join(titles)
return self.create_job_step(self._upload_books, done, to_job=add_as_step_to_job,
args=[files, names],
kwargs={'on_card':on_card,'metadata':metadata,'plugboards':plugboards}, description=desc)
def add_books_to_metadata(self, locations, metadata, booklists):
self.device.add_books_to_metadata(locations, metadata, booklists)
def _delete_books(self, paths):
'''Remove books from device'''
self.device.delete_books(paths, end_session=True)
def delete_books(self, done, paths, add_as_step_to_job=None):
return self.create_job_step(self._delete_books, done, args=[paths],
description=_('Delete books from device'),
to_job=add_as_step_to_job)
def remove_books_from_metadata(self, paths, booklists):
self.device.remove_books_from_metadata(paths, booklists)
def _save_books(self, paths, target):
'''Copy books from device to disk'''
for path in paths:
name = sanitize_file_name2(os.path.basename(path))
dest = os.path.join(target, name)
if os.path.abspath(dest) != os.path.abspath(path):
with lopen(dest, 'wb') as f:
self.device.get_file(path, f)
def save_books(self, done, paths, target, add_as_step_to_job=None):
return self.create_job_step(self._save_books, done, args=[paths, target],
description=_('Download books from device'),
to_job=add_as_step_to_job)
def _view_book(self, path, target):
with lopen(target, 'wb') as f:
self.device.get_file(path, f)
return target
def view_book(self, done, path, target, add_as_step_to_job=None):
return self.create_job_step(self._view_book, done, args=[path, target],
description=_('View book on device'), to_job=add_as_step_to_job)
def set_current_library_uuid(self, uuid):
self.current_library_uuid = uuid
def set_driveinfo_name(self, location_code, name):
if self.connected_device:
self.connected_device.set_driveinfo_name(location_code, name)
# dynamic plugin interface
# This is a helper function that handles queueing with the device manager
def _call_request(self, name, method, *args, **kwargs):
d = self.dynamic_plugins.get(name, None)
if d:
return getattr(d, method)(*args, **kwargs)
return kwargs.get('default', None)
# The dynamic plugin methods below must be called on the GUI thread. They
# will switch to the device thread before calling the plugin.
def start_plugin(self, name):
return self._call_request(name, 'start_plugin')
def stop_plugin(self, name):
self._call_request(name, 'stop_plugin')
def get_option(self, name, opt_string, default=None):
return self._call_request(name, 'get_option', opt_string, default=default)
def set_option(self, name, opt_string, opt_value):
self._call_request(name, 'set_option', opt_string, opt_value)
def is_running(self, name):
if self._call_request(name, 'is_running'):
return True
return False
def is_enabled(self, name):
try:
d = self.dynamic_plugins.get(name, None)
if d:
return True
except:
pass
return False
# }}}
class DeviceAction(QAction): # {{{
a_s = pyqtSignal(object)
def __init__(self, dest, delete, specific, icon_path, text, parent=None):
QAction.__init__(self, QIcon(icon_path), text, parent)
self.dest = dest
self.delete = delete
self.specific = specific
self.triggered.connect(self.emit_triggered)
def emit_triggered(self, *args):
self.a_s.emit(self)
def __repr__(self):
return self.__class__.__name__ + ':%s:%s:%s'%(self.dest, self.delete,
self.specific)
# }}}
class DeviceMenu(QMenu): # {{{
fetch_annotations = pyqtSignal()
disconnect_mounted_device = pyqtSignal()
sync = pyqtSignal(object, object, object)
def __init__(self, parent=None):
QMenu.__init__(self, parent)
self.group = QActionGroup(self)
self._actions = []
self._memory = []
self.set_default_menu = QMenu(_('Set default send to device action'))
self.set_default_menu.setIcon(QIcon(I('config.png')))
basic_actions = [
('main:', False, False, I('reader.png'),
_('Send to main memory')),
('carda:0', False, False, I('sd.png'),
_('Send to storage card A')),
('cardb:0', False, False, I('sd.png'),
_('Send to storage card B')),
]
delete_actions = [
('main:', True, False, I('reader.png'),
_('Main Memory')),
('carda:0', True, False, I('sd.png'),
_('Storage Card A')),
('cardb:0', True, False, I('sd.png'),
_('Storage Card B')),
]
specific_actions = [
('main:', False, True, I('reader.png'),
_('Main Memory')),
('carda:0', False, True, I('sd.png'),
_('Storage Card A')),
('cardb:0', False, True, I('sd.png'),
_('Storage Card B')),
]
later_menus = []
for menu in (self, self.set_default_menu):
for actions, desc in (
(basic_actions, ''),
(specific_actions, _('Send specific format to')),
(delete_actions, _('Send and delete from library')),
):
mdest = menu
if actions is not basic_actions:
mdest = QMenu(desc)
self._memory.append(mdest)
later_menus.append(mdest)
if menu is self.set_default_menu:
menu.addMenu(mdest)
menu.addSeparator()
for dest, delete, specific, icon, text in actions:
action = DeviceAction(dest, delete, specific, icon, text, self)
self._memory.append(action)
if menu is self.set_default_menu:
action.setCheckable(True)
action.setText(action.text())
self.group.addAction(action)
else:
action.a_s.connect(self.action_triggered)
self._actions.append(action)
mdest.addAction(action)
if actions is basic_actions:
menu.addSeparator()
da = config['default_send_to_device_action']
done = False
for action in self.group.actions():
if repr(action) == da:
action.setChecked(True)
done = True
break
if not done:
action = list(self.group.actions())[0]
action.setChecked(True)
config['default_send_to_device_action'] = repr(action)
self.group.triggered.connect(self.change_default_action)
self.addSeparator()
self.addMenu(later_menus[0])
self.addSeparator()
mitem = self.addAction(QIcon(I('eject.png')), _('Eject device'))
mitem.setEnabled(False)
mitem.triggered.connect(lambda x : self.disconnect_mounted_device.emit())
self.disconnect_mounted_device_action = mitem
self.addSeparator()
self.addMenu(self.set_default_menu)
self.addSeparator()
self.addMenu(later_menus[1])
self.addSeparator()
annot = self.addAction(_('Fetch annotations (experimental)'))
annot.setEnabled(False)
annot.triggered.connect(lambda x :
self.fetch_annotations.emit())
self.annotation_action = annot
self.enable_device_actions(False)
def change_default_action(self, action):
config['default_send_to_device_action'] = repr(action)
action.setChecked(True)
def action_triggered(self, action):
self.sync.emit(action.dest, action.delete, action.specific)
def trigger_default(self, *args):
r = config['default_send_to_device_action']
for action in self._actions:
if repr(action) == r:
self.action_triggered(action)
break
def enable_device_actions(self, enable, card_prefix=(None, None),
device=None):
for action in self._actions:
if action.dest in ('main:', 'carda:0', 'cardb:0'):
if not enable:
action.setEnabled(False)
else:
if action.dest == 'main:':
action.setEnabled(True)
elif action.dest == 'carda:0':
if card_prefix and card_prefix[0] is not None:
action.setEnabled(True)
else:
action.setEnabled(False)
elif action.dest == 'cardb:0':
if card_prefix and card_prefix[1] is not None:
action.setEnabled(True)
else:
action.setEnabled(False)
annot_enable = enable and getattr(device, 'SUPPORTS_ANNOTATIONS', False)
self.annotation_action.setEnabled(annot_enable)
# }}}
class DeviceSignals(QObject): # {{{
#: This signal is emitted once, after metadata is downloaded from the
#: connected device.
#: The sequence: gui.device_manager.is_device_connected will become True,
#: and the device_connection_changed signal will be emitted,
#: then sometime later gui.device_metadata_available will be signaled.
#: This does not mean that there are no more jobs running. Automatic metadata
#: management might have kicked off a sync_booklists to write new metadata onto
#: the device, and that job might still be running when the signal is emitted.
device_metadata_available = pyqtSignal()
#: This signal is emitted once when the device is detected and once when
#: it is disconnected. If the parameter is True, then it is a connection,
#: otherwise a disconnection.
device_connection_changed = pyqtSignal(object)
device_signals = DeviceSignals()
# }}}
class DeviceMixin(object): # {{{
def __init__(self, *args, **kwargs):
pass
def init_device_mixin(self):
self.device_error_dialog = error_dialog(self, _('Error'),
_('Error communicating with device'), ' ')
self.device_error_dialog.setModal(Qt.NonModal)
self.device_manager = DeviceManager(FunctionDispatcher(self.device_detected),
self.job_manager, Dispatcher(self.status_bar.show_message),
Dispatcher(self.show_open_feedback),
FunctionDispatcher(self.allow_connect), Dispatcher(self.after_callback_feedback))
self.device_manager.start()
self.device_manager.devices_initialized.wait()
if tweaks['auto_connect_to_folder']:
self.connect_to_folder_named(tweaks['auto_connect_to_folder'])
def allow_connect(self, name, icon):
return question_dialog(self, _('Manage the %s?')%name,
_('Detected the <b>%s</b>. Do you want calibre to manage it?')%
name, show_copy_button=False,
override_icon=QIcon(icon))
def after_callback_feedback(self, feedback):
title, msg, det_msg = feedback
info_dialog(self, feedback['title'], feedback['msg'], det_msg=feedback['det_msg']).show()
def debug_detection(self, done):
self.debug_detection_callback = weakref.ref(done)
self.device_manager.debug_detection(FunctionDispatcher(self.debug_detection_done))
def debug_detection_done(self, job):
d = self.debug_detection_callback()
if d is not None:
d(job)
def show_open_feedback(self, devname, e):
try:
self.__of_dev_mem__ = d = e.custom_dialog(self)
except NotImplementedError:
self.__of_dev_mem__ = d = info_dialog(self, devname, e.feedback_msg)
d.show()
def auto_convert_question(self, msg, autos):
autos = u'\n'.join(map(unicode, map(force_unicode, autos)))
return self.ask_a_yes_no_question(
_('No suitable formats'), msg,
ans_when_user_unavailable=True,
det_msg=autos, skip_dialog_name='auto_convert_before_send'
)
def set_default_thumbnail(self, height):
ratio = height / float(cprefs['cover_height'])
self.default_thumbnail_prefs = prefs = override_prefs(cprefs)
scale_cover(prefs, ratio)
def connect_to_folder_named(self, folder):
if os.path.exists(folder) and os.path.isdir(folder):
self.device_manager.mount_device(kls=FOLDER_DEVICE, kind='folder',
path=folder)
def connect_to_folder(self):
dir = choose_dir(self, 'Select Device Folder',
_('Select folder to open as device'))
if dir is not None:
self.device_manager.mount_device(kls=FOLDER_DEVICE, kind='folder', path=dir)
# disconnect from folder devices
def disconnect_mounted_device(self):
self.device_manager.umount_device()
def configure_connected_device(self):
if not self.device_manager.is_device_connected:
return
if self.job_manager.has_device_jobs(queued_also=True):
return error_dialog(self, _('Running jobs'),
_('Cannot configure the device while there are running'
' device jobs.'), show=True)
dev = self.device_manager.connected_device
cw = dev.config_widget()
config_dialog = QDialog(self)
config_dialog.setWindowTitle(_('Configure %s')%dev.get_gui_name())
config_dialog.setWindowIcon(QIcon(I('config.png')))
l = QVBoxLayout(config_dialog)
config_dialog.setLayout(l)
bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel)
bb.accepted.connect(config_dialog.accept)
bb.rejected.connect(config_dialog.reject)
l.addWidget(cw)
l.addWidget(bb)
# We do not save/restore the size of this dialog as different devices
# have very different size requirements
config_dialog.resize(config_dialog.sizeHint())
def validate():
if cw.validate():
QDialog.accept(config_dialog)
config_dialog.accept = validate
if config_dialog.exec_() == config_dialog.Accepted:
dev.save_settings(cw)
do_restart = show_restart_warning(_('Restart calibre for the changes to %s'
' to be applied.')%dev.get_gui_name(), parent=self)
if do_restart:
self.quit(restart=True)
def _sync_action_triggered(self, *args):
m = getattr(self, '_sync_menu', None)
if m is not None:
m.trigger_default()
def create_device_menu(self):
self._sync_menu = DeviceMenu(self)
self.iactions['Send To Device'].qaction.setMenu(self._sync_menu)
self.iactions['Connect Share'].build_email_entries()
self._sync_menu.sync.connect(self.dispatch_sync_event)
self._sync_menu.fetch_annotations.connect(
self.iactions['Fetch Annotations'].fetch_annotations)
self._sync_menu.disconnect_mounted_device.connect(self.disconnect_mounted_device)
self.iactions['Connect Share'].set_state(self.device_connected,
None)
if self.device_connected:
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
else:
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
def device_job_exception(self, job):
'''
Handle exceptions in threaded device jobs.
'''
if isinstance(getattr(job, 'exception', None), UserFeedback):
ex = job.exception
func = {UserFeedback.ERROR:error_dialog,
UserFeedback.WARNING:warning_dialog,
UserFeedback.INFO:info_dialog}[ex.level]
return func(self, _('Failed'), ex.msg, det_msg=ex.details if
ex.details else '', show=True)
try:
if 'Could not read 32 bytes on the control bus.' in \
unicode(job.details):
error_dialog(self, _('Error talking to device'),
_('There was a temporary error talking to the '
'device. Please unplug and reconnect the device '
'or reboot.')).show()
return
except:
pass
if getattr(job, 'exception', None).__class__.__name__ == 'MTPInvalidSendPathError':
try:
from calibre.gui2.device_drivers.mtp_config import SendError
return SendError(self, job.exception).exec_()
except:
traceback.print_exc()
try:
prints(job.details, file=sys.stderr)
except:
pass
if not self.device_error_dialog.isVisible():
self.device_error_dialog.set_details(job.details)
self.device_error_dialog.show()
# Device connected {{{
def set_device_menu_items_state(self, connected):
self.iactions['Connect Share'].set_state(connected,
self.device_manager.device)
if connected:
self._sync_menu.disconnect_mounted_device_action.setEnabled(True)
self._sync_menu.enable_device_actions(True,
self.device_manager.device.card_prefix(),
self.device_manager.device)
self.eject_action.setEnabled(True)
else:
self._sync_menu.disconnect_mounted_device_action.setEnabled(False)
self._sync_menu.enable_device_actions(False)
self.eject_action.setEnabled(False)
def device_detected(self, connected, device_kind):
'''
Called when a device is connected to the computer.
'''
# This can happen as this function is called in a queued connection and
# the user could have yanked the device in the meantime
if connected and not self.device_manager.is_device_connected:
connected = False
self.set_device_menu_items_state(connected)
if connected:
self.device_connected = device_kind
self.device_manager.get_device_information(
FunctionDispatcher(self.info_read))
self.set_default_thumbnail(
self.device_manager.device.THUMBNAIL_HEIGHT)
self.status_bar.show_message(_('Device: ')+
self.device_manager.device.get_gui_name()+
_(' detected.'), 3000)
self.library_view.set_device_connected(self.device_connected)
self.refresh_ondevice(reset_only=True)
else:
self.device_connected = None
self.status_bar.device_disconnected()
dviews = (self.memory_view, self.card_a_view, self.card_b_view)
for v in dviews:
v.save_state()
if self.current_view() != self.library_view:
self.book_details.reset_info()
self.location_manager.update_devices()
self.bars_manager.update_bars(reveal_bar=True)
self.library_view.set_device_connected(self.device_connected)
# Empty any device view information
for v in dviews:
v.set_database([])
self.refresh_ondevice()
device_signals.device_connection_changed.emit(connected)
def info_read(self, job):
'''
Called once device information has been read.
'''
if job.failed:
return self.device_job_exception(job)
info, cp, fs = job.result
self.location_manager.update_devices(cp, fs,
self.device_manager.device.icon)
self.bars_manager.update_bars(reveal_bar=True)
self.status_bar.device_connected(info[0])
db = self.current_db
self.device_manager.set_library_information(None, os.path.basename(db.library_path),
db.library_id, db.field_metadata,
add_as_step_to_job=job)
self.device_manager.books(FunctionDispatcher(self.metadata_downloaded),
add_as_step_to_job=job)
def metadata_downloaded(self, job):
'''
Called once metadata has been read for all books on the device.
'''
if job.failed:
self.device_job_exception(job)
return
self.device_manager.slow_driveinfo()
# set_books_in_library might schedule a sync_booklists job
if DEBUG:
prints('DeviceJob: metadata_downloaded: Starting set_books_in_library')
self.set_books_in_library(job.result, reset=True, add_as_step_to_job=job)
if DEBUG:
prints('DeviceJob: metadata_downloaded: updating views')
mainlist, cardalist, cardblist = job.result
self.memory_view.set_database(mainlist)
self.memory_view.set_editable(self.device_manager.device.CAN_SET_METADATA,
self.device_manager.device.BACKLOADING_ERROR_MESSAGE
is None)
self.card_a_view.set_database(cardalist)
self.card_a_view.set_editable(self.device_manager.device.CAN_SET_METADATA,
self.device_manager.device.BACKLOADING_ERROR_MESSAGE
is None)
self.card_b_view.set_database(cardblist)
self.card_b_view.set_editable(self.device_manager.device.CAN_SET_METADATA,
self.device_manager.device.BACKLOADING_ERROR_MESSAGE
is None)
if DEBUG:
prints('DeviceJob: metadata_downloaded: syncing')
self.sync_news()
self.sync_catalogs()
if DEBUG:
prints('DeviceJob: metadata_downloaded: refreshing ondevice')
self.refresh_ondevice()
if DEBUG:
prints('DeviceJob: metadata_downloaded: sending metadata_available signal')
device_signals.device_metadata_available.emit()
def refresh_ondevice(self, reset_only=False):
'''
Force the library view to refresh, taking into consideration new
device books information
'''
with self.library_view.preserve_state():
self.book_on_device(None, reset=True)
if reset_only:
return
self.library_view.model().refresh_ondevice()
# }}}
def remove_paths(self, paths):
return self.device_manager.delete_books(
FunctionDispatcher(self.books_deleted), paths)
def books_deleted(self, job):
'''
Called once deletion is done on the device
'''
cv, row = self.current_view(), -1
if cv is not self.library_view:
row = cv.currentIndex().row()
for view in (self.memory_view, self.card_a_view, self.card_b_view):
view.model().deletion_done(job, job.failed)
if job.failed:
self.device_job_exception(job)
return
dm = self.iactions['Remove Books'].delete_memory
if job in dm:
paths, model = dm.pop(job)
self.device_manager.remove_books_from_metadata(paths,
self.booklists())
model.paths_deleted(paths)
# Force recomputation the library's ondevice info. We need to call
# set_books_in_library even though books were not added because
# the deleted book might have been an exact match. Upload the booklists
# if set_books_in_library did not.
if not self.set_books_in_library(self.booklists(), reset=True,
add_as_step_to_job=job, do_device_sync=False):
self.upload_booklists(job)
# We need to reset the ondevice flags in the library. Use a big hammer,
# so we don't need to worry about whether some succeeded or not.
self.refresh_ondevice()
if row > -1:
cv.set_current_row(row)
try:
if not self.current_view().currentIndex().isValid():
self.current_view().set_current_row()
self.current_view().refresh_book_details()
except:
traceback.print_exc()
def dispatch_sync_event(self, dest, delete, specific):
rows = self.library_view.selectionModel().selectedRows()
if not rows or len(rows) == 0:
error_dialog(self, _('No books'), _('No books')+' '+
_('selected to send')).exec_()
return
fmt = None
if specific:
if (not self.device_connected or not self.device_manager or
self.device_manager.device is None):
error_dialog(self, _('No device'),
_('No device connected'), show=True)
return
formats = []
aval_out_formats = available_output_formats()
format_count = {}
for row in rows:
fmts = self.library_view.model().db.formats(row.row())
if fmts:
for f in fmts.split(','):
f = f.lower()
if f in format_count:
format_count[f] += 1
else:
format_count[f] = 1
for f in self.device_manager.device.settings().format_map:
if f in format_count.keys():
formats.append((f, _('%(num)i of %(total)i books') % dict(
num=format_count[f], total=len(rows)),
True if f in aval_out_formats else False))
elif f in aval_out_formats:
formats.append((f, _('0 of %i books') % len(rows), True))
d = ChooseFormatDeviceDialog(self, _('Choose format to send to device'), formats)
if d.exec_() != QDialog.Accepted:
return
if d.format():
fmt = d.format().lower()
dest, sub_dest = dest.partition(':')[0::2]
if dest in ('main', 'carda', 'cardb'):
if not self.device_connected or not self.device_manager:
error_dialog(self, _('No device'),
_('Cannot send: No device is connected')).exec_()
return
if dest == 'carda' and not self.device_manager.has_card():
error_dialog(self, _('No card'),
_('Cannot send: Device has no storage card')).exec_()
return
if dest == 'cardb' and not self.device_manager.has_card():
error_dialog(self, _('No card'),
_('Cannot send: Device has no storage card')).exec_()
return
if dest == 'main':
on_card = None
else:
on_card = dest
self.sync_to_device(on_card, delete, fmt)
elif dest == 'mail':
sub_dest_parts = sub_dest.split(';')
while len(sub_dest_parts) < 3:
sub_dest_parts.append('')
to = sub_dest_parts[0]
fmts = sub_dest_parts[1]
subject = ';'.join(sub_dest_parts[2:])
fmts = [x.strip().lower() for x in fmts.split(',')]
self.send_by_mail(to, fmts, delete, subject=subject)
elif dest == 'choosemail':
from calibre.gui2.email import select_recipients
data = select_recipients(self)
if data:
self.send_multiple_by_mail(data, delete)
def cover_to_thumbnail(self, data):
if self.device_manager.device and \
hasattr(self.device_manager.device, 'THUMBNAIL_WIDTH'):
try:
return scale_image(data,
self.device_manager.device.THUMBNAIL_WIDTH,
self.device_manager.device.THUMBNAIL_HEIGHT,
preserve_aspect_ratio=False)
except:
pass
return
ht = self.device_manager.device.THUMBNAIL_HEIGHT \
if self.device_manager else DevicePlugin.THUMBNAIL_HEIGHT
try:
return scale_image(data, ht, ht,
compression_quality=self.device_manager.device.THUMBNAIL_COMPRESSION_QUALITY)
except:
pass
def sync_catalogs(self, send_ids=None, do_auto_convert=True):
if self.device_connected:
settings = self.device_manager.device.settings()
ids = list(dynamic.get('catalogs_to_be_synced', set([]))) if send_ids is None else send_ids
ids = [id for id in ids if self.library_view.model().db.has_id(id)]
with BusyCursor():
files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(
ids, settings.format_map,
exclude_auto=do_auto_convert)
auto = []
if do_auto_convert and _auto_ids:
for id in _auto_ids:
dbfmts = self.library_view.model().db.formats(id, index_is_id=True)
formats = [] if dbfmts is None else \
[f.lower() for f in dbfmts.split(',')]
if set(formats).intersection(available_input_formats()) \
and set(settings.format_map).intersection(available_output_formats()):
auto.append(id)
if auto:
format = None
for fmt in settings.format_map:
if fmt in list(set(settings.format_map).intersection(set(available_output_formats()))):
format = fmt
break
if format is not None:
autos = [self.library_view.model().db.title(id, index_is_id=True) for id in auto]
if self.auto_convert_question(
_('Auto convert the following books before uploading to '
'the device?'), autos):
self.iactions['Convert Books'].auto_convert_catalogs(auto, format)
files = [f for f in files if f is not None]
if not files:
dynamic.set('catalogs_to_be_synced', set([]))
return
metadata = self.library_view.model().metadata_for(ids)
names = []
for mi in metadata:
prefix = ascii_filename(mi.title)
if not isinstance(prefix, unicode):
prefix = prefix.decode(preferred_encoding, 'replace')
prefix = ascii_filename(prefix)
names.append('%s_%d%s'%(prefix, id,
os.path.splitext(f)[1]))
self.update_thumbnail(mi)
dynamic.set('catalogs_to_be_synced', set([]))
if files:
remove = []
space = {self.location_manager.free[0] : None,
self.location_manager.free[1] : 'carda',
self.location_manager.free[2] : 'cardb'}
on_card = space.get(sorted(space.keys(), reverse=True)[0], None)
self.upload_books(files, names, metadata,
on_card=on_card,
memory=[files, remove])
self.status_bar.show_message(_('Sending catalogs to device.'), 5000)
@dynamic_property
def news_to_be_synced(self):
doc = 'Set of ids to be sent to device'
def fget(self):
ans = []
try:
ans = self.library_view.model().db.prefs.get('news_to_be_synced',
[])
except:
import traceback
traceback.print_exc()
return set(ans)
def fset(self, ids):
try:
self.library_view.model().db.new_api.set_pref('news_to_be_synced',
list(ids))
except:
import traceback
traceback.print_exc()
return property(fget=fget, fset=fset, doc=doc)
def sync_news(self, send_ids=None, do_auto_convert=True):
if self.device_connected:
del_on_upload = config['delete_news_from_library_on_upload']
settings = self.device_manager.device.settings()
ids = list(self.news_to_be_synced) if send_ids is None else send_ids
ids = [id for id in ids if self.library_view.model().db.has_id(id)]
with BusyCursor():
files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(
ids, settings.format_map,
exclude_auto=do_auto_convert)
auto = []
if do_auto_convert and _auto_ids:
for id in _auto_ids:
dbfmts = self.library_view.model().db.formats(id, index_is_id=True)
formats = [] if dbfmts is None else \
[f.lower() for f in dbfmts.split(',')]
if set(formats).intersection(available_input_formats()) \
and set(settings.format_map).intersection(available_output_formats()):
auto.append(id)
if auto:
format = None
for fmt in settings.format_map:
if fmt in list(set(settings.format_map).intersection(set(available_output_formats()))):
format = fmt
break
if format is not None:
autos = [self.library_view.model().db.title(id, index_is_id=True) for id in auto]
if self.auto_convert_question(
_('Auto convert the following books before uploading to '
'the device?'), autos):
self.iactions['Convert Books'].auto_convert_news(auto, format)
files = [f for f in files if f is not None]
if not files:
self.news_to_be_synced = set([])
return
metadata = self.library_view.model().metadata_for(ids)
names = []
for mi in metadata:
prefix = ascii_filename(mi.title)
if not isinstance(prefix, unicode):
prefix = prefix.decode(preferred_encoding, 'replace')
prefix = ascii_filename(prefix)
names.append('%s_%d%s'%(prefix, id,
os.path.splitext(f)[1]))
self.update_thumbnail(mi)
self.news_to_be_synced = set([])
if config['upload_news_to_device'] and files:
remove = ids if del_on_upload else []
space = {self.location_manager.free[0] : None,
self.location_manager.free[1] : 'carda',
self.location_manager.free[2] : 'cardb'}
on_card = space.get(sorted(space.keys(), reverse=True)[0], None)
try:
total_size = sum([os.stat(f).st_size for f in files])
except:
try:
import traceback
traceback.print_exc()
except:
pass
total_size = self.location_manager.free[0]
loc = tweaks['send_news_to_device_location']
loc_index = {"carda": 1, "cardb": 2}.get(loc, 0)
if self.location_manager.free[loc_index] > total_size + (1024**2):
# Send news to main memory if enough space available
# as some devices like the Nook Color cannot handle
# periodicals on SD cards properly
on_card = loc if loc in ('carda', 'cardb') else None
self.upload_books(files, names, metadata,
on_card=on_card,
memory=[files, remove])
self.status_bar.show_message(_('Sending news to device.'), 5000)
def sync_to_device(self, on_card, delete_from_library,
specific_format=None, send_ids=None, do_auto_convert=True):
ids = [self.library_view.model().id(r)
for r in self.library_view.selectionModel().selectedRows()] \
if send_ids is None else send_ids
if not self.device_manager or not ids or len(ids) == 0 or \
not self.device_manager.is_device_connected:
return
settings = self.device_manager.device.settings()
with BusyCursor():
_files, _auto_ids = self.library_view.model().get_preferred_formats_from_ids(ids,
settings.format_map,
specific_format=specific_format,
exclude_auto=do_auto_convert)
if do_auto_convert:
ok_ids = list(set(ids).difference(_auto_ids))
ids = [i for i in ids if i in ok_ids]
else:
_auto_ids = []
metadata = self.library_view.model().metadata_for(ids)
ids = iter(ids)
for mi in metadata:
self.update_thumbnail(mi)
imetadata = iter(metadata)
bad, good, gf, names, remove_ids = [], [], [], [], []
for f in _files:
mi = imetadata.next()
id = ids.next()
if f is None:
bad.append(mi.title)
else:
remove_ids.append(id)
good.append(mi)
gf.append(f)
t = mi.title
if not t:
t = _('Unknown')
a = mi.format_authors()
if not a:
a = _('Unknown')
prefix = ascii_filename(t+' - '+a)
if not isinstance(prefix, unicode):
prefix = prefix.decode(preferred_encoding, 'replace')
prefix = ascii_filename(prefix)
names.append('%s_%d%s'%(prefix, id, os.path.splitext(f)[1]))
remove = remove_ids if delete_from_library else []
self.upload_books(gf, names, good, on_card, memory=(_files, remove))
self.status_bar.show_message(_('Sending books to device.'), 5000)
auto = []
if _auto_ids != []:
for id in _auto_ids:
if specific_format is None:
formats = self.library_view.model().db.formats(id, index_is_id=True)
formats = formats.split(',') if formats is not None else []
formats = [f.lower().strip() for f in formats]
if (list(set(formats).intersection(available_input_formats())) != [] and
list(set(settings.format_map).intersection(available_output_formats())) != []):
auto.append(id)
else:
bad.append(self.library_view.model().db.title(id, index_is_id=True))
else:
if specific_format in list(set(settings.format_map).intersection(set(available_output_formats()))):
auto.append(id)
else:
bad.append(self.library_view.model().db.title(id, index_is_id=True))
if auto != []:
format = specific_format if specific_format in \
list(set(settings.format_map).intersection(set(available_output_formats()))) \
else None
if not format:
for fmt in settings.format_map:
if fmt in list(set(settings.format_map).intersection(set(available_output_formats()))):
format = fmt
break
if not format:
bad += auto
else:
autos = [self.library_view.model().db.title(id, index_is_id=True) for id in auto]
if self.auto_convert_question(
_('Auto convert the following books before uploading to '
'the device?'), autos):
self.iactions['Convert Books'].auto_convert(auto, on_card, format)
if bad:
bad = '\n'.join('%s'%(i,) for i in bad)
d = warning_dialog(self, _('No suitable formats'),
_('Could not upload the following books to the device, '
'as no suitable formats were found. Convert the book(s) to a '
'format supported by your device first.'
), bad)
d.exec_()
def upload_dirtied_booklists(self):
'''
Upload metadata to device.
'''
plugboards = self.library_view.model().db.prefs.get('plugboards', {})
self.device_manager.sync_booklists(Dispatcher(lambda x: x),
self.booklists(), plugboards)
def upload_booklists(self, add_as_step_to_job=None):
'''
Upload metadata to device.
'''
plugboards = self.library_view.model().db.prefs.get('plugboards', {})
self.device_manager.sync_booklists(FunctionDispatcher(self.metadata_synced),
self.booklists(), plugboards,
add_as_step_to_job=add_as_step_to_job)
def metadata_synced(self, job):
'''
Called once metadata has been uploaded.
'''
if job.failed:
self.device_job_exception(job)
return
cp, fs = job.result
self.location_manager.update_devices(cp, fs,
self.device_manager.device.icon)
# reset the views so that up-to-date info is shown. These need to be
# here because some drivers update collections in sync_booklists
cv, row = self.current_view(), -1
if cv is not self.library_view:
row = cv.currentIndex().row()
self.memory_view.reset()
self.card_a_view.reset()
self.card_b_view.reset()
if row > -1:
cv.set_current_row(row)
def _upload_collections(self, job):
if job.failed:
self.device_job_exception(job)
def upload_collections(self, booklist, view=None, oncard=None):
return self.device_manager.upload_collections(self._upload_collections,
booklist, oncard)
def upload_books(self, files, names, metadata, on_card=None, memory=None):
'''
Upload books to device.
:param files: List of either paths to files or file like objects
'''
titles = [i.title for i in metadata]
plugboards = self.library_view.model().db.prefs.get('plugboards', {})
job = self.device_manager.upload_books(
FunctionDispatcher(self.books_uploaded),
files, names, on_card=on_card,
metadata=metadata, titles=titles, plugboards=plugboards
)
self.upload_memory[job] = (metadata, on_card, memory, files)
def books_uploaded(self, job):
'''
Called once books have been uploaded.
'''
metadata, on_card, memory, files = self.upload_memory.pop(job)
if job.exception is not None:
if isinstance(job.exception, FreeSpaceError):
where = 'in main memory.' if 'memory' in str(job.exception) \
else 'on the storage card.'
titles = '\n'.join(['<li>'+mi.title+'</li>'
for mi in metadata])
d = error_dialog(self, _('No space on device'),
_('<p>Cannot upload books to device there '
'is no more free space available ')+where+
'</p>\n<ul>%s</ul>'%(titles,))
d.exec_()
elif isinstance(job.exception, WrongDestinationError):
error_dialog(self, _('Incorrect destination'),
unicode(job.exception), show=True)
else:
self.device_job_exception(job)
return
try:
self.device_manager.add_books_to_metadata(job.result,
metadata, self.booklists())
except:
traceback.print_exc()
raise
books_to_be_deleted = []
if memory and memory[1]:
books_to_be_deleted = memory[1]
self.library_view.model().delete_books_by_id(books_to_be_deleted)
# There are some cases where sending a book to the device overwrites a
# book already there with a different book. This happens frequently in
# news. When this happens, the book match indication will be wrong
# because the UUID changed. Force both the device and the library view
# to refresh the flags. Set_books_in_library could upload the booklists.
# If it does not, then do it here.
if not self.set_books_in_library(self.booklists(), reset=True,
add_as_step_to_job=job, do_device_sync=False):
self.upload_booklists(job)
self.refresh_ondevice()
view = self.card_a_view if on_card == 'carda' else \
self.card_b_view if on_card == 'cardb' else self.memory_view
view.model().resort(reset=False)
view.model().research()
if files:
for f in files:
# Remove temporary files
try:
rem = not getattr(
self.device_manager.device,
'KEEP_TEMP_FILES_AFTER_UPLOAD', False)
if rem and 'caltmpfmt.' in f:
os.remove(f)
except:
pass
def update_metadata_on_device(self):
self.set_books_in_library(self.booklists(), reset=True, force_send=True)
self.refresh_ondevice()
def set_current_library_information(self, library_name, library_uuid, field_metadata):
self.device_manager.set_current_library_uuid(library_uuid)
if self.device_manager.is_device_connected:
self.device_manager.set_library_information(None, library_name,
library_uuid, field_metadata)
def book_on_device(self, id, reset=False):
'''
Return an indication of whether the given book represented by its db id
is on the currently connected device. It returns a 5 element list. The
first three elements represent memory locations main, carda, and cardb,
and are true if the book is identifiably in that memory. The fourth
is a count of how many instances of the book were found across all
the memory locations. The fifth is a set of paths to the
matching books on the device.
'''
loc = [None, None, None, 0, set([])]
if reset:
self.book_db_id_cache = None
self.book_db_id_counts = None
self.book_db_uuid_path_map = None
return
if not self.device_manager.is_device_connected or \
not hasattr(self, 'db_book_uuid_cache'):
return loc
if self.book_db_id_cache is None:
self.book_db_id_cache = []
self.book_db_id_counts = {}
self.book_db_uuid_path_map = {}
for i, l in enumerate(self.booklists()):
self.book_db_id_cache.append(set())
for book in l:
db_id = getattr(book, 'application_id', None)
if db_id is not None:
# increment the count of books on the device with this
# db_id.
self.book_db_id_cache[i].add(db_id)
if db_id not in self.book_db_uuid_path_map:
self.book_db_uuid_path_map[db_id] = set()
if getattr(book, 'lpath', False):
self.book_db_uuid_path_map[db_id].add(book.lpath)
c = self.book_db_id_counts.get(db_id, 0)
self.book_db_id_counts[db_id] = c + 1
for i, l in enumerate(self.booklists()):
if id in self.book_db_id_cache[i]:
loc[i] = True
loc[3] = self.book_db_id_counts.get(id, 0)
loc[4] |= self.book_db_uuid_path_map[id]
return loc
def update_thumbnail(self, book):
if book.cover and os.access(book.cover, os.R_OK):
with lopen(book.cover, 'rb') as f:
book.thumbnail = self.cover_to_thumbnail(f.read())
else:
cprefs = self.default_thumbnail_prefs
book.thumbnail = (cprefs['cover_width'], cprefs['cover_height'], generate_cover(book, prefs=cprefs))
def set_books_in_library(self, booklists, reset=False, add_as_step_to_job=None,
force_send=False, do_device_sync=True):
'''
Set the ondevice indications in the device database.
This method should be called before book_on_device is called, because
it sets the application_id for matched books. Book_on_device uses that
to both speed up matching and to count matches.
'''
if not self.device_manager.is_device_connected:
return False
# It might be possible to get here without having initialized the
# library view. In this case, simply give up
try:
db = self.library_view.model().db
except:
return False
string_pat = re.compile('(?u)\W|[_]')
def clean_string(x):
x = x.lower() if x else ''
return string_pat.sub('', x)
update_metadata = (
device_prefs['manage_device_metadata'] == 'on_connect' or force_send)
get_covers = False
desired_thumbnail_height = 0
if update_metadata and self.device_manager.is_device_connected:
if self.device_manager.device.WANTS_UPDATED_THUMBNAILS:
get_covers = True
desired_thumbnail_height = self.device_manager.device.THUMBNAIL_HEIGHT
# Force a reset if the caches are not initialized
if reset or not hasattr(self, 'db_book_title_cache'):
# Build a cache (map) of the library, so the search isn't On**2
db_book_title_cache = {}
db_book_uuid_cache = {}
for id_ in db.data.iterallids():
title = clean_string(db.title(id_, index_is_id=True))
if title not in db_book_title_cache:
db_book_title_cache[title] = \
{'authors':{}, 'author_sort':{}, 'db_ids':{}}
# If there are multiple books in the library with the same title
# and author, then remember the last one. That is OK, because as
# we can't tell the difference between the books, one is as good
# as another.
authors = clean_string(db.authors(id_, index_is_id=True))
if authors:
db_book_title_cache[title]['authors'][authors] = id_
if db.author_sort(id_, index_is_id=True):
aus = clean_string(db.author_sort(id_, index_is_id=True))
db_book_title_cache[title]['author_sort'][aus] = id_
db_book_title_cache[title]['db_ids'][id_] = id_
db_book_uuid_cache[db.uuid(id_, index_is_id=True)] = id_
self.db_book_title_cache = db_book_title_cache
self.db_book_uuid_cache = db_book_uuid_cache
book_ids_to_refresh = set()
book_formats_to_send = []
books_with_future_dates = []
first_call_to_synchronize_with_db = [True]
def update_book(id_, book) :
if not update_metadata:
return
mi = db.get_metadata(id_, index_is_id=True, get_cover=get_covers)
book.smart_update(mi, replace_metadata=True)
if get_covers and desired_thumbnail_height != 0:
self.update_thumbnail(book)
def updateq(id_, book):
try:
if not update_metadata:
return False
if do_device_sync and self.device_manager.device is not None:
set_of_ids, (fmt_name, date_bad) = \
self.device_manager.device.synchronize_with_db(db, id_, book,
first_call_to_synchronize_with_db[0])
first_call_to_synchronize_with_db[0] = False
if date_bad:
books_with_future_dates.append(book.title)
elif fmt_name is not None:
book_formats_to_send.append((id_, fmt_name))
if set_of_ids is not None:
book_ids_to_refresh.update(set_of_ids)
return True
return (db.metadata_last_modified(id_, index_is_id=True) !=
getattr(book, 'last_modified', None) or
(isinstance(getattr(book, 'thumbnail', None), (list, tuple)) and
max(book.thumbnail[0], book.thumbnail[1]) != desired_thumbnail_height
)
)
except:
return True
# Now iterate through all the books on the device, setting the
# in_library field. If the UUID matches a book in the library, then
# do not consider that book for other matching. In all cases set
# the application_id to the db_id of the matching book. This value
# will be used by books_on_device to indicate matches. While we are
# going by, update the metadata for a book if automatic management is on
total_book_count = 0
for booklist in booklists:
for book in booklist:
if book:
total_book_count += 1
if DEBUG:
prints('DeviceJob: set_books_in_library: books to process=', total_book_count)
start_time = time.time()
with BusyCursor():
current_book_count = 0
for booklist in booklists:
for book in booklist:
if current_book_count % 100 == 0:
self.status_bar.show_message(
_('Analyzing books on the device: %d%% finished')%(
int((float(current_book_count)/total_book_count)*100.0)), show_notification=False)
# I am assuming that this sort-of multi-threading won't break
# anything. Reasons: excluding UI events prevents the user
# from explicitly changing anything, and (in theory) no
# changes are happening because of timers and the like.
# Why every tenth book? WAG balancing performance in the
# loop with preventing App Not Responding errors
if current_book_count % 10 == 0:
QCoreApplication.processEvents(
flags=QEventLoop.ExcludeUserInputEvents|QEventLoop.ExcludeSocketNotifiers)
current_book_count += 1
book.in_library = None
if getattr(book, 'uuid', None) in self.db_book_uuid_cache:
id_ = db_book_uuid_cache[book.uuid]
if updateq(id_, book):
update_book(id_, book)
book.in_library = 'UUID'
# ensure that the correct application_id is set
book.application_id = id_
continue
# No UUID exact match. Try metadata matching.
book_title = clean_string(book.title)
d = self.db_book_title_cache.get(book_title, None)
if d is not None:
# At this point we know that the title matches. The book
# will match if any of the db_id, author, or author_sort
# also match.
if getattr(book, 'application_id', None) in d['db_ids']:
id_ = getattr(book, 'application_id', None)
update_book(id_, book)
book.in_library = 'APP_ID'
# app_id already matches a db_id. No need to set it.
continue
# Sonys know their db_id independent of the application_id
# in the metadata cache. Check that as well.
if getattr(book, 'db_id', None) in d['db_ids']:
update_book(book.db_id, book)
book.in_library = 'DB_ID'
book.application_id = book.db_id
continue
# We now know that the application_id is not right. Set it
# to None to prevent book_on_device from accidentally
# matching on it. It will be set to a correct value below if
# the book is matched with one in the library
book.application_id = None
if book.authors:
# Compare against both author and author sort, because
# either can appear as the author
book_authors = clean_string(authors_to_string(book.authors))
if book_authors in d['authors']:
id_ = d['authors'][book_authors]
update_book(id_, book)
book.in_library = 'AUTHOR'
book.application_id = id_
elif book_authors in d['author_sort']:
id_ = d['author_sort'][book_authors]
update_book(id_, book)
book.in_library = 'AUTH_SORT'
book.application_id = id_
else:
# Book definitely not matched. Clear its application ID
book.application_id = None
# Set author_sort if it isn't already
asort = getattr(book, 'author_sort', None)
if not asort and book.authors:
book.author_sort = self.library_view.model().db.\
author_sort_from_authors(book.authors)
if update_metadata:
if self.device_manager.is_device_connected:
plugboards = self.library_view.model().db.prefs.get('plugboards', {})
self.device_manager.sync_booklists(
FunctionDispatcher(self.metadata_synced), booklists,
plugboards, add_as_step_to_job)
if book_ids_to_refresh:
try:
prints('DeviceJob: set_books_in_library refreshing GUI for ',
len(book_ids_to_refresh), 'books')
self.library_view.model().refresh_ids(book_ids_to_refresh,
current_row=self.library_view.currentIndex().row())
except:
# This shouldn't ever happen, but just in case ...
traceback.print_exc()
# Sync books if necessary
try:
files, names, metadata = [], [], []
for id_, fmt_name in book_formats_to_send:
if DEBUG:
prints('DeviceJob: Syncing book. id:', id_, 'name from device', fmt_name)
ext = os.path.splitext(fmt_name)[1][1:]
fmt_info = db.new_api.format_metadata(id_, ext)
if fmt_info:
try:
pt = PersistentTemporaryFile(suffix='caltmpfmt.'+ext)
db.new_api.copy_format_to(id_, ext, pt)
pt.close()
files.append(filename_to_unicode(os.path.abspath(pt.name)))
names.append(fmt_name)
mi = db.new_api.get_metadata(id_, get_cover=True)
self.update_thumbnail(mi)
metadata.append(mi)
except:
prints('Problem creating temporary file for', fmt_name)
traceback.print_exc()
else:
if DEBUG:
prints("DeviceJob: book doesn't have that format")
if files:
self.upload_books(files, names, metadata)
except:
# Shouldn't ever happen, but just in case
traceback.print_exc()
# Inform user about future-dated books
try:
if books_with_future_dates:
d = error_dialog(self, _('Book format sync problem'),
_('Some book formats in your library cannot be '
'synced because they have dates in the future'),
det_msg='\n'.join(books_with_future_dates),
show=False,
show_copy_button=True)
d.show()
except:
traceback.print_exc()
if DEBUG:
prints('DeviceJob: set_books_in_library finished: time=',
time.time() - start_time)
# The status line is reset when the job finishes
return update_metadata
# }}}
| gpl-3.0 | -3,446,028,992,551,313,000 | 43.190855 | 120 | 0.538229 | false |
sonaht/ansible | lib/ansible/plugins/connection/iocage.py | 44 | 2394 | # based on jail.py (c) 2013, Michael Scherer <[email protected]>
# (c) 2015, Toshio Kuratomi <[email protected]>
# (c) 2016, Stephan Lohse <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
from ansible.plugins.connection.jail import Connection as Jail
from ansible.errors import AnsibleError
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(Jail):
''' Local iocage based connections '''
transport = 'iocage'
def __init__(self, play_context, new_stdin, *args, **kwargs):
self.ioc_jail = play_context.remote_addr
self.iocage_cmd = Jail._search_executable('iocage')
jail_uuid = self.get_jail_uuid()
kwargs[Jail.modified_jailname_key] = 'ioc-{}'.format(jail_uuid)
display.vvv(u"Jail {iocjail} has been translated to {rawjail}".format(
iocjail=self.ioc_jail, rawjail=kwargs[Jail.modified_jailname_key]),
host=kwargs[Jail.modified_jailname_key])
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
def get_jail_uuid(self):
p = subprocess.Popen([self.iocage_cmd, 'get', 'host_hostuuid', self.ioc_jail],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
# otherwise p.returncode would not be set
p.wait()
if p.returncode != 0:
raise AnsibleError(u"iocage returned an error: {}".format(stdout))
return stdout.strip('\n')
| gpl-3.0 | 4,721,034,647,374,134,000 | 34.731343 | 86 | 0.662072 | false |
youprofit/NewsBlur | apps/analyzer/tfidf.py | 17 | 1878 | #!/usr/bin/env python
"""
The simplest TF-IDF library imaginable.
Add your documents as two-element lists `[docname, [list_of_words_in_the_document]]` with `addDocument(docname, list_of_words)`. Get a list of all the `[docname, similarity_score]` pairs relative to a document by calling `similarities([list_of_words])`.
See the README for a usage example.
"""
import sys
import os
class tfidf:
def __init__(self):
self.weighted = False
self.documents = []
self.corpus_dict = {}
def addDocument(self, doc_name, list_of_words):
# building a dictionary
doc_dict = {}
for w in list_of_words:
doc_dict[w] = doc_dict.get(w, 0.) + 1.0
self.corpus_dict[w] = self.corpus_dict.get(w, 0.0) + 1.0
# normalizing the dictionary
length = float(len(list_of_words))
for k in doc_dict:
doc_dict[k] = doc_dict[k] / length
# add the normalized document to the corpus
self.documents.append([doc_name, doc_dict])
def similarities(self, list_of_words):
"""Returns a list of all the [docname, similarity_score] pairs relative to a list of words."""
# building the query dictionary
query_dict = {}
for w in list_of_words:
query_dict[w] = query_dict.get(w, 0.0) + 1.0
# normalizing the query
length = float(len(list_of_words))
for k in query_dict:
query_dict[k] = query_dict[k] / length
# computing the list of similarities
sims = []
for doc in self.documents:
score = 0.0
doc_dict = doc[1]
for k in query_dict:
if doc_dict.has_key(k):
score += (query_dict[k] / self.corpus_dict[k]) + (doc_dict[k] / self.corpus_dict[k])
sims.append([doc[0], score])
return sims | mit | 9,063,353,552,194,863,000 | 32.553571 | 253 | 0.57934 | false |
maxadamski/swift-corelibs-foundation | lib/product.py | 5 | 7497 | # This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
from config import Configuration
from phases import CompileC
from phases import CompileCxx
from phases import Assemble
from phases import BuildAction
from phases import MergeSwiftModule
from target import OSType
from path import Path
import os
class Product(BuildAction):
name = None
product_name = None
phases = []
installed_headers = []
CFLAGS = None
CXXFLAGS = None
LDFLAGS = None
ASFLAGS = None
SWIFTCFLAGS = ""
needs_stdcxx = False
needs_objc = False
GCC_PREFIX_HEADER = None
PUBLIC_HEADERS_FOLDER_PATH = os.path.join("usr", "include")
PUBLIC_MODULE_FOLDER_PATH = os.path.join("usr", "include")
PRIVATE_HEADERS_FOLDER_PATH = os.path.join("usr", "local", "include")
def __init__(self, name):
self.name = name
def generate(self):
generated = "\n\n"
for phase in self.phases:
generated += phase.generate()
return generated
def add_phase(self, phase):
phase.set_product(self)
if len(self.phases) > 0:
phase.previous = self.phases[-1]
self.phases.append(phase)
@property
def product(self):
return Configuration.current.build_directory.path_by_appending(self.name).path_by_appending(self.product_name)
@property
def public_module_path(self):
return Path.path(Configuration.current.build_directory.path_by_appending(self.name).absolute() + "/" + self.PUBLIC_MODULE_FOLDER_PATH)
@property
def public_headers_path(self):
return Path.path(Configuration.current.build_directory.path_by_appending(self.name).absolute() + "/" + self.PUBLIC_HEADERS_FOLDER_PATH)
@property
def private_headers_path(self):
return Path.path(Configuration.current.build_directory.path_by_appending(self.name).absolute() + "/" + self.PRIVATE_HEADERS_FOLDER_PATH)
@property
def project_headers_path(self):
return Path.path(Configuration.current.build_directory.path_by_appending(self.name).absolute() + "/" + self.PROJECT_HEADERS_FOLDER_PATH)
class Library(Product):
rule = None
def __init__(self, name):
Product.__init__(self, name)
def generate(self, flags):
generated = Product.generate(self)
objects = []
for phase in self.phases:
objects += phase.objects
product_flags = " ".join(flags)
if self.LDFLAGS is not None:
product_flags += " " + self.LDFLAGS
if self.needs_stdcxx:
product_flags += " -lstdc++"
generated += """
build """ + self.product.relative() + """: """ + self.rule + """ """ + " ".join(objects) + self.generate_dependencies() + """
flags = """ + product_flags
if self.needs_objc:
generated += """
start = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-begin.o
end = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-end.o
"""
generated += """
build """ + self.product_name + """: phony | """ + self.product.relative() + """
default """ + self.product_name + """
"""
return generated
class DynamicLibrary(Library):
def __init__(self, name):
Library.__init__(self, name)
self.rule = "Link"
self.product_name = Configuration.current.target.dynamic_library_prefix + name + Configuration.current.target.dynamic_library_suffix
def generate(self):
if Configuration.current.target.sdk == OSType.Linux:
return Library.generate(self, ["-shared", "-Wl,-soname," + self.product_name, "-Wl,--no-undefined"])
else:
return Library.generate(self, ["-shared"])
class Framework(Product):
def __init__(self, name):
Product.__init__(self, name)
self.product_name = name + ".framework"
@property
def public_module_path(self):
return Configuration.current.build_directory.path_by_appending(self.name).path_by_appending(self.product_name).path_by_appending("Modules")
@property
def public_headers_path(self):
return Configuration.current.build_directory.path_by_appending(self.name).path_by_appending(self.product_name).path_by_appending("Headers")
@property
def private_headers_path(self):
return Configuration.current.build_directory.path_by_appending(self.name).path_by_appending(self.product_name).path_by_appending("PrivateHeaders")
def generate(self):
generated = Product.generate(self)
objects = []
for phase in self.phases:
objects += phase.objects
product_flags = "-shared -Wl,-soname," + Configuration.current.target.dynamic_library_prefix + self.name + Configuration.current.target.dynamic_library_suffix + " -Wl,--no-undefined"
if self.LDFLAGS is not None:
product_flags += " " + self.LDFLAGS
if self.needs_stdcxx:
product_flags += " -lstdc++"
generated += """
build """ + self.product.path_by_appending(self.name).relative() + """: Link """ + " ".join(objects) + self.generate_dependencies() + """
flags = """ + product_flags
if self.needs_objc:
generated += """
start = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-begin.o
end = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-end.o
"""
generated += """
build """ + self.product_name + """: phony | """ + self.product.relative() + """
default """ + self.product_name + """
build ${TARGET_BOOTSTRAP_DIR}/usr/lib/""" + Configuration.current.target.dynamic_library_prefix + self.name + Configuration.current.target.dynamic_library_suffix + """: Cp """ + self.product.path_by_appending(self.name).relative() + """
"""
return generated
class StaticLibrary(Library):
def __init__(self, name):
Library.__init__(self, name)
self.rule = "Archive"
self.product_name = Configuration.current.target.static_library_prefix + name + Configuration.current.target.static_library_suffix
def generate(self):
return Library.generate(self, [])
class Executable(Product):
def __init__(self, name):
Product.__init__(self, name)
self.product_name = name + Configuration.current.target.executable_suffix
def generate(self):
generated = Product.generate(self)
return generated
class Application(Product):
executable = None
def __init__(self, name):
Product.__init__(self, name)
self.product_name = name + ".app"
def generate(self):
generated = Product.generate(self)
objects = []
for phase in self.phases:
objects += phase.objects
product_flags = ""
if self.LDFLAGS is not None:
product_flags += " " + self.LDFLAGS
if self.needs_stdcxx:
product_flags += " -lstdc++"
generated += """
build """ + self.product.path_by_appending(self.name).relative() + ": Link " + " ".join(objects) + self.generate_dependencies() + """
flags = """ + product_flags
if self.needs_objc:
generated += """
start = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-begin.o
end = ${TARGET_BOOTSTRAP_DIR}/usr/lib/objc-end.o
"""
return generated
| apache-2.0 | 387,776,829,725,781,440 | 32.77027 | 236 | 0.632786 | false |
NeCTAR-RC/nova | nova/api/openstack/compute/legacy_v2/ips.py | 78 | 1929 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
import nova
from nova.api.openstack import common
from nova.api.openstack.compute.views import addresses as view_addresses
from nova.api.openstack import wsgi
from nova.i18n import _
class Controller(wsgi.Controller):
"""The servers addresses API controller for the OpenStack API."""
_view_builder_class = view_addresses.ViewBuilder
def __init__(self, **kwargs):
super(Controller, self).__init__(**kwargs)
self._compute_api = nova.compute.API()
def index(self, req, server_id):
context = req.environ["nova.context"]
instance = common.get_instance(self._compute_api, context, server_id)
networks = common.get_networks_for_instance(context, instance)
return self._view_builder.index(networks)
def show(self, req, server_id, id):
context = req.environ["nova.context"]
instance = common.get_instance(self._compute_api, context, server_id)
networks = common.get_networks_for_instance(context, instance)
if id not in networks:
msg = _("Instance is not a member of specified network")
raise exc.HTTPNotFound(explanation=msg)
return self._view_builder.show(networks[id], id)
def create_resource():
return wsgi.Resource(Controller())
| apache-2.0 | 5,100,317,228,942,981,000 | 36.096154 | 78 | 0.699326 | false |
J861449197/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/test_mongo_call_count.py | 91 | 7866 | """
Tests to verify correct number of MongoDB calls during course import/export and traversal
when using the Split modulestore.
"""
from tempfile import mkdtemp
from shutil import rmtree
from unittest import TestCase, skip
import ddt
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.tests.factories import check_mongo_calls
from xmodule.modulestore.tests.test_cross_modulestore_import_export import (
MixedModulestoreBuilder, VersioningModulestoreBuilder,
MongoModulestoreBuilder, TEST_DATA_DIR
)
MIXED_OLD_MONGO_MODULESTORE_BUILDER = MixedModulestoreBuilder([('draft', MongoModulestoreBuilder())])
MIXED_SPLIT_MODULESTORE_BUILDER = MixedModulestoreBuilder([('split', VersioningModulestoreBuilder())])
@ddt.ddt
@skip("Fix call counts below - sometimes the counts are off by 1.")
class CountMongoCallsXMLRoundtrip(TestCase):
"""
This class exists to test XML import and export to/from Split.
"""
def setUp(self):
super(CountMongoCallsXMLRoundtrip, self).setUp()
self.export_dir = mkdtemp()
self.addCleanup(rmtree, self.export_dir, ignore_errors=True)
@ddt.data(
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 287, 779, 702, 702),
(MIXED_SPLIT_MODULESTORE_BUILDER, 37, 16, 190, 189),
)
@ddt.unpack
def test_import_export(self, store_builder, export_reads, import_reads, first_import_writes, second_import_writes):
with store_builder.build() as (source_content, source_store):
with store_builder.build() as (dest_content, dest_store):
source_course_key = source_store.make_course_key('a', 'course', 'course')
dest_course_key = dest_store.make_course_key('a', 'course', 'course')
# An extra import write occurs in the first Split import due to the mismatch between
# the course id and the wiki_slug in the test XML course. The course must be updated
# with the correct wiki_slug during import.
with check_mongo_calls(import_reads, first_import_writes):
import_course_from_xml(
source_store,
'test_user',
TEST_DATA_DIR,
source_dirs=['manual-testing-complete'],
static_content_store=source_content,
target_id=source_course_key,
create_if_not_present=True,
raise_on_failure=True,
)
with check_mongo_calls(export_reads):
export_course_to_xml(
source_store,
source_content,
source_course_key,
self.export_dir,
'exported_source_course',
)
with check_mongo_calls(import_reads, second_import_writes):
import_course_from_xml(
dest_store,
'test_user',
self.export_dir,
source_dirs=['exported_source_course'],
static_content_store=dest_content,
target_id=dest_course_key,
create_if_not_present=True,
raise_on_failure=True,
)
@ddt.ddt
class CountMongoCallsCourseTraversal(TestCase):
"""
Tests the number of Mongo calls made when traversing a course tree from the top course root
to the leaf nodes.
"""
# Suppose you want to traverse a course - maybe accessing the fields of each XBlock in the course,
# maybe not. What parameters should one use for get_course() in order to minimize the number of
# mongo calls? The tests below both ensure that code changes don't increase the number of mongo calls
# during traversal -and- demonstrate how to minimize the number of calls.
@ddt.data(
# These two lines show the way this traversal *should* be done
# (if you'll eventually access all the fields and load all the definitions anyway).
# 'lazy' does not matter in old Mongo.
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, False, True, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, True, True, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, False, True, 387),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, True, True, 387),
# As shown in these two lines: whether or not the XBlock fields are accessed,
# the same number of mongo calls are made in old Mongo for depth=None.
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, False, False, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, None, True, False, 189),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, False, False, 387),
(MIXED_OLD_MONGO_MODULESTORE_BUILDER, 0, True, False, 387),
# The line below shows the way this traversal *should* be done
# (if you'll eventually access all the fields and load all the definitions anyway).
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, True, 4),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, True, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, False, False, 4),
(MIXED_SPLIT_MODULESTORE_BUILDER, None, True, False, 4),
# TODO: The call count below seems like a bug - should be 4?
# Seems to be related to using self.lazy in CachingDescriptorSystem.get_module_data().
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, False, False, 143),
(MIXED_SPLIT_MODULESTORE_BUILDER, 0, True, False, 4)
)
@ddt.unpack
def test_number_mongo_calls(self, store, depth, lazy, access_all_block_fields, num_mongo_calls):
with store.build() as (source_content, source_store):
source_course_key = source_store.make_course_key('a', 'course', 'course')
# First, import a course.
import_course_from_xml(
source_store,
'test_user',
TEST_DATA_DIR,
source_dirs=['manual-testing-complete'],
static_content_store=source_content,
target_id=source_course_key,
create_if_not_present=True,
raise_on_failure=True,
)
# Course traversal modeled after the traversal done here:
# lms/djangoapps/mobile_api/video_outlines/serializers.py:BlockOutline
# Starting at the root course block, do a breadth-first traversal using
# get_children() to retrieve each block's children.
with check_mongo_calls(num_mongo_calls):
with source_store.bulk_operations(source_course_key):
start_block = source_store.get_course(source_course_key, depth=depth, lazy=lazy)
all_blocks = []
stack = [start_block]
while stack:
curr_block = stack.pop()
all_blocks.append(curr_block)
if curr_block.has_children:
for block in reversed(curr_block.get_children()):
stack.append(block)
if access_all_block_fields:
# Read the fields on each block in order to ensure each block and its definition is loaded.
for xblock in all_blocks:
for __, field in xblock.fields.iteritems():
if field.is_set_on(xblock):
__ = field.read_from(xblock)
| agpl-3.0 | 5,422,810,240,244,577,000 | 48.1625 | 119 | 0.597635 | false |
minlexx/pyevemon | esi_client/models/get_universe_moons_moon_id_internal_server_error.py | 1 | 3107 | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetUniverseMoonsMoonIdInternalServerError(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
GetUniverseMoonsMoonIdInternalServerError - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this GetUniverseMoonsMoonIdInternalServerError.
Internal server error message
:return: The error of this GetUniverseMoonsMoonIdInternalServerError.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this GetUniverseMoonsMoonIdInternalServerError.
Internal server error message
:param error: The error of this GetUniverseMoonsMoonIdInternalServerError.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetUniverseMoonsMoonIdInternalServerError):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| gpl-3.0 | -3,285,205,257,044,549,000 | 25.555556 | 82 | 0.539749 | false |
knnniggett/weewx | bin/weewx/test/test_templates.py | 6 | 5670 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2009-2015 Tom Keffer <[email protected]>
#
# See the file LICENSE.txt for your full rights.
#
"""Test tag notation for template generation."""
from __future__ import with_statement
import os.path
import shutil
import sys
import syslog
import unittest
import configobj
os.environ['TZ'] = 'America/Los_Angeles'
import weewx.reportengine
import weewx.station
import weeutil.weeutil
import gen_fake_data
# Find the configuration file. It's assumed to be in the same directory as me:
config_path = os.path.join(os.path.dirname(__file__), "testgen.conf")
cwd = None
# We will be testing the ability to extend the unit system, so set that up first:
class ExtraUnits(dict):
def __getitem__(self, obs_type):
if obs_type.endswith('Temp'):
# Anything that ends with "Temp" is assumed to be in group_temperature
return "group_temperature"
elif obs_type.startswith('current'):
# Anything that starts with "current" is in group_amperage:
return "group_amperage"
else:
# Otherwise, consult the underlying dictionary:
return dict.__getitem__(self, obs_type)
extra_units = ExtraUnits()
import weewx.units
weewx.units.obs_group_dict.extend(extra_units)
# Add the new group group_amperage to the standard unit systems:
weewx.units.USUnits["group_amperage"] = "amp"
weewx.units.MetricUnits["group_amperage"] = "amp"
weewx.units.MetricWXUnits["group_amperage"] = "amp"
weewx.units.default_unit_format_dict["amp"] = "%.1f"
weewx.units.default_unit_label_dict["amp"] = " A"
class Common(unittest.TestCase):
def setUp(self):
global config_path
global cwd
weewx.debug = 1
syslog.openlog('test_templates', syslog.LOG_CONS)
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG))
# Save and set the current working directory in case some service changes it.
if not cwd:
cwd = os.getcwd()
else:
os.chdir(cwd)
try :
self.config_dict = configobj.ConfigObj(config_path, file_error=True)
except IOError:
sys.stderr.write("Unable to open configuration file %s" % config_path)
# Reraise the exception (this will eventually cause the program to exit)
raise
except configobj.ConfigObjError:
sys.stderr.write("Error while parsing configuration file %s" % config_path)
raise
# Remove the old directory:
try:
test_html_dir = os.path.join(self.config_dict['WEEWX_ROOT'], self.config_dict['StdReport']['HTML_ROOT'])
shutil.rmtree(test_html_dir)
except OSError, e:
if os.path.exists(test_html_dir):
print >>sys.stderr, "\nUnable to remove old test directory %s", test_html_dir
print >>sys.stderr, "Reason:", e
print >>sys.stderr, "Aborting"
exit(1)
# This will generate the test databases if necessary:
gen_fake_data.configDatabases(self.config_dict, database_type=self.database_type)
def tearDown(self):
pass
def test_report_engine(self):
# The generation time should be the same as the last record in the test database:
testtime_ts = gen_fake_data.stop_ts
print "\ntest time is ", weeutil.weeutil.timestamp_to_string(testtime_ts)
stn_info = weewx.station.StationInfo(**self.config_dict['Station'])
t = weewx.reportengine.StdReportEngine(self.config_dict, stn_info, testtime_ts)
# Find the test skins and then have SKIN_ROOT point to it:
test_dir = sys.path[0]
t.config_dict['StdReport']['SKIN_ROOT'] = os.path.join(test_dir, 'test_skins')
# Although the report engine inherits from Thread, we can just run it in the main thread:
print "Starting report engine test"
t.run()
print "Done."
test_html_dir = os.path.join(t.config_dict['WEEWX_ROOT'], t.config_dict['StdReport']['HTML_ROOT'])
expected_dir = os.path.join(test_dir, 'expected')
for file_name in ['index.html', 'bymonth.txt', 'byyear.txt',
'metric/index.html', 'metric/bymonth.txt', 'metric/byyear.txt']:
actual_file = os.path.join(test_html_dir, file_name)
expected_file = os.path.join(expected_dir, file_name)
# print "Checking file: ", actual_file
# print " against file:", expected_file
actual = open(actual_file)
expected = open(expected_file)
n = 0
while True:
n += 1
actual_line = actual.readline()
expected_line = expected.readline()
if actual_line == '' or expected_line == '':
break
self.assertEqual(actual_line, expected_line, msg="%s[%d]:\n%r vs\n%r" % (actual_file, n, actual_line, expected_line))
print "Checked %d lines" % (n,)
class TestSqlite(Common):
def __init__(self, *args, **kwargs):
self.database_type = "sqlite"
super(TestSqlite, self).__init__(*args, **kwargs)
class TestMySQL(Common):
def __init__(self, *args, **kwargs):
self.database_type = "mysql"
super(TestMySQL, self).__init__(*args, **kwargs)
def suite():
tests = ['test_report_engine']
return unittest.TestSuite(map(TestSqlite, tests) + map(TestMySQL, tests))
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| gpl-3.0 | 1,907,654,356,138,092,000 | 34.660377 | 133 | 0.608818 | false |
becm/meson | run_project_tests.py | 1 | 52296 | #!/usr/bin/env python3
# Copyright 2012-2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from concurrent.futures import ProcessPoolExecutor, CancelledError
from enum import Enum
from io import StringIO
from pathlib import Path, PurePath
import argparse
import functools
import itertools
import json
import multiprocessing
import os
import re
import shlex
import shutil
import signal
import subprocess
import sys
import tempfile
import time
import typing as T
import xml.etree.ElementTree as ET
from mesonbuild import build
from mesonbuild import environment
from mesonbuild import compilers
from mesonbuild import mesonlib
from mesonbuild import mlog
from mesonbuild import mtest
from mesonbuild.mesonlib import MachineChoice, Popen_safe
from mesonbuild.coredata import backendlist, version as meson_version
from run_tests import get_fake_options, run_configure, get_meson_script
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
from run_tests import ensure_backend_detects_changes
from run_tests import guess_backend
ALL_TESTS = ['cmake', 'common', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test',
'keyval', 'platform-osx', 'platform-windows', 'platform-linux',
'java', 'C#', 'vala', 'rust', 'd', 'objective c', 'objective c++',
'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm'
]
class BuildStep(Enum):
configure = 1
build = 2
test = 3
install = 4
clean = 5
validate = 6
class TestResult(BaseException):
def __init__(self, cicmds):
self.msg = '' # empty msg indicates test success
self.stdo = ''
self.stde = ''
self.mlog = ''
self.cicmds = cicmds
self.conftime = 0
self.buildtime = 0
self.testtime = 0
def add_step(self, step, stdo, stde, mlog='', time=0):
self.step = step
self.stdo += stdo
self.stde += stde
self.mlog += mlog
if step == BuildStep.configure:
self.conftime = time
elif step == BuildStep.build:
self.buildtime = time
elif step == BuildStep.test:
self.testtime = time
def fail(self, msg):
self.msg = msg
class InstalledFile:
def __init__(self, raw: T.Dict[str, str]):
self.path = raw['file']
self.typ = raw['type']
self.platform = raw.get('platform', None)
self.language = raw.get('language', 'c') # type: str
version = raw.get('version', '') # type: str
if version:
self.version = version.split('.') # type: T.List[str]
else:
# split on '' will return [''], we want an empty list though
self.version = []
def get_path(self, compiler: str, env: environment.Environment) -> T.Optional[Path]:
p = Path(self.path)
canonical_compiler = compiler
if ((compiler in ['clang-cl', 'intel-cl']) or
(env.machines.host.is_windows() and compiler in {'pgi', 'dmd', 'ldc'})):
canonical_compiler = 'msvc'
has_pdb = False
if self.language in {'c', 'cpp'}:
has_pdb = canonical_compiler == 'msvc'
elif self.language == 'd':
# dmd's optlink does not genearte pdb iles
has_pdb = env.coredata.compilers.host['d'].linker.id in {'link', 'lld-link'}
# Abort if the platform does not match
matches = {
'msvc': canonical_compiler == 'msvc',
'gcc': canonical_compiler != 'msvc',
'cygwin': env.machines.host.is_cygwin(),
'!cygwin': not env.machines.host.is_cygwin(),
}.get(self.platform or '', True)
if not matches:
return None
# Handle the different types
if self.typ == 'file':
return p
elif self.typ == 'shared_lib':
if env.machines.host.is_windows() or env.machines.host.is_cygwin():
# Windows only has foo.dll and foo-X.dll
if len(self.version) > 1:
return None
if self.version:
p = p.with_name('{}-{}'.format(p.name, self.version[0]))
return p.with_suffix('.dll')
p = p.with_name('lib{}'.format(p.name))
if env.machines.host.is_darwin():
# MacOS only has libfoo.dylib and libfoo.X.dylib
if len(self.version) > 1:
return None
# pathlib.Path.with_suffix replaces, not appends
suffix = '.dylib'
if self.version:
suffix = '.{}{}'.format(self.version[0], suffix)
else:
# pathlib.Path.with_suffix replaces, not appends
suffix = '.so'
if self.version:
suffix = '{}.{}'.format(suffix, '.'.join(self.version))
return p.with_suffix(suffix)
elif self.typ == 'exe':
if env.machines.host.is_windows() or env.machines.host.is_cygwin():
return p.with_suffix('.exe')
elif self.typ == 'pdb':
if self.version:
p = p.with_name('{}-{}'.format(p.name, self.version[0]))
return p.with_suffix('.pdb') if has_pdb else None
elif self.typ == 'implib' or self.typ == 'implibempty':
if env.machines.host.is_windows() and canonical_compiler == 'msvc':
# only MSVC doesn't generate empty implibs
if self.typ == 'implibempty' and compiler == 'msvc':
return None
return p.parent / (re.sub(r'^lib', '', p.name) + '.lib')
elif env.machines.host.is_windows() or env.machines.host.is_cygwin():
return p.with_suffix('.dll.a')
else:
return None
elif self.typ == 'expr':
return Path(platform_fix_name(p.as_posix(), canonical_compiler, env))
else:
raise RuntimeError('Invalid installed file type {}'.format(self.typ))
return p
@functools.total_ordering
class TestDef:
def __init__(self, path: Path, name: T.Optional[str], args: T.List[str], skip: bool = False):
self.path = path
self.name = name
self.args = args
self.skip = skip
self.env = os.environ.copy()
self.installed_files = [] # type: T.List[InstalledFile]
self.do_not_set_opts = [] # type: T.List[str]
self.stdout = [] # type: T.List[T.Dict[str, str]]
def __repr__(self) -> str:
return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip)
def display_name(self) -> str:
if self.name:
return '{} ({})'.format(self.path.as_posix(), self.name)
return self.path.as_posix()
def __lt__(self, other: T.Any) -> bool:
if isinstance(other, TestDef):
# None is not sortable, so replace it with an empty string
s_id = int(self.path.name.split(' ')[0])
o_id = int(other.path.name.split(' ')[0])
return (s_id, self.path, self.name or '') < (o_id, other.path, other.name or '')
return NotImplemented
class AutoDeletedDir:
def __init__(self, d):
self.dir = d
def __enter__(self):
os.makedirs(self.dir, exist_ok=True)
return self.dir
def __exit__(self, _type, value, traceback):
# We don't use tempfile.TemporaryDirectory, but wrap the
# deletion in the AutoDeletedDir class because
# it fails on Windows due antivirus programs
# holding files open.
mesonlib.windows_proof_rmtree(self.dir)
failing_logs = []
print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ
under_ci = 'CI' in os.environ
under_xenial_ci = under_ci and ('XENIAL' in os.environ)
skip_scientific = under_ci and ('SKIP_SCIENTIFIC' in os.environ)
do_debug = under_ci or print_debug
no_meson_log_msg = 'No meson-log.txt found.'
system_compiler = None
compiler_id_map = {} # type: T.Dict[str, str]
tool_vers_map = {} # type: T.Dict[str, str]
class StopException(Exception):
def __init__(self):
super().__init__('Stopped by user')
stop = False
def stop_handler(signal, frame):
global stop
stop = True
signal.signal(signal.SIGINT, stop_handler)
signal.signal(signal.SIGTERM, stop_handler)
def setup_commands(optbackend):
global do_debug, backend, backend_flags
global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands
backend, backend_flags = guess_backend(optbackend, shutil.which('msbuild'))
compile_commands, clean_commands, test_commands, install_commands, \
uninstall_commands = get_backend_commands(backend, do_debug)
# TODO try to eliminate or at least reduce this function
def platform_fix_name(fname: str, canonical_compiler: str, env: environment.Environment) -> str:
if '?lib' in fname:
if env.machines.host.is_windows() and canonical_compiler == 'msvc':
fname = re.sub(r'lib/\?lib(.*)\.', r'bin/\1.', fname)
fname = re.sub(r'/\?lib/', r'/bin/', fname)
elif env.machines.host.is_windows():
fname = re.sub(r'lib/\?lib(.*)\.', r'bin/lib\1.', fname)
fname = re.sub(r'\?lib(.*)\.dll$', r'lib\1.dll', fname)
fname = re.sub(r'/\?lib/', r'/bin/', fname)
elif env.machines.host.is_cygwin():
fname = re.sub(r'lib/\?lib(.*)\.so$', r'bin/cyg\1.dll', fname)
fname = re.sub(r'lib/\?lib(.*)\.', r'bin/cyg\1.', fname)
fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname)
fname = re.sub(r'/\?lib/', r'/bin/', fname)
else:
fname = re.sub(r'\?lib', 'lib', fname)
if fname.endswith('?so'):
if env.machines.host.is_windows() and canonical_compiler == 'msvc':
fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname)
fname = re.sub(r'/(?:lib|)([^/]*?)\?so$', r'/\1.dll', fname)
return fname
elif env.machines.host.is_windows():
fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname)
fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname)
return fname
elif env.machines.host.is_cygwin():
fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname)
fname = re.sub(r'/lib([^/]*?)\?so$', r'/cyg\1.dll', fname)
fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname)
return fname
elif env.machines.host.is_darwin():
return fname[:-3] + '.dylib'
else:
return fname[:-3] + '.so'
return fname
def validate_install(test: TestDef, installdir: Path, compiler: str, env: environment.Environment) -> str:
expected_raw = [x.get_path(compiler, env) for x in test.installed_files]
expected = {Path(x): False for x in expected_raw if x}
found = [x.relative_to(installdir) for x in installdir.rglob('*') if x.is_file() or x.is_symlink()]
ret_msg = ''
# Mark all found files as found and detect unexpected files
for fname in found:
if fname not in expected:
ret_msg += 'Extra file {} found.\n'.format(fname)
continue
expected[fname] = True
# Check if expected files were found
for p, f in expected.items():
if not f:
ret_msg += 'Expected file {} missing.\n'.format(p)
# List dir content on error
if ret_msg != '':
ret_msg += '\nInstall dir contents:\n'
for i in found:
ret_msg += ' - {}\n'.format(i)
return ret_msg
def log_text_file(logfile, testdir, stdo, stde):
global stop, executor, futures
logfile.write('%s\nstdout\n\n---\n' % testdir.as_posix())
logfile.write(stdo)
logfile.write('\n\n---\n\nstderr\n\n---\n')
logfile.write(stde)
logfile.write('\n\n---\n\n')
if print_debug:
try:
print(stdo)
except UnicodeError:
sanitized_out = stdo.encode('ascii', errors='replace').decode()
print(sanitized_out)
try:
print(stde, file=sys.stderr)
except UnicodeError:
sanitized_err = stde.encode('ascii', errors='replace').decode()
print(sanitized_err, file=sys.stderr)
if stop:
print("Aborting..")
for f in futures:
f[2].cancel()
executor.shutdown()
raise StopException()
def bold(text):
return mlog.bold(text).get_text(mlog.colorize_console())
def green(text):
return mlog.green(text).get_text(mlog.colorize_console())
def red(text):
return mlog.red(text).get_text(mlog.colorize_console())
def yellow(text):
return mlog.yellow(text).get_text(mlog.colorize_console())
def _run_ci_include(args: T.List[str]) -> str:
if not args:
return 'At least one parameter required'
try:
data = Path(args[0]).read_text(errors='ignore', encoding='utf-8')
return 'Included file {}:\n{}\n'.format(args[0], data)
except Exception:
return 'Failed to open {}'.format(args[0])
ci_commands = {
'ci_include': _run_ci_include
}
def run_ci_commands(raw_log: str) -> T.List[str]:
res = []
for l in raw_log.splitlines():
if not l.startswith('!meson_ci!/'):
continue
cmd = shlex.split(l[11:])
if not cmd or cmd[0] not in ci_commands:
continue
res += ['CI COMMAND {}:\n{}\n'.format(cmd[0], ci_commands[cmd[0]](cmd[1:]))]
return res
def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) -> str:
if expected:
i = iter(expected)
def next_expected(i):
# Get the next expected line
item = next(i)
how = item.get('match', 'literal')
expected = item.get('line')
# Simple heuristic to automatically convert path separators for
# Windows:
#
# Any '/' appearing before 'WARNING' or 'ERROR' (i.e. a path in a
# filename part of a location) is replaced with '\' (in a re: '\\'
# which matches a literal '\')
#
# (There should probably be a way to turn this off for more complex
# cases which don't fit this)
if mesonlib.is_windows():
if how != "re":
sub = r'\\'
else:
sub = r'\\\\'
expected = re.sub(r'/(?=.*(WARNING|ERROR))', sub, expected)
return how, expected
try:
how, expected = next_expected(i)
for actual in output.splitlines():
if how == "re":
match = bool(re.match(expected, actual))
else:
match = (expected == actual)
if match:
how, expected = next_expected(i)
# reached the end of output without finding expected
return 'expected "{}" not found in {}'.format(expected, desc)
except StopIteration:
# matched all expected lines
pass
return ''
def validate_output(test: TestDef, stdo: str, stde: str) -> str:
return _compare_output(test.stdout, stdo, 'stdout')
# There are some class variables and such that cahce
# information. Clear all of these. The better solution
# would be to change the code so that no state is persisted
# but that would be a lot of work given that Meson was originally
# coded to run as a batch process.
def clear_internal_caches():
import mesonbuild.interpreterbase
from mesonbuild.dependencies import CMakeDependency
from mesonbuild.mesonlib import PerMachine
mesonbuild.interpreterbase.FeatureNew.feature_registry = {}
CMakeDependency.class_cmakeinfo = PerMachine(None, None)
def run_test_inprocess(testdir):
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
old_stderr = sys.stderr
sys.stderr = mystderr = StringIO()
old_cwd = os.getcwd()
os.chdir(testdir)
test_log_fname = Path('meson-logs', 'testlog.txt')
try:
returncode_test = mtest.run_with_args(['--no-rebuild'])
if test_log_fname.exists():
test_log = test_log_fname.open(errors='ignore').read()
else:
test_log = ''
returncode_benchmark = mtest.run_with_args(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog'])
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
os.chdir(old_cwd)
return max(returncode_test, returncode_benchmark), mystdout.getvalue(), mystderr.getvalue(), test_log
# Build directory name must be the same so Ccache works over
# consecutive invocations.
def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str:
import hashlib
src_dir = test.path.as_posix()
if test.name:
src_dir += test.name
rel_dirname = 'b ' + hashlib.sha256(src_dir.encode(errors='ignore')).hexdigest()[0:10]
abs_pathname = os.path.join(tempfile.gettempdir() if use_tmpdir else os.getcwd(), rel_dirname)
os.mkdir(abs_pathname)
return abs_pathname
def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool):
if test.skip:
return None
with AutoDeletedDir(create_deterministic_builddir(test, use_tmp)) as build_dir:
with AutoDeletedDir(tempfile.mkdtemp(prefix='i ', dir=None if use_tmp else os.getcwd())) as install_dir:
try:
return _run_test(test, build_dir, install_dir, extra_args, compiler, backend, flags, commands, should_fail)
except TestResult as r:
return r
finally:
mlog.shutdown() # Close the log file because otherwise Windows wets itself.
def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, compiler, backend, flags, commands, should_fail):
compile_commands, clean_commands, install_commands, uninstall_commands = commands
gen_start = time.time()
# Configure in-process
gen_args = [] # type: T.List[str]
if 'prefix' not in test.do_not_set_opts:
gen_args += ['--prefix', 'x:/usr'] if mesonlib.is_windows() else ['--prefix', '/usr']
if 'libdir' not in test.do_not_set_opts:
gen_args += ['--libdir', 'lib']
gen_args += [test.path.as_posix(), test_build_dir] + flags + extra_args
nativefile = test.path / 'nativefile.ini'
crossfile = test.path / 'crossfile.ini'
if nativefile.exists():
gen_args.extend(['--native-file', nativefile.as_posix()])
if crossfile.exists():
gen_args.extend(['--cross-file', crossfile.as_posix()])
(returncode, stdo, stde) = run_configure(gen_args, env=test.env)
try:
logfile = Path(test_build_dir, 'meson-logs', 'meson-log.txt')
mesonlog = logfile.open(errors='ignore', encoding='utf-8').read()
except Exception:
mesonlog = no_meson_log_msg
cicmds = run_ci_commands(mesonlog)
testresult = TestResult(cicmds)
testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start)
output_msg = validate_output(test, stdo, stde)
testresult.mlog += output_msg
if output_msg:
testresult.fail('Unexpected output while configuring.')
return testresult
if should_fail == 'meson':
if returncode == 1:
return testresult
elif returncode != 0:
testresult.fail('Test exited with unexpected status {}.'.format(returncode))
return testresult
else:
testresult.fail('Test that should have failed succeeded.')
return testresult
if returncode != 0:
testresult.fail('Generating the build system failed.')
return testresult
builddata = build.load(test_build_dir)
dir_args = get_backend_args_for_dir(backend, test_build_dir)
# Build with subprocess
def build_step():
build_start = time.time()
pc, o, e = Popen_safe(compile_commands + dir_args, cwd=test_build_dir)
testresult.add_step(BuildStep.build, o, e, '', time.time() - build_start)
if should_fail == 'build':
if pc.returncode != 0:
raise testresult
testresult.fail('Test that should have failed to build succeeded.')
raise testresult
if pc.returncode != 0:
testresult.fail('Compiling source code failed.')
raise testresult
# Touch the meson.build file to force a regenerate
def force_regenerate():
ensure_backend_detects_changes(backend)
os.utime(str(test.path / 'meson.build'))
# just test building
build_step()
# test that regeneration works for build step
force_regenerate()
build_step() # TBD: assert nothing gets built after the regenerate?
# test that regeneration works for test step
force_regenerate()
# Test in-process
clear_internal_caches()
test_start = time.time()
(returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir)
testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start)
if should_fail == 'test':
if returncode != 0:
return testresult
testresult.fail('Test that should have failed to run unit tests succeeded.')
return testresult
if returncode != 0:
testresult.fail('Running unit tests failed.')
return testresult
# Do installation, if the backend supports it
if install_commands:
env = os.environ.copy()
env['DESTDIR'] = install_dir
# Install with subprocess
pi, o, e = Popen_safe(install_commands, cwd=test_build_dir, env=env)
testresult.add_step(BuildStep.install, o, e)
if pi.returncode != 0:
testresult.fail('Running install failed.')
return testresult
# Clean with subprocess
env = os.environ.copy()
pi, o, e = Popen_safe(clean_commands + dir_args, cwd=test_build_dir, env=env)
testresult.add_step(BuildStep.clean, o, e)
if pi.returncode != 0:
testresult.fail('Running clean failed.')
return testresult
# Validate installed files
testresult.add_step(BuildStep.install, '', '')
if not install_commands:
return testresult
install_msg = validate_install(test, Path(install_dir), compiler, builddata.environment)
if install_msg:
testresult.fail('\n' + install_msg)
return testresult
return testresult
def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]:
tests = [t.name for t in testdir.iterdir() if t.is_dir()]
tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc)
test_defs = [TestDef(testdir / t, None, []) for t in tests]
all_tests = [] # type: T.List[TestDef]
for t in test_defs:
test_def = {}
test_def_file = t.path / 'test.json'
if test_def_file.is_file():
test_def = json.loads(test_def_file.read_text())
# Handle additional environment variables
env = {} # type: T.Dict[str, str]
if 'env' in test_def:
assert isinstance(test_def['env'], dict)
env = test_def['env']
for key, val in env.items():
val = val.replace('@ROOT@', t.path.resolve().as_posix())
env[key] = val
# Handle installed files
installed = [] # type: T.List[InstalledFile]
if 'installed' in test_def:
installed = [InstalledFile(x) for x in test_def['installed']]
# Handle expected output
stdout = test_def.get('stdout', [])
if stdout_mandatory and not stdout:
raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file))
# Handle the do_not_set_opts list
do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str]
# Skip tests if the tool requirements are not met
if 'tools' in test_def:
assert isinstance(test_def['tools'], dict)
for tool, vers_req in test_def['tools'].items():
if tool not in tool_vers_map:
t.skip = True
elif not mesonlib.version_compare(tool_vers_map[tool], vers_req):
t.skip = True
# Skip the matrix code and just update the existing test
if 'matrix' not in test_def:
t.env.update(env)
t.installed_files = installed
t.do_not_set_opts = do_not_set_opts
t.stdout = stdout
all_tests += [t]
continue
# 'matrix; entry is present, so build multiple tests from matrix definition
opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]]
matrix = test_def['matrix']
assert "options" in matrix
for key, val in matrix["options"].items():
assert isinstance(val, list)
tmp_opts = [] # type: T.List[T.Tuple[str, bool]]
for i in val:
assert isinstance(i, dict)
assert "val" in i
skip = False
# Skip the matrix entry if environment variable is present
if 'skip_on_env' in i:
for skip_env_var in i['skip_on_env']:
if skip_env_var in os.environ:
skip = True
# Only run the test if all compiler ID's match
if 'compilers' in i:
for lang, id_list in i['compilers'].items():
if lang not in compiler_id_map or compiler_id_map[lang] not in id_list:
skip = True
break
# Add an empty matrix entry
if i['val'] is None:
tmp_opts += [(None, skip)]
continue
tmp_opts += [('{}={}'.format(key, i['val']), skip)]
if opt_list:
new_opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]]
for i in opt_list:
for j in tmp_opts:
new_opt_list += [[*i, j]]
opt_list = new_opt_list
else:
opt_list = [[x] for x in tmp_opts]
# Exclude specific configurations
if 'exclude' in matrix:
assert isinstance(matrix['exclude'], list)
new_opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]]
for i in opt_list:
exclude = False
opt_names = [x[0] for x in i]
for j in matrix['exclude']:
ex_list = ['{}={}'.format(k, v) for k, v in j.items()]
if all([x in opt_names for x in ex_list]):
exclude = True
break
if not exclude:
new_opt_list += [i]
opt_list = new_opt_list
for i in opt_list:
name = ' '.join([x[0] for x in i if x[0] is not None])
opts = ['-D' + x[0] for x in i if x[0] is not None]
skip = any([x[1] for x in i])
test = TestDef(t.path, name, opts, skip or t.skip)
test.env.update(env)
test.installed_files = installed
test.do_not_set_opts = do_not_set_opts
test.stdout = stdout
all_tests += [test]
return sorted(all_tests)
def have_d_compiler():
if shutil.which("ldc2"):
return True
elif shutil.which("ldc"):
return True
elif shutil.which("gdc"):
return True
elif shutil.which("dmd"):
# The Windows installer sometimes produces a DMD install
# that exists but segfaults every time the compiler is run.
# Don't know why. Don't know how to fix. Skip in this case.
cp = subprocess.run(['dmd', '--version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if cp.stdout == b'':
return False
return True
return False
def have_objc_compiler(use_tmp: bool) -> bool:
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
env = environment.Environment(None, build_dir, get_fake_options('/'))
try:
objc_comp = env.detect_objc_compiler(MachineChoice.HOST)
except mesonlib.MesonException:
return False
if not objc_comp:
return False
env.coredata.process_new_compiler('objc', objc_comp, env)
try:
objc_comp.sanity_check(env.get_scratch_dir(), env)
except mesonlib.MesonException:
return False
return True
def have_objcpp_compiler(use_tmp: bool) -> bool:
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if use_tmp else '.')) as build_dir:
env = environment.Environment(None, build_dir, get_fake_options('/'))
try:
objcpp_comp = env.detect_objcpp_compiler(MachineChoice.HOST)
except mesonlib.MesonException:
return False
if not objcpp_comp:
return False
env.coredata.process_new_compiler('objcpp', objcpp_comp, env)
try:
objcpp_comp.sanity_check(env.get_scratch_dir(), env)
except mesonlib.MesonException:
return False
return True
def have_java():
if shutil.which('javac') and shutil.which('java'):
return True
return False
def skippable(suite, test):
# Everything is optional when not running on CI, or on Ubuntu 16.04 CI
if not under_ci or under_xenial_ci:
return True
if not suite.endswith('frameworks'):
return True
# this test assumptions aren't valid for Windows paths
if test.endswith('38 libdir must be inside prefix'):
return True
# gtk-doc test may be skipped, pending upstream fixes for spaces in
# filenames landing in the distro used for CI
if test.endswith('10 gtk-doc'):
return True
# NetCDF is not in the CI Docker image
if test.endswith('netcdf'):
return True
# MSVC doesn't link with GFortran
if test.endswith('14 fortran links c'):
return True
# Blocks are not supported on all compilers
if test.endswith('29 blocks'):
return True
# Scientific libraries are skippable on certain systems
# See the discussion here: https://github.com/mesonbuild/meson/pull/6562
if any([x in test for x in ['17 mpi', '25 hdf5', '30 scalapack']]) and skip_scientific:
return True
# These create OS specific tests, and need to be skippable
if any([x in test for x in ['16 sdl', '17 mpi']]):
return True
# No frameworks test should be skipped on linux CI, as we expect all
# prerequisites to be installed
if mesonlib.is_linux():
return False
# Boost test should only be skipped for windows CI build matrix entries
# which don't define BOOST_ROOT
if test.endswith('1 boost'):
if mesonlib.is_windows():
return 'BOOST_ROOT' not in os.environ
return False
# Qt is provided on macOS by Homebrew
if test.endswith('4 qt') and mesonlib.is_osx():
return False
# Other framework tests are allowed to be skipped on other platforms
return True
def skip_csharp(backend) -> bool:
if backend is not Backend.ninja:
return True
if not shutil.which('resgen'):
return True
if shutil.which('mcs'):
return False
if shutil.which('csc'):
# Only support VS2017 for now. Earlier versions fail
# under CI in mysterious ways.
try:
stdo = subprocess.check_output(['csc', '/version'])
except subprocess.CalledProcessError:
return True
# Having incrementing version numbers would be too easy.
# Microsoft reset the versioning back to 1.0 (from 4.x)
# when they got the Roslyn based compiler. Thus there
# is NO WAY to reliably do version number comparisons.
# Only support the version that ships with VS2017.
return not stdo.startswith(b'2.')
return True
# In Azure some setups have a broken rustc that will error out
# on all compilation attempts.
def has_broken_rustc() -> bool:
dirname = 'brokenrusttest'
if os.path.exists(dirname):
mesonlib.windows_proof_rmtree(dirname)
os.mkdir(dirname)
open(dirname + '/sanity.rs', 'w').write('''fn main() {
}
''')
pc = subprocess.run(['rustc', '-o', 'sanity.exe', 'sanity.rs'],
cwd=dirname,
stdout = subprocess.DEVNULL,
stderr = subprocess.DEVNULL)
mesonlib.windows_proof_rmtree(dirname)
return pc.returncode != 0
def should_skip_rust(backend: Backend) -> bool:
if not shutil.which('rustc'):
return True
if backend is not Backend.ninja:
return True
if mesonlib.is_windows() and has_broken_rustc():
return True
return False
def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, T.List[TestDef], bool]]:
"""
Parameters
----------
only: list of str, optional
specify names of tests to run
Returns
-------
gathered_tests: list of tuple of str, list of TestDef, bool
tests to run
"""
skip_fortran = not(shutil.which('gfortran') or
shutil.which('flang') or
shutil.which('pgfortran') or
shutil.which('ifort'))
class TestCategory:
def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False):
self.category = category # category name
self.subdir = subdir # subdirectory
self.skip = skip # skip condition
self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this categroy
all_tests = [
TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)),
TestCategory('common', 'common'),
TestCategory('warning-meson', 'warning', stdout_mandatory=True),
TestCategory('failing-meson', 'failing', stdout_mandatory=True),
TestCategory('failing-build', 'failing build'),
TestCategory('failing-test', 'failing test'),
TestCategory('keyval', 'keyval'),
TestCategory('platform-osx', 'osx', not mesonlib.is_osx()),
TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
TestCategory('C#', 'csharp', skip_csharp(backend)),
TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
TestCategory('rust', 'rust', should_skip_rust(backend)),
TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)),
TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)),
TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
# CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja
TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
TestCategory('python3', 'python3', backend is not Backend.ninja),
TestCategory('python', 'python'),
TestCategory('fpga', 'fpga', shutil.which('yosys') is None),
TestCategory('frameworks', 'frameworks'),
TestCategory('nasm', 'nasm'),
TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja),
]
categories = [t.category for t in all_tests]
assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories'
if only:
all_tests = [t for t in all_tests if t.category in only]
gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests]
return gathered_tests
def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
log_name_base: str, failfast: bool,
extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
global logfile
txtname = log_name_base + '.txt'
with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf:
logfile = lf
return _run_tests(all_tests, log_name_base, failfast, extra_args, use_tmp)
def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
log_name_base: str, failfast: bool,
extra_args: T.List[str], use_tmp: bool) -> T.Tuple[int, int, int]:
global stop, executor, futures, system_compiler
xmlname = log_name_base + '.xml'
junit_root = ET.Element('testsuites')
conf_time = 0
build_time = 0
test_time = 0
passing_tests = 0
failing_tests = 0
skipped_tests = 0
commands = (compile_commands, clean_commands, install_commands, uninstall_commands)
try:
# This fails in some CI environments for unknown reasons.
num_workers = multiprocessing.cpu_count()
except Exception as e:
print('Could not determine number of CPUs due to the following reason:' + str(e))
print('Defaulting to using only one process')
num_workers = 1
# Due to Ninja deficiency, almost 50% of build time
# is spent waiting. Do something useful instead.
#
# Remove this once the following issue has been resolved:
# https://github.com/mesonbuild/meson/pull/2082
if not mesonlib.is_windows(): # twice as fast on Windows by *not* multiplying by 2.
num_workers *= 2
executor = ProcessPoolExecutor(max_workers=num_workers)
for name, test_cases, skipped in all_tests:
current_suite = ET.SubElement(junit_root, 'testsuite', {'name': name, 'tests': str(len(test_cases))})
print()
if skipped:
print(bold('Not running %s tests.' % name))
else:
print(bold('Running %s tests.' % name))
print()
futures = []
for t in test_cases:
# Jenkins screws us over by automatically sorting test cases by name
# and getting it wrong by not doing logical number sorting.
(testnum, testbase) = t.path.name.split(' ', 1)
testname = '%.3d %s' % (int(testnum), testbase)
if t.name:
testname += ' ({})'.format(t.name)
should_fail = False
suite_args = []
if name.startswith('failing'):
should_fail = name.split('failing-')[1]
if name.startswith('warning'):
suite_args = ['--fatal-meson-warnings']
should_fail = name.split('warning-')[1]
t.skip = skipped or t.skip
result = executor.submit(run_test, t, extra_args + suite_args + t.args,
system_compiler, backend, backend_flags, commands, should_fail, use_tmp)
futures.append((testname, t, result))
for (testname, t, result) in futures:
sys.stdout.flush()
try:
result = result.result()
except CancelledError:
continue
if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t.path.as_posix()))):
print(yellow('Skipping:'), t.display_name())
current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,
'classname': name})
ET.SubElement(current_test, 'skipped', {})
skipped_tests += 1
else:
without_install = "" if len(install_commands) > 0 else " (without install)"
if result.msg != '':
print(red('Failed test{} during {}: {!r}'.format(without_install, result.step.name, t.display_name())))
print('Reason:', result.msg)
failing_tests += 1
if result.step == BuildStep.configure and result.mlog != no_meson_log_msg:
# For configure failures, instead of printing stdout,
# print the meson log if available since it's a superset
# of stdout and often has very useful information.
failing_logs.append(result.mlog)
elif under_ci:
# Always print the complete meson log when running in
# a CI. This helps debugging issues that only occur in
# a hard to reproduce environment
failing_logs.append(result.mlog)
failing_logs.append(result.stdo)
else:
failing_logs.append(result.stdo)
for cmd_res in result.cicmds:
failing_logs.append(cmd_res)
failing_logs.append(result.stde)
if failfast:
print("Cancelling the rest of the tests")
for (_, _, res) in futures:
res.cancel()
else:
print('Succeeded test%s: %s' % (without_install, t.display_name()))
passing_tests += 1
conf_time += result.conftime
build_time += result.buildtime
test_time += result.testtime
total_time = conf_time + build_time + test_time
log_text_file(logfile, t.path, result.stdo, result.stde)
current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,
'classname': name,
'time': '%.3f' % total_time})
if result.msg != '':
ET.SubElement(current_test, 'failure', {'message': result.msg})
stdoel = ET.SubElement(current_test, 'system-out')
stdoel.text = result.stdo
stdeel = ET.SubElement(current_test, 'system-err')
stdeel.text = result.stde
if failfast and failing_tests > 0:
break
print("\nTotal configuration time: %.2fs" % conf_time)
print("Total build time: %.2fs" % build_time)
print("Total test time: %.2fs" % test_time)
ET.ElementTree(element=junit_root).write(xmlname, xml_declaration=True, encoding='UTF-8')
return passing_tests, failing_tests, skipped_tests
def check_file(file: Path):
lines = file.read_bytes().split(b'\n')
tabdetector = re.compile(br' *\t')
for i, line in enumerate(lines):
if re.match(tabdetector, line):
raise SystemExit("File {} contains a tab indent on line {:d}. Only spaces are permitted.".format(file, i + 1))
if line.endswith(b'\r'):
raise SystemExit("File {} contains DOS line ending on line {:d}. Only unix-style line endings are permitted.".format(file, i + 1))
def check_format():
check_suffixes = {'.c',
'.cpp',
'.cxx',
'.cc',
'.rs',
'.f90',
'.vala',
'.d',
'.s',
'.m',
'.mm',
'.asm',
'.java',
'.txt',
'.py',
'.swift',
'.build',
'.md',
}
for (root, _, filenames) in os.walk('.'):
if '.dub' in root: # external deps are here
continue
if '.pytest_cache' in root:
continue
if 'meson-logs' in root or 'meson-private' in root:
continue
if '__CMake_build' in root:
continue
if '.eggs' in root or '_cache' in root: # e.g. .mypy_cache
continue
for fname in filenames:
file = Path(fname)
if file.suffix.lower() in check_suffixes:
if file.name in ('sitemap.txt', 'meson-test-run.txt'):
continue
check_file(root / file)
def check_meson_commands_work(options):
global backend, compile_commands, test_commands, install_commands
testdir = PurePath('test cases', 'common', '1 trivial').as_posix()
meson_commands = mesonlib.python_command + [get_meson_script()]
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
print('Checking that configuring works...')
gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + options.extra_args
pc, o, e = Popen_safe(gen_cmd)
if pc.returncode != 0:
raise RuntimeError('Failed to configure {!r}:\n{}\n{}'.format(testdir, e, o))
print('Checking that building works...')
dir_args = get_backend_args_for_dir(backend, build_dir)
pc, o, e = Popen_safe(compile_commands + dir_args, cwd=build_dir)
if pc.returncode != 0:
raise RuntimeError('Failed to build {!r}:\n{}\n{}'.format(testdir, e, o))
print('Checking that testing works...')
pc, o, e = Popen_safe(test_commands, cwd=build_dir)
if pc.returncode != 0:
raise RuntimeError('Failed to test {!r}:\n{}\n{}'.format(testdir, e, o))
if install_commands:
print('Checking that installing works...')
pc, o, e = Popen_safe(install_commands, cwd=build_dir)
if pc.returncode != 0:
raise RuntimeError('Failed to install {!r}:\n{}\n{}'.format(testdir, e, o))
def detect_system_compiler(options):
global system_compiler, compiler_id_map
with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir=None if options.use_tmpdir else '.')) as build_dir:
fake_opts = get_fake_options('/')
if options.cross_file:
fake_opts.cross_file = [options.cross_file]
env = environment.Environment(None, build_dir, fake_opts)
print()
for lang in sorted(compilers.all_languages):
try:
comp = env.compiler_from_language(lang, MachineChoice.HOST)
details = '{:<10} {} {}'.format('[' + comp.get_id() + ']', ' '.join(comp.get_exelist()), comp.get_version_string())
compiler_id_map[lang] = comp.get_id()
except mesonlib.MesonException:
comp = None
details = '[not found]'
print('%-7s: %s' % (lang, details))
# note C compiler for later use by platform_fix_name()
if lang == 'c':
if comp:
system_compiler = comp.get_id()
else:
raise RuntimeError("Could not find C compiler.")
print()
def print_tool_versions():
tools = [
{
'tool': 'ninja',
'args': ['--version'],
'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
'match_group': 1,
},
{
'tool': 'cmake',
'args': ['--version'],
'regex': re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
'match_group': 1,
},
{
'tool': 'hotdoc',
'args': ['--version'],
'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
'match_group': 1,
},
]
def get_version(t: dict) -> str:
exe = shutil.which(t['tool'])
if not exe:
return 'not found'
args = [t['tool']] + t['args']
pc, o, e = Popen_safe(args)
if pc.returncode != 0:
return '{} (invalid {} executable)'.format(exe, t['tool'])
for i in o.split('\n'):
i = i.strip('\n\r\t ')
m = t['regex'].match(i)
if m is not None:
tool_vers_map[t['tool']] = m.group(t['match_group'])
return '{} ({})'.format(exe, m.group(t['match_group']))
return '{} (unknown)'.format(exe)
max_width = max([len(x['tool']) for x in tools] + [7])
for tool in tools:
print('{0:<{2}}: {1}'.format(tool['tool'], get_version(tool), max_width))
print()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Run the test suite of Meson.")
parser.add_argument('extra_args', nargs='*',
help='arguments that are passed directly to Meson (remember to have -- before these).')
parser.add_argument('--backend', dest='backend', choices=backendlist)
parser.add_argument('--failfast', action='store_true',
help='Stop running if test case fails')
parser.add_argument('--no-unittests', action='store_true',
help='Not used, only here to simplify run_tests.py')
parser.add_argument('--only', help='name of test(s) to run', nargs='+', choices=ALL_TESTS)
parser.add_argument('--cross-file', action='store', help='File describing cross compilation environment.')
parser.add_argument('--use-tmpdir', action='store_true', help='Use tmp directory for temporary files.')
options = parser.parse_args()
if options.cross_file:
options.extra_args += ['--cross-file', options.cross_file]
print('Meson build system', meson_version, 'Project Tests')
setup_commands(options.backend)
detect_system_compiler(options)
print_tool_versions()
script_dir = os.path.split(__file__)[0]
if script_dir != '':
os.chdir(script_dir)
check_format()
check_meson_commands_work(options)
try:
all_tests = detect_tests_to_run(options.only, options.use_tmpdir)
(passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args, options.use_tmpdir)
except StopException:
pass
print('\nTotal passed tests:', green(str(passing_tests)))
print('Total failed tests:', red(str(failing_tests)))
print('Total skipped tests:', yellow(str(skipped_tests)))
if failing_tests > 0:
print('\nMesonlogs of failing tests\n')
for l in failing_logs:
try:
print(l, '\n')
except UnicodeError:
print(l.encode('ascii', errors='replace').decode(), '\n')
for name, dirs, _ in all_tests:
dir_names = list(set(x.path.name for x in dirs))
for k, g in itertools.groupby(dir_names, key=lambda x: x.split()[0]):
tests = list(g)
if len(tests) != 1:
print('WARNING: The %s suite contains duplicate "%s" tests: "%s"' % (name, k, '", "'.join(tests)))
raise SystemExit(failing_tests)
| apache-2.0 | 4,731,378,599,057,081,000 | 39.85625 | 152 | 0.572147 | false |
jamesblunt/psutil | test/_posix.py | 4 | 9932 | #!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""POSIX specific tests. These are implicitly run by test_psutil.py."""
import datetime
import os
import subprocess
import sys
import time
import psutil
from psutil._compat import PY3, callable
from test_psutil import LINUX, SUNOS, OSX, BSD, PYTHON, POSIX, TRAVIS
from test_psutil import (get_test_subprocess, skip_on_access_denied,
retry_before_failing, reap_children, sh, unittest,
get_kernel_version, wait_for_pid)
def ps(cmd):
"""Expects a ps command with a -o argument and parse the result
returning only the value of interest.
"""
if not LINUX:
cmd = cmd.replace(" --no-headers ", " ")
if SUNOS:
cmd = cmd.replace("-o command", "-o comm")
cmd = cmd.replace("-o start", "-o stime")
p = subprocess.Popen(cmd, shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if PY3:
output = str(output, sys.stdout.encoding)
if not LINUX:
output = output.split('\n')[1].strip()
try:
return int(output)
except ValueError:
return output
@unittest.skipUnless(POSIX, "not a POSIX system")
class PosixSpecificTestCase(unittest.TestCase):
"""Compare psutil results against 'ps' command line utility."""
@classmethod
def setUpClass(cls):
cls.pid = get_test_subprocess([PYTHON, "-E", "-O"],
stdin=subprocess.PIPE).pid
wait_for_pid(cls.pid)
@classmethod
def tearDownClass(cls):
reap_children()
# for ps -o arguments see: http://unixhelp.ed.ac.uk/CGI/man-cgi?ps
def test_process_parent_pid(self):
ppid_ps = ps("ps --no-headers -o ppid -p %s" % self.pid)
ppid_psutil = psutil.Process(self.pid).ppid()
self.assertEqual(ppid_ps, ppid_psutil)
def test_process_uid(self):
uid_ps = ps("ps --no-headers -o uid -p %s" % self.pid)
uid_psutil = psutil.Process(self.pid).uids().real
self.assertEqual(uid_ps, uid_psutil)
def test_process_gid(self):
gid_ps = ps("ps --no-headers -o rgid -p %s" % self.pid)
gid_psutil = psutil.Process(self.pid).gids().real
self.assertEqual(gid_ps, gid_psutil)
def test_process_username(self):
username_ps = ps("ps --no-headers -o user -p %s" % self.pid)
username_psutil = psutil.Process(self.pid).username()
self.assertEqual(username_ps, username_psutil)
@skip_on_access_denied()
@retry_before_failing()
def test_process_rss_memory(self):
# give python interpreter some time to properly initialize
# so that the results are the same
time.sleep(0.1)
rss_ps = ps("ps --no-headers -o rss -p %s" % self.pid)
rss_psutil = psutil.Process(self.pid).memory_info()[0] / 1024
self.assertEqual(rss_ps, rss_psutil)
@skip_on_access_denied()
@retry_before_failing()
def test_process_vsz_memory(self):
# give python interpreter some time to properly initialize
# so that the results are the same
time.sleep(0.1)
vsz_ps = ps("ps --no-headers -o vsz -p %s" % self.pid)
vsz_psutil = psutil.Process(self.pid).memory_info()[1] / 1024
self.assertEqual(vsz_ps, vsz_psutil)
def test_process_name(self):
# use command + arg since "comm" keyword not supported on all platforms
name_ps = ps("ps --no-headers -o command -p %s" % (
self.pid)).split(' ')[0]
# remove path if there is any, from the command
name_ps = os.path.basename(name_ps).lower()
name_psutil = psutil.Process(self.pid).name().lower()
self.assertEqual(name_ps, name_psutil)
@unittest.skipIf(OSX or BSD,
'ps -o start not available')
def test_process_create_time(self):
time_ps = ps("ps --no-headers -o start -p %s" % self.pid).split(' ')[0]
time_psutil = psutil.Process(self.pid).create_time()
time_psutil_tstamp = datetime.datetime.fromtimestamp(
time_psutil).strftime("%H:%M:%S")
# sometimes ps shows the time rounded up instead of down, so we check
# for both possible values
round_time_psutil = round(time_psutil)
round_time_psutil_tstamp = datetime.datetime.fromtimestamp(
round_time_psutil).strftime("%H:%M:%S")
self.assertIn(time_ps, [time_psutil_tstamp, round_time_psutil_tstamp])
def test_process_exe(self):
ps_pathname = ps("ps --no-headers -o command -p %s" %
self.pid).split(' ')[0]
psutil_pathname = psutil.Process(self.pid).exe()
try:
self.assertEqual(ps_pathname, psutil_pathname)
except AssertionError:
# certain platforms such as BSD are more accurate returning:
# "/usr/local/bin/python2.7"
# ...instead of:
# "/usr/local/bin/python"
# We do not want to consider this difference in accuracy
# an error.
adjusted_ps_pathname = ps_pathname[:len(ps_pathname)]
self.assertEqual(ps_pathname, adjusted_ps_pathname)
def test_process_cmdline(self):
ps_cmdline = ps("ps --no-headers -o command -p %s" % self.pid)
psutil_cmdline = " ".join(psutil.Process(self.pid).cmdline())
if SUNOS:
# ps on Solaris only shows the first part of the cmdline
psutil_cmdline = psutil_cmdline.split(" ")[0]
self.assertEqual(ps_cmdline, psutil_cmdline)
@retry_before_failing()
def test_pids(self):
# Note: this test might fail if the OS is starting/killing
# other processes in the meantime
if SUNOS:
cmd = ["ps", "-A", "-o", "pid"]
else:
cmd = ["ps", "ax", "-o", "pid"]
p = get_test_subprocess(cmd, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
assert p.poll() == 0
if PY3:
output = str(output, sys.stdout.encoding)
pids_ps = []
for line in output.split('\n')[1:]:
if line:
pid = int(line.split()[0].strip())
pids_ps.append(pid)
# remove ps subprocess pid which is supposed to be dead in meantime
pids_ps.remove(p.pid)
pids_psutil = psutil.pids()
pids_ps.sort()
pids_psutil.sort()
# on OSX ps doesn't show pid 0
if OSX and 0 not in pids_ps:
pids_ps.insert(0, 0)
if pids_ps != pids_psutil:
difference = [x for x in pids_psutil if x not in pids_ps] + \
[x for x in pids_ps if x not in pids_psutil]
self.fail("difference: " + str(difference))
# for some reason ifconfig -a does not report all interfaces
# returned by psutil
@unittest.skipIf(SUNOS, "test not reliable on SUNOS")
@unittest.skipIf(TRAVIS, "test not reliable on Travis")
def test_nic_names(self):
p = subprocess.Popen("ifconfig -a", shell=1, stdout=subprocess.PIPE)
output = p.communicate()[0].strip()
if PY3:
output = str(output, sys.stdout.encoding)
for nic in psutil.net_io_counters(pernic=True).keys():
for line in output.split():
if line.startswith(nic):
break
else:
self.fail(
"couldn't find %s nic in 'ifconfig -a' output\n%s" % (
nic, output))
@retry_before_failing()
def test_users(self):
out = sh("who")
lines = out.split('\n')
users = [x.split()[0] for x in lines]
self.assertEqual(len(users), len(psutil.users()))
terminals = [x.split()[1] for x in lines]
for u in psutil.users():
self.assertTrue(u.name in users, u.name)
self.assertTrue(u.terminal in terminals, u.terminal)
def test_fds_open(self):
# Note: this fails from time to time; I'm keen on thinking
# it doesn't mean something is broken
def call(p, attr):
args = ()
attr = getattr(p, name, None)
if attr is not None and callable(attr):
if name == 'rlimit':
args = (psutil.RLIMIT_NOFILE,)
attr(*args)
else:
attr
p = psutil.Process(os.getpid())
failures = []
ignored_names = ['terminate', 'kill', 'suspend', 'resume', 'nice',
'send_signal', 'wait', 'children', 'as_dict']
if LINUX and get_kernel_version() < (2, 6, 36):
ignored_names.append('rlimit')
if LINUX and get_kernel_version() < (2, 6, 23):
ignored_names.append('num_ctx_switches')
for name in dir(psutil.Process):
if (name.startswith('_') or name in ignored_names):
continue
else:
try:
num1 = p.num_fds()
for x in range(2):
call(p, name)
num2 = p.num_fds()
except psutil.AccessDenied:
pass
else:
if abs(num2 - num1) > 1:
fail = "failure while processing Process.%s method " \
"(before=%s, after=%s)" % (name, num1, num2)
failures.append(fail)
if failures:
self.fail('\n' + '\n'.join(failures))
def main():
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(PosixSpecificTestCase))
result = unittest.TextTestRunner(verbosity=2).run(test_suite)
return result.wasSuccessful()
if __name__ == '__main__':
if not main():
sys.exit(1)
| bsd-3-clause | -915,559,176,048,948,200 | 37.34749 | 79 | 0.567962 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.