repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
meisterkleister/erpnext | refs/heads/master | erpnext/stock/report/serial_no_status/__init__.py | 12133432 | |
SurfasJones/icecream-info | refs/heads/master | icecream/lib/python2.7/site-packages/django/contrib/staticfiles/models.py | 12133432 | |
yigitguler/django | refs/heads/master | tests/timezones/__init__.py | 12133432 | |
andree1320z/deport-upao-web | refs/heads/master | deport_upao/config/__init__.py | 12133432 | |
lwiecek/django | refs/heads/master | django/template/loaders/__init__.py | 12133432 | |
toooooper/oppia | refs/heads/master | extensions/rules/base.py | 26 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base rules."""
from core.domain import rule_domain
from extensions.objects.models import objects
class CodeEvaluationRule(rule_domain.Rule):
subject_type = objects.CodeEvaluation
class CoordTwoDimRule(rule_domain.Rule):
subject_type = objects.CoordTwoDim
class MusicPhraseRule(rule_domain.Rule):
subject_type = objects.MusicPhrase
class NonnegativeIntRule(rule_domain.Rule):
subject_type = objects.NonnegativeInt
class NormalizedStringRule(rule_domain.Rule):
subject_type = objects.NormalizedString
class RealRule(rule_domain.Rule):
subject_type = objects.Real
class SetOfUnicodeStringRule(rule_domain.Rule):
subject_type = objects.SetOfUnicodeString
class UnicodeStringRule(rule_domain.Rule):
subject_type = objects.UnicodeString
class CheckedProofRule(rule_domain.Rule):
subject_type = objects.CheckedProof
class GraphRule(rule_domain.Rule):
subject_type = objects.Graph
class ImageWithRegionsRule(rule_domain.Rule):
subject_type = objects.ImageWithRegions
class ClickOnImageRule(rule_domain.Rule):
subject_type = objects.ClickOnImage
|
pfnet/chainer | refs/heads/master | tests/chainer_tests/links_tests/connection_tests/test_deconvolution_2d.py | 5 | import numpy
import chainer
from chainer.backends import cuda
import chainer.functions as F
from chainer import links as L
from chainer import testing
from chainer.testing import parameterize
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
@parameterize(
*testing.product({
'nobias': [True, False],
'dilate': [1, 2],
'groups': [1, 3],
'x_dtype': [numpy.float32],
'W_dtype': [numpy.float32],
})
)
@testing.inject_backend_tests(
['test_forward', 'test_backward'],
# CPU tests
[
{},
{'use_ideep': 'always'},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
class TestDeconvolution2D(testing.LinkTestCase):
def setUp(self):
self.in_channels = 3
self.out_channels = 6
self.ksize = 3
self.stride = 2
self.pad = 1
if self.nobias:
TestDeconvolution2D.param_names = ('W',)
else:
TestDeconvolution2D.param_names = ('W', 'b')
self.check_backward_options.update({'atol': 1e-3, 'rtol': 1e-2})
def before_test(self, test_name):
# cuDNN 5 and 5.1 results suffer from precision issues
using_old_cudnn = (self.backend_config.xp is cuda.cupy
and self.backend_config.use_cudnn == 'always'
and cuda.cuda.cudnn.getVersion() < 6000)
if using_old_cudnn:
self.check_backward_options.update({'atol': 3e-2, 'rtol': 5e-2})
def generate_inputs(self):
N = 2
h, w = 3, 2
x = numpy.random.uniform(
-1, 1, (N, self.in_channels, h, w)).astype(self.x_dtype)
return x,
def generate_params(self):
initialW = chainer.initializers.Normal(1, self.W_dtype)
initial_bias = chainer.initializers.Normal(1, self.x_dtype)
return initialW, initial_bias
def create_link(self, initializers):
initialW, initial_bias = initializers
if self.nobias:
link = L.Deconvolution2D(
self.in_channels, self.out_channels, self.ksize,
stride=self.stride, pad=self.pad, nobias=self.nobias,
dilate=self.dilate, groups=self.groups,
initialW=initialW)
else:
link = L.Deconvolution2D(
self.in_channels, self.out_channels, self.ksize,
stride=self.stride, pad=self.pad, nobias=self.nobias,
dilate=self.dilate, groups=self.groups,
initialW=initialW,
initial_bias=initial_bias)
return link
def forward_expected(self, link, inputs):
x, = inputs
W = link.W
if self.nobias:
y = F.deconvolution_2d(
x, W,
stride=self.stride, pad=self.pad,
dilate=self.dilate, groups=self.groups)
else:
b = link.b
y = F.deconvolution_2d(
x, W, b,
stride=self.stride, pad=self.pad,
dilate=self.dilate, groups=self.groups)
return y.array,
@parameterize(
*testing.product({
'nobias': [True, False],
'use_cudnn': ['always', 'never'],
'deconv_args': [((3, 2, 3), {}), ((2, 3), {}), ((None, 2, 3), {}),
((2, 3), {'stride': 2, 'pad': 1}),
((None, 2, 3, 2, 1), {})]
})
)
@testing.inject_backend_tests(
['test_forward', 'test_backward'],
# CPU tests
[
{},
{'use_ideep': 'always'},
]
# GPU tests
+ testing.product({
'use_cuda': [True],
'use_cudnn': ['never', 'always'],
'cuda_device': [0, 1],
})
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:1'},
])
class TestDeconvolution2DParameterShapePlaceholder(testing.LinkTestCase):
def setUp(self):
if self.nobias:
self.param_names = ('W',)
else:
self.param_names = ('W', 'b')
self.check_backward_options.update({'atol': 1e-4, 'rtol': 1e-3})
def before_test(self, test_name):
# cuDNN 5 and 5.1 results suffer from precision issues
using_old_cudnn = (self.backend_config.xp is cuda.cupy
and self.backend_config.use_cudnn == 'always'
and cuda.cuda.cudnn.getVersion() < 6000)
if using_old_cudnn:
self.check_backward_options.update({'atol': 3e-2, 'rtol': 5e-2})
def generate_inputs(self):
N = 2
h, w = 3, 2
x = numpy.random.uniform(
-1, 1, (N, 3, h, w)).astype(numpy.float32)
return x,
def generate_params(self):
return []
def create_link(self, initializers):
args, kwargs = self.deconv_args
kwargs['nobias'] = self.nobias
link = L.Deconvolution2D(*args, **kwargs)
if not self.nobias:
link.b.data[...] = numpy.random.uniform(
-1, 1, link.b.data.shape).astype(numpy.float32)
return link
def forward_expected(self, link, inputs):
x, = inputs
y = link(x).array
return y,
testing.run_module(__name__, __file__)
|
AsgerPetersen/tiledrasterio | refs/heads/master | tiledrasterio/scripts/cli.py | 1 | # Skeleton of a CLI
import click
import tiledrasterio
@click.command('tiledrasterio')
@click.argument('count', type=int, metavar='N')
def cli(count):
"""Echo a value `N` number of times"""
for i in range(count):
click.echo(tiledrasterio.has_legs)
|
mmmsplay10/AttackWeakBrowsers | refs/heads/master | lib/flask/testing.py | 783 | # -*- coding: utf-8 -*-
"""
flask.testing
~~~~~~~~~~~~~
Implements test support helpers. This module is lazily imported
and usually not used in production environments.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from contextlib import contextmanager
from werkzeug.test import Client, EnvironBuilder
from flask import _request_ctx_stack
try:
from werkzeug.urls import url_parse
except ImportError:
from urlparse import urlsplit as url_parse
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs):
"""Creates a new test builder with some application defaults thrown in."""
http_host = app.config.get('SERVER_NAME')
app_root = app.config.get('APPLICATION_ROOT')
if base_url is None:
url = url_parse(path)
base_url = 'http://%s/' % (url.netloc or http_host or 'localhost')
if app_root:
base_url += app_root.lstrip('/')
if url.netloc:
path = url.path
return EnvironBuilder(path, base_url, *args, **kwargs)
class FlaskClient(Client):
"""Works like a regular Werkzeug test client but has some knowledge about
how Flask works to defer the cleanup of the request context stack to the
end of a with body when used in a with statement. For general information
about how to use this class refer to :class:`werkzeug.test.Client`.
Basic usage is outlined in the :ref:`testing` chapter.
"""
preserve_context = False
@contextmanager
def session_transaction(self, *args, **kwargs):
"""When used in combination with a with statement this opens a
session transaction. This can be used to modify the session that
the test client uses. Once the with block is left the session is
stored back.
with client.session_transaction() as session:
session['value'] = 42
Internally this is implemented by going through a temporary test
request context and since session handling could depend on
request variables this function accepts the same arguments as
:meth:`~flask.Flask.test_request_context` which are directly
passed through.
"""
if self.cookie_jar is None:
raise RuntimeError('Session transactions only make sense '
'with cookies enabled.')
app = self.application
environ_overrides = kwargs.setdefault('environ_overrides', {})
self.cookie_jar.inject_wsgi(environ_overrides)
outer_reqctx = _request_ctx_stack.top
with app.test_request_context(*args, **kwargs) as c:
sess = app.open_session(c.request)
if sess is None:
raise RuntimeError('Session backend did not open a session. '
'Check the configuration')
# Since we have to open a new request context for the session
# handling we want to make sure that we hide out own context
# from the caller. By pushing the original request context
# (or None) on top of this and popping it we get exactly that
# behavior. It's important to not use the push and pop
# methods of the actual request context object since that would
# mean that cleanup handlers are called
_request_ctx_stack.push(outer_reqctx)
try:
yield sess
finally:
_request_ctx_stack.pop()
resp = app.response_class()
if not app.session_interface.is_null_session(sess):
app.save_session(sess, resp)
headers = resp.get_wsgi_headers(c.request.environ)
self.cookie_jar.extract_wsgi(c.request.environ, headers)
def open(self, *args, **kwargs):
kwargs.setdefault('environ_overrides', {}) \
['flask._preserve_context'] = self.preserve_context
as_tuple = kwargs.pop('as_tuple', False)
buffered = kwargs.pop('buffered', False)
follow_redirects = kwargs.pop('follow_redirects', False)
builder = make_test_environ_builder(self.application, *args, **kwargs)
return Client.open(self, builder,
as_tuple=as_tuple,
buffered=buffered,
follow_redirects=follow_redirects)
def __enter__(self):
if self.preserve_context:
raise RuntimeError('Cannot nest client invocations')
self.preserve_context = True
return self
def __exit__(self, exc_type, exc_value, tb):
self.preserve_context = False
# on exit we want to clean up earlier. Normally the request context
# stays preserved until the next request in the same thread comes
# in. See RequestGlobals.push() for the general behavior.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop()
|
wattlebird/Bangumi_Spider | refs/heads/master | bgm/pipelines.py | 1 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
from twisted.enterprise import adbapi
from scrapy import signals
from scrapy.exporters import JsonLinesItemExporter, CsvItemExporter
import pickle
import codecs
import datetime
import os
from .settings import *
if UPLOAD_TO_AZURE_STORAGE:
from azure.storage.blob import BlobServiceClient
class TsvPipeline(object):
def __init__(self):
self.files = dict()
@classmethod
def from_crawler(cls, crawler):
pipeline = cls()
crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
return pipeline
def spider_opened(self, spider):
file = open(spider.name+'-'+datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")+'.tsv*', 'wb')
self.files[spider] = [file]
self.exporter = CsvItemExporter(file, include_headers_line=True, join_multivalued=';', encoding="utf-8", delimiter='\t')
if spider.name=='record':
userfile = open('user-'+datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")+'.tsv*', 'wb')
self.files[spider].append(userfile)
self.userexporter = CsvItemExporter(userfile, include_headers_line=True, join_multivalued=';', encoding="utf-8", delimiter='\t')
self.userexporter.fields_to_export = ['uid', 'name', 'nickname', 'joindate']
self.exporter.fields_to_export = ['uid', 'iid', 'typ', 'state', 'adddate', 'rate', 'tags', 'comment']
elif spider.name=='user':
self.exporter.fields_to_export = ['uid', 'name', 'nickname', 'joindate']
elif spider.name=='subject':
self.exporter.fields_to_export = ['subjectid', 'subjecttype', 'subjectname', 'order', 'alias', 'staff', 'relations']
elif spider.name=='index':
self.exporter.fields_to_export = ['indexid', 'creator', 'favourite', 'date', 'items']
elif spider.name=='friends':
self.exporter.fields_to_export = ['user', 'friend']
self.exporter.start_exporting()
if spider.name=='record':
self.userexporter.start_exporting()
def spider_closed(self, spider):
self.exporter.finish_exporting()
file = self.files[spider].pop(0)
filename = file.name
newname = filename[:-5]+'-'+datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")+'.tsv'
file.close()
os.rename(filename, newname)
if spider.name == 'record':
self.userexporter.finish_exporting()
file = self.files[spider].pop(0)
userfilename = file.name
newuserfilename = userfilename[:-5]+'-'+datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")+'.tsv'
file.close()
os.rename(userfilename, newuserfilename)
if UPLOAD_TO_AZURE_STORAGE:
blobServiceClient = BlobServiceClient.from_connection_string(AZURE_ACCOUNT_KEY)
blobClient = blobServiceClient.get_blob_client(container=AZURE_CONTAINER, blob=newname)
with open(newname, "rb") as data:
blobClient.upload_blob(data)
if spider.name == 'record':
blobClient = blobServiceClient.get_blob_client(container=AZURE_CONTAINER, blob=newuserfilename)
with open(newuserfilename, "rb") as data:
blobClient.upload_blob(data)
def process_item(self, item, spider):
if spider.name == 'record':
if 'joindate' in item:
self.userexporter.export_item(item)
else:
self.exporter.export_item(item)
else:
self.exporter.export_item(item)
return item |
antonve/s4-project-mooc | refs/heads/master | common/djangoapps/config_models/tests.py | 2 | """
Tests of ConfigurationModel
"""
from django.contrib.auth.models import User
from django.db import models
from django.test import TestCase
from freezegun import freeze_time
from mock import patch
from config_models.models import ConfigurationModel
class ExampleConfig(ConfigurationModel):
"""
Test model for testing ``ConfigurationModels``.
"""
cache_timeout = 300
string_field = models.TextField()
int_field = models.IntegerField(default=10)
@patch('config_models.models.cache')
class ConfigurationModelTests(TestCase):
"""
Tests of ConfigurationModel
"""
def setUp(self):
super(ConfigurationModelTests, self).setUp()
self.user = User()
self.user.save()
def test_cache_deleted_on_save(self, mock_cache):
ExampleConfig(changed_by=self.user).save()
mock_cache.delete.assert_called_with(ExampleConfig.cache_key_name())
def test_cache_key_name(self, _mock_cache):
self.assertEquals(ExampleConfig.cache_key_name(), 'configuration/ExampleConfig/current')
def test_no_config_empty_cache(self, mock_cache):
mock_cache.get.return_value = None
current = ExampleConfig.current()
self.assertEquals(current.int_field, 10)
self.assertEquals(current.string_field, '')
mock_cache.set.assert_called_with(ExampleConfig.cache_key_name(), current, 300)
def test_no_config_full_cache(self, mock_cache):
current = ExampleConfig.current()
self.assertEquals(current, mock_cache.get.return_value)
def test_config_ordering(self, mock_cache):
mock_cache.get.return_value = None
with freeze_time('2012-01-01'):
first = ExampleConfig(changed_by=self.user)
first.string_field = 'first'
first.save()
second = ExampleConfig(changed_by=self.user)
second.string_field = 'second'
second.save()
self.assertEquals(ExampleConfig.current().string_field, 'second')
def test_cache_set(self, mock_cache):
mock_cache.get.return_value = None
first = ExampleConfig(changed_by=self.user)
first.string_field = 'first'
first.save()
ExampleConfig.current()
mock_cache.set.assert_called_with(ExampleConfig.cache_key_name(), first, 300)
|
steven-cutting/latinpigsay | refs/heads/stable | latinpigsay/tmp/experiments/expfunctions.py | 1 | # -*- coding: utf-8 -*-
__title__ = 'latinpigsay'
__license__ = 'MIT'
__author__ = 'Steven Cutting'
__author_email__ = '[email protected]'
__created_on__ = '12/7/2014'
# handle a massive file using generator
def filereader_gen(file):
with open(file) as f:
for line in f:
yield line
|
bsipocz/astropy | refs/heads/hacking | astropy/nddata/tests/test_decorators.py | 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import inspect
import pytest
import numpy as np
from astropy.tests.helper import catch_warnings
from astropy.utils.exceptions import AstropyUserWarning
from astropy import units as u
from astropy.nddata.nddata import NDData
from astropy.nddata.decorators import support_nddata
class CCDData(NDData):
pass
@support_nddata
def wrapped_function_1(data, wcs=None, unit=None):
return data, wcs, unit
def test_pass_numpy():
data_in = np.array([1, 2, 3])
data_out, wcs_out, unit_out = wrapped_function_1(data=data_in)
assert data_out is data_in
assert wcs_out is None
assert unit_out is None
def test_pass_all_separate():
data_in = np.array([1, 2, 3])
wcs_in = "the wcs"
unit_in = u.Jy
data_out, wcs_out, unit_out = wrapped_function_1(data=data_in, wcs=wcs_in, unit=unit_in)
assert data_out is data_in
assert wcs_out is wcs_in
assert unit_out is unit_in
def test_pass_nddata():
data_in = np.array([1, 2, 3])
wcs_in = "the wcs"
unit_in = u.Jy
nddata_in = NDData(data_in, wcs=wcs_in, unit=unit_in)
data_out, wcs_out, unit_out = wrapped_function_1(nddata_in)
assert data_out is data_in
assert wcs_out is wcs_in
assert unit_out is unit_in
def test_pass_nddata_and_explicit():
data_in = np.array([1, 2, 3])
wcs_in = "the wcs"
unit_in = u.Jy
unit_in_alt = u.mJy
nddata_in = NDData(data_in, wcs=wcs_in, unit=unit_in)
with catch_warnings() as w:
data_out, wcs_out, unit_out = wrapped_function_1(nddata_in, unit=unit_in_alt)
assert data_out is data_in
assert wcs_out is wcs_in
assert unit_out is unit_in_alt
assert len(w) == 1
assert str(w[0].message) == ("Property unit has been passed explicitly and as "
"an NDData property, using explicitly specified value")
def test_pass_nddata_ignored():
data_in = np.array([1, 2, 3])
wcs_in = "the wcs"
unit_in = u.Jy
nddata_in = NDData(data_in, wcs=wcs_in, unit=unit_in, mask=[0, 1, 0])
with catch_warnings() as w:
data_out, wcs_out, unit_out = wrapped_function_1(nddata_in)
assert data_out is data_in
assert wcs_out is wcs_in
assert unit_out is unit_in
assert len(w) == 1
assert str(w[0].message) == ("The following attributes were set on the data "
"object, but will be ignored by the function: mask")
def test_incorrect_first_argument():
with pytest.raises(ValueError) as exc:
@support_nddata
def wrapped_function_2(something, wcs=None, unit=None):
pass
assert exc.value.args[0] == "Can only wrap functions whose first positional argument is `data`"
with pytest.raises(ValueError) as exc:
@support_nddata
def wrapped_function_3(something, data, wcs=None, unit=None):
pass
assert exc.value.args[0] == "Can only wrap functions whose first positional argument is `data`"
with pytest.raises(ValueError) as exc:
@support_nddata
def wrapped_function_4(wcs=None, unit=None):
pass
assert exc.value.args[0] == "Can only wrap functions whose first positional argument is `data`"
def test_wrap_function_no_kwargs():
@support_nddata
def wrapped_function_5(data, other_data):
return data
data_in = np.array([1, 2, 3])
nddata_in = NDData(data_in)
assert wrapped_function_5(nddata_in, [1, 2, 3]) is data_in
def test_wrap_function_repack_valid():
@support_nddata(repack=True, returns=['data'])
def wrapped_function_5(data, other_data):
return data
data_in = np.array([1, 2, 3])
nddata_in = NDData(data_in)
nddata_out = wrapped_function_5(nddata_in, [1, 2, 3])
assert isinstance(nddata_out, NDData)
assert nddata_out.data is data_in
def test_wrap_function_accepts():
class MyData(NDData):
pass
@support_nddata(accepts=MyData)
def wrapped_function_5(data, other_data):
return data
data_in = np.array([1, 2, 3])
nddata_in = NDData(data_in)
mydata_in = MyData(data_in)
assert wrapped_function_5(mydata_in, [1, 2, 3]) is data_in
with pytest.raises(TypeError) as exc:
wrapped_function_5(nddata_in, [1, 2, 3])
assert exc.value.args[0] == "Only NDData sub-classes that inherit from MyData can be used by this function"
def test_wrap_preserve_signature_docstring():
@support_nddata
def wrapped_function_6(data, wcs=None, unit=None):
"""
An awesome function
"""
pass
if wrapped_function_6.__doc__ is not None:
assert wrapped_function_6.__doc__.strip() == "An awesome function"
signature = inspect.signature(wrapped_function_6)
assert str(signature) == "(data, wcs=None, unit=None)"
def test_setup_failures1():
# repack but no returns
with pytest.raises(ValueError):
support_nddata(repack=True)
def test_setup_failures2():
# returns but no repack
with pytest.raises(ValueError):
support_nddata(returns=['data'])
def test_setup_failures9():
# keeps but no repack
with pytest.raises(ValueError):
support_nddata(keeps=['unit'])
def test_setup_failures3():
# same attribute in keeps and returns
with pytest.raises(ValueError):
support_nddata(repack=True, keeps=['mask'], returns=['data', 'mask'])
def test_setup_failures4():
# function accepts *args
with pytest.raises(ValueError):
@support_nddata
def test(data, *args):
pass
def test_setup_failures10():
# function accepts **kwargs
with pytest.raises(ValueError):
@support_nddata
def test(data, **kwargs):
pass
def test_setup_failures5():
# function accepts *args (or **kwargs)
with pytest.raises(ValueError):
@support_nddata
def test(data, *args):
pass
def test_setup_failures6():
# First argument is not data
with pytest.raises(ValueError):
@support_nddata
def test(img):
pass
def test_setup_failures7():
# accepts CCDData but was given just an NDData
with pytest.raises(TypeError):
@support_nddata(accepts=CCDData)
def test(data):
pass
test(NDData(np.ones((3, 3))))
def test_setup_failures8():
# function returns a different amount of arguments than specified. Using
# NDData here so we don't get into troubles when creating a CCDData without
# unit!
with pytest.raises(ValueError):
@support_nddata(repack=True, returns=['data', 'mask'])
def test(data):
return 10
test(NDData(np.ones((3, 3)))) # do NOT use CCDData here.
def test_setup_failures11():
# function accepts no arguments
with pytest.raises(ValueError):
@support_nddata
def test():
pass
def test_setup_numpyarray_default():
# It should be possible (even if it's not advisable to use mutable
# defaults) to have a numpy array as default value.
@support_nddata
def func(data, wcs=np.array([1, 2, 3])):
return wcs
def test_still_accepts_other_input():
@support_nddata(repack=True, returns=['data'])
def test(data):
return data
assert isinstance(test(NDData(np.ones((3, 3)))), NDData)
assert isinstance(test(10), int)
assert isinstance(test([1, 2, 3]), list)
def test_accepting_property_normal():
# Accepts a mask attribute and takes it from the input
@support_nddata
def test(data, mask=None):
return mask
ndd = NDData(np.ones((3, 3)))
assert test(ndd) is None
ndd._mask = np.zeros((3, 3))
assert np.all(test(ndd) == 0)
# Use the explicitly given one (raises a Warning)
with catch_warnings(AstropyUserWarning) as w:
assert test(ndd, mask=10) == 10
assert len(w) == 1
def test_parameter_default_identical_to_explicit_passed_argument():
# If the default is identical to the explicitly passed argument this
# should still raise a Warning and use the explicit one.
@support_nddata
def func(data, wcs=[1, 2, 3]):
return wcs
with catch_warnings(AstropyUserWarning) as w:
assert func(NDData(1, wcs=[1, 2]), [1, 2, 3]) == [1, 2, 3]
assert len(w) == 1
with catch_warnings(AstropyUserWarning) as w:
assert func(NDData(1, wcs=[1, 2])) == [1, 2]
assert len(w) == 0
def test_accepting_property_notexist():
# Accepts flags attribute but NDData doesn't have one
@support_nddata
def test(data, flags=10):
return flags
ndd = NDData(np.ones((3, 3)))
test(ndd)
def test_accepting_property_translated():
# Accepts a error attribute and we want to pass in uncertainty!
@support_nddata(mask='masked')
def test(data, masked=None):
return masked
ndd = NDData(np.ones((3, 3)))
assert test(ndd) is None
ndd._mask = np.zeros((3, 3))
assert np.all(test(ndd) == 0)
# Use the explicitly given one (raises a Warning)
with catch_warnings(AstropyUserWarning) as w:
assert test(ndd, masked=10) == 10
assert len(w) == 1
def test_accepting_property_meta_empty():
# Meta is always set (OrderedDict) so it has a special case that it's
# ignored if it's empty but not None
@support_nddata
def test(data, meta=None):
return meta
ndd = NDData(np.ones((3, 3)))
assert test(ndd) is None
ndd._meta = {'a': 10}
assert test(ndd) == {'a': 10}
|
grischa/mytardis-mrtardis | refs/heads/master | tardis/tardis_portal/templatetags/xmldate.py | 7 | from django import template
from datetime import datetime
from tardis.tardis_portal.rfc3339 import rfc3339
register = template.Library()
@register.filter
def toxmldatetime(value):
return rfc3339(value)
|
jagguli/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/localflavor/fi/forms.py | 309 | """
FI-specific Form helpers
"""
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.translation import ugettext_lazy as _
class FIZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
super(FIZipCodeField, self).__init__(r'^\d{5}$',
max_length=None, min_length=None, *args, **kwargs)
class FIMunicipalitySelect(Select):
"""
A Select widget that uses a list of Finnish municipalities as its choices.
"""
def __init__(self, attrs=None):
from fi_municipalities import MUNICIPALITY_CHOICES
super(FIMunicipalitySelect, self).__init__(attrs, choices=MUNICIPALITY_CHOICES)
class FISocialSecurityNumber(Field):
default_error_messages = {
'invalid': _('Enter a valid Finnish social security number.'),
}
def clean(self, value):
super(FISocialSecurityNumber, self).clean(value)
if value in EMPTY_VALUES:
return u''
checkmarks = "0123456789ABCDEFHJKLMNPRSTUVWXY"
result = re.match(r"""^
(?P<date>([0-2]\d|3[01])
(0\d|1[012])
(\d{2}))
[A+-]
(?P<serial>(\d{3}))
(?P<checksum>[%s])$""" % checkmarks, value, re.VERBOSE | re.IGNORECASE)
if not result:
raise ValidationError(self.error_messages['invalid'])
gd = result.groupdict()
checksum = int(gd['date'] + gd['serial'])
if checkmarks[checksum % len(checkmarks)] == gd['checksum'].upper():
return u'%s' % value.upper()
raise ValidationError(self.error_messages['invalid'])
|
sadleader/odoo | refs/heads/master | addons/l10n_ch/account_wizard.py | 424 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv.orm import TransientModel
class WizardMultiChartsAccounts(TransientModel):
_inherit ='wizard.multi.charts.accounts'
def onchange_chart_template_id(self, cursor, uid, ids, chart_template_id=False, context=None):
if context is None: context = {}
res = super(WizardMultiChartsAccounts, self).onchange_chart_template_id(cursor, uid, ids,
chart_template_id=chart_template_id,
context=context)
# 0 is evaluated as False in python so we have to do this
# because original wizard test code_digits value on a float widget
if chart_template_id:
sterchi_template = self.pool.get('ir.model.data').get_object(cursor, uid, 'l10n_ch', 'l10nch_chart_template')
if sterchi_template.id == chart_template_id:
res['value']['code_digits'] = 0
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
thinkerou/grpc | refs/heads/master | src/python/grpcio/grpc/_plugin_wrapping.py | 11 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import logging
import threading
import grpc
from grpc import _common
from grpc._cython import cygrpc
_LOGGER = logging.getLogger(__name__)
class _AuthMetadataContext(
collections.namedtuple('AuthMetadataContext', (
'service_url',
'method_name',
)), grpc.AuthMetadataContext):
pass
class _CallbackState(object):
def __init__(self):
self.lock = threading.Lock()
self.called = False
self.exception = None
class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
def __init__(self, state, callback):
self._state = state
self._callback = callback
def __call__(self, metadata, error):
with self._state.lock:
if self._state.exception is None:
if self._state.called:
raise RuntimeError(
'AuthMetadataPluginCallback invoked more than once!')
else:
self._state.called = True
else:
raise RuntimeError(
'AuthMetadataPluginCallback raised exception "{}"!'.format(
self._state.exception))
if error is None:
self._callback(metadata, cygrpc.StatusCode.ok, None)
else:
self._callback(None, cygrpc.StatusCode.internal,
_common.encode(str(error)))
class _Plugin(object):
def __init__(self, metadata_plugin):
self._metadata_plugin = metadata_plugin
def __call__(self, service_url, method_name, callback):
context = _AuthMetadataContext(
_common.decode(service_url), _common.decode(method_name))
callback_state = _CallbackState()
try:
self._metadata_plugin(context,
_AuthMetadataPluginCallback(
callback_state, callback))
except Exception as exception: # pylint: disable=broad-except
_LOGGER.exception(
'AuthMetadataPluginCallback "%s" raised exception!',
self._metadata_plugin)
with callback_state.lock:
callback_state.exception = exception
if callback_state.called:
return
callback(None, cygrpc.StatusCode.internal,
_common.encode(str(exception)))
def metadata_plugin_call_credentials(metadata_plugin, name):
if name is None:
try:
effective_name = metadata_plugin.__name__
except AttributeError:
effective_name = metadata_plugin.__class__.__name__
else:
effective_name = name
return grpc.CallCredentials(
cygrpc.MetadataPluginCallCredentials(
_Plugin(metadata_plugin), _common.encode(effective_name)))
|
MOA-2011/enigma2.pli4.0 | refs/heads/master | lib/python/Tools/HardwareInfo.py | 1 | import os
from Tools.Directories import SCOPE_SKIN, resolveFilename
hw_info = None
class HardwareInfo:
device_name = _("unavailable")
device_model = None
device_version = ""
device_revision = ""
device_hdmi = False
def __init__(self):
global hw_info
if hw_info is not None:
# print "using cached result"
return
hw_info = self
print "Scanning hardware info"
# Version
if os.path.exists("/proc/stb/info/version"):
self.device_version = open("/proc/stb/info/version").read().strip()
# Revision
if os.path.exists("/proc/stb/info/board_revision"):
self.device_revision = open("/proc/stb/info/board_revision").read().strip()
# Name ... bit odd, but history prevails
# if os.path.exists("/proc/stb/info/model"):
# self.device_name = open("/proc/stb/info/model").read().strip()
# elif os.path.exists("/proc/stb/info/hwmodel"):
# self.device_name = open("/proc/stb/info/hwmodel").read().strip()
# [ IQON : by LeeWS : our model is inserted at /proc/stb/info/hwmodel, This should be checked first.
if os.path.exists("/proc/stb/info/hwmodel"):
self.device_name = open("/proc/stb/info/hwmodel").read().strip()
# IQON ] : by LeeWS
else:
print "----------------"
print "you should upgrade to new drivers for the hardware detection to work properly"
print "----------------"
print "fallback to detect hardware via /proc/cpuinfo!!"
try:
rd = open("/proc/cpuinfo", "r").read()
if "Brcm4380 V4.2" in rd:
self.device_name = "dm8000"
elif "Brcm7401 V0.0" in rd:
self.device_name = "dm800"
elif "MIPS 4KEc V4.8" in rd:
self.device_name = "dm7025"
rd.close();
except:
pass
# Model
for line in open((resolveFilename(SCOPE_SKIN, 'hw_info/hw_info.cfg')), 'r'):
if not line.startswith('#') and not line.isspace():
l = line.strip().replace('\t', ' ')
if l.find(' ') != -1:
infoFname, prefix = l.split()
else:
infoFname = l
prefix = ""
if os.path.exists("/proc/stb/info/" + infoFname):
self.device_model = prefix + open("/proc/stb/info/" + infoFname).read().strip()
break
if self.device_model is None:
self.device_model = self.device_name
# HDMI capbility
self.device_hdmi = ( self.device_name == 'dm7020hd' or
self.device_name == 'dm800se' or
self.device_name == 'dm500hd' or
(self.device_name == 'dm8000' and self.device_version != None))
print "Detected: " + self.get_device_string()
def get_device_name(self):
return hw_info.device_name
def get_device_model(self):
return hw_info.device_model
def get_device_version(self):
return hw_info.device_version
def get_device_revision(self):
return hw_info.device_revision
def get_device_string(self):
s = hw_info.device_model
if hw_info.device_revision != "":
s += " (" + hw_info.device_revision + "-" + hw_info.device_version + ")"
elif hw_info.device_version != "":
s += " (" + hw_info.device_version + ")"
return s
def has_hdmi(self):
# [ IQON : by LeeWS : replace from return hw_info.device_hdmi because display to "DVI" at initial menu
DEVICES_WITHOUT_HDMI = []
if self.get_device_model() in DEVICES_WITHOUT_HDMI:
return False
else:
return True
# IQON ] : by LeeWS
#[ IQON by knuth
# HardwareInfo().device_name instead to get_device_model function.
def has_micom(self):
DEVICES_WITHOUT_MICOM = []
if self.get_device_model() in DEVICES_WITHOUT_MICOM:
return False
else:
return True
def has_vcr(self):
DEVICES_WITH_VCR = [ "tmtwinoe", "ios100hd" ]
if self.get_device_model() in DEVICES_WITH_VCR:
return False
else:
return True
def has_yuv(self):
DEVICES_WITH_YUV = [ 'force1', 'tmnano2super', 'tmtwinoe', 'ios100hd', 'ios200hd', 'tmnano2t', 'optimussos2', 'optimussos2plus' ]
if self.get_device_model() in DEVICES_WITH_YUV:
return True
else:
return False
def support_1080p_50_60(self):
DEVICES_WITH_1080P_50_60 = [ 'force1', 'tmnanosuper', 'tm2tsuper', 'force1plus', 'force2', 'optimussos1plus', 'optimussos2plus', 'tmnano2super' ]
if self.get_device_model() in DEVICES_WITH_1080P_50_60:
return True
else:
return False
def has_scart(self):
DEVICES_WITH_SCART = [ 'tmtwinoe', 'ios100hd', 'tm2toe', 'tmsingle', 'ios300hd', 'mediabox' ]
if self.get_device_model() in DEVICES_WITH_SCART:
return True
else:
return False
#IQON ] by knuth
|
walac/build-mozharness | refs/heads/master | configs/builds/releng_sub_linux_configs/64_mulet.py | 1 | import os
config = {
'default_actions': [
'clobber',
'clone-tools',
'setup-mock',
'build',
],
'stage_platform': 'linux64-mulet',
'stage_product': 'b2g',
'tooltool_manifest_src': "b2g/dev/config/tooltool-manifests/linux64/releng.manifest",
'platform_supports_post_upload_to_latest': False,
'enable_signing': False,
'enable_talos_sendchange': False,
'enable_count_ctors': False,
'enable_check_test': False,
#### 64 bit build specific #####
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'MOZ_AUTOMATION': '1',
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
'TOOLTOOL_CACHE': '/builds/tooltool_cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
/tools/python27-mercurial/bin:/home/cltbld/bin',
},
'src_mozconfig': 'b2g/dev/config/mozconfigs/linux64/mulet',
#######################
}
|
nishad-jobsglobal/odoo-marriot | refs/heads/master | addons/fleet/fleet.py | 266 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import time
import datetime
from openerp import tools
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
from dateutil.relativedelta import relativedelta
def str_to_datetime(strdate):
return datetime.datetime.strptime(strdate, tools.DEFAULT_SERVER_DATE_FORMAT)
class fleet_vehicle_cost(osv.Model):
_name = 'fleet.vehicle.cost'
_description = 'Cost related to a vehicle'
_order = 'date desc, vehicle_id asc'
def _get_odometer(self, cr, uid, ids, odometer_id, arg, context):
res = dict.fromkeys(ids, False)
for record in self.browse(cr,uid,ids,context=context):
if record.odometer_id:
res[record.id] = record.odometer_id.value
return res
def _set_odometer(self, cr, uid, id, name, value, args=None, context=None):
if not value:
raise except_orm(_('Operation not allowed!'), _('Emptying the odometer value of a vehicle is not allowed.'))
date = self.browse(cr, uid, id, context=context).date
if not(date):
date = fields.date.context_today(self, cr, uid, context=context)
vehicle_id = self.browse(cr, uid, id, context=context).vehicle_id
data = {'value': value, 'date': date, 'vehicle_id': vehicle_id.id}
odometer_id = self.pool.get('fleet.vehicle.odometer').create(cr, uid, data, context=context)
return self.write(cr, uid, id, {'odometer_id': odometer_id}, context=context)
_columns = {
'name': fields.related('vehicle_id', 'name', type="char", string='Name', store=True),
'vehicle_id': fields.many2one('fleet.vehicle', 'Vehicle', required=True, help='Vehicle concerned by this log'),
'cost_subtype_id': fields.many2one('fleet.service.type', 'Type', help='Cost type purchased with this cost'),
'amount': fields.float('Total Price'),
'cost_type': fields.selection([('contract', 'Contract'), ('services','Services'), ('fuel','Fuel'), ('other','Other')], 'Category of the cost', help='For internal purpose only', required=True),
'parent_id': fields.many2one('fleet.vehicle.cost', 'Parent', help='Parent cost to this current cost'),
'cost_ids': fields.one2many('fleet.vehicle.cost', 'parent_id', 'Included Services'),
'odometer_id': fields.many2one('fleet.vehicle.odometer', 'Odometer', help='Odometer measure of the vehicle at the moment of this log'),
'odometer': fields.function(_get_odometer, fnct_inv=_set_odometer, type='float', string='Odometer Value', help='Odometer measure of the vehicle at the moment of this log'),
'odometer_unit': fields.related('vehicle_id', 'odometer_unit', type="char", string="Unit", readonly=True),
'date' :fields.date('Date',help='Date when the cost has been executed'),
'contract_id': fields.many2one('fleet.vehicle.log.contract', 'Contract', help='Contract attached to this cost'),
'auto_generated': fields.boolean('Automatically Generated', readonly=True, required=True),
}
_defaults ={
'cost_type': 'other',
}
def create(self, cr, uid, data, context=None):
#make sure that the data are consistent with values of parent and contract records given
if 'parent_id' in data and data['parent_id']:
parent = self.browse(cr, uid, data['parent_id'], context=context)
data['vehicle_id'] = parent.vehicle_id.id
data['date'] = parent.date
data['cost_type'] = parent.cost_type
if 'contract_id' in data and data['contract_id']:
contract = self.pool.get('fleet.vehicle.log.contract').browse(cr, uid, data['contract_id'], context=context)
data['vehicle_id'] = contract.vehicle_id.id
data['cost_subtype_id'] = contract.cost_subtype_id.id
data['cost_type'] = contract.cost_type
if 'odometer' in data and not data['odometer']:
#if received value for odometer is 0, then remove it from the data as it would result to the creation of a
#odometer log with 0, which is to be avoided
del(data['odometer'])
return super(fleet_vehicle_cost, self).create(cr, uid, data, context=context)
class fleet_vehicle_tag(osv.Model):
_name = 'fleet.vehicle.tag'
_columns = {
'name': fields.char('Name', required=True, translate=True),
}
class fleet_vehicle_state(osv.Model):
_name = 'fleet.vehicle.state'
_order = 'sequence asc'
_columns = {
'name': fields.char('Name', required=True),
'sequence': fields.integer('Sequence', help="Used to order the note stages")
}
_sql_constraints = [('fleet_state_name_unique','unique(name)', 'State name already exists')]
class fleet_vehicle_model(osv.Model):
def _model_name_get_fnc(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for record in self.browse(cr, uid, ids, context=context):
name = record.modelname
if record.brand_id.name:
name = record.brand_id.name + ' / ' + name
res[record.id] = name
return res
def on_change_brand(self, cr, uid, ids, model_id, context=None):
if not model_id:
return {'value': {'image_medium': False}}
brand = self.pool.get('fleet.vehicle.model.brand').browse(cr, uid, model_id, context=context)
return {
'value': {
'image_medium': brand.image,
}
}
_name = 'fleet.vehicle.model'
_description = 'Model of a vehicle'
_order = 'name asc'
_columns = {
'name': fields.function(_model_name_get_fnc, type="char", string='Name', store=True),
'modelname': fields.char('Model name', required=True),
'brand_id': fields.many2one('fleet.vehicle.model.brand', 'Model Brand', required=True, help='Brand of the vehicle'),
'vendors': fields.many2many('res.partner', 'fleet_vehicle_model_vendors', 'model_id', 'partner_id', string='Vendors'),
'image': fields.related('brand_id', 'image', type="binary", string="Logo"),
'image_medium': fields.related('brand_id', 'image_medium', type="binary", string="Logo (medium)"),
'image_small': fields.related('brand_id', 'image_small', type="binary", string="Logo (small)"),
}
class fleet_vehicle_model_brand(osv.Model):
_name = 'fleet.vehicle.model.brand'
_description = 'Brand model of the vehicle'
_order = 'name asc'
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image)
return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
'name': fields.char('Brand Name', required=True),
'image': fields.binary("Logo",
help="This field holds the image used as logo for the brand, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized photo", type="binary", multi="_get_image",
store = {
'fleet.vehicle.model.brand': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized logo of the brand. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved. "\
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Smal-sized photo", type="binary", multi="_get_image",
store = {
'fleet.vehicle.model.brand': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized photo of the brand. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
}
class fleet_vehicle(osv.Model):
_inherit = 'mail.thread'
def _vehicle_name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = {}
for record in self.browse(cr, uid, ids, context=context):
res[record.id] = record.model_id.brand_id.name + '/' + record.model_id.modelname + ' / ' + record.license_plate
return res
def return_action_to_open(self, cr, uid, ids, context=None):
""" This opens the xml view specified in xml_id for the current vehicle """
if context is None:
context = {}
if context.get('xml_id'):
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid ,'fleet', context['xml_id'], context=context)
res['context'] = context
res['context'].update({'default_vehicle_id': ids[0]})
res['domain'] = [('vehicle_id','=', ids[0])]
return res
return False
def act_show_log_cost(self, cr, uid, ids, context=None):
""" This opens log view to view and add new log for this vehicle, groupby default to only show effective costs
@return: the costs log view
"""
if context is None:
context = {}
res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid ,'fleet','fleet_vehicle_costs_act', context=context)
res['context'] = context
res['context'].update({
'default_vehicle_id': ids[0],
'search_default_parent_false': True
})
res['domain'] = [('vehicle_id','=', ids[0])]
return res
def _get_odometer(self, cr, uid, ids, odometer_id, arg, context):
res = dict.fromkeys(ids, 0)
for record in self.browse(cr,uid,ids,context=context):
ids = self.pool.get('fleet.vehicle.odometer').search(cr, uid, [('vehicle_id', '=', record.id)], limit=1, order='value desc')
if len(ids) > 0:
res[record.id] = self.pool.get('fleet.vehicle.odometer').browse(cr, uid, ids[0], context=context).value
return res
def _set_odometer(self, cr, uid, id, name, value, args=None, context=None):
if value:
date = fields.date.context_today(self, cr, uid, context=context)
data = {'value': value, 'date': date, 'vehicle_id': id}
return self.pool.get('fleet.vehicle.odometer').create(cr, uid, data, context=context)
def _search_get_overdue_contract_reminder(self, cr, uid, obj, name, args, context):
res = []
for field, operator, value in args:
assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported'
if (operator == '=' and value == True) or (operator in ('<>', '!=') and value == False):
search_operator = 'in'
else:
search_operator = 'not in'
today = fields.date.context_today(self, cr, uid, context=context)
cr.execute('select cost.vehicle_id, count(contract.id) as contract_number FROM fleet_vehicle_cost cost left join fleet_vehicle_log_contract contract on contract.cost_id = cost.id WHERE contract.expiration_date is not null AND contract.expiration_date < %s AND contract.state IN (\'open\', \'toclose\') GROUP BY cost.vehicle_id', (today,))
res_ids = [x[0] for x in cr.fetchall()]
res.append(('id', search_operator, res_ids))
return res
def _search_contract_renewal_due_soon(self, cr, uid, obj, name, args, context):
res = []
for field, operator, value in args:
assert operator in ('=', '!=', '<>') and value in (True, False), 'Operation not supported'
if (operator == '=' and value == True) or (operator in ('<>', '!=') and value == False):
search_operator = 'in'
else:
search_operator = 'not in'
today = fields.date.context_today(self, cr, uid, context=context)
datetime_today = datetime.datetime.strptime(today, tools.DEFAULT_SERVER_DATE_FORMAT)
limit_date = str((datetime_today + relativedelta(days=+15)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT))
cr.execute('select cost.vehicle_id, count(contract.id) as contract_number FROM fleet_vehicle_cost cost left join fleet_vehicle_log_contract contract on contract.cost_id = cost.id WHERE contract.expiration_date is not null AND contract.expiration_date > %s AND contract.expiration_date < %s AND contract.state IN (\'open\', \'toclose\') GROUP BY cost.vehicle_id', (today, limit_date))
res_ids = [x[0] for x in cr.fetchall()]
res.append(('id', search_operator, res_ids))
return res
def _get_contract_reminder_fnc(self, cr, uid, ids, field_names, unknow_none, context=None):
res= {}
for record in self.browse(cr, uid, ids, context=context):
overdue = False
due_soon = False
total = 0
name = ''
for element in record.log_contracts:
if element.state in ('open', 'toclose') and element.expiration_date:
current_date_str = fields.date.context_today(self, cr, uid, context=context)
due_time_str = element.expiration_date
current_date = str_to_datetime(current_date_str)
due_time = str_to_datetime(due_time_str)
diff_time = (due_time-current_date).days
if diff_time < 0:
overdue = True
total += 1
if diff_time < 15 and diff_time >= 0:
due_soon = True;
total += 1
if overdue or due_soon:
ids = self.pool.get('fleet.vehicle.log.contract').search(cr,uid,[('vehicle_id', '=', record.id), ('state', 'in', ('open', 'toclose'))], limit=1, order='expiration_date asc')
if len(ids) > 0:
#we display only the name of the oldest overdue/due soon contract
name=(self.pool.get('fleet.vehicle.log.contract').browse(cr, uid, ids[0], context=context).cost_subtype_id.name)
res[record.id] = {
'contract_renewal_overdue': overdue,
'contract_renewal_due_soon': due_soon,
'contract_renewal_total': (total - 1), #we remove 1 from the real total for display purposes
'contract_renewal_name': name,
}
return res
def _get_default_state(self, cr, uid, context):
try:
model, model_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fleet', 'vehicle_state_active')
except ValueError:
model_id = False
return model_id
def _count_all(self, cr, uid, ids, field_name, arg, context=None):
Odometer = self.pool['fleet.vehicle.odometer']
LogFuel = self.pool['fleet.vehicle.log.fuel']
LogService = self.pool['fleet.vehicle.log.services']
LogContract = self.pool['fleet.vehicle.log.contract']
Cost = self.pool['fleet.vehicle.cost']
return {
vehicle_id: {
'odometer_count': Odometer.search_count(cr, uid, [('vehicle_id', '=', vehicle_id)], context=context),
'fuel_logs_count': LogFuel.search_count(cr, uid, [('vehicle_id', '=', vehicle_id)], context=context),
'service_count': LogService.search_count(cr, uid, [('vehicle_id', '=', vehicle_id)], context=context),
'contract_count': LogContract.search_count(cr, uid, [('vehicle_id', '=', vehicle_id)], context=context),
'cost_count': Cost.search_count(cr, uid, [('vehicle_id', '=', vehicle_id), ('parent_id', '=', False)], context=context)
}
for vehicle_id in ids
}
_name = 'fleet.vehicle'
_description = 'Information on a vehicle'
_order= 'license_plate asc'
_columns = {
'name': fields.function(_vehicle_name_get_fnc, type="char", string='Name', store=True),
'company_id': fields.many2one('res.company', 'Company'),
'license_plate': fields.char('License Plate', required=True, help='License plate number of the vehicle (ie: plate number for a car)'),
'vin_sn': fields.char('Chassis Number', help='Unique number written on the vehicle motor (VIN/SN number)', copy=False),
'driver_id': fields.many2one('res.partner', 'Driver', help='Driver of the vehicle'),
'model_id': fields.many2one('fleet.vehicle.model', 'Model', required=True, help='Model of the vehicle'),
'log_fuel': fields.one2many('fleet.vehicle.log.fuel', 'vehicle_id', 'Fuel Logs'),
'log_services': fields.one2many('fleet.vehicle.log.services', 'vehicle_id', 'Services Logs'),
'log_contracts': fields.one2many('fleet.vehicle.log.contract', 'vehicle_id', 'Contracts'),
'cost_count': fields.function(_count_all, type='integer', string="Costs" , multi=True),
'contract_count': fields.function(_count_all, type='integer', string='Contracts', multi=True),
'service_count': fields.function(_count_all, type='integer', string='Services', multi=True),
'fuel_logs_count': fields.function(_count_all, type='integer', string='Fuel Logs', multi=True),
'odometer_count': fields.function(_count_all, type='integer', string='Odometer', multi=True),
'acquisition_date': fields.date('Acquisition Date', required=False, help='Date when the vehicle has been bought'),
'color': fields.char('Color', help='Color of the vehicle'),
'state_id': fields.many2one('fleet.vehicle.state', 'State', help='Current state of the vehicle', ondelete="set null"),
'location': fields.char('Location', help='Location of the vehicle (garage, ...)'),
'seats': fields.integer('Seats Number', help='Number of seats of the vehicle'),
'doors': fields.integer('Doors Number', help='Number of doors of the vehicle'),
'tag_ids' :fields.many2many('fleet.vehicle.tag', 'fleet_vehicle_vehicle_tag_rel', 'vehicle_tag_id','tag_id', 'Tags', copy=False),
'odometer': fields.function(_get_odometer, fnct_inv=_set_odometer, type='float', string='Last Odometer', help='Odometer measure of the vehicle at the moment of this log'),
'odometer_unit': fields.selection([('kilometers', 'Kilometers'),('miles','Miles')], 'Odometer Unit', help='Unit of the odometer ',required=True),
'transmission': fields.selection([('manual', 'Manual'), ('automatic', 'Automatic')], 'Transmission', help='Transmission Used by the vehicle'),
'fuel_type': fields.selection([('gasoline', 'Gasoline'), ('diesel', 'Diesel'), ('electric', 'Electric'), ('hybrid', 'Hybrid')], 'Fuel Type', help='Fuel Used by the vehicle'),
'horsepower': fields.integer('Horsepower'),
'horsepower_tax': fields.float('Horsepower Taxation'),
'power': fields.integer('Power', help='Power in kW of the vehicle'),
'co2': fields.float('CO2 Emissions', help='CO2 emissions of the vehicle'),
'image': fields.related('model_id', 'image', type="binary", string="Logo"),
'image_medium': fields.related('model_id', 'image_medium', type="binary", string="Logo (medium)"),
'image_small': fields.related('model_id', 'image_small', type="binary", string="Logo (small)"),
'contract_renewal_due_soon': fields.function(_get_contract_reminder_fnc, fnct_search=_search_contract_renewal_due_soon, type="boolean", string='Has Contracts to renew', multi='contract_info'),
'contract_renewal_overdue': fields.function(_get_contract_reminder_fnc, fnct_search=_search_get_overdue_contract_reminder, type="boolean", string='Has Contracts Overdued', multi='contract_info'),
'contract_renewal_name': fields.function(_get_contract_reminder_fnc, type="text", string='Name of contract to renew soon', multi='contract_info'),
'contract_renewal_total': fields.function(_get_contract_reminder_fnc, type="integer", string='Total of contracts due or overdue minus one', multi='contract_info'),
'car_value': fields.float('Car Value', help='Value of the bought vehicle'),
}
_defaults = {
'doors': 5,
'odometer_unit': 'kilometers',
'state_id': _get_default_state,
}
def on_change_model(self, cr, uid, ids, model_id, context=None):
if not model_id:
return {}
model = self.pool.get('fleet.vehicle.model').browse(cr, uid, model_id, context=context)
return {
'value': {
'image_medium': model.image,
}
}
def create(self, cr, uid, data, context=None):
context = dict(context or {}, mail_create_nolog=True)
vehicle_id = super(fleet_vehicle, self).create(cr, uid, data, context=context)
vehicle = self.browse(cr, uid, vehicle_id, context=context)
self.message_post(cr, uid, [vehicle_id], body=_('%s %s has been added to the fleet!') % (vehicle.model_id.name,vehicle.license_plate), context=context)
return vehicle_id
def write(self, cr, uid, ids, vals, context=None):
"""
This function write an entry in the openchatter whenever we change important information
on the vehicle like the model, the drive, the state of the vehicle or its license plate
"""
for vehicle in self.browse(cr, uid, ids, context):
changes = []
if 'model_id' in vals and vehicle.model_id.id != vals['model_id']:
value = self.pool.get('fleet.vehicle.model').browse(cr,uid,vals['model_id'],context=context).name
oldmodel = vehicle.model_id.name or _('None')
changes.append(_("Model: from '%s' to '%s'") %(oldmodel, value))
if 'driver_id' in vals and vehicle.driver_id.id != vals['driver_id']:
value = self.pool.get('res.partner').browse(cr,uid,vals['driver_id'],context=context).name
olddriver = (vehicle.driver_id.name) or _('None')
changes.append(_("Driver: from '%s' to '%s'") %(olddriver, value))
if 'state_id' in vals and vehicle.state_id.id != vals['state_id']:
value = self.pool.get('fleet.vehicle.state').browse(cr,uid,vals['state_id'],context=context).name
oldstate = vehicle.state_id.name or _('None')
changes.append(_("State: from '%s' to '%s'") %(oldstate, value))
if 'license_plate' in vals and vehicle.license_plate != vals['license_plate']:
old_license_plate = vehicle.license_plate or _('None')
changes.append(_("License Plate: from '%s' to '%s'") %(old_license_plate, vals['license_plate']))
if len(changes) > 0:
self.message_post(cr, uid, [vehicle.id], body=", ".join(changes), context=context)
vehicle_id = super(fleet_vehicle,self).write(cr, uid, ids, vals, context)
return True
class fleet_vehicle_odometer(osv.Model):
_name='fleet.vehicle.odometer'
_description='Odometer log for a vehicle'
_order='date desc'
def _vehicle_log_name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = {}
for record in self.browse(cr, uid, ids, context=context):
name = record.vehicle_id.name
if not name:
name = record.date
elif record.date:
name += ' / '+ record.date
res[record.id] = name
return res
def on_change_vehicle(self, cr, uid, ids, vehicle_id, context=None):
if not vehicle_id:
return {}
odometer_unit = self.pool.get('fleet.vehicle').browse(cr, uid, vehicle_id, context=context).odometer_unit
return {
'value': {
'unit': odometer_unit,
}
}
_columns = {
'name': fields.function(_vehicle_log_name_get_fnc, type="char", string='Name', store=True),
'date': fields.date('Date'),
'value': fields.float('Odometer Value', group_operator="max"),
'vehicle_id': fields.many2one('fleet.vehicle', 'Vehicle', required=True),
'unit': fields.related('vehicle_id', 'odometer_unit', type="char", string="Unit", readonly=True),
}
_defaults = {
'date': fields.date.context_today,
}
class fleet_vehicle_log_fuel(osv.Model):
def on_change_vehicle(self, cr, uid, ids, vehicle_id, context=None):
if not vehicle_id:
return {}
vehicle = self.pool.get('fleet.vehicle').browse(cr, uid, vehicle_id, context=context)
odometer_unit = vehicle.odometer_unit
driver = vehicle.driver_id.id
return {
'value': {
'odometer_unit': odometer_unit,
'purchaser_id': driver,
}
}
def on_change_liter(self, cr, uid, ids, liter, price_per_liter, amount, context=None):
#need to cast in float because the value receveid from web client maybe an integer (Javascript and JSON do not
#make any difference between 3.0 and 3). This cause a problem if you encode, for example, 2 liters at 1.5 per
#liter => total is computed as 3.0, then trigger an onchange that recomputes price_per_liter as 3/2=1 (instead
#of 3.0/2=1.5)
#If there is no change in the result, we return an empty dict to prevent an infinite loop due to the 3 intertwine
#onchange. And in order to verify that there is no change in the result, we have to limit the precision of the
#computation to 2 decimal
liter = float(liter)
price_per_liter = float(price_per_liter)
amount = float(amount)
if liter > 0 and price_per_liter > 0 and round(liter*price_per_liter,2) != amount:
return {'value' : {'amount' : round(liter * price_per_liter,2),}}
elif amount > 0 and liter > 0 and round(amount/liter,2) != price_per_liter:
return {'value' : {'price_per_liter' : round(amount / liter,2),}}
elif amount > 0 and price_per_liter > 0 and round(amount/price_per_liter,2) != liter:
return {'value' : {'liter' : round(amount / price_per_liter,2),}}
else :
return {}
def on_change_price_per_liter(self, cr, uid, ids, liter, price_per_liter, amount, context=None):
#need to cast in float because the value receveid from web client maybe an integer (Javascript and JSON do not
#make any difference between 3.0 and 3). This cause a problem if you encode, for example, 2 liters at 1.5 per
#liter => total is computed as 3.0, then trigger an onchange that recomputes price_per_liter as 3/2=1 (instead
#of 3.0/2=1.5)
#If there is no change in the result, we return an empty dict to prevent an infinite loop due to the 3 intertwine
#onchange. And in order to verify that there is no change in the result, we have to limit the precision of the
#computation to 2 decimal
liter = float(liter)
price_per_liter = float(price_per_liter)
amount = float(amount)
if liter > 0 and price_per_liter > 0 and round(liter*price_per_liter,2) != amount:
return {'value' : {'amount' : round(liter * price_per_liter,2),}}
elif amount > 0 and price_per_liter > 0 and round(amount/price_per_liter,2) != liter:
return {'value' : {'liter' : round(amount / price_per_liter,2),}}
elif amount > 0 and liter > 0 and round(amount/liter,2) != price_per_liter:
return {'value' : {'price_per_liter' : round(amount / liter,2),}}
else :
return {}
def on_change_amount(self, cr, uid, ids, liter, price_per_liter, amount, context=None):
#need to cast in float because the value receveid from web client maybe an integer (Javascript and JSON do not
#make any difference between 3.0 and 3). This cause a problem if you encode, for example, 2 liters at 1.5 per
#liter => total is computed as 3.0, then trigger an onchange that recomputes price_per_liter as 3/2=1 (instead
#of 3.0/2=1.5)
#If there is no change in the result, we return an empty dict to prevent an infinite loop due to the 3 intertwine
#onchange. And in order to verify that there is no change in the result, we have to limit the precision of the
#computation to 2 decimal
liter = float(liter)
price_per_liter = float(price_per_liter)
amount = float(amount)
if amount > 0 and liter > 0 and round(amount/liter,2) != price_per_liter:
return {'value': {'price_per_liter': round(amount / liter,2),}}
elif amount > 0 and price_per_liter > 0 and round(amount/price_per_liter,2) != liter:
return {'value': {'liter': round(amount / price_per_liter,2),}}
elif liter > 0 and price_per_liter > 0 and round(liter*price_per_liter,2) != amount:
return {'value': {'amount': round(liter * price_per_liter,2),}}
else :
return {}
def _get_default_service_type(self, cr, uid, context):
try:
model, model_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fleet', 'type_service_refueling')
except ValueError:
model_id = False
return model_id
_name = 'fleet.vehicle.log.fuel'
_description = 'Fuel log for vehicles'
_inherits = {'fleet.vehicle.cost': 'cost_id'}
_columns = {
'liter': fields.float('Liter'),
'price_per_liter': fields.float('Price Per Liter'),
'purchaser_id': fields.many2one('res.partner', 'Purchaser', domain="['|',('customer','=',True),('employee','=',True)]"),
'inv_ref': fields.char('Invoice Reference', size=64),
'vendor_id': fields.many2one('res.partner', 'Supplier', domain="[('supplier','=',True)]"),
'notes': fields.text('Notes'),
'cost_id': fields.many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade'),
'cost_amount': fields.related('cost_id', 'amount', string='Amount', type='float', store=True), #we need to keep this field as a related with store=True because the graph view doesn't support (1) to address fields from inherited table and (2) fields that aren't stored in database
}
_defaults = {
'date': fields.date.context_today,
'cost_subtype_id': _get_default_service_type,
'cost_type': 'fuel',
}
class fleet_vehicle_log_services(osv.Model):
def on_change_vehicle(self, cr, uid, ids, vehicle_id, context=None):
if not vehicle_id:
return {}
vehicle = self.pool.get('fleet.vehicle').browse(cr, uid, vehicle_id, context=context)
odometer_unit = vehicle.odometer_unit
driver = vehicle.driver_id.id
return {
'value': {
'odometer_unit': odometer_unit,
'purchaser_id': driver,
}
}
def _get_default_service_type(self, cr, uid, context):
try:
model, model_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fleet', 'type_service_service_8')
except ValueError:
model_id = False
return model_id
_inherits = {'fleet.vehicle.cost': 'cost_id'}
_name = 'fleet.vehicle.log.services'
_description = 'Services for vehicles'
_columns = {
'purchaser_id': fields.many2one('res.partner', 'Purchaser', domain="['|',('customer','=',True),('employee','=',True)]"),
'inv_ref': fields.char('Invoice Reference'),
'vendor_id': fields.many2one('res.partner', 'Supplier', domain="[('supplier','=',True)]"),
'cost_amount': fields.related('cost_id', 'amount', string='Amount', type='float', store=True), #we need to keep this field as a related with store=True because the graph view doesn't support (1) to address fields from inherited table and (2) fields that aren't stored in database
'notes': fields.text('Notes'),
'cost_id': fields.many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade'),
}
_defaults = {
'date': fields.date.context_today,
'cost_subtype_id': _get_default_service_type,
'cost_type': 'services'
}
class fleet_service_type(osv.Model):
_name = 'fleet.service.type'
_description = 'Type of services available on a vehicle'
_columns = {
'name': fields.char('Name', required=True, translate=True),
'category': fields.selection([('contract', 'Contract'), ('service', 'Service'), ('both', 'Both')], 'Category', required=True, help='Choose wheter the service refer to contracts, vehicle services or both'),
}
class fleet_vehicle_log_contract(osv.Model):
def scheduler_manage_auto_costs(self, cr, uid, context=None):
#This method is called by a cron task
#It creates costs for contracts having the "recurring cost" field setted, depending on their frequency
#For example, if a contract has a reccuring cost of 200 with a weekly frequency, this method creates a cost of 200 on the first day of each week, from the date of the last recurring costs in the database to today
#If the contract has not yet any recurring costs in the database, the method generates the recurring costs from the start_date to today
#The created costs are associated to a contract thanks to the many2one field contract_id
#If the contract has no start_date, no cost will be created, even if the contract has recurring costs
vehicle_cost_obj = self.pool.get('fleet.vehicle.cost')
d = datetime.datetime.strptime(fields.date.context_today(self, cr, uid, context=context), tools.DEFAULT_SERVER_DATE_FORMAT).date()
contract_ids = self.pool.get('fleet.vehicle.log.contract').search(cr, uid, [('state','!=','closed')], offset=0, limit=None, order=None,context=None, count=False)
deltas = {'yearly': relativedelta(years=+1), 'monthly': relativedelta(months=+1), 'weekly': relativedelta(weeks=+1), 'daily': relativedelta(days=+1)}
for contract in self.pool.get('fleet.vehicle.log.contract').browse(cr, uid, contract_ids, context=context):
if not contract.start_date or contract.cost_frequency == 'no':
continue
found = False
last_cost_date = contract.start_date
if contract.generated_cost_ids:
last_autogenerated_cost_id = vehicle_cost_obj.search(cr, uid, ['&', ('contract_id','=',contract.id), ('auto_generated','=',True)], offset=0, limit=1, order='date desc',context=context, count=False)
if last_autogenerated_cost_id:
found = True
last_cost_date = vehicle_cost_obj.browse(cr, uid, last_autogenerated_cost_id[0], context=context).date
startdate = datetime.datetime.strptime(last_cost_date, tools.DEFAULT_SERVER_DATE_FORMAT).date()
if found:
startdate += deltas.get(contract.cost_frequency)
while (startdate <= d) & (startdate <= datetime.datetime.strptime(contract.expiration_date, tools.DEFAULT_SERVER_DATE_FORMAT).date()):
data = {
'amount': contract.cost_generated,
'date': startdate.strftime(tools.DEFAULT_SERVER_DATE_FORMAT),
'vehicle_id': contract.vehicle_id.id,
'cost_subtype_id': contract.cost_subtype_id.id,
'contract_id': contract.id,
'auto_generated': True
}
cost_id = self.pool.get('fleet.vehicle.cost').create(cr, uid, data, context=context)
startdate += deltas.get(contract.cost_frequency)
return True
def scheduler_manage_contract_expiration(self, cr, uid, context=None):
#This method is called by a cron task
#It manages the state of a contract, possibly by posting a message on the vehicle concerned and updating its status
datetime_today = datetime.datetime.strptime(fields.date.context_today(self, cr, uid, context=context), tools.DEFAULT_SERVER_DATE_FORMAT)
limit_date = (datetime_today + relativedelta(days=+15)).strftime(tools.DEFAULT_SERVER_DATE_FORMAT)
ids = self.search(cr, uid, ['&', ('state', '=', 'open'), ('expiration_date', '<', limit_date)], offset=0, limit=None, order=None, context=context, count=False)
res = {}
for contract in self.browse(cr, uid, ids, context=context):
if contract.vehicle_id.id in res:
res[contract.vehicle_id.id] += 1
else:
res[contract.vehicle_id.id] = 1
for vehicle, value in res.items():
self.pool.get('fleet.vehicle').message_post(cr, uid, vehicle, body=_('%s contract(s) need(s) to be renewed and/or closed!') % (str(value)), context=context)
return self.write(cr, uid, ids, {'state': 'toclose'}, context=context)
def run_scheduler(self, cr, uid, context=None):
self.scheduler_manage_auto_costs(cr, uid, context=context)
self.scheduler_manage_contract_expiration(cr, uid, context=context)
return True
def _vehicle_contract_name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = {}
for record in self.browse(cr, uid, ids, context=context):
name = record.vehicle_id.name
if record.cost_subtype_id.name:
name += ' / '+ record.cost_subtype_id.name
if record.date:
name += ' / '+ record.date
res[record.id] = name
return res
def on_change_vehicle(self, cr, uid, ids, vehicle_id, context=None):
if not vehicle_id:
return {}
odometer_unit = self.pool.get('fleet.vehicle').browse(cr, uid, vehicle_id, context=context).odometer_unit
return {
'value': {
'odometer_unit': odometer_unit,
}
}
def compute_next_year_date(self, strdate):
oneyear = datetime.timedelta(days=365)
curdate = str_to_datetime(strdate)
return datetime.datetime.strftime(curdate + oneyear, tools.DEFAULT_SERVER_DATE_FORMAT)
def on_change_start_date(self, cr, uid, ids, strdate, enddate, context=None):
if (strdate):
return {'value': {'expiration_date': self.compute_next_year_date(strdate),}}
return {}
def get_days_left(self, cr, uid, ids, prop, unknow_none, context=None):
"""return a dict with as value for each contract an integer
if contract is in an open state and is overdue, return 0
if contract is in a closed state, return -1
otherwise return the number of days before the contract expires
"""
res = {}
for record in self.browse(cr, uid, ids, context=context):
if (record.expiration_date and (record.state == 'open' or record.state == 'toclose')):
today = str_to_datetime(time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT))
renew_date = str_to_datetime(record.expiration_date)
diff_time = (renew_date-today).days
res[record.id] = diff_time > 0 and diff_time or 0
else:
res[record.id] = -1
return res
def act_renew_contract(self, cr, uid, ids, context=None):
assert len(ids) == 1, "This operation should only be done for 1 single contract at a time, as it it suppose to open a window as result"
for element in self.browse(cr, uid, ids, context=context):
#compute end date
startdate = str_to_datetime(element.start_date)
enddate = str_to_datetime(element.expiration_date)
diffdate = (enddate - startdate)
default = {
'date': fields.date.context_today(self, cr, uid, context=context),
'start_date': datetime.datetime.strftime(str_to_datetime(element.expiration_date) + datetime.timedelta(days=1), tools.DEFAULT_SERVER_DATE_FORMAT),
'expiration_date': datetime.datetime.strftime(enddate + diffdate, tools.DEFAULT_SERVER_DATE_FORMAT),
}
newid = super(fleet_vehicle_log_contract, self).copy(cr, uid, element.id, default, context=context)
mod, modid = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fleet', 'fleet_vehicle_log_contract_form')
return {
'name':_("Renew Contract"),
'view_mode': 'form',
'view_id': modid,
'view_type': 'tree,form',
'res_model': 'fleet.vehicle.log.contract',
'type': 'ir.actions.act_window',
'nodestroy': True,
'domain': '[]',
'res_id': newid,
'context': {'active_id':newid},
}
def _get_default_contract_type(self, cr, uid, context=None):
try:
model, model_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fleet', 'type_contract_leasing')
except ValueError:
model_id = False
return model_id
def on_change_indic_cost(self, cr, uid, ids, cost_ids, context=None):
totalsum = 0.0
for element in cost_ids:
if element and len(element) == 3 and isinstance(element[2], dict):
totalsum += element[2].get('amount', 0.0)
return {
'value': {
'sum_cost': totalsum,
}
}
def _get_sum_cost(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for contract in self.browse(cr, uid, ids, context=context):
totalsum = 0
for cost in contract.cost_ids:
totalsum += cost.amount
res[contract.id] = totalsum
return res
_inherits = {'fleet.vehicle.cost': 'cost_id'}
_name = 'fleet.vehicle.log.contract'
_description = 'Contract information on a vehicle'
_order='state desc,expiration_date'
_columns = {
'name': fields.function(_vehicle_contract_name_get_fnc, type="text", string='Name', store=True),
'start_date': fields.date('Contract Start Date', help='Date when the coverage of the contract begins'),
'expiration_date': fields.date('Contract Expiration Date', help='Date when the coverage of the contract expirates (by default, one year after begin date)'),
'days_left': fields.function(get_days_left, type='integer', string='Warning Date'),
'insurer_id' :fields.many2one('res.partner', 'Supplier'),
'purchaser_id': fields.many2one('res.partner', 'Contractor', help='Person to which the contract is signed for'),
'ins_ref': fields.char('Contract Reference', size=64, copy=False),
'state': fields.selection([('open', 'In Progress'), ('toclose','To Close'), ('closed', 'Terminated')],
'Status', readonly=True, help='Choose wheter the contract is still valid or not',
copy=False),
'notes': fields.text('Terms and Conditions', help='Write here all supplementary informations relative to this contract', copy=False),
'cost_generated': fields.float('Recurring Cost Amount', help="Costs paid at regular intervals, depending on the cost frequency. If the cost frequency is set to unique, the cost will be logged at the start date"),
'cost_frequency': fields.selection([('no','No'), ('daily', 'Daily'), ('weekly','Weekly'), ('monthly','Monthly'), ('yearly','Yearly')], 'Recurring Cost Frequency', help='Frequency of the recuring cost', required=True),
'generated_cost_ids': fields.one2many('fleet.vehicle.cost', 'contract_id', 'Generated Costs'),
'sum_cost': fields.function(_get_sum_cost, type='float', string='Indicative Costs Total'),
'cost_id': fields.many2one('fleet.vehicle.cost', 'Cost', required=True, ondelete='cascade'),
'cost_amount': fields.related('cost_id', 'amount', string='Amount', type='float', store=True), #we need to keep this field as a related with store=True because the graph view doesn't support (1) to address fields from inherited table and (2) fields that aren't stored in database
}
_defaults = {
'purchaser_id': lambda self, cr, uid, ctx: self.pool.get('res.users').browse(cr, uid, uid, context=ctx).partner_id.id or False,
'date': fields.date.context_today,
'start_date': fields.date.context_today,
'state':'open',
'expiration_date': lambda self, cr, uid, ctx: self.compute_next_year_date(fields.date.context_today(self, cr, uid, context=ctx)),
'cost_frequency': 'no',
'cost_subtype_id': _get_default_contract_type,
'cost_type': 'contract',
}
def contract_close(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'closed'}, context=context)
def contract_open(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'open'}, context=context)
class fleet_contract_state(osv.Model):
_name = 'fleet.contract.state'
_description = 'Contains the different possible status of a leasing contract'
_columns = {
'name':fields.char('Contract Status', required=True),
}
|
jeanlinux/calibre | refs/heads/master | src/calibre/devices/edge/__init__.py | 12133432 | |
fafaman/django | refs/heads/master | django/contrib/gis/db/backends/postgis/__init__.py | 12133432 | |
lesina/Hack70 | refs/heads/master | env/lib/python3.5/site-packages/django/conf/locale/ga/__init__.py | 12133432 | |
karrtikr/ete | refs/heads/master | examples/evol/4_branch_models.py | 4 | #!/usr/bin/python
"""
15 Nov 2010
run branches models, contrasting groups of branches over
the rest of the phylogeny.
Test of positive selection and relaxation over them.
"""
__author__ = "Francois-Jose Serra"
__email__ = "[email protected]"
__licence__ = "GPLv3"
__version__ = "0.0"
try:
input = raw_input
except NameError:
pass
from ete3 import EvolTree
from ete3 import NodeStyle
tree = EvolTree ("data/S_example/measuring_S_tree.nw")
tree.link_to_alignment ('data/S_example/alignment_S_measuring_evol.fasta')
print (tree)
print ('Tree and alignment loaded.')
input ('Tree will be mark in order to contrast Gorilla and Chimpanzee as foreground \nspecies.')
marks = ['1', '3', '7']
tree.mark_tree (marks, ['#1'] * 3)
print (tree.write ())
print ('we can easily colorize marked branches')
# display marked branches in orange
for node in tree.traverse ():
if not hasattr (node, 'mark'):
continue
if node.mark == '':
continue
node.img_style = NodeStyle ()
node.img_style ['bgcolor'] = '#ffaa00'
tree.show()
print ('''now running branch models
free branch models, 2 groups of branches, one with Gorilla and
chimp, the other with the rest of the phylogeny
''')
print ('running branch free...')
tree.run_model ('b_free.137')
print ('running branch neut...')
tree.run_model ('b_neut.137')
print ('running M0 (all branches have the save value of omega)...')
tree.run_model ('M0')
input ('''Now we can do comparisons...
Compare first if we have one or 2 rates of evolution among phylogeny.
LRT between b_free and M0 (that is one or two rates of omega value)
p-value ofthis comparison is:''')
print (tree.get_most_likely ('b_free.137', 'M0'))
input ('''
Now test if foreground rate is significantly different of 1.
(b_free with significantly better likelihood than b_neut)
if significantly different, and higher than one, we will be under
positive selection, if different and lower than 1 we will be under
negative selection. And finally if models are not significantly different
we should accept null hypothesis that omega value on marked branches is
equal to 1, what would be a signal of relaxation.
p-value for difference in rates between marked branches and the rest:''')
print (tree.get_most_likely ('b_free.137', 'M0'))
print ('p-value representing significance that omega is different of 1:')
print (tree.get_most_likely ('b_free.137', 'b_neut.137'))
print ('value of omega in marked branch (frg branch):')
b_free = tree.get_evol_model ('b_free.137')
print (b_free.branches[1]['w'])
print ('and value of omega for background: ')
print (b_free.branches[2]['w'])
print ('we will now run 2 branch models over this tree, one letting the omega \nvalue of foreground species to be free, and the other fixing it at one.\n')
print ("The End.")
|
gnu-sandhi/gnuradio | refs/heads/master | gr-scigen/python/__init__.py | 7 | #
# Copyright 2008,2009 Free Software Foundation, Inc.
#
# This application is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The presence of this file turns this directory into a Python package
'''
This is the GNU Radio SCIGEN module. Place your Python package
description here (python/__init__.py).
'''
# ----------------------------------------------------------------
# Temporary workaround for ticket:181 (swig+python problem)
import sys
_RTLD_GLOBAL = 0
try:
from dl import RTLD_GLOBAL as _RTLD_GLOBAL
except ImportError:
try:
from DLFCN import RTLD_GLOBAL as _RTLD_GLOBAL
except ImportError:
pass
if _RTLD_GLOBAL != 0:
_dlopenflags = sys.getdlopenflags()
sys.setdlopenflags(_dlopenflags|_RTLD_GLOBAL)
# ----------------------------------------------------------------
# import any pure python here
from generic import *
#
# ----------------------------------------------------------------
# Tail of workaround
if _RTLD_GLOBAL != 0:
sys.setdlopenflags(_dlopenflags) # Restore original flags
# ----------------------------------------------------------------
|
csachs/openmicroscopy | refs/heads/develop | components/tools/OmeroWeb/test/unit/test_marshal.py | 10 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import pytest
import omero
import omero.clients
from omero.rtypes import rlong, rstring
from omeroweb.webgateway.marshal import shapeMarshal
@pytest.fixture(scope='module')
def default_id():
return TestShapeMarshal.DEFAULT_ID
@pytest.fixture(scope='function', params=[
# OME-XML version of the points
'1,2 2,3 4,5',
# OMERO.insight version of the points
'points[1,2 2,3 4,5] points1[1,2 2,3 4,5] '
'points2[1,2 2,3 4,5] mask[0,0,0]'
])
def basic_polyline(request, default_id):
points = request.param
shape = omero.model.PolylineI()
shape.id = rlong(default_id)
shape.points = rstring(points)
return shape
@pytest.fixture(scope='function', params=[
# OME-XML version of the points
'1.5,2.5 2,3 4.1,5.1',
# OMERO.insight version of the points
'points[1.5,2.5 2,3 4.1,5.1] points1[1.5,2.5 2,3 4.1,5.1] '
'points2[1.5,2.5 2,3 4.1,5.1] mask[0,0,0]'
])
@pytest.fixture(scope='function')
def float_polyline(request, default_id):
points = request.param
shape = omero.model.PolylineI()
shape.id = rlong(default_id)
shape.points = rstring(points)
return shape
@pytest.fixture(scope='function', params=[
# OME-XML version of the points
'1,2 2,3 4,5',
# OMERO.insight version of the points
'points[1,2 2,3 4,5] points1[1,2 2,3 4,5] '
'points2[1,2 2,3 4,5] mask[0,0,0]'
])
@pytest.fixture(scope='function')
def basic_polygon(request, default_id):
points = request.param
shape = omero.model.PolygonI()
shape.id = rlong(default_id)
shape.points = rstring(points)
return shape
@pytest.fixture(scope='function')
def empty_polygon(default_id):
shape = omero.model.PolygonI()
shape.id = rlong(default_id)
shape.points = rstring('')
return shape
class TestShapeMarshal(object):
"""
Tests to ensure that OME-XML model and OMERO.insight shape point
parsing are supported correctly.
"""
DEFAULT_ID = 1L
def assert_polyline(self, marshaled):
assert marshaled['type'] == 'PolyLine'
assert marshaled['id'] == self.DEFAULT_ID
def assert_polygon(self, marshaled):
assert marshaled['type'] == 'Polygon'
assert marshaled['id'] == self.DEFAULT_ID
def test_ployline_marshal(self, basic_polyline):
marshaled = shapeMarshal(basic_polyline)
self.assert_polyline(marshaled)
assert 'M 1 2 L 2 3 L 4 5' == marshaled['points']
def test_polyline_float_marshal(self, float_polyline):
marshaled = shapeMarshal(float_polyline)
self.assert_polyline(marshaled)
assert 'M 1.5 2.5 L 2 3 L 4.1 5.1' == marshaled['points']
def test_polygon_marshal(self, basic_polygon):
marshaled = shapeMarshal(basic_polygon)
self.assert_polygon(marshaled)
assert 'M 1 2 L 2 3 L 4 5 z' == marshaled['points']
def test_unrecognised_roi_shape_points_string(self, empty_polygon):
marshaled = shapeMarshal(empty_polygon)
assert ' z' == marshaled['points']
|
wrouesnel/ansible | refs/heads/devel | lib/ansible/modules/cloud/packet/packet_sshkey.py | 101 | #!/usr/bin/python
# Copyright 2016 Tomas Karasek <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: packet_sshkey
short_description: Create/delete an SSH key in Packet host.
description:
- Create/delete an SSH key in Packet host.
- API is documented at U(https://www.packet.net/help/api/#page:ssh-keys,header:ssh-keys-ssh-keys-post).
version_added: "2.3"
author: "Tomas Karasek (@t0mk) <[email protected]>"
options:
state:
description:
- Indicate desired state of the target.
default: present
choices: ['present', 'absent']
auth_token:
description:
- Packet api token. You can also supply it in env var C(PACKET_API_TOKEN).
label:
description:
- Label for the key. If you keep it empty, it will be read from key string.
id:
description:
- UUID of the key which you want to remove.
fingerprint:
description:
- Fingerprint of the key which you want to remove.
key:
description:
- Public Key string ({type} {base64 encoded key} {description}).
key_file:
description:
- File with the public key.
requirements:
- "python >= 2.6"
- packet-python
'''
EXAMPLES = '''
# All the examples assume that you have your Packet API token in env var PACKET_API_TOKEN.
# You can also pass the api token in module param auth_token.
- name: create sshkey from string
hosts: localhost
tasks:
packet_sshkey:
key: "{{ lookup('file', 'my_packet_sshkey.pub') }}"
- name: create sshkey from file
hosts: localhost
tasks:
packet_sshkey:
label: key from file
key_file: ~/ff.pub
- name: remove sshkey by id
hosts: localhost
tasks:
packet_sshkey:
state: absent
id: eef49903-7a09-4ca1-af67-4087c29ab5b6
'''
RETURN = '''
changed:
description: True if a sshkey was created or removed.
type: bool
sample: True
returned: always
sshkeys:
description: Information about sshkeys that were createe/removed.
type: list
sample: [
{
"fingerprint": "5c:93:74:7c:ed:07:17:62:28:75:79:23:d6:08:93:46",
"id": "41d61bd8-3342-428b-a09c-e67bdd18a9b7",
"key": "ssh-dss AAAAB3NzaC1kc3MAAACBAIfNT5S0ncP4BBJBYNhNPxFF9lqVhfPeu6SM1LoCocxqDc1AT3zFRi8hjIf6TLZ2AA4FYbcAWxLMhiBxZRVldT9GdBXile78kAK5z3bKTwq152DCqpxwwbaTIggLFhsU8wrfBsPWnDuAxZ0h7mmrCjoLIE3CNLDA/NmV3iB8xMThAAAAFQCStcesSgR1adPORzBxTr7hug92LwAAAIBOProm3Gk+HWedLyE8IfofLaOeRnbBRHAOL4z0SexKkVOnQ/LGN/uDIIPGGBDYTvXgKZT+jbHeulRJ2jKgfSpGKN4JxFQ8uzVH492jEiiUJtT72Ss1dCV4PmyERVIw+f54itihV3z/t25dWgowhb0int8iC/OY3cGodlmYb3wdcQAAAIBuLbB45djZXzUkOTzzcRDIRfhaxo5WipbtEM2B1fuBt2gyrvksPpH/LK6xTjdIIb0CxPu4OCxwJG0aOz5kJoRnOWIXQGhH7VowrJhsqhIc8gN9ErbO5ea8b1L76MNcAotmBDeTUiPw01IJ8MdDxfmcsCslJKgoRKSmQpCwXQtN2g== tomk@hp2",
"label": "mynewkey33"
}
]
returned: always
''' # NOQA
import os
import uuid
from ansible.module_utils.basic import AnsibleModule
HAS_PACKET_SDK = True
try:
import packet
except ImportError:
HAS_PACKET_SDK = False
PACKET_API_TOKEN_ENV_VAR = "PACKET_API_TOKEN"
def serialize_sshkey(sshkey):
sshkey_data = {}
copy_keys = ['id', 'key', 'label', 'fingerprint']
for name in copy_keys:
sshkey_data[name] = getattr(sshkey, name)
return sshkey_data
def is_valid_uuid(myuuid):
try:
val = uuid.UUID(myuuid, version=4)
except ValueError:
return False
return str(val) == myuuid
def load_key_string(key_str):
ret_dict = {}
key_str = key_str.strip()
ret_dict['key'] = key_str
cut_key = key_str.split()
if len(cut_key) in [2, 3]:
if len(cut_key) == 3:
ret_dict['label'] = cut_key[2]
else:
raise Exception("Public key %s is in wrong format" % key_str)
return ret_dict
def get_sshkey_selector(module):
key_id = module.params.get('id')
if key_id:
if not is_valid_uuid(key_id):
raise Exception("sshkey ID %s is not valid UUID" % key_id)
selecting_fields = ['label', 'fingerprint', 'id', 'key']
select_dict = {}
for f in selecting_fields:
if module.params.get(f) is not None:
select_dict[f] = module.params.get(f)
if module.params.get('key_file'):
with open(module.params.get('key_file')) as _file:
loaded_key = load_key_string(_file.read())
select_dict['key'] = loaded_key['key']
if module.params.get('label') is None:
if loaded_key.get('label'):
select_dict['label'] = loaded_key['label']
def selector(k):
if 'key' in select_dict:
# if key string is specified, compare only the key strings
return k.key == select_dict['key']
else:
# if key string not specified, all the fields must match
return all([select_dict[f] == getattr(k, f) for f in select_dict])
return selector
def act_on_sshkeys(target_state, module, packet_conn):
selector = get_sshkey_selector(module)
existing_sshkeys = packet_conn.list_ssh_keys()
matching_sshkeys = filter(selector, existing_sshkeys)
changed = False
if target_state == 'present':
if matching_sshkeys == []:
# there is no key matching the fields from module call
# => create the key, label and
newkey = {}
if module.params.get('key_file'):
with open(module.params.get('key_file')) as f:
newkey = load_key_string(f.read())
if module.params.get('key'):
newkey = load_key_string(module.params.get('key'))
if module.params.get('label'):
newkey['label'] = module.params.get('label')
for param in ('label', 'key'):
if param not in newkey:
_msg = ("If you want to ensure a key is present, you must "
"supply both a label and a key string, either in "
"module params, or in a key file. %s is missing"
% param)
raise Exception(_msg)
matching_sshkeys = []
new_key_response = packet_conn.create_ssh_key(
newkey['label'], newkey['key'])
changed = True
matching_sshkeys.append(new_key_response)
else:
# state is 'absent' => delete mathcing keys
for k in matching_sshkeys:
try:
k.delete()
changed = True
except Exception as e:
_msg = ("while trying to remove sshkey %s, id %s %s, "
"got error: %s" %
(k.label, k.id, target_state, e))
raise Exception(_msg)
return {
'changed': changed,
'sshkeys': [serialize_sshkey(k) for k in matching_sshkeys]
}
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(choices=['present', 'absent'], default='present'),
auth_token=dict(default=os.environ.get(PACKET_API_TOKEN_ENV_VAR),
no_log=True),
label=dict(type='str', aliases=['name'], default=None),
id=dict(type='str', default=None),
fingerprint=dict(type='str', default=None),
key=dict(type='str', default=None, no_log=True),
key_file=dict(type='path', default=None),
),
mutually_exclusive=[
('label', 'id'),
('label', 'fingerprint'),
('id', 'fingerprint'),
('key', 'fingerprint'),
('key', 'id'),
('key_file', 'key'),
]
)
if not HAS_PACKET_SDK:
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable %s, "
"the auth_token parameter is required" %
PACKET_API_TOKEN_ENV_VAR)
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
packet_conn = packet.Manager(auth_token=auth_token)
state = module.params.get('state')
if state in ['present', 'absent']:
try:
module.exit_json(**act_on_sshkeys(state, module, packet_conn))
except Exception as e:
module.fail_json(msg='failed to set sshkey state: %s' % str(e))
else:
module.fail_json(msg='%s is not a valid state for this module' % state)
if __name__ == '__main__':
main()
|
maxkoryukov/headphones | refs/heads/master | lib/certgen.py | 161 | # -*- coding: latin-1 -*-
#
# Copyright (C) Martin Sjögren and AB Strakt 2001, All rights reserved
# Copyright (C) Jean-Paul Calderone 2008, All rights reserved
# This file is licenced under the GNU LESSER GENERAL PUBLIC LICENSE Version 2.1 or later (aka LGPL v2.1)
# Please see LGPL2.1.txt for more information
"""
Certificate generation module.
"""
from OpenSSL import crypto
import time
TYPE_RSA = crypto.TYPE_RSA
TYPE_DSA = crypto.TYPE_DSA
serial = int(time.time())
def createKeyPair(type, bits):
"""
Create a public/private key pair.
Arguments: type - Key type, must be one of TYPE_RSA and TYPE_DSA
bits - Number of bits to use in the key
Returns: The public/private key pair in a PKey object
"""
pkey = crypto.PKey()
pkey.generate_key(type, bits)
return pkey
def createCertRequest(pkey, digest="md5", **name):
"""
Create a certificate request.
Arguments: pkey - The key to associate with the request
digest - Digestion method to use for signing, default is md5
**name - The name of the subject of the request, possible
arguments are:
C - Country name
ST - State or province name
L - Locality name
O - Organization name
OU - Organizational unit name
CN - Common name
emailAddress - E-mail address
Returns: The certificate request in an X509Req object
"""
req = crypto.X509Req()
subj = req.get_subject()
for (key,value) in name.items():
setattr(subj, key, value)
req.set_pubkey(pkey)
req.sign(pkey, digest)
return req
def createCertificate(req, (issuerCert, issuerKey), serial, (notBefore, notAfter), digest="md5"):
"""
Generate a certificate given a certificate request.
Arguments: req - Certificate reqeust to use
issuerCert - The certificate of the issuer
issuerKey - The private key of the issuer
serial - Serial number for the certificate
notBefore - Timestamp (relative to now) when the certificate
starts being valid
notAfter - Timestamp (relative to now) when the certificate
stops being valid
digest - Digest method to use for signing, default is md5
Returns: The signed certificate in an X509 object
"""
cert = crypto.X509()
cert.set_serial_number(serial)
cert.gmtime_adj_notBefore(notBefore)
cert.gmtime_adj_notAfter(notAfter)
cert.set_issuer(issuerCert.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(issuerKey, digest)
return cert
|
ksmet1977/pyKS_lib | refs/heads/master | general/fcns.py | 1 | # -*- coding: utf-8 -*-
"""
pylib_KS: general function definitions
Created on Wed Jun 10 15:14:20 2015
@author: kevin.smet
"""
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# explicitely define all variables and functions in current module for export
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
__all__=['setverbosity','linspaceD','stackarrays','dlmread','toarray2D','tostring','a_eq_b','isequalall','getwd', \
'setwd','clear','nargout_str','sprintf',\
'setcustomcolormap',\
'getpythonpath']
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# module imports
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
from .. import *
from .variables import *
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# start module function definitions
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#------------------------------------------------------------------------------
# General functions:
# setverbosity(): change verbosity (0: none, 1: text display, 2: plot display, 3: text+plot)
# linspaceD(): create linearly spaced array
# stackarrays(): stack array (default= vertical)
# dlmread(), dlmwrite(): read and write numeric array data to file
# toarray2D(): convert formatted string to 2D array
# tostring(): convert data array to formatted string
# a_eq_b(): check if arrays a and b are equal (size and elements)
# isequalall(): check if all elements in an array are the same
# getwd(): get working directory (wrapper)
# setwd(): set working directory (wrapper)
# clear(): clear all or specific variables from workspace and spyder shell
# nargout_str(): return requested number of output arguments
# sprintf(): create formatted string for (2D)-input-data
# _stringbuild(): create formated string for 1D input data (helper function for sprintf())
# setcustomcolormap(): create gradual colormap from blue to red
# getpythonpath(): get user defined python paths
#------------------------------------------------------------------------------
def setverbosity(verbosity_=None):
global verbosity
if verbosity_ is None:
verbosity=_verbosity
else:
verbosity = verbosity_
print 'verbosity is set to {:1.0f} ( = {:s} output) '.format(verbosity_,_verbositylegend[verbosity_])
return verbosity
def linspaceD(start,stop,spacing,exact_=1):
"""
Create array with equispaced elements from start to stop.
exact_ = 1: spacing is inforced, stop may not be part of array
exact_ = 0: spacing is approximate to ensure start and stop are part of range
"""
if exact_==0:
linspace_=np.linspace(start,stop,np.abs(stop-start)/float(spacing)+1.0) #calculates closest fit to fill range
else:
linspace_=np.array([start + (i-1)*spacing+spacing for i in xrange(int(np.abs(stop-start)/float(spacing)+1.0))]) #stop may never be reached
return linspace_
def stackarrays(arraytup,dim_=0):
"""
Stack arrays.
dim_ = 0: vertical (row) dimension
dim_ = 1: horizintal (col) dimension
dim > 1: Not implemented (raises Exception)
"""
#print arraytup
def stackf(dim_):
if dim_==0:
stackf=np.vstack
elif dim_==1:
stackf=np.hstack
else:
raise Exception('Array sticking for dim = %1.0f not implemented' %(dim_))
return stackf
stackfcn=stackf(dim_)
for idx in xrange(len(arraytup)):
if idx==0:
stackedarray=np.atleast_2d(arraytup[idx])
else:
stackedarray=stackfcn((stackedarray,np.atleast_2d(arraytup[idx])))
return stackedarray
def tostring(array2D,fmstr=_fmstr,sep=_sep,rc_format=_rc_format):
"""
Convert array to formatted string
"""
array2D=np.atleast_2d(array2D)
if rc_format=='c': array2D=np.transpose(array2D)
arraystr_=''
for i in xrange(array2D.shape[0]):
for j in xrange(array2D.shape[1]):
datastr_=fmstr %(array2D[i,j])
#print datastr_
arraystr_=arraystr_ + datastr_ +sep
arraystr_=arraystr_[:-1]+'\n'
return arraystr_
def toarray2D(arraystr_,sep=_sep,rc_format=_rc_format):
"""
Convert formatted data string to array.
"""
array2D=[]
arraylist_=arraystr_.split('\n')
for arrayline_ in arraylist_:
arrayline_=arrayline_.strip()
words = arrayline_.split(sep)
if words!=['']:
numbers = np.atleast_2d(np.array([float(word) for word in words]))
if array2D==[]:
array2D = numbers
elif numbers!=[]:
array2D = stackarrays((array2D,numbers),dim_=0)
array2D=np.atleast_2d(array2D)
if rc_format=='c': array2D=np.transpose(array2D)
return array2D
def dlmwrite(filename,array2D,fidformat=_fidformat_aw,fmstr=_fmstr,sep=_sep,rc_format=_rc_format):
"""
Write array as formatted string to file.
"""
if rc_format=='c': array2D=np.transpose(array2D) #arrange data from row to column format for file output
arraystr_ = tostring(array2D,fmstr,sep)
if fidformat=='r': fidformat='w'
fid=open(filename,fidformat)
fid.write(arraystr_)
fid.close()
def dlmread(filename,rc_format=_rc_format):
"""
Read numeric array from file.
"""
fid=open(filename,'r')
arraystr_=fid.read()
fid.close()
array2D=toarray2D(arraystr_)
if rc_format=='c': array2D=np.transpose(array2D) #arrange data in row format when in column format in file
return array2D
def a_eq_b(a,b):
"""
Check if two arrays are equal in size and element values
"""
if a.size==b.size:
eq=np.sum((np.abs(a-b)<_eps)*1)==a.size
else:
eq=False
return eq
def isequalall(lambdas):
"""
Check if all elements have the same value.
"""
return np.unique(np.diff(lambdas)).size==1
def getwd():
"""
Get working directory
"""
import os
return os.getcwd()
def setwd(wd_):
"""
Set working directory
"""
import os
return os.chdir(wd_)
def clear(*args):
"""
Clears variables from the workspace and shell of the spyder application.
No input arguments clears all variables
"""
variablenames = args
gl = globals().copy()
import os
if variablenames == ():
os.system("cls")
for var in gl:
if var[0] == '_': continue
if 'func' in str(globals()[var]): continue
if 'module' in str(globals()[var]): continue
if variablenames != ():
for variablename in variablenames:
if variablename == var:
del globals()[var]
else:
del globals()[var]
return None
def nargout_str(nargout_=None,*args):
"""
return requested number of output arguments
"""
if (nargout_ is None):
return 'None'
elif nargout_==0:
return 'None'
else:
arg_str=args[0]
for i in range(1,nargout_):
arg_str = arg_str + ',' + args[i]
return arg_str
def sprintf(array2D,fmstr=_fmstr,header=None,labels=None,title=None,cch=_cch,rch=_rch,sadjust =_sadjust,rc_format=_rc_format):
"""
Print data in array2D in tabulated form
"""
array2D=np.atleast_2d(array2D)
if rc_format=='c': array2D=np.transpose(array2D)
width_=int(fmstr[fmstr.index('%')+1:fmstr.index('.')])
fmstr=fmstr[fmstr.index('%')+1:]
if rch ==-1:
rch = ''
no_lines = 1
else:
no_lines = 0
if title is None:
titlestr_ = ''
else:
titlestr_ = title + ':\n'
if header is None:
headerstr_ =''
else:
headerstr_ = _stringbuild(header,fm_str='s',ssep=cch,width=width_,ssadjust =sadjust) + '\n'
if labels is None:
datastr_ = ''
max_datastr_i = 0
for i in range(0,np.size(array2D,0)):
datastr_i = _stringbuild(array2D[i],fm_str=fmstr,ssep=cch,width=width_,ssadjust =sadjust)
if len(datastr_i) > max_datastr_i:
max_datastr_i = len(datastr_i)
datastr_ = datastr_ + datastr_i + '\n'
else:
maxlabelwidth=0
if isinstance(labels,str):
labels = [labels]
if len(labels)!=np.size(array2D,0):
label = labels[0]
labels = [label +' ' + str(i+1) for i in range(0,np.size(array2D))]
for label in labels:
if np.size(label) > maxlabelwidth:
maxlabelwidth = len(label)
maxlabelwidth = max((width_, maxlabelwidth))
datastr_ = ''
max_datastr_i = 0
for i in range(0,np.size(array2D,0)):
datastr_i = _stringbuild(array2D[i],fm_str=fmstr,ssep=cch,width=width_,ssadjust =sadjust)
len_label_i = maxlabelwidth - len(labels[i])
datastr_i = cch + labels[i] + ' '*len_label_i + datastr_i + '\n'
if len(datastr_i) > max_datastr_i:
max_datastr_i = len(datastr_i)
datastr_ = datastr_ + datastr_i
headerstr_ = cch + ' '*maxlabelwidth + headerstr_
if no_lines==1:
linestr_ = ''
arraystr_ = datastr_
else:
linestr_ = _stringbuild(rch,fm_str = 's',ssep = '',width=1,rep=max_datastr_i,ssadjust = '>')
arraystr_ = linestr_ + '\n' + datastr_ + linestr_ + '\n'
if not(header is None):
arraystr_ = linestr_ + '\n' + headerstr_ + arraystr_
if not(title is None):
arraystr_ = titlestr_ + arraystr_
else:
arraystr_ = arraystr_
labels = ''
return arraystr_[:-1]
def _stringbuild(string_tuple,fm_str='s',ssep=_cch,width=10,rep=1,ssadjust='>'):
"""
Create formated string for 1D input data (helper function for sprintf())
"""
str_f="'"+ssep
str_t='.format('
for i in range(0,np.size(string_tuple)):
if isinstance(string_tuple[i],str):
ssadjust_ = ssadjust +str(width)
else:
ssadjust_= ''
str_f = str_f + '{:' + ssadjust_ + fm_str + '}' + ssep
str_t = str_t + 'string_tuple[' + str(i) + ']' + '*' + str(rep) +','
str_f=str_f + "'"
str_t = str_t[:-1] + ')'
str_ = str_f + str_t
str_ = eval(str_)
return str_
def setcustomcolormap(N,cmapname='jet'):
"""
Create a custom colormap of N equally spaced colors (for use in plotting).
"""
#import matplotlib.pyplot as plt
#import matplotlib.colors as colors
#import matplotlib.cm as cmx
values=range(N)
jet = plt.get_cmap(cmapname)
cNorm = colors.Normalize(vmin=0, vmax=values[-1])
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=jet)
colorVals=scalarMap.to_rgba(values[0])
for idx in xrange(N-1):
colorVals = np.vstack((colorVals,scalarMap.to_rgba(values[idx+1])))
if N==1: colorVals=np.array([list(colorVals)])
return colorVals
def getpythonpath():
try:
user_paths = os.environ['PYTHONPATH'].split(os.pathsep)
print user_paths
except KeyError:
user_paths = []
return user_paths
#------------------------------------------------------------------------------
|
TOCyna/tabelinha | refs/heads/master | flask/lib/python2.7/linecache.py | 4 | /usr/lib/python2.7/linecache.py |
code-sauce/tensorflow | refs/heads/master | tensorflow/contrib/quantization/python/math_ops.py | 179 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Quantized Math Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops.gen_math_ops import *
# pylint: enable=unused-import,wildcard-import
|
ptemplier/ansible | refs/heads/devel | lib/ansible/modules/packaging/os/pulp_repo.py | 33 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Joe Adams <@sysadmind>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pulp_repo
author: "Joe Adams (@sysadmind)"
short_description: Add or remove Pulp repos from a remote host.
description:
- Add or remove Pulp repos from a remote host.
version_added: "2.3"
requirements: []
options:
add_export_distributor:
description:
- Whether or not to add the export distributor to new C(rpm) repositories.
required: false
default: false
feed:
description:
- Upstream feed URL to receive updates from.
required: false
default: null
force_basic_auth:
description:
- httplib2, the library used by the M(uri) module only sends
authentication information when a webservice responds to an initial
request with a 401 status. Since some basic auth services do not
properly send a 401, logins will fail. This option forces the sending of
the Basic authentication header upon initial request.
required: false
choices: [ "yes", "no" ]
default: "no"
importer_ssl_ca_cert:
description:
- CA certificate string used to validate the feed source SSL certificate.
This can be the file content or the path to the file.
required: false
default: null
importer_ssl_client_cert:
description:
- Certificate used as the client certificate when synchronizing the
repository. This is used to communicate authentication information to
the feed source. The value to this option must be the full path to the
certificate. The specified file may be the certificate itself or a
single file containing both the certificate and private key. This can be
the file content or the path to the file.
required: false
default: null
importer_ssl_client_key:
description:
- Private key to the certificate specified in I(importer_ssl_client_cert),
assuming it is not included in the certificate file itself. This can be
the file content or the path to the file.
required: false
default: null
name:
description:
- Name of the repo to add or remove. This correlates to repo-id in Pulp.
required: true
proxy_host:
description:
- Proxy url setting for the pulp repository importer. This is in the
format scheme://host.
required: false
default: null
proxy_port:
description:
- Proxy port setting for the pulp repository importer.
required: false
default: null
publish_distributor:
description:
- Distributor to use when state is C(publish). The default is to
publish all distributors.
required: false
pulp_host:
description:
- URL of the pulp server to connect to.
default: http://127.0.0.1
relative_url:
description:
- Relative URL for the local repository.
required: true
default: null
repo_type:
description:
- Repo plugin type to use (i.e. C(rpm), C(docker)).
default: rpm
serve_http:
description:
- Make the repo available over HTTP.
required: false
default: false
serve_https:
description:
- Make the repo available over HTTPS.
required: false
default: true
state:
description:
- The repo state. A state of C(sync) will queue a sync of the repo.
This is asynchronous but not delayed like a scheduled sync. A state of
C(publish) will use the repository's distributor to publish the content.
required: false
default: present
choices: [ "present", "absent", "sync", "publish" ]
url_password:
description:
- The password for use in HTTP basic authentication to the pulp API.
If the I(url_username) parameter is not specified, the I(url_password)
parameter will not be used.
required: false
url_username:
description:
- The username for use in HTTP basic authentication to the pulp API.
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
used on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: [ "yes", "no" ]
wait_for_completion:
description:
- Wait for asynchronous tasks to complete before returning.
required: false
default: 'no'
choices: [ "yes", "no" ]
notes:
- This module can currently only create distributors and importers on rpm
repositories. Contributions to support other repo types are welcome.
'''
EXAMPLES = '''
- name: Create a new repo with name 'my_repo'
pulp_repo:
name: my_repo
relative_url: my/repo
state: present
- name: Create a repo with a feed and a relative URL
pulp_repo:
name: my_centos_updates
repo_type: rpm
feed: http://mirror.centos.org/centos/6/updates/x86_64/
relative_url: centos/6/updates
url_username: admin
url_password: admin
force_basic_auth: yes
state: present
- name: Remove a repo from the pulp server
pulp_repo:
name: my_old_repo
repo_type: rpm
state: absent
'''
RETURN = '''
repo:
description: Name of the repo that the action was performed on.
returned: success
type: string
sample: my_repo
'''
import json
import os
from time import sleep
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.urls import url_argument_spec
class pulp_server(object):
"""
Class to interact with a Pulp server
"""
def __init__(self, module, pulp_host, repo_type, wait_for_completion=False):
self.module = module
self.host = pulp_host
self.repo_type = repo_type
self.repo_cache = dict()
self.wait_for_completion = wait_for_completion
def check_repo_exists(self, repo_id):
try:
self.get_repo_config_by_id(repo_id)
except IndexError:
return False
else:
return True
def compare_repo_distributor_config(self, repo_id, **kwargs):
repo_config = self.get_repo_config_by_id(repo_id)
for distributor in repo_config['distributors']:
for key, value in kwargs.items():
if not distributor['config'][key] == value:
return False
return True
def compare_repo_importer_config(self, repo_id, **kwargs):
repo_config = self.get_repo_config_by_id(repo_id)
for importer in repo_config['importers']:
for key, value in kwargs.items():
if value is not None:
if key not in importer['config'].keys():
return False
if not importer['config'][key] == value:
return False
return True
def create_repo(
self,
repo_id,
relative_url,
feed=None,
serve_http=False,
serve_https=True,
proxy_host=None,
proxy_port=None,
ssl_ca_cert=None,
ssl_client_cert=None,
ssl_client_key=None,
add_export_distributor=False
):
url = "%s/pulp/api/v2/repositories/" % self.host
data = dict()
data['id'] = repo_id
data['distributors'] = []
if self.repo_type == 'rpm':
yum_distributor = dict()
yum_distributor['distributor_id'] = "yum_distributor"
yum_distributor['distributor_type_id'] = "yum_distributor"
yum_distributor['auto_publish'] = True
yum_distributor['distributor_config'] = dict()
yum_distributor['distributor_config']['http'] = serve_http
yum_distributor['distributor_config']['https'] = serve_https
yum_distributor['distributor_config']['relative_url'] = relative_url
data['distributors'].append(yum_distributor)
if add_export_distributor:
export_distributor = dict()
export_distributor['distributor_id'] = "export_distributor"
export_distributor['distributor_type_id'] = "export_distributor"
export_distributor['auto_publish'] = False
export_distributor['distributor_config'] = dict()
export_distributor['distributor_config']['http'] = serve_http
export_distributor['distributor_config']['https'] = serve_https
export_distributor['distributor_config']['relative_url'] = relative_url
data['distributors'].append(export_distributor)
data['importer_type_id'] = "yum_importer"
data['importer_config'] = dict()
if feed:
data['importer_config']['feed'] = feed
if proxy_host:
data['importer_config']['proxy_host'] = proxy_host
if proxy_port:
data['importer_config']['proxy_port'] = proxy_port
if ssl_ca_cert:
data['importer_config']['ssl_ca_cert'] = ssl_ca_cert
if ssl_client_cert:
data['importer_config']['ssl_client_cert'] = ssl_client_cert
if ssl_client_key:
data['importer_config']['ssl_client_key'] = ssl_client_key
data['notes'] = {
"_repo-type": "rpm-repo"
}
response, info = fetch_url(
self.module,
url,
data=json.dumps(data),
method='POST')
if info['status'] != 201:
self.module.fail_json(
msg="Failed to create repo.",
status_code=info['status'],
response=info['msg'],
url=url)
else:
return True
def delete_repo(self, repo_id):
url = "%s/pulp/api/v2/repositories/%s/" % (self.host, repo_id)
response, info = fetch_url(self.module, url, data='', method='DELETE')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to delete repo.",
status_code=info['status'],
response=info['msg'],
url=url)
if self.wait_for_completion:
self.verify_tasks_completed(json.load(response))
return True
def get_repo_config_by_id(self, repo_id):
if repo_id not in self.repo_cache.keys():
repo_array = [x for x in self.repo_list if x['id'] == repo_id]
self.repo_cache[repo_id] = repo_array[0]
return self.repo_cache[repo_id]
def publish_repo(self, repo_id, publish_distributor):
url = "%s/pulp/api/v2/repositories/%s/actions/publish/" % (self.host, repo_id)
# If there's no distributor specified, we will publish them all
if publish_distributor is None:
repo_config = self.get_repo_config_by_id(repo_id)
for distributor in repo_config['distributors']:
data = dict()
data['id'] = distributor['id']
response, info = fetch_url(
self.module,
url,
data=json.dumps(data),
method='POST')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to publish the repo.",
status_code=info['status'],
response=info['msg'],
url=url,
distributor=distributor['id'])
else:
data = dict()
data['id'] = publish_distributor
response, info = fetch_url(
self.module,
url,
data=json.dumps(data),
method='POST')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to publish the repo",
status_code=info['status'],
response=info['msg'],
url=url,
distributor=publish_distributor)
if self.wait_for_completion:
self.verify_tasks_completed(json.load(response))
return True
def sync_repo(self, repo_id):
url = "%s/pulp/api/v2/repositories/%s/actions/sync/" % (self.host, repo_id)
response, info = fetch_url(self.module, url, data='', method='POST')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to schedule a sync of the repo.",
status_code=info['status'],
response=info['msg'],
url=url)
if self.wait_for_completion:
self.verify_tasks_completed(json.load(response))
return True
def update_repo_distributor_config(self, repo_id, **kwargs):
url = "%s/pulp/api/v2/repositories/%s/distributors/" % (self.host, repo_id)
repo_config = self.get_repo_config_by_id(repo_id)
for distributor in repo_config['distributors']:
distributor_url = "%s%s/" % (url, distributor['id'])
data = dict()
data['distributor_config'] = dict()
for key, value in kwargs.items():
data['distributor_config'][key] = value
response, info = fetch_url(
self.module,
distributor_url,
data=json.dumps(data),
method='PUT')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to set the relative url for the repository.",
status_code=info['status'],
response=info['msg'],
url=url)
def update_repo_importer_config(self, repo_id, **kwargs):
url = "%s/pulp/api/v2/repositories/%s/importers/" % (self.host, repo_id)
data = dict()
importer_config = dict()
for key, value in kwargs.items():
if value is not None:
importer_config[key] = value
data['importer_config'] = importer_config
if self.repo_type == 'rpm':
data['importer_type_id'] = "yum_importer"
response, info = fetch_url(
self.module,
url,
data=json.dumps(data),
method='POST')
if info['status'] != 202:
self.module.fail_json(
msg="Failed to set the repo importer configuration",
status_code=info['status'],
response=info['msg'],
importer_config=importer_config,
url=url)
def set_repo_list(self):
url = "%s/pulp/api/v2/repositories/?details=true" % self.host
response, info = fetch_url(self.module, url, method='GET')
if info['status'] != 200:
self.module.fail_json(
msg="Request failed",
status_code=info['status'],
response=info['msg'],
url=url)
self.repo_list = json.load(response)
def verify_tasks_completed(self, response_dict):
for task in response_dict['spawned_tasks']:
task_url = "%s%s" % (self.host, task['_href'])
while True:
response, info = fetch_url(
self.module,
task_url,
data='',
method='GET')
if info['status'] != 200:
self.module.fail_json(
msg="Failed to check async task status.",
status_code=info['status'],
response=info['msg'],
url=task_url)
task_dict = json.load(response)
if task_dict['state'] == 'finished':
return True
if task_dict['state'] == 'error':
self.module.fail_json(msg="Asynchronous task failed to complete.", error=task_dict['error'])
sleep(2)
def main():
argument_spec = url_argument_spec()
argument_spec.update(
add_export_distributor=dict(default=False, type='bool'),
feed=dict(),
importer_ssl_ca_cert=dict(),
importer_ssl_client_cert=dict(),
importer_ssl_client_key=dict(),
name=dict(required=True, aliases=['repo']),
proxy_host=dict(),
proxy_port=dict(),
publish_distributor=dict(),
pulp_host=dict(default="https://127.0.0.1"),
relative_url=dict(),
repo_type=dict(default="rpm"),
serve_http=dict(default=False, type='bool'),
serve_https=dict(default=True, type='bool'),
state=dict(
default="present",
choices=['absent', 'present', 'sync', 'publish']),
wait_for_completion=dict(default=False, type="bool"))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
add_export_distributor = module.params['add_export_distributor']
feed = module.params['feed']
importer_ssl_ca_cert = module.params['importer_ssl_ca_cert']
importer_ssl_client_cert = module.params['importer_ssl_client_cert']
importer_ssl_client_key = module.params['importer_ssl_client_key']
proxy_host = module.params['proxy_host']
proxy_port = module.params['proxy_port']
publish_distributor = module.params['publish_distributor']
pulp_host = module.params['pulp_host']
relative_url = module.params['relative_url']
repo = module.params['name']
repo_type = module.params['repo_type']
serve_http = module.params['serve_http']
serve_https = module.params['serve_https']
state = module.params['state']
wait_for_completion = module.params['wait_for_completion']
if (state == 'present') and (not relative_url):
module.fail_json(msg="When state is present, relative_url is required.")
# Ensure that the importer_ssl_* is the content and not a file path
if importer_ssl_ca_cert is not None:
importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert)
if os.path.isfile(importer_ssl_ca_cert_file_path):
importer_ssl_ca_cert_file_object = open(importer_ssl_ca_cert_file_path, 'r')
try:
importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read()
finally:
importer_ssl_ca_cert_file_object.close()
if importer_ssl_client_cert is not None:
importer_ssl_client_cert_file_path = os.path.abspath(importer_ssl_client_cert)
if os.path.isfile(importer_ssl_client_cert_file_path):
importer_ssl_client_cert_file_object = open(importer_ssl_client_cert_file_path, 'r')
try:
importer_ssl_client_cert = importer_ssl_client_cert_file_object.read()
finally:
importer_ssl_client_cert_file_object.close()
if importer_ssl_client_key is not None:
importer_ssl_client_key_file_path = os.path.abspath(importer_ssl_client_key)
if os.path.isfile(importer_ssl_client_key_file_path):
importer_ssl_client_key_file_object = open(importer_ssl_client_key_file_path, 'r')
try:
importer_ssl_client_key = importer_ssl_client_key_file_object.read()
finally:
importer_ssl_client_key_file_object.close()
server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion)
server.set_repo_list()
repo_exists = server.check_repo_exists(repo)
changed = False
if state == 'absent' and repo_exists:
if not module.check_mode:
server.delete_repo(repo)
changed = True
if state == 'sync':
if not repo_exists:
module.fail_json(msg="Repository was not found. The repository can not be synced.")
if not module.check_mode:
server.sync_repo(repo)
changed = True
if state == 'publish':
if not repo_exists:
module.fail_json(msg="Repository was not found. The repository can not be published.")
if not module.check_mode:
server.publish_repo(repo, publish_distributor)
changed = True
if state == 'present':
if not repo_exists:
if not module.check_mode:
server.create_repo(
repo_id=repo,
relative_url=relative_url,
feed=feed,
serve_http=serve_http,
serve_https=serve_https,
proxy_host=proxy_host,
proxy_port=proxy_port,
ssl_ca_cert=importer_ssl_ca_cert,
ssl_client_cert=importer_ssl_client_cert,
ssl_client_key=importer_ssl_client_key,
add_export_distributor=add_export_distributor)
changed = True
else:
# Check to make sure all the settings are correct
# The importer config gets overwritten on set and not updated, so
# we set the whole config at the same time.
if not server.compare_repo_importer_config(
repo,
feed=feed,
proxy_host=proxy_host,
proxy_port=proxy_port,
ssl_ca_cert=importer_ssl_ca_cert,
ssl_client_cert=importer_ssl_client_cert,
ssl_client_key=importer_ssl_client_key
):
if not module.check_mode:
server.update_repo_importer_config(
repo,
feed=feed,
proxy_host=proxy_host,
proxy_port=proxy_port,
ssl_ca_cert=importer_ssl_ca_cert,
ssl_client_cert=importer_ssl_client_cert,
ssl_client_key=importer_ssl_client_key)
changed = True
if relative_url is not None:
if not server.compare_repo_distributor_config(
repo,
relative_url=relative_url
):
if not module.check_mode:
server.update_repo_distributor_config(
repo,
relative_url=relative_url)
changed = True
if not server.compare_repo_distributor_config(repo, http=serve_http):
if not module.check_mode:
server.update_repo_distributor_config(repo, http=serve_http)
changed = True
if not server.compare_repo_distributor_config(repo, https=serve_https):
if not module.check_mode:
server.update_repo_distributor_config(repo, https=serve_https)
changed = True
module.exit_json(changed=changed, repo=repo)
if __name__ == '__main__':
main()
|
mtdewulf/incubator-airflow | refs/heads/master | tests/dags/test_issue_1225.py | 16 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
DAG designed to test what happens when a DAG with pooled tasks is run
by a BackfillJob.
Addresses issue #1225.
"""
from datetime import datetime
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.subdag_operator import SubDagOperator
from airflow.utils.trigger_rule import TriggerRule
import time
DEFAULT_DATE = datetime(2016, 1, 1)
default_args = dict(
start_date=DEFAULT_DATE,
owner='airflow')
def fail():
raise ValueError('Expected failure.')
def delayed_fail():
"""
Delayed failure to make sure that processes are running before the error
is raised.
TODO handle more directly (without sleeping)
"""
time.sleep(5)
raise ValueError('Expected failure.')
# DAG tests backfill with pooled tasks
# Previously backfill would queue the task but never run it
dag1 = DAG(dag_id='test_backfill_pooled_task_dag', default_args=default_args)
dag1_task1 = DummyOperator(
task_id='test_backfill_pooled_task',
dag=dag1,
pool='test_backfill_pooled_task_pool',)
# DAG tests depends_on_past dependencies
dag2 = DAG(dag_id='test_depends_on_past', default_args=default_args)
dag2_task1 = DummyOperator(
task_id='test_dop_task',
dag=dag2,
depends_on_past=True,)
# DAG tests that a Dag run that doesn't complete is marked failed
dag3 = DAG(dag_id='test_dagrun_states_fail', default_args=default_args)
dag3_task1 = PythonOperator(
task_id='test_dagrun_fail',
dag=dag3,
python_callable=fail)
dag3_task2 = DummyOperator(
task_id='test_dagrun_succeed',
dag=dag3,)
dag3_task2.set_upstream(dag3_task1)
# DAG tests that a Dag run that completes but has a failure is marked success
dag4 = DAG(dag_id='test_dagrun_states_success', default_args=default_args)
dag4_task1 = PythonOperator(
task_id='test_dagrun_fail',
dag=dag4,
python_callable=fail,
)
dag4_task2 = DummyOperator(
task_id='test_dagrun_succeed',
dag=dag4,
trigger_rule=TriggerRule.ALL_FAILED
)
dag4_task2.set_upstream(dag4_task1)
# DAG tests that a Dag run that completes but has a root failure is marked fail
dag5 = DAG(dag_id='test_dagrun_states_root_fail', default_args=default_args)
dag5_task1 = DummyOperator(
task_id='test_dagrun_succeed',
dag=dag5,
)
dag5_task2 = PythonOperator(
task_id='test_dagrun_fail',
dag=dag5,
python_callable=fail,
)
# DAG tests that a Dag run that is deadlocked with no states is failed
dag6 = DAG(dag_id='test_dagrun_states_deadlock', default_args=default_args)
dag6_task1 = DummyOperator(
task_id='test_depends_on_past',
depends_on_past=True,
dag=dag6,)
dag6_task2 = DummyOperator(
task_id='test_depends_on_past_2',
depends_on_past=True,
dag=dag6,)
dag6_task2.set_upstream(dag6_task1)
# DAG tests that a deadlocked subdag is properly caught
dag7 = DAG(dag_id='test_subdag_deadlock', default_args=default_args)
subdag7 = DAG(dag_id='test_subdag_deadlock.subdag', default_args=default_args)
subdag7_task1 = PythonOperator(
task_id='test_subdag_fail',
dag=subdag7,
python_callable=fail)
subdag7_task2 = DummyOperator(
task_id='test_subdag_dummy_1',
dag=subdag7,)
subdag7_task3 = DummyOperator(
task_id='test_subdag_dummy_2',
dag=subdag7)
dag7_subdag1 = SubDagOperator(
task_id='subdag',
dag=dag7,
subdag=subdag7)
subdag7_task1.set_downstream(subdag7_task2)
subdag7_task2.set_downstream(subdag7_task3)
|
jonparrott/gcloud-python | refs/heads/master | bigquery/tests/unit/test_schema.py | 3 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class TestSchemaField(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.bigquery.schema import SchemaField
return SchemaField
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_constructor_defaults(self):
field = self._make_one('test', 'STRING')
self.assertEqual(field._name, 'test')
self.assertEqual(field._field_type, 'STRING')
self.assertEqual(field._mode, 'NULLABLE')
self.assertIsNone(field._description)
self.assertEqual(field._fields, ())
def test_constructor_explicit(self):
field = self._make_one('test', 'STRING', mode='REQUIRED',
description='Testing')
self.assertEqual(field._name, 'test')
self.assertEqual(field._field_type, 'STRING')
self.assertEqual(field._mode, 'REQUIRED')
self.assertEqual(field._description, 'Testing')
self.assertEqual(field._fields, ())
def test_constructor_subfields(self):
sub_field1 = self._make_one('area_code', 'STRING')
sub_field2 = self._make_one('local_number', 'STRING')
field = self._make_one(
'phone_number',
'RECORD',
fields=[sub_field1, sub_field2],
)
self.assertEqual(field._name, 'phone_number')
self.assertEqual(field._field_type, 'RECORD')
self.assertEqual(field._mode, 'NULLABLE')
self.assertIsNone(field._description)
self.assertEqual(len(field._fields), 2)
self.assertIs(field._fields[0], sub_field1)
self.assertIs(field._fields[1], sub_field2)
def test_to_api_repr(self):
field = self._make_one('foo', 'INTEGER', 'NULLABLE')
self.assertEqual(field.to_api_repr(), {
'mode': 'NULLABLE',
'name': 'foo',
'type': 'INTEGER',
'description': None,
})
def test_to_api_repr_with_subfield(self):
subfield = self._make_one('bar', 'INTEGER', 'NULLABLE')
field = self._make_one('foo', 'RECORD', 'REQUIRED', fields=(subfield,))
self.assertEqual(field.to_api_repr(), {
'fields': [{
'mode': 'NULLABLE',
'name': 'bar',
'type': 'INTEGER',
'description': None,
}],
'mode': 'REQUIRED',
'name': 'foo',
'type': 'RECORD',
'description': None,
})
def test_from_api_repr(self):
field = self._get_target_class().from_api_repr({
'fields': [{
'mode': 'nullable',
'name': 'bar',
'type': 'integer',
}],
'mode': 'required',
'description': 'test_description',
'name': 'foo',
'type': 'record',
})
self.assertEqual(field.name, 'foo')
self.assertEqual(field.field_type, 'RECORD')
self.assertEqual(field.mode, 'REQUIRED')
self.assertEqual(field.description, 'test_description')
self.assertEqual(len(field.fields), 1)
self.assertEqual(field.fields[0].name, 'bar')
self.assertEqual(field.fields[0].field_type, 'INTEGER')
self.assertEqual(field.fields[0].mode, 'NULLABLE')
def test_from_api_repr_defaults(self):
field = self._get_target_class().from_api_repr({
'name': 'foo',
'type': 'record',
})
self.assertEqual(field.name, 'foo')
self.assertEqual(field.field_type, 'RECORD')
self.assertEqual(field.mode, 'NULLABLE')
self.assertEqual(field.description, None)
self.assertEqual(len(field.fields), 0)
def test_name_property(self):
name = 'lemon-ness'
schema_field = self._make_one(name, 'INTEGER')
self.assertIs(schema_field.name, name)
def test_field_type_property(self):
field_type = 'BOOLEAN'
schema_field = self._make_one('whether', field_type)
self.assertIs(schema_field.field_type, field_type)
def test_mode_property(self):
mode = 'REPEATED'
schema_field = self._make_one('again', 'FLOAT', mode=mode)
self.assertIs(schema_field.mode, mode)
def test_is_nullable(self):
mode = 'NULLABLE'
schema_field = self._make_one('test', 'FLOAT', mode=mode)
self.assertTrue(schema_field.is_nullable)
def test_is_not_nullable(self):
mode = 'REPEATED'
schema_field = self._make_one('test', 'FLOAT', mode=mode)
self.assertFalse(schema_field.is_nullable)
def test_description_property(self):
description = 'It holds some data.'
schema_field = self._make_one(
'do', 'TIMESTAMP', description=description)
self.assertIs(schema_field.description, description)
def test_fields_property(self):
sub_field1 = self._make_one('one', 'STRING')
sub_field2 = self._make_one('fish', 'INTEGER')
fields = (sub_field1, sub_field2)
schema_field = self._make_one('boat', 'RECORD', fields=fields)
self.assertIs(schema_field.fields, fields)
def test___eq___wrong_type(self):
field = self._make_one('test', 'STRING')
other = object()
self.assertNotEqual(field, other)
self.assertEqual(field, mock.ANY)
def test___eq___name_mismatch(self):
field = self._make_one('test', 'STRING')
other = self._make_one('other', 'STRING')
self.assertNotEqual(field, other)
def test___eq___field_type_mismatch(self):
field = self._make_one('test', 'STRING')
other = self._make_one('test', 'INTEGER')
self.assertNotEqual(field, other)
def test___eq___mode_mismatch(self):
field = self._make_one('test', 'STRING', mode='REQUIRED')
other = self._make_one('test', 'STRING', mode='NULLABLE')
self.assertNotEqual(field, other)
def test___eq___description_mismatch(self):
field = self._make_one('test', 'STRING', description='Testing')
other = self._make_one('test', 'STRING', description='Other')
self.assertNotEqual(field, other)
def test___eq___fields_mismatch(self):
sub1 = self._make_one('sub1', 'STRING')
sub2 = self._make_one('sub2', 'STRING')
field = self._make_one('test', 'RECORD', fields=[sub1])
other = self._make_one('test', 'RECORD', fields=[sub2])
self.assertNotEqual(field, other)
def test___eq___hit(self):
field = self._make_one('test', 'STRING', mode='REQUIRED',
description='Testing')
other = self._make_one('test', 'STRING', mode='REQUIRED',
description='Testing')
self.assertEqual(field, other)
def test___eq___hit_case_diff_on_type(self):
field = self._make_one('test', 'STRING', mode='REQUIRED',
description='Testing')
other = self._make_one('test', 'string', mode='REQUIRED',
description='Testing')
self.assertEqual(field, other)
def test___eq___hit_w_fields(self):
sub1 = self._make_one('sub1', 'STRING')
sub2 = self._make_one('sub2', 'STRING')
field = self._make_one('test', 'RECORD', fields=[sub1, sub2])
other = self._make_one('test', 'RECORD', fields=[sub1, sub2])
self.assertEqual(field, other)
def test___ne___wrong_type(self):
field = self._make_one('toast', 'INTEGER')
other = object()
self.assertNotEqual(field, other)
self.assertEqual(field, mock.ANY)
def test___ne___same_value(self):
field1 = self._make_one('test', 'TIMESTAMP', mode='REPEATED')
field2 = self._make_one('test', 'TIMESTAMP', mode='REPEATED')
# unittest ``assertEqual`` uses ``==`` not ``!=``.
comparison_val = (field1 != field2)
self.assertFalse(comparison_val)
def test___ne___different_values(self):
field1 = self._make_one(
'test1', 'FLOAT', mode='REPEATED', description='Not same')
field2 = self._make_one(
'test2', 'FLOAT', mode='NULLABLE', description='Knot saym')
self.assertNotEqual(field1, field2)
def test___hash__set_equality(self):
sub1 = self._make_one('sub1', 'STRING')
sub2 = self._make_one('sub2', 'STRING')
field1 = self._make_one('test', 'RECORD', fields=[sub1])
field2 = self._make_one('test', 'RECORD', fields=[sub2])
set_one = {field1, field2}
set_two = {field1, field2}
self.assertEqual(set_one, set_two)
def test___hash__not_equals(self):
sub1 = self._make_one('sub1', 'STRING')
sub2 = self._make_one('sub2', 'STRING')
field1 = self._make_one('test', 'RECORD', fields=[sub1])
field2 = self._make_one('test', 'RECORD', fields=[sub2])
set_one = {field1}
set_two = {field2}
self.assertNotEqual(set_one, set_two)
def test___repr__(self):
field1 = self._make_one('field1', 'STRING')
expected = "SchemaField('field1', 'STRING', 'NULLABLE', None, ())"
self.assertEqual(repr(field1), expected)
# TODO: dedup with the same class in test_table.py.
class _SchemaBase(object):
def _verify_field(self, field, r_field):
self.assertEqual(field.name, r_field['name'])
self.assertEqual(field.field_type, r_field['type'])
self.assertEqual(field.mode, r_field.get('mode', 'NULLABLE'))
def _verifySchema(self, schema, resource):
r_fields = resource['schema']['fields']
self.assertEqual(len(schema), len(r_fields))
for field, r_field in zip(schema, r_fields):
self._verify_field(field, r_field)
class Test_parse_schema_resource(unittest.TestCase, _SchemaBase):
def _call_fut(self, resource):
from google.cloud.bigquery.schema import _parse_schema_resource
return _parse_schema_resource(resource)
def _make_resource(self):
return {
'schema': {'fields': [
{'name': 'full_name', 'type': 'STRING', 'mode': 'REQUIRED'},
{'name': 'age', 'type': 'INTEGER', 'mode': 'REQUIRED'},
]},
}
def test__parse_schema_resource_defaults(self):
RESOURCE = self._make_resource()
schema = self._call_fut(RESOURCE['schema'])
self._verifySchema(schema, RESOURCE)
def test__parse_schema_resource_subfields(self):
RESOURCE = self._make_resource()
RESOURCE['schema']['fields'].append(
{'name': 'phone',
'type': 'RECORD',
'mode': 'REPEATED',
'fields': [{'name': 'type',
'type': 'STRING',
'mode': 'REQUIRED'},
{'name': 'number',
'type': 'STRING',
'mode': 'REQUIRED'}]})
schema = self._call_fut(RESOURCE['schema'])
self._verifySchema(schema, RESOURCE)
def test__parse_schema_resource_fields_without_mode(self):
RESOURCE = self._make_resource()
RESOURCE['schema']['fields'].append(
{'name': 'phone',
'type': 'STRING'})
schema = self._call_fut(RESOURCE['schema'])
self._verifySchema(schema, RESOURCE)
class Test_build_schema_resource(unittest.TestCase, _SchemaBase):
def _call_fut(self, resource):
from google.cloud.bigquery.schema import _build_schema_resource
return _build_schema_resource(resource)
def test_defaults(self):
from google.cloud.bigquery.schema import SchemaField
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
resource = self._call_fut([full_name, age])
self.assertEqual(len(resource), 2)
self.assertEqual(resource[0],
{'name': 'full_name',
'type': 'STRING',
'mode': 'REQUIRED',
'description': None})
self.assertEqual(resource[1],
{'name': 'age',
'type': 'INTEGER',
'mode': 'REQUIRED',
'description': None})
def test_w_description(self):
from google.cloud.bigquery.schema import SchemaField
DESCRIPTION = 'DESCRIPTION'
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED',
description=DESCRIPTION)
age = SchemaField('age', 'INTEGER', mode='REQUIRED')
resource = self._call_fut([full_name, age])
self.assertEqual(len(resource), 2)
self.assertEqual(resource[0],
{'name': 'full_name',
'type': 'STRING',
'mode': 'REQUIRED',
'description': DESCRIPTION})
self.assertEqual(resource[1],
{'name': 'age',
'type': 'INTEGER',
'mode': 'REQUIRED',
'description': None})
def test_w_subfields(self):
from google.cloud.bigquery.schema import SchemaField
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
ph_type = SchemaField('type', 'STRING', 'REQUIRED')
ph_num = SchemaField('number', 'STRING', 'REQUIRED')
phone = SchemaField('phone', 'RECORD', mode='REPEATED',
fields=[ph_type, ph_num])
resource = self._call_fut([full_name, phone])
self.assertEqual(len(resource), 2)
self.assertEqual(resource[0],
{'name': 'full_name',
'type': 'STRING',
'mode': 'REQUIRED',
'description': None})
self.assertEqual(resource[1],
{'name': 'phone',
'type': 'RECORD',
'mode': 'REPEATED',
'description': None,
'fields': [{'name': 'type',
'type': 'STRING',
'mode': 'REQUIRED',
'description': None},
{'name': 'number',
'type': 'STRING',
'mode': 'REQUIRED',
'description': None}]})
|
prune998/ansible | refs/heads/devel | lib/ansible/plugins/lookup/credstash.py | 131 | # (c) 2015, Ensighten <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
CREDSTASH_INSTALLED = False
try:
import credstash
CREDSTASH_INSTALLED = True
except ImportError:
CREDSTASH_INSTALLED = False
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not CREDSTASH_INSTALLED:
raise AnsibleError('The credstash lookup plugin requires credstash to be installed.')
ret = []
for term in terms:
try:
version = kwargs.pop('version', '')
region = kwargs.pop('region', None)
table = kwargs.pop('table', 'credential-store')
val = credstash.getSecret(term, version, region, table,
context=kwargs)
except credstash.ItemNotFound:
raise AnsibleError('Key {0} not found'.format(term))
except Exception as e:
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
ret.append(val)
return ret
|
chirilo/mozillians | refs/heads/master | vendor-local/lib/python/tablib/packages/yaml3/serializer.py | 293 |
__all__ = ['Serializer', 'SerializerError']
from .error import YAMLError
from .events import *
from .nodes import *
class SerializerError(YAMLError):
pass
class Serializer:
ANCHOR_TEMPLATE = 'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
|
josthkko/ggrc-core | refs/heads/develop | src/ggrc/models/object_person.py | 6 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy import orm
from ggrc import db
from ggrc.models.deferred import deferred
from ggrc.models.mixins import Mapping, Timeboxed
from ggrc.models.reflection import PublishOnly
class ObjectPerson(Timeboxed, Mapping, db.Model):
__tablename__ = 'object_people'
role = deferred(db.Column(db.String), 'ObjectPerson')
notes = deferred(db.Column(db.Text), 'ObjectPerson')
person_id = db.Column(db.Integer, db.ForeignKey('people.id'), nullable=False)
personable_id = db.Column(db.Integer, nullable=False)
personable_type = db.Column(db.String, nullable=False)
@property
def personable_attr(self):
return '{0}_personable'.format(self.personable_type)
@property
def personable(self):
return getattr(self, self.personable_attr)
@personable.setter
def personable(self, value):
self.personable_id = value.id if value is not None else None
self.personable_type = value.__class__.__name__ if value is not None \
else None
return setattr(self, self.personable_attr, value)
@staticmethod
def _extra_table_args(cls):
return (
db.UniqueConstraint('person_id', 'personable_id', 'personable_type'),
db.Index('ix_person_id', 'person_id'),
)
_publish_attrs = [
'role',
'notes',
'person',
'personable',
]
_sanitize_html = [
'notes',
]
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(ObjectPerson, cls).eager_query()
return query.options(
orm.subqueryload('person'))
def _display_name(self):
return self.personable.display_name + '<->' + self.person.display_name
class Personable(object):
@declared_attr
def object_people(cls):
cls.people = association_proxy(
'object_people', 'person',
creator=lambda person: ObjectPerson(
person=person,
personable_type=cls.__name__,
)
)
joinstr = 'and_(foreign(ObjectPerson.personable_id) == {type}.id, '\
'foreign(ObjectPerson.personable_type) == "{type}")'
joinstr = joinstr.format(type=cls.__name__)
return db.relationship(
'ObjectPerson',
primaryjoin=joinstr,
backref='{0}_personable'.format(cls.__name__),
cascade='all, delete-orphan',
)
_publish_attrs = [
PublishOnly('people'),
'object_people',
]
_include_links = []
@classmethod
def eager_query(cls):
query = super(Personable, cls).eager_query()
return cls.eager_inclusions(query, Personable._include_links).options(
orm.subqueryload('object_people'))
|
skirsdeda/django | refs/heads/master | django/views/decorators/http.py | 18 | """
Decorators for views based on HTTP headers.
"""
import logging
from calendar import timegm
from functools import wraps
from django.utils.decorators import decorator_from_middleware, available_attrs
from django.utils.http import http_date, parse_http_date_safe, parse_etags, quote_etag
from django.middleware.http import ConditionalGetMiddleware
from django.http import HttpResponseNotAllowed, HttpResponseNotModified, HttpResponse
conditional_page = decorator_from_middleware(ConditionalGetMiddleware)
logger = logging.getLogger('django.request')
def require_http_methods(request_method_list):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
if request.method not in request_method_list:
logger.warning('Method Not Allowed (%s): %s', request.method, request.path,
extra={
'status_code': 405,
'request': request
}
)
return HttpResponseNotAllowed(request_method_list)
return func(request, *args, **kwargs)
return inner
return decorator
require_GET = require_http_methods(["GET"])
require_GET.__doc__ = "Decorator to require that a view only accept the GET method."
require_POST = require_http_methods(["POST"])
require_POST.__doc__ = "Decorator to require that a view only accept the POST method."
require_safe = require_http_methods(["GET", "HEAD"])
require_safe.__doc__ = "Decorator to require that a view only accept safe methods: GET and HEAD."
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The Etag function should return a string (or
None if the resource doesn't exist), whilst the last_modified function
should return a datetime object (or None if the resource doesn't exist).
If both parameters are provided, all the preconditions must be met before
the view is processed.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (preconditions
failed), depending upon the request method.
Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match
plus If-modified-since headers) will result in the view function being
called.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner(request, *args, **kwargs):
# Get HTTP request headers
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if if_modified_since:
if_modified_since = parse_http_date_safe(if_modified_since)
if_none_match = request.META.get("HTTP_IF_NONE_MATCH")
if_match = request.META.get("HTTP_IF_MATCH")
if if_none_match or if_match:
# There can be more than one ETag in the request, so we
# consider the list of values.
try:
etags = parse_etags(if_none_match or if_match)
except ValueError:
# In case of invalid etag ignore all ETag headers.
# Apparently Opera sends invalidly quoted headers at times
# (we should be returning a 400 response, but that's a
# little extreme) -- this is Django bug #10681.
if_none_match = None
if_match = None
# Compute values (if any) for the requested resource.
if etag_func:
res_etag = etag_func(request, *args, **kwargs)
else:
res_etag = None
if last_modified_func:
dt = last_modified_func(request, *args, **kwargs)
if dt:
res_last_modified = timegm(dt.utctimetuple())
else:
res_last_modified = None
else:
res_last_modified = None
response = None
if not ((if_match and (if_modified_since or if_none_match)) or
(if_match and if_none_match)):
# We only get here if no undefined combinations of headers are
# specified.
if ((if_none_match and (res_etag in etags or
"*" in etags and res_etag)) and
(not if_modified_since or
(res_last_modified and if_modified_since and
res_last_modified <= if_modified_since))):
if request.method in ("GET", "HEAD"):
response = HttpResponseNotModified()
else:
logger.warning('Precondition Failed: %s', request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif if_match and ((not res_etag and "*" in etags) or
(res_etag and res_etag not in etags)):
logger.warning('Precondition Failed: %s', request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif (not if_none_match and request.method in ("GET", "HEAD") and
res_last_modified and if_modified_since and
res_last_modified <= if_modified_since):
response = HttpResponseNotModified()
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist.
if res_last_modified and not response.has_header('Last-Modified'):
response['Last-Modified'] = http_date(res_last_modified)
if res_etag and not response.has_header('ETag'):
response['ETag'] = quote_etag(res_etag)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only
def etag(etag_func):
return condition(etag_func=etag_func)
def last_modified(last_modified_func):
return condition(last_modified_func=last_modified_func)
|
alexanderfefelov/nav | refs/heads/master | python/nav/web/seeddb/page/netboxtype.py | 1 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 2 as published by the Free
# Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
from ..forms import NetboxTypeFilterForm, NetboxTypeForm
from nav.models.manage import NetboxType
from nav.bulkparse import NetboxTypeBulkParser
from nav.bulkimport import NetboxTypeImporter
from nav.web.seeddb import SeeddbInfo, reverse_lazy
from nav.web.seeddb.constants import SEEDDB_EDITABLE_MODELS
from nav.web.seeddb.page import view_switcher, not_implemented
from nav.web.seeddb.utils.list import render_list
from nav.web.seeddb.utils.edit import render_edit
from nav.web.seeddb.utils.bulk import render_bulkimport
from nav.web.seeddb.utils.delete import render_delete
class NetboxTypeInfo(SeeddbInfo):
active = {'type': True}
caption = 'Types'
tab_template = 'seeddb/tabs_generic.html'
_title = 'Types'
_navpath = [('Types', reverse_lazy('seeddb-type'))]
hide_move = True
delete_url = reverse_lazy('seeddb-type')
back_url = reverse_lazy('seeddb-type')
add_url = reverse_lazy('seeddb-type-edit')
bulk_url = reverse_lazy('seeddb-type-bulk')
def netboxtype(request):
return view_switcher(request,
list_view=netboxtype_list,
move_view=not_implemented,
delete_view=netboxtype_delete)
def netboxtype_list(request):
info = NetboxTypeInfo()
query = NetboxType.objects.all()
filter_form = NetboxTypeFilterForm(request.GET)
value_list = (
'name', 'vendor', 'description', 'sysobjectid')
return render_list(request, query, value_list, 'seeddb-type-edit',
filter_form=filter_form,
extra_context=info.template_context)
def netboxtype_delete(request):
info = NetboxTypeInfo()
return render_delete(request, NetboxType, 'seeddb-type',
whitelist=SEEDDB_EDITABLE_MODELS,
extra_context=info.template_context)
def netboxtype_edit(request, type_id=None):
info = NetboxTypeInfo()
return render_edit(request, NetboxType, NetboxTypeForm, type_id,
'seeddb-type-edit',
extra_context=info.template_context)
def netboxtype_bulk(request):
info = NetboxTypeInfo()
return render_bulkimport(
request, NetboxTypeBulkParser, NetboxTypeImporter,
'seeddb-type',
extra_context=info.template_context)
|
kaarolch/ansible | refs/heads/devel | lib/ansible/modules/windows/win_webpicmd.py | 23 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Peter Mounce <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: win_webpicmd
version_added: "2.0"
short_description: Installs packages using Web Platform Installer command-line
description:
- Installs packages using Web Platform Installer command-line (http://www.iis.net/learn/install/web-platform-installer/web-platform-installer-v4-command-line-webpicmdexe-rtw-release).
- Must be installed and present in PATH (see win_chocolatey module; 'webpicmd' is the package name, and you must install 'lessmsi' first too)
- Install IIS first (see win_feature module)
notes:
- accepts EULAs and suppresses reboot - you will need to check manage reboots yourself (see win_reboot module)
options:
name:
description:
- Name of the package to be installed
required: true
author: Peter Mounce
'''
EXAMPLES = '''
# Install URLRewrite2.
win_webpicmd:
name: URLRewrite2
'''
|
paplorinc/intellij-community | refs/heads/master | python/testData/inspections/PyUnresolvedReferencesInspection/metaClassMembers.py | 79 | class GenericMeta(type):
def __getitem__(self, args):
pass
class Generic(object):
__metaclass__ = GenericMeta
class C(Generic['foo']):
pass
print(C['bar'])
c = C()
print(c<warning descr="Class 'C' does not define '__getitem__', so the '[]' operator cannot be used on its instances">[</warning>'baz'])
|
zhaogaolong/oneFinger | refs/heads/master | storage/ceph/__init__.py | 17 | #!/usr/bin/env python
# coding:utf8 |
freeflightsim/ffs-app-engine | refs/heads/master | google_appengine/lib/yaml/lib/yaml/events.py | 986 |
# Abstract classes.
class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
self.anchor = anchor
self.start_mark = start_mark
self.end_mark = end_mark
class CollectionStartEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
flow_style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class CollectionEndEvent(Event):
pass
# Implementations.
class StreamStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndEvent(Event):
pass
class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None, version=None, tags=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
class AliasEvent(NodeEvent):
pass
class ScalarEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, value,
start_mark=None, end_mark=None, style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class SequenceStartEvent(CollectionStartEvent):
pass
class SequenceEndEvent(CollectionEndEvent):
pass
class MappingStartEvent(CollectionStartEvent):
pass
class MappingEndEvent(CollectionEndEvent):
pass
|
pratikmallya/hue | refs/heads/master | desktop/core/ext-py/PyYAML-3.09/lib3/yaml/events.py | 986 |
# Abstract classes.
class Event(object):
def __init__(self, start_mark=None, end_mark=None):
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
if hasattr(self, key)]
arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
for key in attributes])
return '%s(%s)' % (self.__class__.__name__, arguments)
class NodeEvent(Event):
def __init__(self, anchor, start_mark=None, end_mark=None):
self.anchor = anchor
self.start_mark = start_mark
self.end_mark = end_mark
class CollectionStartEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
flow_style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class CollectionEndEvent(Event):
pass
# Implementations.
class StreamStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None, encoding=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.encoding = encoding
class StreamEndEvent(Event):
pass
class DocumentStartEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None, version=None, tags=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
def __init__(self, start_mark=None, end_mark=None,
explicit=None):
self.start_mark = start_mark
self.end_mark = end_mark
self.explicit = explicit
class AliasEvent(NodeEvent):
pass
class ScalarEvent(NodeEvent):
def __init__(self, anchor, tag, implicit, value,
start_mark=None, end_mark=None, style=None):
self.anchor = anchor
self.tag = tag
self.implicit = implicit
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class SequenceStartEvent(CollectionStartEvent):
pass
class SequenceEndEvent(CollectionEndEvent):
pass
class MappingStartEvent(CollectionStartEvent):
pass
class MappingEndEvent(CollectionEndEvent):
pass
|
thedrow/django | refs/heads/master | tests/template_tests/filter_tests/__init__.py | 12133432 | |
mattcaldwell/django-storages-py3 | refs/heads/py3 | examples/cloudfiles_project/photos/__init__.py | 12133432 | |
Esri/hermes | refs/heads/master | setup.py | 1 | from setuptools import setup,find_packages
with open('src/hermes/version.py') as fin: exec(fin.read())
setup(
name='esri-hermes',
version=__version__,
package_dir={'':'src'},
packages=find_packages('src'),
include_package_data=True,
# PyPI MetaData
author='achapkowski',
author_email='[email protected]',
description='Collection of Utilities to Read/Write a Dataset\'s Metadata',
license='Apache License - 2.0',
keywords='esri,arcpy,metadata',
url='https://github.com/Esri/hermes',
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
),
zip_safe=False,
)
|
viarr/eve-wspace | refs/heads/develop | evewspace/Jabber/management/commands/ejabberd_auth_bridge.py | 14 | # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Authenticate XMPP user.
"""
from struct import *
import sys
import datetime
import time
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User, check_password, Permission
from core.utils import get_config
class Command(BaseCommand):
"""
Acts as an auth service for ejabberd through ejabberds external auth
option. See contrib/ejabberd/ejabber.cfg for an example configuration.
"""
help = "Runs an ejabberd auth service"
def __init__(self, *args, **kwargs):
BaseCommand.__init__(self, *args, **kwargs)
self.local_enabled = False
# Change this to True to log requests for debug *logs may include passwords*
self.LOGGING_ENABLED = False
if get_config("JABBER_LOCAL_ENABLED",None).value == "1":
self.local_enabled = True
self.local_user = get_config("JABBER_FROM_JID", False).value.split('@')[0]
self.local_pass = get_config("JABBER_FROM_PASSWORD", False).value
self.space_char = get_config("JABBER_LOCAL_SPACE_CHAR", False).value
def log(self, string):
if self.LOGGING_ENABLED:
with open('/tmp/evewspace-jabber-bridge.log', 'a') as f:
f.write(str(datetime.datetime.now()) + ': ' + string + '\n')
def isuser(self, username):
"""
Handles the isuer ejabberd command.
:Parameters:
- `username`: the user name to verify exists
"""
try:
clean_name = username.replace(self.space_char,' ')
user = User.objects.get(username=clean_name)
self.log('Found user with username ' + str(username))
return True
except User.DoesNotExist:
self.log('No username ' + str(username))
return False
except Exception, ex:
self.log('Unhandled error: ' + str(ex))
return False
def auth(self, username, password):
"""
Handles authentication of the user.
:Parameters:
- `username`: the username to verify
- `password`: the password to verify with the user
"""
self.log('Starting auth check')
if not self.local_enabled:
return False
try:
clean_name = username.replace(self.space_char,' ')
user = User.objects.get(username=clean_name)
self.log('Found username ' + str(clean_name))
if user.check_password(password) and user.has_perm('Alerts.can_alert'):
self.log(username + ' has logged in')
return True
else:
self.log(username + ' failed auth')
return False
except User.DoesNotExist:
if username == self.local_user and password == self.local_pass:
return True
else:
self.log(username + ' is not a valid user')
return False
except Exception, ex:
self.log('Unhandled error: ' + str(ex))
return False
def from_ejabberd(self):
input_length = sys.stdin.read(2)
(size,) = unpack('>h', input_length)
return sys.stdin.read(size).split(':')
def to_ejabberd(self, bool):
answer = 0
if bool:
answer = 1
token = pack('>hh', 2, answer)
self.log('writing token ' + str(token) + ' to stdout')
sys.stdout.write(token)
sys.stdout.flush()
def handle(self, **options):
"""
How to check if a user is valid
:Parameters:
- `options`: keyword arguments
"""
while True:
data = self.from_ejabberd()
self.log("Got token: %s from ejabberd." % str(data))
success = False
if data[0] == "auth":
success = self.auth(data[1], data[3])
elif data[0] == "isuser":
success = self.isuser(data[1])
elif data[0] == "setpass":
success = False
self.to_ejabberd(success)
def __del__(self):
"""
What to do when we are shut off.
"""
self.log('ejabberd_auth_bridge process stopped')
|
vainotuisk/icecreamratings | refs/heads/master | ENV/lib/python2.7/site-packages/wheel/test/test_tagopt.py | 236 | """
Tests for the bdist_wheel tag options (--python-tag and --universal)
"""
import sys
import shutil
import pytest
import py.path
import tempfile
import subprocess
SETUP_PY = """\
from setuptools import setup
setup(
name="Test",
version="1.0",
author_email="[email protected]",
py_modules=["test"],
)
"""
@pytest.fixture
def temp_pkg(request):
tempdir = tempfile.mkdtemp()
def fin():
shutil.rmtree(tempdir)
request.addfinalizer(fin)
temppath = py.path.local(tempdir)
temppath.join('test.py').write('print("Hello, world")')
temppath.join('setup.py').write(SETUP_PY)
return temppath
def test_default_tag(temp_pkg):
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py%s-' % (sys.version[0],))
assert wheels[0].ext == '.whl'
def test_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_universal_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_beats_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_pythontag_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
|
detiber/ansible | refs/heads/devel | contrib/inventory/jail.py | 79 | #!/usr/bin/env python
# (c) 2013, Michael Scherer <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['jls', '-q', 'name'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = [x[:-1] for x in pipe.stdout.readlines()]
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'jail'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'jail'}))
else:
sys.stderr.write("Need an argument, either --list or --host <host>\n")
|
lukw00/powerline | refs/heads/develop | powerline/lib/watcher/__init__.py | 38 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
from powerline.lib.watcher.stat import StatFileWatcher
from powerline.lib.watcher.inotify import INotifyFileWatcher
from powerline.lib.watcher.tree import TreeWatcher
from powerline.lib.watcher.uv import UvFileWatcher, UvNotFound
from powerline.lib.inotify import INotifyError
def create_file_watcher(pl, watcher_type='auto', expire_time=10):
'''Create an object that can watch for changes to specified files
Use ``.__call__()`` method of the returned object to start watching the file
or check whether file has changed since last call.
Use ``.unwatch()`` method of the returned object to stop watching the file.
Uses inotify if available, then pyuv, otherwise tracks mtimes. expire_time
is the number of minutes after the last query for a given path for the
inotify watch for that path to be automatically removed. This conserves
kernel resources.
:param PowerlineLogger pl:
Logger.
:param str watcher_type
One of ``inotify`` (linux only), ``uv``, ``stat``, ``auto``. Determines
what watcher will be used. ``auto`` will use ``inotify`` if available,
then ``libuv`` and then fall back to ``stat``.
:param int expire_time:
Number of minutes since last ``.__call__()`` before inotify watcher will
stop watching given file.
'''
if watcher_type == 'stat':
pl.debug('Using requested stat-based watcher', prefix='watcher')
return StatFileWatcher()
if watcher_type == 'inotify':
# Explicitly selected inotify watcher: do not catch INotifyError then.
pl.debug('Using requested inotify watcher', prefix='watcher')
return INotifyFileWatcher(expire_time=expire_time)
elif watcher_type == 'uv':
pl.debug('Using requested uv watcher', prefix='watcher')
return UvFileWatcher()
if sys.platform.startswith('linux'):
try:
pl.debug('Trying to use inotify watcher', prefix='watcher')
return INotifyFileWatcher(expire_time=expire_time)
except INotifyError:
pl.info('Failed to create inotify watcher', prefix='watcher')
try:
pl.debug('Using libuv-based watcher')
return UvFileWatcher()
except UvNotFound:
pl.debug('Failed to import pyuv')
pl.debug('Using stat-based watcher')
return StatFileWatcher()
def create_tree_watcher(pl, watcher_type='auto', expire_time=10):
'''Create an object that can watch for changes in specified directories
:param PowerlineLogger pl:
Logger.
:param str watcher_type:
Watcher type. Currently the only supported types are ``inotify`` (linux
only), ``uv``, ``dummy`` and ``auto``.
:param int expire_time:
Number of minutes since last ``.__call__()`` before inotify watcher will
stop watching given file.
'''
return TreeWatcher(pl, watcher_type, expire_time)
|
iuliat/nova | refs/heads/master | nova/api/openstack/compute/plugins/v3/virtual_interfaces.py | 6 | # Copyright (C) 2011 Midokura KK
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The virtual interfaces extension."""
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova import network
ALIAS = 'os-virtual-interfaces'
authorize = extensions.os_compute_authorizer(ALIAS)
def _translate_vif_summary_view(_context, vif):
"""Maps keys for VIF summary view."""
d = {}
d['id'] = vif.uuid
d['mac_address'] = vif.address
return d
class ServerVirtualInterfaceController(wsgi.Controller):
"""The instance VIF API controller for the OpenStack API.
"""
def __init__(self):
self.compute_api = compute.API(skip_policy_check=True)
self.network_api = network.API(skip_policy_check=True)
super(ServerVirtualInterfaceController, self).__init__()
def _items(self, req, server_id, entity_maker):
"""Returns a list of VIFs, transformed through entity_maker."""
context = req.environ['nova.context']
authorize(context)
instance = common.get_instance(self.compute_api, context, server_id)
vifs = self.network_api.get_vifs_by_instance(context, instance)
limited_list = common.limited(vifs, req)
res = [entity_maker(context, vif) for vif in limited_list]
return {'virtual_interfaces': res}
@extensions.expected_errors((404))
def index(self, req, server_id):
"""Returns the list of VIFs for a given instance."""
return self._items(req, server_id,
entity_maker=_translate_vif_summary_view)
class VirtualInterfaces(extensions.V3APIExtensionBase):
"""Virtual interface support."""
name = "VirtualInterfaces"
alias = ALIAS
version = 1
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
ALIAS,
controller=ServerVirtualInterfaceController(),
parent=dict(member_name='server', collection_name='servers'))
resources.append(res)
return resources
def get_controller_extensions(self):
return []
|
soarpenguin/ansible | refs/heads/devel | lib/ansible/modules/network/cloudengine/ce_bfd_view.py | 7 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ce_bfd_view
version_added: "2.4"
short_description: Manages BFD session view configuration on HUAWEI CloudEngine devices.
description:
- Manages BFD session view configuration on HUAWEI CloudEngine devices.
author: QijunPan (@CloudEngine-Ansible)
options:
session_name:
description:
- Specifies the name of a BFD session.
The value is a string of 1 to 15 case-sensitive characters without spaces.
required: true
default: null
local_discr:
description:
- Specifies the local discriminator of a BFD session.
The value is an integer that ranges from 1 to 16384.
required: false
default: null
remote_discr:
description:
- Specifies the remote discriminator of a BFD session.
The value is an integer that ranges from 1 to 4294967295.
required: false
default: null
min_tx_interval:
description:
- Specifies the minimum interval for receiving BFD packets.
The value is an integer that ranges from 50 to 1000, in milliseconds.
required: false
default: null
min_rx_interval:
description:
- Specifies the minimum interval for sending BFD packets.
The value is an integer that ranges from 50 to 1000, in milliseconds.
required: false
default: null
detect_multi:
description:
- Specifies the local detection multiplier of a BFD session.
The value is an integer that ranges from 3 to 50.
required: false
default: null
wtr_interval:
description:
- Specifies the WTR time of a BFD session.
The value is an integer that ranges from 1 to 60, in minutes.
The default value is 0.
required: false
default: null
tos_exp:
description:
- Specifies a priority for BFD control packets.
The value is an integer ranging from 0 to 7.
The default value is 7, which is the highest priority.
required: false
default: null
admin_down:
description:
- Enables the BFD session to enter the AdminDown state.
By default, a BFD session is enabled.
The default value is bool type.
required: false
default: false
description:
description:
- Specifies the description of a BFD session.
The value is a string of 1 to 51 case-sensitive characters with spaces.
required: false
default: null
state:
description:
- Determines whether the config should be present or not on the device.
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = '''
- name: bfd view module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Set the local discriminator of a BFD session to 80 and the remote discriminator to 800
ce_bfd_view:
session_name: atob
local_discr: 80
remote_discr: 800
state: present
provider: '{{ cli }}'
- name: Set the minimum interval for receiving BFD packets to 500 ms
ce_bfd_view:
session_name: atob
min_rx_interval: 500
state: present
provider: '{{ cli }}'
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {
"admin_down": false,
"description": null,
"detect_multi": null,
"local_discr": 80,
"min_rx_interval": null,
"min_tx_interval": null,
"remote_discr": 800,
"session_name": "atob",
"state": "present",
"tos_exp": null,
"wtr_interval": null
}
existing:
description: k/v pairs of existing configuration
returned: always
type: dict
sample: {
"session": {
"adminDown": "false",
"createType": "SESS_STATIC",
"description": null,
"detectMulti": "3",
"localDiscr": null,
"minRxInt": null,
"minTxInt": null,
"remoteDiscr": null,
"sessName": "atob",
"tosExp": null,
"wtrTimerInt": null
}
}
end_state:
description: k/v pairs of configuration after module execution
returned: always
type: dict
sample: {
"session": {
"adminDown": "false",
"createType": "SESS_STATIC",
"description": null,
"detectMulti": "3",
"localDiscr": "80",
"minRxInt": null,
"minTxInt": null,
"remoteDiscr": "800",
"sessName": "atob",
"tosExp": null,
"wtrTimerInt": null
}
}
updates:
description: commands sent to the device
returned: always
type: list
sample: [
"bfd atob",
"discriminator local 80",
"discriminator remote 800"
]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import sys
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_BFD = """
<filter type="subtree">
<bfd xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
%s
</bfd>
</filter>
"""
CE_NC_GET_BFD_GLB = """
<bfdSchGlobal>
<bfdEnable></bfdEnable>
</bfdSchGlobal>
"""
CE_NC_GET_BFD_SESSION = """
<bfdCfgSessions>
<bfdCfgSession>
<sessName>%s</sessName>
<createType></createType>
<localDiscr></localDiscr>
<remoteDiscr></remoteDiscr>
<minTxInt></minTxInt>
<minRxInt></minRxInt>
<detectMulti></detectMulti>
<wtrTimerInt></wtrTimerInt>
<tosExp></tosExp>
<adminDown></adminDown>
<description></description>
</bfdCfgSession>
</bfdCfgSessions>
"""
class BfdView(object):
"""Manages BFD View"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# module input info
self.session_name = self.module.params['session_name']
self.local_discr = self.module.params['local_discr']
self.remote_discr = self.module.params['remote_discr']
self.min_tx_interval = self.module.params['min_tx_interval']
self.min_rx_interval = self.module.params['min_rx_interval']
self.detect_multi = self.module.params['detect_multi']
self.wtr_interval = self.module.params['wtr_interval']
self.tos_exp = self.module.params['tos_exp']
self.admin_down = self.module.params['admin_down']
self.description = self.module.params['description']
self.state = self.module.params['state']
# host info
self.host = self.module.params['host']
self.username = self.module.params['username']
self.port = self.module.params['port']
# state
self.changed = False
self.bfd_dict = dict()
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def __init_module__(self):
"""init module"""
self.module = AnsibleModule(argument_spec=self.spec,
supports_check_mode=True)
def get_bfd_dict(self):
"""bfd config dict"""
bfd_dict = dict()
bfd_dict["global"] = dict()
bfd_dict["session"] = dict()
conf_str = CE_NC_GET_BFD % (CE_NC_GET_BFD_GLB + (CE_NC_GET_BFD_SESSION % self.session_name))
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return bfd_dict
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
# get bfd global info
glb = root.find("data/bfd/bfdSchGlobal")
if glb:
for attr in glb:
bfd_dict["global"][attr.tag] = attr.text
# get bfd session info
sess = root.find("data/bfd/bfdCfgSessions/bfdCfgSession")
if sess:
for attr in sess:
bfd_dict["session"][attr.tag] = attr.text
return bfd_dict
def config_session(self):
"""configures bfd session"""
xml_str = ""
cmd_list = list()
cmd_session = ""
if not self.session_name:
return xml_str
if self.bfd_dict["global"].get("bfdEnable", "false") != "true":
self.module.fail_json(msg="Error: Please enable BFD globally first.")
if not self.bfd_dict["session"]:
self.module.fail_json(msg="Error: BFD session is not exist.")
session = self.bfd_dict["session"]
xml_str = "<sessName>%s</sessName>" % self.session_name
cmd_session = "bfd %s" % self.session_name
# BFD session view
if self.local_discr is not None:
if self.state == "present" and str(self.local_discr) != session.get("localDiscr"):
xml_str += "<localDiscr>%s</localDiscr>" % self.local_discr
cmd_list.append("discriminator local %s" % self.local_discr)
elif self.state == "absent" and str(self.local_discr) == session.get("localDiscr"):
xml_str += "<localDiscr/>"
cmd_list.append("undo discriminator local")
if self.remote_discr is not None:
if self.state == "present" and str(self.remote_discr) != session.get("remoteDiscr"):
xml_str += "<remoteDiscr>%s</remoteDiscr>" % self.remote_discr
cmd_list.append("discriminator remote %s" % self.remote_discr)
elif self.state == "absent" and str(self.remote_discr) == session.get("remoteDiscr"):
xml_str += "<remoteDiscr/>"
cmd_list.append("undo discriminator remote")
if self.min_tx_interval is not None:
if self.state == "present" and str(self.min_tx_interval) != session.get("minTxInt"):
xml_str += "<minTxInt>%s</minTxInt>" % self.min_tx_interval
cmd_list.append("min-tx-interval %s" % self.min_tx_interval)
elif self.state == "absent" and str(self.min_tx_interval) == session.get("minTxInt"):
xml_str += "<minTxInt/>"
cmd_list.append("undo min-tx-interval")
if self.min_rx_interval is not None:
if self.state == "present" and str(self.min_rx_interval) != session.get("minRxInt"):
xml_str += "<minRxInt>%s</minRxInt>" % self.min_rx_interval
cmd_list.append("min-rx-interval %s" % self.min_rx_interval)
elif self.state == "absent" and str(self.min_rx_interval) == session.get("minRxInt"):
xml_str += "<minRxInt/>"
cmd_list.append("undo min-rx-interval")
if self.detect_multi is not None:
if self.state == "present" and str(self.detect_multi) != session.get("detectMulti"):
xml_str += " <detectMulti>%s</detectMulti>" % self.detect_multi
cmd_list.append("detect-multiplier %s" % self.detect_multi)
elif self.state == "absent" and str(self.detect_multi) == session.get("detectMulti"):
xml_str += " <detectMulti/>"
cmd_list.append("undo detect-multiplier")
if self.wtr_interval is not None:
if self.state == "present" and str(self.wtr_interval) != session.get("wtrTimerInt"):
xml_str += " <wtrTimerInt>%s</wtrTimerInt>" % self.wtr_interval
cmd_list.append("wtr %s" % self.wtr_interval)
elif self.state == "absent" and str(self.wtr_interval) == session.get("wtrTimerInt"):
xml_str += " <wtrTimerInt/>"
cmd_list.append("undo wtr")
if self.tos_exp is not None:
if self.state == "present" and str(self.tos_exp) != session.get("tosExp"):
xml_str += " <tosExp>%s</tosExp>" % self.tos_exp
cmd_list.append("tos-exp %s" % self.tos_exp)
elif self.state == "absent" and str(self.tos_exp) == session.get("tosExp"):
xml_str += " <tosExp/>"
cmd_list.append("undo tos-exp")
if self.admin_down and session.get("adminDown", "false") == "false":
xml_str += " <adminDown>true</adminDown>"
cmd_list.append("shutdown")
elif not self.admin_down and session.get("adminDown", "false") == "true":
xml_str += " <adminDown>false</adminDown>"
cmd_list.append("undo shutdown")
if self.description:
if self.state == "present" and self.description != session.get("description"):
xml_str += "<description>%s</description>" % self.description
cmd_list.append("description %s" % self.description)
elif self.state == "absent" and self.description == session.get("description"):
xml_str += "<description/>"
cmd_list.append("undo description")
if xml_str.endswith("</sessName>"):
# no config update
return ""
else:
cmd_list.insert(0, cmd_session)
self.updates_cmd.extend(cmd_list)
return '<bfdCfgSessions><bfdCfgSession operation="merge">' + xml_str\
+ '</bfdCfgSession></bfdCfgSessions>'
def netconf_load_config(self, xml_str):
"""load bfd config by netconf"""
if not xml_str:
return
xml_cfg = """
<config>
<bfd xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
%s
</bfd>
</config>""" % xml_str
set_nc_config(self.min_rx_interval, xml_cfg)
self.changed = True
def check_params(self):
"""Check all input params"""
# check session_name
if not self.session_name:
self.module.fail_json(msg="Error: Missing required arguments: session_name.")
if self.session_name:
if len(self.session_name) < 1 or len(self.session_name) > 15:
self.module.fail_json(msg="Error: Session name is invalid.")
# check local_discr
if self.local_discr is not None:
if self.local_discr < 1 or self.local_discr > 16384:
self.module.fail_json(msg="Error: Session local_discr is not ranges from 1 to 16384.")
# check remote_discr
if self.remote_discr is not None:
if self.remote_discr < 1 or self.remote_discr > 4294967295:
self.module.fail_json(msg="Error: Session remote_discr is not ranges from 1 to 4294967295.")
# check min_tx_interval
if self.min_tx_interval is not None:
if self.min_tx_interval < 50 or self.min_tx_interval > 1000:
self.module.fail_json(msg="Error: Session min_tx_interval is not ranges from 50 to 1000.")
# check min_rx_interval
if self.min_rx_interval is not None:
if self.min_rx_interval < 50 or self.min_rx_interval > 1000:
self.module.fail_json(msg="Error: Session min_rx_interval is not ranges from 50 to 1000.")
# check detect_multi
if self.detect_multi is not None:
if self.detect_multi < 3 or self.detect_multi > 50:
self.module.fail_json(msg="Error: Session detect_multi is not ranges from 3 to 50.")
# check wtr_interval
if self.wtr_interval is not None:
if self.wtr_interval < 1 or self.wtr_interval > 60:
self.module.fail_json(msg="Error: Session wtr_interval is not ranges from 1 to 60.")
# check tos_exp
if self.tos_exp is not None:
if self.tos_exp < 0 or self.tos_exp > 7:
self.module.fail_json(msg="Error: Session tos_exp is not ranges from 0 to 7.")
# check description
if self.description:
if len(self.description) < 1 or len(self.description) > 51:
self.module.fail_json(msg="Error: Session description is invalid.")
def get_proposed(self):
"""get proposed info"""
# base config
self.proposed["session_name"] = self.session_name
self.proposed["local_discr"] = self.local_discr
self.proposed["remote_discr"] = self.remote_discr
self.proposed["min_tx_interval"] = self.min_tx_interval
self.proposed["min_rx_interval"] = self.min_rx_interval
self.proposed["detect_multi"] = self.detect_multi
self.proposed["wtr_interval"] = self.wtr_interval
self.proposed["tos_exp"] = self.tos_exp
self.proposed["admin_down"] = self.admin_down
self.proposed["description"] = self.description
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if not self.bfd_dict:
return
self.existing["session"] = self.bfd_dict.get("session")
def get_end_state(self):
"""get end state info"""
bfd_dict = self.get_bfd_dict()
if not bfd_dict:
return
self.end_state["session"] = bfd_dict.get("session")
def work(self):
"""worker"""
self.check_params()
self.bfd_dict = self.get_bfd_dict()
self.get_existing()
self.get_proposed()
# deal present or absent
xml_str = ''
if self.session_name:
xml_str += self.config_session()
# update to device
if xml_str:
self.netconf_load_config(xml_str)
self.changed = True
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
session_name=dict(required=True, type='str'),
local_discr=dict(required=False, type='int'),
remote_discr=dict(required=False, type='int'),
min_tx_interval=dict(required=False, type='int'),
min_rx_interval=dict(required=False, type='int'),
detect_multi=dict(required=False, type='int'),
wtr_interval=dict(required=False, type='int'),
tos_exp=dict(required=False, type='int'),
admin_down=dict(required=False, type='bool', default=False),
description=dict(required=False, type='str'),
state=dict(required=False, default='present', choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = BfdView(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
bonewell/sdl_core | refs/heads/master | tools/InterfaceGenerator/generator/parsers/SDLRPCV1.py | 14 | """SDLRPCV1 parser.
Contains parser for SDLRPCV1 XML format.
"""
from generator.parsers import RPCBase
class Parser(RPCBase.Parser):
"""SDLRPCV1 parser."""
pass
|
jiffyjeff/mongrel2 | refs/heads/master | examples/ws/python/echo.py | 55 | import simplejson as json
from mongrel2 import handler
import wsutil
import sys
import time
import re
sender_id = "82209006-86FF-4982-B5EA-D1E29E55D480"
conn = handler.Connection(sender_id, "tcp://127.0.0.1:9990",
"tcp://127.0.0.1:9989")
CONNECTION_TIMEOUT=5
closingMessages={}
badUnicode=re.compile(u'[\ud800-\udfff]')
logf=open('echo.log','wb')
#logf=open('/dev/null','wb')
#logf=sys.stdout
def abortConnection(conn,req,reason='none',code=None):
#print 'abort',conn,req,reason,code
if code is not None:
#print "Closing cleanly\n"
conn.reply_websocket(req,code+reason,opcode=wsutil.OP_CLOSE)
closingMessages[req.conn_id]=(time.time(),req.sender)
else:
conn.reply(req,'')
print >>logf,'abort',code,reason
while True:
now=time.time()
logf.flush()
for k,(t,uuid) in closingMessages.items():
if now > t+CONNECTION_TIMEOUT:
conn.send(uuid,k,'')
try:
req = conn.recv()
except:
print "FAILED RECV"
continue
if req.is_disconnect():
#print "DISCONNECTED", req.conn_id
continue
if req.headers.get('METHOD') == 'WEBSOCKET_HANDSHAKE':
#print "HANDSHAKE"
conn.reply(req,
'\r\n'.join([
"HTTP/1.1 101 Switching Protocols",
"Upgrade: websocket",
"Connection: Upgrade",
"Sec-WebSocket-Accept: %s\r\n\r\n"])%req.body)
continue
if req.headers.get('METHOD') != 'WEBSOCKET':
print 'METHOD is Not WEBSOCKET:',req.headers#,req.body
conn.reply(req,'')
continue
try:
#print 'headers',req.headers
flags = int(req.headers.get('FLAGS'),16)
fin = flags&0x80==0x80
rsvd=flags & 0x70
opcode=flags & 0xf
wsdata = req.body
#print fin,rsvd,opcode,len(wsdata),wsdata
#logf.write('\n')
except:
#print "Unable to decode FLAGS"
abortConnection(conn,req,'WS decode failed')
#continue
if rsvd != 0:
abortConnection(conn,req,'reserved non-zero',
wsutil.CLOSE_PROTOCOL_ERROR)
continue
if opcode == wsutil.OP_CLOSE:
if req.conn_id in closingMessages:
del closingMessages[req.conn_id]
conn.reply(req,'')
else:
conn.reply_websocket(req,wsdata,opcode)
conn.reply(req,'')
continue
if req.conn_id in closingMessages:
continue
if opcode not in wsutil.opcodes:
abortConnection(conn,req,'Unknown opcode',
wsutil.CLOSE_PROTOCOL_ERROR)
continue
if (opcode & 0x8) != 0:
if opcode ==wsutil.OP_PING:
opcode = wsutil.OP_PONG
conn.reply_websocket(req,wsdata,opcode)
continue
if opcode == wsutil.OP_PONG:
continue # We don't send pings, so ignore pongs
if(opcode == wsutil.OP_TEXT):
try:
x=wsdata.decode('utf-8')
#Thank you for not fixing python issue8271 in 2.x :(
if badUnicode.search(x):
raise UnicodeError('Surrogates not allowed')
#for c in x:
#if (0xd800 <= ord(c) <= 0xdfff):
#raise UnicodeError('Surrogates not allowed')
except:
abortConnection(conn,req,'invalid UTF', wsutil.CLOSE_BAD_DATA)
continue
conn.reply_websocket(req,wsdata,opcode)
|
mhnatiuk/phd_sociology_of_religion | refs/heads/master | scrapper/lib/python2.7/site-packages/twisted/internet/stdio.py | 44 | # -*- test-case-name: twisted.test.test_stdio -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Standard input/out/err support.
This module exposes one name, StandardIO, which is a factory that takes an
IProtocol provider as an argument. It connects that protocol to standard input
and output on the current process.
It should work on any UNIX and also on Win32 (with some caveats: due to
platform limitations, it will perform very poorly on Win32).
Future Plans::
support for stderr, perhaps
Rewrite to use the reactor instead of an ad-hoc mechanism for connecting
protocols to transport.
Maintainer: James Y Knight
"""
from twisted.python.runtime import platform
if platform.isWindows():
from twisted.internet import _win32stdio
StandardIO = _win32stdio.StandardIO
PipeAddress = _win32stdio.Win32PipeAddress
else:
from twisted.internet._posixstdio import StandardIO, PipeAddress
__all__ = ['StandardIO', 'PipeAddress']
|
RuanJG/paparazzi | refs/heads/master | sw/lib/python/pprz_math/__init__.py | 79 | __all__ = ["geodetic", "algebra"]
|
nelango/ViralityAnalysis | refs/heads/master | model/lib/sklearn/utils/graph.py | 289 | """
Graph utilities and algorithms
Graphs are represented with their adjacency matrices, preferably using
sparse matrices.
"""
# Authors: Aric Hagberg <[email protected]>
# Gael Varoquaux <[email protected]>
# Jake Vanderplas <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from .validation import check_array
from .graph_shortest_path import graph_shortest_path
###############################################################################
# Path and connected component analysis.
# Code adapted from networkx
def single_source_shortest_path_length(graph, source, cutoff=None):
"""Return the shortest path length from source to all reachable nodes.
Returns a dictionary of shortest path lengths keyed by target.
Parameters
----------
graph: sparse matrix or 2D array (preferably LIL matrix)
Adjacency matrix of the graph
source : node label
Starting node for path
cutoff : integer, optional
Depth to stop the search - only
paths of length <= cutoff are returned.
Examples
--------
>>> from sklearn.utils.graph import single_source_shortest_path_length
>>> import numpy as np
>>> graph = np.array([[ 0, 1, 0, 0],
... [ 1, 0, 1, 0],
... [ 0, 1, 0, 1],
... [ 0, 0, 1, 0]])
>>> single_source_shortest_path_length(graph, 0)
{0: 0, 1: 1, 2: 2, 3: 3}
>>> single_source_shortest_path_length(np.ones((6, 6)), 2)
{0: 1, 1: 1, 2: 0, 3: 1, 4: 1, 5: 1}
"""
if sparse.isspmatrix(graph):
graph = graph.tolil()
else:
graph = sparse.lil_matrix(graph)
seen = {} # level (number of hops) when seen in BFS
level = 0 # the current level
next_level = [source] # dict of nodes to check at next level
while next_level:
this_level = next_level # advance to next level
next_level = set() # and start a new list (fringe)
for v in this_level:
if v not in seen:
seen[v] = level # set the level of vertex v
next_level.update(graph.rows[v])
if cutoff is not None and cutoff <= level:
break
level += 1
return seen # return all path lengths as dictionary
if hasattr(sparse, 'connected_components'):
connected_components = sparse.connected_components
else:
from .sparsetools import connected_components
###############################################################################
# Graph laplacian
def graph_laplacian(csgraph, normed=False, return_diag=False):
""" Return the Laplacian matrix of a directed graph.
For non-symmetric graphs the out-degree is used in the computation.
Parameters
----------
csgraph : array_like or sparse matrix, 2 dimensions
compressed-sparse graph, with shape (N, N).
normed : bool, optional
If True, then compute normalized Laplacian.
return_diag : bool, optional
If True, then return diagonal as well as laplacian.
Returns
-------
lap : ndarray
The N x N laplacian matrix of graph.
diag : ndarray
The length-N diagonal of the laplacian matrix.
diag is returned only if return_diag is True.
Notes
-----
The Laplacian matrix of a graph is sometimes referred to as the
"Kirchoff matrix" or the "admittance matrix", and is useful in many
parts of spectral graph theory. In particular, the eigen-decomposition
of the laplacian matrix can give insight into many properties of the graph.
For non-symmetric directed graphs, the laplacian is computed using the
out-degree of each node.
"""
if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]:
raise ValueError('csgraph must be a square matrix or array')
if normed and (np.issubdtype(csgraph.dtype, np.int)
or np.issubdtype(csgraph.dtype, np.uint)):
csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True)
if sparse.isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed,
return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed,
return_diag=return_diag)
def _laplacian_sparse(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
if not graph.format == 'coo':
lap = (-graph).tocoo()
else:
lap = -graph.copy()
diag_mask = (lap.row == lap.col)
if not diag_mask.sum() == n_nodes:
# The sparsity pattern of the matrix has holes on the diagonal,
# we need to fix that
diag_idx = lap.row[diag_mask]
diagonal_holes = list(set(range(n_nodes)).difference(diag_idx))
new_data = np.concatenate([lap.data, np.ones(len(diagonal_holes))])
new_row = np.concatenate([lap.row, diagonal_holes])
new_col = np.concatenate([lap.col, diagonal_holes])
lap = sparse.coo_matrix((new_data, (new_row, new_col)),
shape=lap.shape)
diag_mask = (lap.row == lap.col)
lap.data[diag_mask] = 0
w = -np.asarray(lap.sum(axis=1)).squeeze()
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap.data /= w[lap.row]
lap.data /= w[lap.col]
lap.data[diag_mask] = (1 - w_zeros[lap.row[diag_mask]]).astype(
lap.data.dtype)
else:
lap.data[diag_mask] = w[lap.row[diag_mask]]
if return_diag:
return lap, w
return lap
def _laplacian_dense(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
lap = -np.asarray(graph) # minus sign leads to a copy
# set diagonal to zero
lap.flat[::n_nodes + 1] = 0
w = -lap.sum(axis=0)
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap /= w
lap /= w[:, np.newaxis]
lap.flat[::n_nodes + 1] = (1 - w_zeros).astype(lap.dtype)
else:
lap.flat[::n_nodes + 1] = w.astype(lap.dtype)
if return_diag:
return lap, w
return lap
|
osbzr/gooderp_org | refs/heads/master | website_mail/models/mail_message.py | 264 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.tools import html2plaintext
from openerp.tools.translate import _
from openerp.osv import osv, fields, expression
class MailMessage(osv.Model):
_inherit = 'mail.message'
def _get_description_short(self, cr, uid, ids, name, arg, context=None):
res = dict.fromkeys(ids, False)
for message in self.browse(cr, uid, ids, context=context):
if message.subject:
res[message.id] = message.subject
else:
plaintext_ct = '' if not message.body else html2plaintext(message.body)
res[message.id] = plaintext_ct[:30] + '%s' % (' [...]' if len(plaintext_ct) >= 30 else '')
return res
_columns = {
'description': fields.function(
_get_description_short, type='char',
help='Message description: either the subject, or the beginning of the body'
),
'website_published': fields.boolean(
'Published', help="Visible on the website as a comment", copy=False,
),
}
def default_get(self, cr, uid, fields_list, context=None):
defaults = super(MailMessage, self).default_get(cr, uid, fields_list, context=context)
# Note: explicitly implemented in default_get() instead of _defaults,
# to avoid setting to True for all existing messages during upgrades.
# TODO: this default should probably be dynamic according to the model
# on which the messages are attached, thus moved to create().
if 'website_published' in fields_list:
defaults.setdefault('website_published', True)
return defaults
def _search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False, access_rights_uid=None):
""" Override that adds specific access rights of mail.message, to restrict
messages to published messages for public users. """
if uid != SUPERUSER_ID:
group_ids = self.pool.get('res.users').browse(cr, uid, uid, context=context).groups_id
group_user_id = self.pool.get("ir.model.data").get_object_reference(cr, uid, 'base', 'group_public')[1]
if group_user_id in [group.id for group in group_ids]:
args = expression.AND([[('website_published', '=', True)], list(args)])
return super(MailMessage, self)._search(cr, uid, args, offset=offset, limit=limit, order=order,
context=context, count=count, access_rights_uid=access_rights_uid)
def check_access_rule(self, cr, uid, ids, operation, context=None):
""" Add Access rules of mail.message for non-employee user:
- read:
- raise if the type is comment and subtype NULL (internal note)
"""
if uid != SUPERUSER_ID:
group_ids = self.pool.get('res.users').browse(cr, uid, uid, context=context).groups_id
group_user_id = self.pool.get("ir.model.data").get_object_reference(cr, uid, 'base', 'group_public')[1]
if group_user_id in [group.id for group in group_ids]:
cr.execute('SELECT id FROM "%s" WHERE website_published IS FALSE AND id = ANY (%%s)' % (self._table), (ids,))
if cr.fetchall():
raise osv.except_osv(
_('Access Denied'),
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % (self._description, operation))
return super(MailMessage, self).check_access_rule(cr, uid, ids=ids, operation=operation, context=context)
|
DarrelHsu/cvsClient | refs/heads/master | third_party/pylint/pyreverse/utils.py | 30 | # Copyright (c) 2002-2010 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:[email protected]
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
generic classes/functions for pyreverse core/extensions
"""
import sys
import re
import os
########### pyreverse option utils ##############################
RCFILE = '.pyreverserc'
def get_default_options():
"""
Read config file and return list of options
"""
options = []
home = os.environ.get('HOME', '')
if home:
rcfile = os.path.join(home, RCFILE)
try:
options = open(rcfile).read().split()
except IOError:
pass # ignore if no config file found
return options
def insert_default_options():
"""insert default options to sys.argv
"""
options = get_default_options()
options.reverse()
for arg in options:
sys.argv.insert(1, arg)
# astng utilities ###########################################################
SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$')
PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$')
PROTECTED = re.compile('^_[_A-Za-z0-9]*$')
def get_visibility(name):
"""return the visibility from a name: public, protected, private or special
"""
if SPECIAL.match(name):
visibility = 'special'
elif PRIVATE.match(name):
visibility = 'private'
elif PROTECTED.match(name):
visibility = 'protected'
else:
visibility = 'public'
return visibility
ABSTRACT = re.compile('^.*Abstract.*')
FINAL = re.compile('^[A-Z_]*$')
def is_abstract(node):
"""return true if the given class node correspond to an abstract class
definition
"""
return ABSTRACT.match(node.name)
def is_final(node):
"""return true if the given class/function node correspond to final
definition
"""
return FINAL.match(node.name)
def is_interface(node):
# bw compat
return node.type == 'interface'
def is_exception(node):
# bw compat
return node.type == 'exception'
# Helpers #####################################################################
_CONSTRUCTOR = 1
_SPECIAL = 2
_PROTECTED = 4
_PRIVATE = 8
MODES = {
'ALL' : 0,
'PUB_ONLY' : _SPECIAL + _PROTECTED + _PRIVATE,
'SPECIAL' : _SPECIAL,
'OTHER' : _PROTECTED + _PRIVATE,
}
VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED, \
'private': _PRIVATE, 'public': 0 }
class FilterMixIn:
"""filter nodes according to a mode and nodes' visibility
"""
def __init__(self, mode):
"init filter modes"
__mode = 0
for nummod in mode.split('+'):
try:
__mode += MODES[nummod]
except KeyError, ex:
print >> sys.stderr, 'Unknown filter mode %s' % ex
self.__mode = __mode
def show_attr(self, node):
"""return true if the node should be treated
"""
visibility = get_visibility(getattr(node, 'name', node))
return not (self.__mode & VIS_MOD[visibility] )
|
vladimiroff/humble-media | refs/heads/master | humblemedia/resources/migrations/0007_attachment_mime_type.py | 1 | # encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('resources', '0006_auto_20140426_1458'),
]
operations = [
migrations.AddField(
model_name='attachment',
name='mime_type',
field=models.CharField(null=True, max_length=64),
preserve_default=True,
),
]
|
vitalti/sapl | refs/heads/master | sapl/parlamentares/migrations/0009_auto_20170905_1617.py | 3 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-09-05 16:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('parlamentares', '0008_adiciona_cargos_mesa'),
]
operations = [
migrations.AlterModelOptions(
name='situacaomilitar',
options={'ordering': ['descricao'], 'verbose_name': 'Tipo Situação Militar', 'verbose_name_plural': 'Tipos Situações Militares'},
),
migrations.AlterModelOptions(
name='tipoafastamento',
options={'ordering': ['descricao'], 'verbose_name': 'Tipo de Afastamento', 'verbose_name_plural': 'Tipos de Afastamento'},
),
migrations.AlterModelOptions(
name='tipodependente',
options={'ordering': ['descricao'], 'verbose_name': 'Tipo de Dependente', 'verbose_name_plural': 'Tipos de Dependente'},
),
]
|
webu/django-cms | refs/heads/develop | cms/south_migrations/0051_fix_content_type.py | 63 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
try:
# Let's fix the brokem page entry
ct = orm['contenttypes.ContentType'].objects.get(
model='page', name='', app_label='cms')
ct.name = 'page'
ct.save()
except orm['contenttypes.ContentType'].DoesNotExist:
# No content type to fix
pass
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'unique_together': "(('publisher_is_draft', 'application_namespace'),)", 'object_name': 'Page'},
'application_namespace': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'revision_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "'INHERIT'", 'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [u'auth.User']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': u"orm['auth.User']"}),
u'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': [u'auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': u"orm['auth.User']"}),
u'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '155', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'cms.usersettings': {
'Meta': {'object_name': 'UserSettings'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
symmetrical = True
|
hiidef/hiispider | refs/heads/master | hiispider/sleep.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""An asynchronous implementation of time.sleep()"""
from twisted.internet.defer import Deferred
from twisted.internet import reactor
# FIXME: make this a function rather than a class
class Sleep(Deferred):
def __init__(self, timeout):
Deferred.__init__(self)
reactor.callLater(timeout, self.callback, None)
|
dfang/odoo | refs/heads/10.0 | addons/sale_timesheet/models/product.py | 22 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
track_service = fields.Selection(selection_add=[
('timesheet', 'Timesheets on project'),
('task', 'Create a task and track hours')])
project_id = fields.Many2one(
'project.project', 'Project', company_dependent=True,
help='Create a task under this project on sale order validation. This setting must be set for each company.')
@api.onchange('type')
def _onchange_type(self):
if self.type == 'service':
self.track_service = 'timesheet'
else:
self.track_service = 'manual'
class ProductProduct(models.Model):
_inherit = 'product.product'
@api.multi
def _need_procurement(self):
for product in self:
if product.type == 'service' and product.track_service == 'task':
return True
return super(ProductProduct, self)._need_procurement()
|
JackDandy/SickGear | refs/heads/master | lib/tornado_py2/tcpclient.py | 2 | #
# Copyright 2014 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A non-blocking TCP connection factory.
"""
from __future__ import absolute_import, division, print_function
import functools
import socket
import numbers
import datetime
from tornado_py2.concurrent import Future, future_add_done_callback
from tornado_py2.ioloop import IOLoop
from tornado_py2.iostream import IOStream
from tornado_py2 import gen
from tornado_py2.netutil import Resolver
from tornado_py2.platform.auto import set_close_exec
from tornado_py2.gen import TimeoutError
from tornado_py2.util import timedelta_to_seconds
_INITIAL_CONNECT_TIMEOUT = 0.3
class _Connector(object):
"""A stateless implementation of the "Happy Eyeballs" algorithm.
"Happy Eyeballs" is documented in RFC6555 as the recommended practice
for when both IPv4 and IPv6 addresses are available.
In this implementation, we partition the addresses by family, and
make the first connection attempt to whichever address was
returned first by ``getaddrinfo``. If that connection fails or
times out, we begin a connection in parallel to the first address
of the other family. If there are additional failures we retry
with other addresses, keeping one connection attempt per family
in flight at a time.
http://tools.ietf.org/html/rfc6555
"""
def __init__(self, addrinfo, connect):
self.io_loop = IOLoop.current()
self.connect = connect
self.future = Future()
self.timeout = None
self.connect_timeout = None
self.last_error = None
self.remaining = len(addrinfo)
self.primary_addrs, self.secondary_addrs = self.split(addrinfo)
self.streams = set()
@staticmethod
def split(addrinfo):
"""Partition the ``addrinfo`` list by address family.
Returns two lists. The first list contains the first entry from
``addrinfo`` and all others with the same family, and the
second list contains all other addresses (normally one list will
be AF_INET and the other AF_INET6, although non-standard resolvers
may return additional families).
"""
primary = []
secondary = []
primary_af = addrinfo[0][0]
for af, addr in addrinfo:
if af == primary_af:
primary.append((af, addr))
else:
secondary.append((af, addr))
return primary, secondary
def start(self, timeout=_INITIAL_CONNECT_TIMEOUT, connect_timeout=None):
self.try_connect(iter(self.primary_addrs))
self.set_timeout(timeout)
if connect_timeout is not None:
self.set_connect_timeout(connect_timeout)
return self.future
def try_connect(self, addrs):
try:
af, addr = next(addrs)
except StopIteration:
# We've reached the end of our queue, but the other queue
# might still be working. Send a final error on the future
# only when both queues are finished.
if self.remaining == 0 and not self.future.done():
self.future.set_exception(self.last_error or
IOError("connection failed"))
return
stream, future = self.connect(af, addr)
self.streams.add(stream)
future_add_done_callback(
future, functools.partial(self.on_connect_done, addrs, af, addr))
def on_connect_done(self, addrs, af, addr, future):
self.remaining -= 1
try:
stream = future.result()
except Exception as e:
if self.future.done():
return
# Error: try again (but remember what happened so we have an
# error to raise in the end)
self.last_error = e
self.try_connect(addrs)
if self.timeout is not None:
# If the first attempt failed, don't wait for the
# timeout to try an address from the secondary queue.
self.io_loop.remove_timeout(self.timeout)
self.on_timeout()
return
self.clear_timeouts()
if self.future.done():
# This is a late arrival; just drop it.
stream.close()
else:
self.streams.discard(stream)
self.future.set_result((af, addr, stream))
self.close_streams()
def set_timeout(self, timeout):
self.timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout,
self.on_timeout)
def on_timeout(self):
self.timeout = None
if not self.future.done():
self.try_connect(iter(self.secondary_addrs))
def clear_timeout(self):
if self.timeout is not None:
self.io_loop.remove_timeout(self.timeout)
def set_connect_timeout(self, connect_timeout):
self.connect_timeout = self.io_loop.add_timeout(
connect_timeout, self.on_connect_timeout)
def on_connect_timeout(self):
if not self.future.done():
self.future.set_exception(TimeoutError())
self.close_streams()
def clear_timeouts(self):
if self.timeout is not None:
self.io_loop.remove_timeout(self.timeout)
if self.connect_timeout is not None:
self.io_loop.remove_timeout(self.connect_timeout)
def close_streams(self):
for stream in self.streams:
stream.close()
class TCPClient(object):
"""A non-blocking TCP connection factory.
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
"""
def __init__(self, resolver=None):
if resolver is not None:
self.resolver = resolver
self._own_resolver = False
else:
self.resolver = Resolver()
self._own_resolver = True
def close(self):
if self._own_resolver:
self.resolver.close()
@gen.coroutine
def connect(self, host, port, af=socket.AF_UNSPEC, ssl_options=None,
max_buffer_size=None, source_ip=None, source_port=None,
timeout=None):
"""Connect to the given host and port.
Asynchronously returns an `.IOStream` (or `.SSLIOStream` if
``ssl_options`` is not None).
Using the ``source_ip`` kwarg, one can specify the source
IP address to use when establishing the connection.
In case the user needs to resolve and
use a specific interface, it has to be handled outside
of Tornado as this depends very much on the platform.
Raises `TimeoutError` if the input future does not complete before
``timeout``, which may be specified in any form allowed by
`.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time
relative to `.IOLoop.time`)
Similarly, when the user requires a certain source port, it can
be specified using the ``source_port`` arg.
.. versionchanged:: 4.5
Added the ``source_ip`` and ``source_port`` arguments.
.. versionchanged:: 5.0
Added the ``timeout`` argument.
"""
if timeout is not None:
if isinstance(timeout, numbers.Real):
timeout = IOLoop.current().time() + timeout
elif isinstance(timeout, datetime.timedelta):
timeout = IOLoop.current().time() + timedelta_to_seconds(timeout)
else:
raise TypeError("Unsupported timeout %r" % timeout)
if timeout is not None:
addrinfo = yield gen.with_timeout(
timeout, self.resolver.resolve(host, port, af))
else:
addrinfo = yield self.resolver.resolve(host, port, af)
connector = _Connector(
addrinfo,
functools.partial(self._create_stream, max_buffer_size,
source_ip=source_ip, source_port=source_port)
)
af, addr, stream = yield connector.start(connect_timeout=timeout)
# TODO: For better performance we could cache the (af, addr)
# information here and re-use it on subsequent connections to
# the same host. (http://tools.ietf.org/html/rfc6555#section-4.2)
if ssl_options is not None:
if timeout is not None:
stream = yield gen.with_timeout(timeout, stream.start_tls(
False, ssl_options=ssl_options, server_hostname=host))
else:
stream = yield stream.start_tls(False, ssl_options=ssl_options,
server_hostname=host)
raise gen.Return(stream)
def _create_stream(self, max_buffer_size, af, addr, source_ip=None,
source_port=None):
# Always connect in plaintext; we'll convert to ssl if necessary
# after one connection has completed.
source_port_bind = source_port if isinstance(source_port, int) else 0
source_ip_bind = source_ip
if source_port_bind and not source_ip:
# User required a specific port, but did not specify
# a certain source IP, will bind to the default loopback.
source_ip_bind = '::1' if af == socket.AF_INET6 else '127.0.0.1'
# Trying to use the same address family as the requested af socket:
# - 127.0.0.1 for IPv4
# - ::1 for IPv6
socket_obj = socket.socket(af)
set_close_exec(socket_obj.fileno())
if source_port_bind or source_ip_bind:
# If the user requires binding also to a specific IP/port.
try:
socket_obj.bind((source_ip_bind, source_port_bind))
except socket.error:
socket_obj.close()
# Fail loudly if unable to use the IP/port.
raise
try:
stream = IOStream(socket_obj,
max_buffer_size=max_buffer_size)
except socket.error as e:
fu = Future()
fu.set_exception(e)
return fu
else:
return stream, stream.connect(addr)
|
janchorowski/fuel | refs/heads/master | fuel/converters/iris.py | 18 | import os
import h5py
import numpy
from fuel.converters.base import fill_hdf5_file
def convert_iris(directory, output_directory, output_filename='iris.hdf5'):
"""Convert the Iris dataset to HDF5.
Converts the Iris dataset to an HDF5 dataset compatible with
:class:`fuel.datasets.Iris`. The converted dataset is
saved as 'iris.hdf5'.
This method assumes the existence of the file `iris.data`.
Parameters
----------
directory : str
Directory in which input files reside.
output_directory : str
Directory in which to save the converted dataset.
output_filename : str, optional
Name of the saved dataset. Defaults to `None`, in which case a name
based on `dtype` will be used.
Returns
-------
output_paths : tuple of str
Single-element tuple containing the path to the converted dataset.
"""
classes = {b'Iris-setosa': 0, b'Iris-versicolor': 1, b'Iris-virginica': 2}
data = numpy.loadtxt(
os.path.join(directory, 'iris.data'),
converters={4: lambda x: classes[x]},
delimiter=',')
features = data[:, :-1].astype('float32')
targets = data[:, -1].astype('uint8').reshape((-1, 1))
data = (('all', 'features', features),
('all', 'targets', targets))
output_path = os.path.join(output_directory, output_filename)
h5file = h5py.File(output_path, mode='w')
fill_hdf5_file(h5file, data)
h5file['features'].dims[0].label = 'batch'
h5file['features'].dims[1].label = 'feature'
h5file['targets'].dims[0].label = 'batch'
h5file['targets'].dims[1].label = 'index'
h5file.flush()
h5file.close()
return (output_path,)
def fill_subparser(subparser):
"""Sets up a subparser to convert the Iris dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `iris` command.
"""
return convert_iris
|
bowlofstew/Impala | refs/heads/cdh5-trunk | thirdparty/gtest-1.6.0/test/gtest_output_test.py | 1733 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO([email protected]): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual)
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
|
turbokongen/home-assistant | refs/heads/dev | homeassistant/components/juicenet/switch.py | 14 | """Support for monitoring juicenet/juicepoint/juicebox based EVSE switches."""
from homeassistant.components.switch import SwitchEntity
from .const import DOMAIN, JUICENET_API, JUICENET_COORDINATOR
from .entity import JuiceNetDevice
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the JuiceNet switches."""
entities = []
juicenet_data = hass.data[DOMAIN][config_entry.entry_id]
api = juicenet_data[JUICENET_API]
coordinator = juicenet_data[JUICENET_COORDINATOR]
for device in api.devices:
entities.append(JuiceNetChargeNowSwitch(device, coordinator))
async_add_entities(entities)
class JuiceNetChargeNowSwitch(JuiceNetDevice, SwitchEntity):
"""Implementation of a JuiceNet switch."""
def __init__(self, device, coordinator):
"""Initialise the switch."""
super().__init__(device, "charge_now", coordinator)
@property
def name(self):
"""Return the name of the device."""
return f"{self.device.name} Charge Now"
@property
def is_on(self):
"""Return true if switch is on."""
return self.device.override_time != 0
async def async_turn_on(self, **kwargs):
"""Charge now."""
await self.device.set_override(True)
async def async_turn_off(self, **kwargs):
"""Don't charge now."""
await self.device.set_override(False)
|
PandaWei/tp-libvirt | refs/heads/master | libvirt/tests/src/virsh_cmd/domain/virsh_desc.py | 8 | import logging
import os
from autotest.client.shared import error
from virttest import virsh
def run(test, params, env):
"""
Test command: virsh desc.
This command allows to show or modify description or title of a domain.
1). For running domain, get/set description&title with options.
2). For shut off domian, get/set description&title with options.
3). For persistent/transient domain, get/set description&title with options.
"""
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
options = params.get("desc_option", "")
persistent_vm = params.get("persistent_vm", "yes")
def run_cmd(name, options, desc_str, status_error):
"""
Run virsh desc command
:return: cmd output
"""
cmd_result = virsh.desc(name, options, desc_str, ignore_status=True,
debug=True)
output = cmd_result.stdout.strip()
err = cmd_result.stderr.strip()
status = cmd_result.exit_status
if status_error == "no" and status:
raise error.TestFail(err)
elif status_error == "yes" and status == 0:
raise error.TestFail("Expect fail, but run successfully.")
return output
def vm_state_switch():
"""
Switch the vm state
"""
if vm.is_dead():
vm.start()
if vm.is_alive():
vm.destroy()
def desc_check(name, desc_str, state_switch):
"""
Check the domain's description or title
"""
ret = False
if state_switch:
vm_state_switch()
output = run_cmd(name, "", "", "no")
if desc_str == output:
logging.debug("Domain desc check successfully.")
ret = True
else:
logging.error("Domain desc check fail.")
if state_switch:
vm_state_switch()
return ret
def run_test():
"""
Get/Set vm desc by running virsh desc command.
"""
status_error = params.get("status_error", "no")
desc_str = params.get("desc_str", "")
state_switch = False
# Test 1: get vm desc
run_cmd(vm_name, options, "", status_error)
# Test 2: set vm desc
if options.count("--config") and vm.is_persistent():
state_switch = True
if options.count("--live") and vm.state() == "shut off":
status_error = "yes"
if len(desc_str) == 0:
desc_str = "New Description/title for the %s vm" % vm.state()
logging.debug("Use the default desc message: %s", desc_str)
run_cmd(vm_name, options, desc_str, status_error)
desc_check(vm_name, desc_str, state_switch)
# Prepare transient/persistent vm
original_xml = vm.backup_xml()
if persistent_vm == "no" and vm.is_persistent():
vm.undefine()
elif persistent_vm == "yes" and not vm.is_persistent():
vm.define(original_xml)
try:
if vm.is_dead():
vm.start()
run_test()
# Recvoer the vm and shutoff it
if persistent_vm == "yes":
vm.define(original_xml)
vm.destroy()
run_test()
finally:
vm.destroy()
virsh.define(original_xml)
os.remove(original_xml)
|
karwa/swift | refs/heads/master | utils/cmpcodesize/tests/__init__.py | 65 | # cmpcodesize/tests/__init__.py - Unit tests for cmpcodesize -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# This file needs to be here in order for Python to treat the
# utils/cmpcodesize/tests/ directory as a module.
#
# ----------------------------------------------------------------------------
|
adamlwgriffiths/Pyglet | refs/heads/master | tests/image/BMP_RGB_32BPP_LOAD.py | 29 | #!/usr/bin/env python
'''Test load using the Python BMP loader. You should see the rgb_32bpp.bmp
image on a checkboard background.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import unittest
import base_load
from pyglet.image.codecs.bmp import BMPImageDecoder
class TEST_SUITE(base_load.TestLoad):
texture_file = 'rgb_32bpp.bmp'
decoder = BMPImageDecoder()
if __name__ == '__main__':
unittest.main()
|
luzfcb/luzfcb_dj_simplelock | refs/heads/master | luzfcb_dj_simplelock/forms.py | 1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
# TODO: Incluir geracao do hash para verificacao se pode ou nao revalidar o bloqueio
class DeleteForm(forms.Form):
def __init__(self, id_obj=None, *args, **kwargs):
self._id = id_obj
super(DeleteForm, self).__init__(*args, **kwargs)
idd = forms.CharField(widget=forms.HiddenInput)
hashe = forms.CharField(widget=forms.HiddenInput)
def clean(self):
idd = self.cleaned_data.get('idd')
hashe = self.cleaned_data.get('hashe')
if not self._id == idd and not idd == hashe:
raise forms.ValidationError(
"Hash errado"
)
class ReValidateForm(forms.Form):
def __init__(self, id_obj=None, *args, **kwargs):
self._id = id_obj
super(ReValidateForm, self).__init__(*args, **kwargs)
idd = forms.CharField(widget=forms.HiddenInput)
hashe = forms.CharField(widget=forms.HiddenInput)
def clean(self):
idd = self.cleaned_data.get('idd')
hashe = self.cleaned_data.get('hashe')
if not self._id == idd and not idd == hashe:
raise forms.ValidationError(
"Hash errado"
)
|
BhavyaLight/kaggle-predicting-Red-Hat-Business-Value | refs/heads/master | Initial_Classification_Models/Ensemble/VotingClassifier.py | 1 | import pandas as pd
from Classification import Utility
# test_dataset = Utility.loadModel("../Final/test_randomforest")
# test_dataset.set_index(["activity_id"]).drop('act_0')
test_dataset = pd.read_csv("../../Data/Outputs/Best/randomForest500Model_new.csv")
test_dataset = test_dataset[["activity_id", "outcome"]]
test_dataset["outcome_RF"] = test_dataset["outcome"]
# print(len(test_dataset["outcome_RF"]))
xgb = pd.read_csv("../../Data/XGB.csv")
xgb["out_xgb"] = xgb["outcome"]
lr = pd.read_csv("../../Data/LR.csv")
lr["out_lr"] = lr["outcome"]
manipulation = pd.read_csv("../../Data/Outputs/manipulation.csv")
manipulation["out_man"] = manipulation["outcome"]
# print(len(lr["out_lr"]))
output = pd.merge(xgb, lr, on="activity_id")
output = pd.merge(test_dataset, output, on="activity_id")
output = pd.merge(output, manipulation, on='activity_id')
# print(output)'
print(output.columns)
output["outcome"] = (0.60*output["out_xgb"] + 0.35*output["outcome_RF"] + 0.05*output["out_lr"])
output.set_index(["activity_id"])
# output.loc[len(output)] = ["act_0", "act_0", "act_0", "act_0", "act_0", "act_0"]
Utility.saveInOutputForm(output, "60XGB_35rf_5lr.csv", "ensemble") |
CompMusic/essentia | refs/heads/master | src/examples/python/experimental/beatogram.py | 10 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import os, sys
from os.path import join
import essentia
from essentia.streaming import *
import essentia.standard as std
from pylab import median, mean, argmax
import matplotlib
#matplotlib.use('Agg') # in order to not grab focus on screen while batch processing
import matplotlib.pyplot as pyplot
import numpy as np
from numpy import shape, zeros, fabs
# for key input
import termios, sys, os, subprocess
TERMIOS = termios
import copy
# for alsa
if sys.platform =='linux2':
import wave, alsaaudio
import time
import thread
barkBands = [0.0, 50.0, 100.0, 150.0, 200.0, 300.0, 400.0, 510.0, 630.0, 770.0,
920.0, 1080.0, 1270.0, 1480.0, 1720.0, 2000.0, 2320.0, 2700.0,
3150.0, 3700.0, 4400.0, 5300.0, 6400.0, 7700.0,
9500.0, 12000.0, 15500.0, 20500.0, 27000.0]
scheirerBands = [ 0.0, 200.0, 400.0, 800.0, 1600.0, 3200.0, 22000.0]
scheirerBands_extended = [ 0.0, 50.0, 100.0, 150.0, 200.0, 400.0, 800.0, 1600.0, 3200.0, 5000.0, 10000.0]
EqBands = [20.0, 150.0, 400.0, 3200.0, 7000.0, 22000.0]
EqBands2 =[0.0, 75.0, 150.0, 400.0, 3200.0, 7000.0]
DOWNMIX ='mix'
# defines for novelty curve:
FRAMESIZE = 1024
HOPSIZE = FRAMESIZE/2
WEIGHT='flat' #'supplied' #'flat'
SAMPLERATE=44100.0
WINDOW='hann' #'blackmanharris92'
BEATWINDOW=16 # number of beats where to compute statistics
# tempogram defines:
FRAMERATE = float(SAMPLERATE)/HOPSIZE
TEMPO_FRAMESIZE = 4;
TEMPO_OVERLAP=2;
STARTTIME = 0
ENDTIME = 2000
def computeOnsets(filename, pool):
loader = EasyLoader(filename=filename,
sampleRate=pool['samplerate'],
startTime=STARTTIME, endTime=ENDTIME,
downmix=pool['downmix'])
onset = OnsetRate()
loader.audio >> onset.signal
onset.onsetTimes >> (pool, 'ticks')
onset.onsetRate >> None
essentia.run(loader)
pool.set('size', loader.audio.totalProduced())
pool.set('length', pool['size']/pool['samplerate'])
def computeSegmentation(filename, pool):
sampleRate = 44100
frameSize = 2048
hopSize = frameSize/2
audio = EqloudLoader(filename = filename,
downmix=pool['downmix'],
sampleRate=sampleRate)
fc = FrameCutter(frameSize=frameSize, hopSize=hopSize, silentFrames='keep')
w = Windowing(type='blackmanharris62')
spec = Spectrum()
mfcc = MFCC(highFrequencyBound=8000)
tmpPool = essentia.Pool()
audio.audio >> fc.signal
fc.frame >> w.frame >> spec.frame
spec.spectrum >> mfcc.spectrum
mfcc.bands >> (tmpPool, 'mfcc_bands')
mfcc.mfcc>> (tmpPool, 'mfcc_coeff')
essentia.run(audio)
# compute transpose of features array, don't call numpy.matrix.transpose
# because essentia fucks it up!!
features = copy.deepcopy(tmpPool['mfcc_coeff'].transpose())
segments = std.SBic(cpw=1.5, size1=1000, inc1=300, size2=600, inc2=50)(features)
for segment in segments:
pool.add('segments', segment*hopSize/sampleRate)
#print pool['segments']
def computeNoveltyCurve(filename, pool):
loader = EasyLoader(filename=filename,
sampleRate=pool['samplerate'],
startTime=STARTTIME, endTime=ENDTIME,
downmix=pool['downmix'])
fc = FrameCutter(frameSize=int(pool['framesize']),
silentFrames ='noise',
hopSize=int(pool['hopsize']),
startFromZero=False)
window = Windowing(type=pool['window'],
zeroPhase=False)
#freqBands = FrequencyBands(frequencyBands=EqBands, sampleRate=pool['samplerate'])
freqBands = FrequencyBands(sampleRate=pool['samplerate'])
spec = Spectrum()
hfc = HFC()
loader.audio >> fc.signal
fc.frame >> window.frame >> spec.frame
spec.spectrum >> freqBands.spectrum
spec.spectrum >> hfc.spectrum
freqBands.bands >> (pool, 'frequency_bands')
hfc.hfc >> (pool, 'hfc')
essentia.run(loader)
pool.set('size', loader.audio.totalProduced())
pool.set('length', pool['size']/pool['samplerate'])
# compute a weighting curve that is according to frequency bands:
frequencyBands = pool['frequency_bands']
nFrames = len(frequencyBands)
weightCurve= np.sum(frequencyBands, axis=0)
weightCurve = [val/float(nFrames) for val in weightCurve]
weightCurve = essentia.normalize(weightCurve)
#pyplot.plot(weightCurve)
#pyplot.show()
noveltyCurve = std.NoveltyCurve(frameRate=pool['framerate'],
weightCurveType=pool['weight'],
weightCurve=weightCurve)(frequencyBands)
#for x in noveltyCurve: pool.add('novelty_curve', x)
#return
# derivative of hfc seems to help in finding more precise beats...
hfc = essentia.normalize(pool['hfc'])
dhfc = essentia.derivative(hfc)
for i, val in enumerate(dhfc):
if val< 0: continue
noveltyCurve[i] += val
# low pass filter novelty curve:
env = std.Envelope(attackTime=2./pool['framerate'],
releaseTime=2./pool['framerate'])(noveltyCurve)
# apply median filter:
windowSize = 8 #samples
size = len(env)
filtered = zeros(size)
for i in range(size):
start = i-windowSize
if start < 0: start = 0
end = start + windowSize
if end > size:
end = size
start = size-windowSize
filtered[i] = env[i] - np.median(env[start:end])
if filtered[i] < 0: filtered[i] = 0
#pyplot.subplot(311)
#pyplot.plot(noveltyCurve)
#pyplot.subplot(312)
#pyplot.plot(env, 'r')
#pyplot.subplot(313)
#pyplot.plot(filtered, 'g')
#pyplot.show()
#for x in noveltyCurve: pool.add('novelty_curve', x)
for x in filtered: pool.add('novelty_curve', x)
def computeBeats(filename, pool):
computeNoveltyCurve(filename, pool)
recompute = True
novelty = pool['novelty_curve']
count = 0
bpmTolerance = 5
while recompute:
gen = VectorInput(novelty)
bpmHist = BpmHistogram(frameRate=pool['framerate'],
frameSize=pool['tempo_framesize'],
overlap=int(pool['tempo_overlap']),
maxPeaks=50,
windowType='hann',
minBpm=40.0,
maxBpm=1000.0,
normalize=False,
constantTempo=False,
tempoChange=5,
weightByMagnitude=True)
gen.data >> bpmHist.novelty
bpmHist.bpm >> (pool, 'peaksBpm')
bpmHist.bpmMagnitude >> (pool, 'peaksMagnitude')
bpmHist.harmonicBpm >> (pool, 'harmonicBpm')
bpmHist.harmonicBpm >> (pool, 'harmonicBpm')
bpmHist.confidence >> (pool, 'confidence')
bpmHist.ticks >> (pool, 'ticks')
bpmHist.ticksMagnitude >> (pool, 'ticksMagnitude')
bpmHist.sinusoid >> (pool, 'sinusoid')
essentia.run(gen)
## get rid of beats of beats > audio.length
#ticks = []
#ticksAmp = []
#for t, amp in zip(pool['ticks'], pool['ticksMagnitude']):
# if t < 0 or t > pool['length']: continue
# ticks.append(float(t))
# ticksAmp.append(float(amp))
#step = pool['step']
#ticks = essentia.postProcessTicks(ticks, ticksAmp, 60./pool['harmonicBpm'][0]);
sine = pool['sinusoid']
#pyplot.plot(novelty, 'k')
#pyplot.plot(sine, 'r')
#for i in range(len(novelty)-1):
# diff = novelty[i+1]-novelty[i]
# if diff > 0: novelty[i] = diff
# else: novelty[i] = 0
#pyplot.plot(novelty, 'r')
prodPulse = zeros(len(novelty))
i = 0
while i < len(novelty):
if sine[i] <= 0.1:
i += 1
continue
window = []
while sine[i] != 0 and i < len(novelty):
window.append(novelty[i]*sine[i])
i+=1
peakPos = argmax(window)
peakPos = i - len(window) + peakPos
prodPulse[peakPos] = novelty[peakPos]
#pyplot.plot(prodPulse, 'g')
#pyplot.show()
ticks = []
ticksAmp = []
frameRate = pool['framerate']
bpms = pool['harmonicBpm']
print 'estimated bpm:', bpms
tatum = 60./bpms[0]
diffTick = 2*tatum
prevTick = -1
prevAmp = -1
for i, x in enumerate(prodPulse):
if x != 0:
newTick = float(i)/frameRate
if newTick < 0 or newTick > pool['length']: continue
ticks.append(newTick)
ticksAmp.append(x)
# if x != 0:
# newTick = float(i)/frameRate
# if prevTick < 0:
# ticks.append(newTick)
# ticksAmp.append(x)
# prevTick = newTick
# prevAmp = x
# else:
# diff = newTick-prevTick
# ratio = max( round(tatum/diff), round(diff/tatum))
# if (diff >= 0.9*tatum*ratio) and (diff <= 1.1*tatum*ratio):
# ticks.append(newTick)
# ticksAmp.append(x)
# prevTick = newTick
# prevAmp = x
# else: #(newTick-prevTick) < 0.75*tatum:
# newTick = (newTick*x+prevTick*prevAmp)/(x+prevAmp)
# ticks[-1] = newTick
# ticksAmp[-1] = (x+prevAmp)/2.
# prevTick = newTick
# prevAmp = (x+prevAmp)/2.
_, _, bestBpm= getMostStableTickLength(ticks)
#pool.set('bestTicksStart', bestTicks[0])
#pool.set('bestTicksEnd', bestTicks[0] + bestTicks[1])
#ticks = essentia.postProcessTicks(ticks, ticksAmp, 60./pool['harmonicBpm'][0]);
#ticks = essentia.postProcessTicks(ticks)
if fabs(bestBpm - bpms[0]) < bpmTolerance: recompute = False
else:
count+=1
if count >= 5:
bpmTolerance += 1
count = 0
print "recomputing!!!!"
novelty = copy.deepcopy(pool['sinusoid'])
pool.remove('sinusoid')
pool.remove('novelty_curve')
pool.remove('peaksBpm')
pool.remove('peaksMagnitude')
pool.remove('harmonicBpm')
pool.remove('harmonicBpm')
pool.remove('confidence')
pool.remove('ticks')
pool.remove('ticksMagnitude')
#pyplot.plot(prodPulse, 'g')
#pyplot.show()
print 'estimated bpm:', bpms
print 'bpms:', pool['peaksBpm']
#ticks = postProcessTicks(filename, pool)
#print 'bpm mags:', pool['peaksMagnitude']
bpmRatios = []
#for i, bpm1 in enumerate(bpms):
# bpmRatios.append([float(bpm1)/float(bpm2) for bpm2 in bpms[i:]])
#print 'bpmRatios:', bpmRatios
#print 'original nticks:', len(ticks)
#print 'step:', step
if step>1:
ticks = essentia.array(map(lambda i: ticks[i],
filter(lambda i: i%step == 0,range(len(ticks)))))
#print 'nticks:', len(ticks)
pool.remove('ticks')
pool.set('ticks', ticks)
def longestChain(dticks, startpos, period, tolerance):
pos = startpos
ubound = period*(1+tolerance)
lbound = period*(1-tolerance)
while (pos < len(dticks)) and\
(lbound < dticks[pos] and dticks[pos] < ubound):
pos += 1
return pos - startpos
def getMostStableTickLength(ticks):
nticks = len(ticks)
dticks = zeros(nticks-1)
for i in range(nticks-1):
dticks[i] = (ticks[i+1] - ticks[i])
hist, distx = np.histogram(dticks, bins=50*(1+(max(dticks)-min(dticks))))
bestPeriod = distx[argmax(hist)] # there may be more than one candidate!!
bestBpm = 60./bestPeriod
print 'best period', bestPeriod
print 'best bpm:', bestBpm
#print 'hist:', hist, distx
maxLength = 0
idx = 0
for startpos in range(nticks-1):
l = longestChain(dticks, startpos, bestPeriod, 0.1)
if l > maxLength :
maxLength = l;
idx = startpos;
print 'max stable length:', idx, maxLength
return idx, maxLength, bestBpm
def postProcessTicks(audioFilename, pool):
'''Computes delta energy in order to find the correct position of the ticks'''
# get rid of beats of beats > audio.length
ticks = []
ticksAmp = []
for t, amp in zip(pool['ticks'], pool['ticksMagnitude']):
if t < 0 or t > pool['length']: continue
ticks.append(float(t))
ticksAmp.append(float(amp))
step = pool['step']
#ticks = essentia.postProcessTicks(ticks, ticksAmp, 60./pool['harmonicBpm'][0]);
#beatWindowDuration = 0.01 # seconds
#beatDuration = 0.005 # seconds
#rmsFrameSize = 64
#rmsHopSize = rmsFrameSize/2
#audio = std.MonoLoader(filename=audioFilename,
# sampleRate=pool['samplerate'],
# downmix=pool['downmix'])()
#for i, tick in enumerate(ticks):
# startTime = tick - beatWindowDuration/2.0
# if startTime < 0: startTime = 0
# endTime = startTime + beatWindowDuration + beatDuration + 0.0001
# slice = std.Trimmer(sampleRate=pool['samplerate'],
# startTime=startTime,
# endTime=endTime)(audio)
# frames = std.FrameGenerator(slice, frameSize=rmsFrameSize, hopSize=rmsHopSize)
# maxDeltaRms=0
# RMS = std.RMS()
# prevRms = 0
# pos = 0
# tickPos = pos
# for frame in frames:
# rms = RMS(frame)
# diff = rms - prevRms
# if diff > maxDeltaRms:
# tickPos = pos
# maxDeltaRms = diff
# pos+=1
# prevRms = rms
# ticks[i]= tick + tickPos*float(rmsHopSize)/pool['samplerate']
return ticks
def writeBeatFile(filename, pool) :
beatFilename = os.path.splitext(filename)[0] + '_beat.wav' #'out_beat.wav' #
audio = EasyLoader(filename=filename, downmix='mix', startTime=STARTTIME, endTime=ENDTIME)
writer = MonoWriter(filename=beatFilename)
onsetsMarker = AudioOnsetsMarker(onsets=pool['ticks'])
audio.audio >> onsetsMarker.signal >> writer.audio
essentia.run(audio)
return beatFilename
def computeBeatsLoudness(filename, pool):
loader = MonoLoader(filename=filename,
sampleRate=pool['samplerate'],
downmix=pool['downmix'])
ticks = pool['ticks']#[pool['bestTicksStart']:pool['bestTicksStart']+32]
beatsLoud = BeatsLoudness(sampleRate = pool['samplerate'],
frequencyBands = barkBands, #EqBands, #scheirerBands, #barkBands,
beats=ticks)
loader.audio >> beatsLoud.signal
beatsLoud.loudness >> (pool, 'loudness')
beatsLoud.loudnessBandRatio >> (pool, 'loudnessBandRatio')
essentia.run(loader)
def computeSpectrum(signal):
#gen = VectorInput(signal)
#fc = FrameCutter(startFromZero=False, frameSize=48, hopSize=1)
#w = Windowing(zeroPhase=False)
#spec = Spectrum()
#p = essentia.Pool()
#gen.data >> fc.signal
#fc.frame >> w.frame >> spec.frame
#spec.spectrum >> (p,'spectrum')
#essentia.run(gen)
#pyplot.imshow(p['spectrum'], cmap=pyplot.cm.hot, aspect='auto', origin='lower')
corr = std.AutoCorrelation()(signal)
pyplot.plot(corr)
pyplot.show()
print argmax(corr[2:])+2
def isPowerTwo(n):
return (n&(n-1))==0
def isEvenHarmonic(a,b):
if a < 2 or b < 2: return False
if (a<b): return isEvenHarmonic(b,a)
return (a%b == 0) and isPowerTwo(a/b)
def getHarmonics(array):
size = len(array)
hist = [0]*size
counts = [1]*size
for idx1, x in enumerate(array):
for idx2, y in enumerate(array):
if isEvenHarmonic(idx1, idx2):
hist[idx1] += y
counts[idx1] += 1
hist = [hist[i]/float(counts[i]) for i in range(size)]
return hist
def plot(pool, title, outputfile='out.svg', subplot=111):
''' plots bars for each beat'''
#computeSpectrum(pool['loudness'])
ticks = pool['ticks']
#barSize = min([ticks[i+1] - ticks[i] for i in range(len(ticks[:-1]))])/2.
barSize = 0.8
offset = barSize/2.
loudness = pool['loudness']
loudnessBand = pool['loudnessBandRatio'] # ticks x bands
medianRatiosPerTick = []
meanRatiosPerTick = []
for tick, energy in enumerate(loudnessBand):
medianRatiosPerTick.append(median(energy))
meanRatiosPerTick.append(mean(energy))
loudnessBand = copy.deepcopy(loudnessBand.transpose()) # bands x ticks
#xcorr = std.CrossCorrelation(minLag=0, maxLag=16)
#acorr = std.AutoCorrelation()
#bandCorr = []
#for iBand, band in enumerate(loudnessBand):
# bandCorr.append(acorr(essentia.array(band)))
nBands = len(loudnessBand)
nticks = len(loudness)
maxRatiosPerBand = []
medianRatiosPerBand = []
meanRatiosPerBand = []
for idxBand, band in enumerate(loudnessBand):
maxRatiosPerBand.append([0]*nticks)
medianRatiosPerBand.append([0]*nticks)
meanRatiosPerBand.append([0]*nticks)
for idxTick in range(nticks):
start = idxTick
end = start+BEATWINDOW
if (end>nticks):
howmuch = end-nticks
end = nticks-1
start = end-howmuch
if start < 0: start = 0
medianRatiosPerBand[idxBand][idxTick] = median(band[start:end])
maxRatiosPerBand[idxBand][idxTick] = max(band[start:end])
meanRatiosPerBand[idxBand][idxTick] = mean(band[start:end])
for iBand, band in enumerate(loudnessBand):
for tick, ratio in enumerate(band):
#if ratio < medianRatiosPerBand[iBand][tick] and\
# ratio <= medianRatiosPerTick[tick]: loudnessBand[iBand][tick]=0
bandThreshold = max(medianRatiosPerBand[iBand][tick],
meanRatiosPerBand[iBand][tick])
tickThreshold = max(medianRatiosPerTick[tick],
meanRatiosPerTick[tick])
if ratio < bandThreshold and ratio <= tickThreshold:
loudnessBand[iBand][tick]=0
else:
loudnessBand[iBand][tick] *= loudness[tick]
#if loudnessBand[iBand][tick] > 1 : loudnessBand[iBand][tick] = 1
acorr = std.AutoCorrelation()
bandCorr = []
maxCorr = []
for iBand, band in enumerate(loudnessBand):
bandCorr.append(acorr(essentia.array(band)))
maxCorr.append(argmax(bandCorr[-1][2:])+2)
# use as much window space as possible:
pyplot.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.95)
pyplot.subplot(511)
pyplot.imshow(bandCorr, cmap=pyplot.cm.hot, aspect='auto', origin='lower', interpolation='nearest')
print 'max correlation', maxCorr
sumCorr = []
for tick in range(nticks):
total = 0
for band in bandCorr:
total += band[tick]
sumCorr.append(total)
sumCorr[0] = 0
sumCorr[1] = 0
pyplot.subplot(512)
maxAlpha = max(sumCorr)
for i,val in enumerate(sumCorr):
alpha = max(0,min(val/maxAlpha, 1))
pyplot.bar(i, 1 , barSize, align='edge',
bottom=0,alpha=alpha,
color='r', edgecolor='w', linewidth=.3)
print 'max sum correlation', argmax(sumCorr[2:])+2
hist = getHarmonics(sumCorr)
maxHist = argmax(hist)
print 'max histogram', maxHist
#for idx,val in enumerate(hist):
# if val < maxHist: hist[idx] = 0
pyplot.subplot(513)
for i,val in enumerate(hist):
pyplot.bar(i, val , barSize, align='edge',
bottom=0, color='r', edgecolor='w', linewidth=.3)
peakDetect = std.PeakDetection(maxPeaks=5,
orderBy='amplitude',
minPosition=0,
maxPosition=len(sumCorr)-1,
range=len(sumCorr)-1)
peaks = peakDetect(sumCorr)[0]
peaks = [round(x+1e-15) for x in peaks]
print 'Peaks:',peaks
pyplot.subplot(514)
maxAlpha = max(sumCorr)
for i,val in enumerate(sumCorr):
alpha = max(0,min(val/maxAlpha, 1))
pyplot.bar(i, val, barSize, align='edge',
bottom=0,alpha=alpha,
color='r', edgecolor='w', linewidth=.3)
# multiply both histogram and sum corr to have a weighted histogram:
wHist = essentia.array(hist)*sumCorr*acorr(loudness)
maxHist = argmax(wHist)
print 'max weighted histogram', maxHist
pyplot.subplot(515)
maxAlpha = max(wHist)
for i,val in enumerate(wHist):
alpha = max(0,min(val/maxAlpha, 1))
pyplot.bar(i, val, barSize, align='edge',
bottom=0,alpha=alpha,
color='r', edgecolor='w', linewidth=.3)
pyplot.savefig(outputfile, dpi=300)
#pyplot.show()
return
def ossplay(filename): # play audio thru oss
from wave import open as waveOpen
from ossaudiodev import open as ossOpen
s = waveOpen(filename,'rb')
(nc,sw,fr,nf,comptype, compname) = s.getparams( )
dsp = ossOpen('/dev/dsp','w')
try:
from ossaudiodev import AFMT_S16_NE
except ImportError:
if byteorder == "little":
AFMT_S16_NE = ossaudiodev.AFMT_S16_LE
else:
AFMT_S16_NE = ossaudiodev.AFMT_S16_BE
dsp.setparameters(AFMT_S16_NE, nc, fr)
data = s.readframes(nf)
s.close()
dsp.write(data)
dsp.close()
def getkey(audioFilename, device, f, card, lock):
c = None
b = True
while b:
#fd = sys.stdin.fileno()
#old = termios.tcgetattr(fd)
#new = termios.tcgetattr(fd)
#new[3] = new[3] & ~TERMIOS.ICANON & ~TERMIOS.ECHO
#new[6][TERMIOS.VMIN] = 1
#new[6][TERMIOS.VTIME] = 0
#termios.tcsetattr(fd, TERMIOS.TCSANOW, new)
#c = None
lock.acquire()
#try:
# c = os.read(fd, 1)
#finally:
# termios.tcsetattr(fd, TERMIOS.TCSAFLUSH, old)
#if c == '\n': ## break on a Return/Enter keypress
# b = False
# return
#if c==' ': playAudio(audioFilename)
#else: print 'got', c
#ossplay(audioFilename)
alsaplay(audioFilename, device, f, card)
lock.release()
time.sleep(0.1)
def alsaplay(filename, device, f, card):
device.setchannels(f.getnchannels())
device.setrate(f.getframerate())
# 8bit is unsigned in wav files
if f.getsampwidth() == 1:
device.setformat(alsaaudio.PCM_FORMAT_U8)
# Otherwise we assume signed data, little endian
elif f.getsampwidth() == 2:
device.setformat(alsaaudio.PCM_FORMAT_S16_LE)
elif f.getsampwidth() == 3:
device.setformat(alsaaudio.PCM_FORMAT_S24_LE)
elif f.getsampwidth() == 4:
device.setformat(alsaaudio.PCM_FORMAT_S32_LE)
else:
raise ValueError('Unsupported format')
device.setperiodsize(320)
data = f.readframes(320)
while data:
device.write(data)
data = f.readframes(320)
f.setpos(0)
if __name__ == '__main__':
if len(sys.argv) < 1:
usage()
sys.exit(1)
step = 1
if len(sys.argv) > 2:
step = int(sys.argv[-1])
inputfilename = sys.argv[1]
ext = os.path.splitext(inputfilename)[1]
if ext == '.txt': # input file contains a list of audio files
files = open(inputfilename).read().split('\n')[:-1]
else: files = [inputfilename]
for audiofile in files:
print "*"*70
print "Processing ", audiofile
print "*"*70
try:
print 'realBpm', open(audiofile.replace('wav', 'bpm')).read()
except:
print 'realBpm not found'
pool = essentia.Pool()
pool.set('downmix', DOWNMIX)
pool.set('framesize', FRAMESIZE)
pool.set('hopsize', HOPSIZE)
pool.set('weight', WEIGHT)
pool.set('samplerate', SAMPLERATE)
pool.set('window', WINDOW)
pool.set('framerate', FRAMERATE)
pool.set('tempo_framesize', TEMPO_FRAMESIZE)
pool.set('tempo_overlap', TEMPO_OVERLAP)
pool.set('step', step)
#computeSegmentation(audiofile, pool)
#segments = pool['segments']
computeBeats(audiofile, pool)
beatFilename = writeBeatFile(audiofile, pool)
computeBeatsLoudness(audiofile, pool)
imgfilename = os.path.splitext(audiofile)[0]+'.png'
#imgfilename = imgfilename.split(os.sep)[-1]
#print 'plotting', imgfilename
if sys.platform == 'darwin' or sys.platform == 'win32':
plot(pool,'beats loudness ' + str(audiofile), imgfilename);
else:
card = 'default'
f = wave.open(beatFilename, 'rb')
# print '%d channels, sampling rate: %d \n' % (f.getnchannels(),
# f.getframerate())
device = alsaaudio.PCM(card=card)
lock = thread.allocate_lock()
thread.start_new_thread(getkey, (beatFilename, device, f, card, lock))
plot(pool,'beats loudness ' + audiofile, imgfilename);
f.close()
thread.exit()
#print 'deleting beatfile:', beatFilename
#subprocess.call(['rm', beatFilename])
|
jvkops/django | refs/heads/master | tests/version/__init__.py | 12133432 | |
xadahiya/django | refs/heads/master | django/contrib/messages/apps.py | 591 | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MessagesConfig(AppConfig):
name = 'django.contrib.messages'
verbose_name = _("Messages")
|
JohanSJA/Qian | refs/heads/master | qian/qian/urls.py | 1 | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'base.views.home', name='home'),
url(r"^users/", include("users.urls")),
url(r'^stocks/', include('stocks.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
|
codepantry/django | refs/heads/master | tests/validation/test_picklable.py | 576 | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError([ValidationError('a'), ValidationError('b')])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
|
mglukhikh/intellij-community | refs/heads/master | python/testData/inspections/PyProtectedMemberInspection/ImportFromTheSamePackage/my_package/_package_internal_module.py | 39 | def very_smart_func(param):
return None |
Tikitaco/centinel-iclab | refs/heads/master | centinel/client_config.py | 1 | import ConfigParser
import os
from utils.colors import bcolors
class client_conf:
c = { 'server_address' : "24.189.208.220",
'server_port' : "8082",
'centinel_homedir' : os.path.dirname(__file__),
'experiment_data_dir' : os.path.join(os.path.dirname(__file__), "experiment_data"),
'experiments_py_dir' : os.path.join(os.path.dirname(__file__), "py_experiments"),
'configurable_experiments_dir' : os.path.join(os.path.dirname(__file__), "conf_experiments"),
'results_dir' : os.path.join(os.path.dirname(__file__), "results"),
'client_keys_dir' : os.path.join(os.path.dirname(__file__), "client_keys"),
'results_archive_dir' : os.path.join(os.path.dirname(__file__), "results_archive"),
'config_file' : os.path.join(os.path.dirname(__file__), "confs/client_config.cfg"),
'server_public_rsa' : os.path.join(os.path.dirname(__file__), "client_keys/server_public_rsa.pem"),
'client_public_rsa' : os.path.join(os.path.dirname(__file__), "client_keys/client_public_rsa.pem"),
'client_private_rsa' : os.path.join(os.path.dirname(__file__), "client_keys/client_private_rsa.pem"),
'client_tag' : "unauthorized"}
def __init__(self,conf_file = '' ):
parser = ConfigParser.ConfigParser()
try:
if not conf_file:
conf_file = self.c['config_file']
parser.read([conf_file,])
self.c.update(parser.items('CentinelClient'))
self.config_read = True
except ConfigParser.Error, message:
print bcolors.FAIL + 'Error reading config file (did you run init.sh?).' + bcolors.ENDC
self.config_read = False
|
camptocamp/odoo | refs/heads/master | addons/website_twitter/controllers/__init__.py | 7372 | import main
|
aequitas/home-assistant | refs/heads/dev | homeassistant/components/aruba/device_tracker.py | 7 | """Support for Aruba Access Points."""
import logging
import re
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
_LOGGER = logging.getLogger(__name__)
_DEVICES_REGEX = re.compile(
r'(?P<name>([^\s]+)?)\s+' +
r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s+' +
r'(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))\s+')
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string
})
def get_scanner(hass, config):
"""Validate the configuration and return a Aruba scanner."""
scanner = ArubaDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class ArubaDeviceScanner(DeviceScanner):
"""This class queries a Aruba Access Point for connected devices."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.last_results = {}
# Test the router is accessible.
data = self.get_aruba_data()
self.success_init = data is not None
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return [client['mac'] for client in self.last_results]
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
if not self.last_results:
return None
for client in self.last_results:
if client['mac'] == device:
return client['name']
return None
def _update_info(self):
"""Ensure the information from the Aruba Access Point is up to date.
Return boolean if scanning successful.
"""
if not self.success_init:
return False
data = self.get_aruba_data()
if not data:
return False
self.last_results = data.values()
return True
def get_aruba_data(self):
"""Retrieve data from Aruba Access Point and return parsed result."""
import pexpect
connect = 'ssh {}@{}'
ssh = pexpect.spawn(connect.format(self.username, self.host))
query = ssh.expect(['password:', pexpect.TIMEOUT, pexpect.EOF,
'continue connecting (yes/no)?',
'Host key verification failed.',
'Connection refused',
'Connection timed out'], timeout=120)
if query == 1:
_LOGGER.error("Timeout")
return
if query == 2:
_LOGGER.error("Unexpected response from router")
return
if query == 3:
ssh.sendline('yes')
ssh.expect('password:')
elif query == 4:
_LOGGER.error("Host key changed")
return
elif query == 5:
_LOGGER.error("Connection refused by server")
return
elif query == 6:
_LOGGER.error("Connection timed out")
return
ssh.sendline(self.password)
ssh.expect('#')
ssh.sendline('show clients')
ssh.expect('#')
devices_result = ssh.before.split(b'\r\n')
ssh.sendline('exit')
devices = {}
for device in devices_result:
match = _DEVICES_REGEX.search(device.decode('utf-8'))
if match:
devices[match.group('ip')] = {
'ip': match.group('ip'),
'mac': match.group('mac').upper(),
'name': match.group('name')
}
return devices
|
flashvnn/PopclipInstantTranslate | refs/heads/master | src/InstantTranslate.popclipext/requests/packages/chardet/euckrfreq.py | 3120 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
|
bboozzoo/jhbuild | refs/heads/master | scripts/hg-update.py | 2 | #! /usr/bin/env python2
#
# hg-update - pull and update a mercurial repository
#
# Copyright (C) 2007 Marco Barisione <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import sys
import re
from subprocess import Popen, call, PIPE, STDOUT
def get_parent():
hg = Popen(['hg', 'parents', '--template', '{rev}'], stdout=PIPE)
try:
return hg.stdout.read().split()[0]
except IndexError:
# handle parentless revisions
return ''
def pull():
ret = call(['hg', 'pull'])
return ret == 0
def update():
env = dict(os.environ)
env['HGMERGE'] = '/bin/false'
env['LANG'] = 'C'
hg = Popen(['hg', 'update', '--noninteractive'], stdout=PIPE,
stderr=STDOUT, env=env)
out = hg.communicate()[0]
if hg.returncode != 0:
# Use CVS-like format for conflicts.
out = re.sub('merging (.*) failed!', r'C \1', out)
index = out.find('You can redo the full merge using:')
# Remove the instructions to redo the full merge as we are
# going to revert the update.
if index != -1:
out = out[:index]
print out
return hg.returncode == 0
def undo_update(parent):
print 'Update failed, updating to parent revision'
env = dict(os.environ)
env['HGMERGE'] = 'false'
hg = call(['hg', 'update', '--noninteractive', '-q', parent], env=env)
def pull_and_update():
parent = get_parent()
if not pull():
return False
if update():
return True
else:
undo_update(parent)
return False
if __name__ == '__main__':
ret = False
try:
ret = pull_and_update()
except OSError, e:
print '%s: %s' % (sys.argv[0], e)
if ret:
exit_code = 0
else:
exit_code = 1
sys.exit(exit_code)
|
befelix/scipy | refs/heads/master | scipy/sparse/linalg/eigen/arpack/__init__.py | 159 | """
Eigenvalue solver using iterative methods.
Find k eigenvectors and eigenvalues of a matrix A using the
Arnoldi/Lanczos iterative methods from ARPACK [1]_,[2]_.
These methods are most useful for large sparse matrices.
- eigs(A,k)
- eigsh(A,k)
References
----------
.. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/
.. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE:
Solution of Large Scale Eigenvalue Problems by Implicitly Restarted
Arnoldi Methods. SIAM, Philadelphia, PA, 1998.
"""
from __future__ import division, print_function, absolute_import
from .arpack import *
|
OSSESAC/odoopubarquiluz | refs/heads/7.0 | openerp/tests/addons/test_impex/tests/test_import.py | 98 | # -*- coding: utf-8 -*-
import openerp.modules.registry
import openerp
from openerp.tests import common
from openerp.tools.misc import mute_logger
def ok(n):
""" Successful import of ``n`` records
:param int n: number of records which should have been imported
"""
return n, 0, 0, 0
def error(row, message, record=None, **kwargs):
""" Failed import of the record ``record`` at line ``row``, with the error
message ``message``
:param str message:
:param dict record:
"""
return (
-1, dict(record or {}, **kwargs),
"Line %d : %s" % (row, message),
'')
def values(seq, field='value'):
return [item[field] for item in seq]
class ImporterCase(common.TransactionCase):
model_name = False
def __init__(self, *args, **kwargs):
super(ImporterCase, self).__init__(*args, **kwargs)
self.model = None
def setUp(self):
super(ImporterCase, self).setUp()
self.model = self.registry(self.model_name)
def import_(self, fields, rows, context=None):
return self.model.import_data(
self.cr, openerp.SUPERUSER_ID, fields, rows, context=context)
def read(self, fields=('value',), domain=(), context=None):
return self.model.read(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
fields=fields, context=context)
def browse(self, domain=(), context=None):
return self.model.browse(
self.cr, openerp.SUPERUSER_ID,
self.model.search(self.cr, openerp.SUPERUSER_ID, domain, context=context),
context=context)
def xid(self, record):
ModelData = self.registry('ir.model.data')
ids = ModelData.search(
self.cr, openerp.SUPERUSER_ID,
[('model', '=', record._table_name), ('res_id', '=', record.id)])
if ids:
d = ModelData.read(
self.cr, openerp.SUPERUSER_ID, ids, ['name', 'module'])[0]
if d['module']:
return '%s.%s' % (d['module'], d['name'])
return d['name']
name = dict(record.name_get())[record.id]
# fix dotted name_get results, otherwise xid lookups blow up
name = name.replace('.', '-')
ModelData.create(self.cr, openerp.SUPERUSER_ID, {
'name': name,
'model': record._table_name,
'res_id': record.id,
'module': '__test__'
})
return '__test__.' + name
class test_ids_stuff(ImporterCase):
model_name = 'export.integer'
def test_create_with_id(self):
self.assertEqual(
self.import_(['.id', 'value'], [['42', '36']]),
error(1, u"Unknown database identifier '42'"))
def test_create_with_xid(self):
self.assertEqual(
self.import_(['id', 'value'], [['somexmlid', '42']]),
ok(1))
self.assertEqual(
'somexmlid',
self.xid(self.browse()[0]))
def test_update_with_id(self):
id = self.model.create(self.cr, openerp.SUPERUSER_ID, {'value': 36})
self.assertEqual(
36,
self.model.browse(self.cr, openerp.SUPERUSER_ID, id).value)
self.assertEqual(
self.import_(['.id', 'value'], [[str(id), '42']]),
ok(1))
self.assertEqual(
[42], # updated value to imported
values(self.read()))
def test_update_with_xid(self):
self.import_(['id', 'value'], [['somexmlid', '36']])
self.assertEqual([36], values(self.read()))
self.import_(['id', 'value'], [['somexmlid', '1234567']])
self.assertEqual([1234567], values(self.read()))
class test_boolean_field(ImporterCase):
model_name = 'export.boolean'
def test_empty(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_exported(self):
self.assertEqual(
self.import_(['value'], [
['False'],
['True'],
]),
ok(2))
records = self.read()
self.assertEqual([
False,
True,
], values(records))
def test_falses(self):
self.assertEqual(
self.import_(['value'], [
[u'0'],
[u'no'],
[u'false'],
[u'FALSE'],
[u''],
]),
ok(5))
self.assertEqual([
False,
False,
False,
False,
False,
],
values(self.read()))
def test_trues(self):
self.assertEqual(
self.import_(['value'], [
['off'],
['None'],
['nil'],
['()'],
['f'],
['#f'],
# Problem: OpenOffice (and probably excel) output localized booleans
['VRAI'],
[u'OFF'],
]),
ok(8))
self.assertEqual(
[True] * 8,
values(self.read()))
class test_integer_field(ImporterCase):
model_name = 'export.integer'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
self.assertEqual(
self.import_(['value'], [['0']]),
ok(1))
self.assertEqual(
self.import_(['value'], [['-0']]),
ok(1))
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
self.assertEqual(
self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678']
]),
ok(4))
self.assertEqual([
1, 42, 2**31-1, 12345678
], values(self.read()))
def test_negatives(self):
self.assertEqual(
self.import_(['value'], [
['-1'],
['-42'],
[str(-(2**31 - 1))],
[str(-(2**31))],
['-12345678']
]),
ok(5))
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678
], values(self.read()))
@mute_logger('openerp.sql_db')
def test_out_of_range(self):
self.assertEqual(
self.import_(['value'], [[str(2**31)]]),
error(1, "integer out of range\n"))
# auto-rollbacks if error is in process_liness, but not during
# ir.model.data write. Can differentiate because former ends lines
# error lines with "!"
self.cr.rollback()
self.assertEqual(
self.import_(['value'], [[str(-2**32)]]),
error(1, "integer out of range\n"))
def test_nonsense(self):
self.assertEqual(
self.import_(['value'], [['zorglub']]),
error(1, u"'zorglub' does not seem to be an integer for field 'unknown'"))
class test_float_field(ImporterCase):
model_name = 'export.float'
def test_none(self):
self.assertEqual(
self.import_(['value'], []),
ok(0))
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual(
[False],
values(self.read()))
def test_zero(self):
self.assertEqual(
self.import_(['value'], [['0']]),
ok(1))
self.assertEqual(
self.import_(['value'], [['-0']]),
ok(1))
self.assertEqual([False, False], values(self.read()))
def test_positives(self):
self.assertEqual(
self.import_(['value'], [
['1'],
['42'],
[str(2**31-1)],
['12345678'],
[str(2**33)],
['0.000001'],
]),
ok(6))
self.assertEqual([
1, 42, 2**31-1, 12345678, 2.0**33, .000001
], values(self.read()))
def test_negatives(self):
self.assertEqual(
self.import_(['value'], [
['-1'],
['-42'],
[str(-2**31 + 1)],
[str(-2**31)],
['-12345678'],
[str(-2**33)],
['-0.000001'],
]),
ok(7))
self.assertEqual([
-1, -42, -(2**31 - 1), -(2**31), -12345678, -2.0**33, -.000001
], values(self.read()))
def test_nonsense(self):
self.assertEqual(
self.import_(['value'], [['foobar']]),
error(1, u"'foobar' does not seem to be a number for field 'unknown'"))
class test_string_field(ImporterCase):
model_name = 'export.string.bounded'
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual([False], values(self.read()))
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
[u'foobar'],
[u'foobarbaz'],
[u'Með suð í eyrum við spilum endalaust'],
[u"People 'get' types. They use them all the time. Telling "
u"someone he can't pound a nail with a banana doesn't much "
u"surprise him."]
]),
ok(4))
self.assertEqual([
u"foobar",
u"foobarbaz",
u"Með suð í eyrum ",
u"People 'get' typ",
], values(self.read()))
class test_unbound_string_field(ImporterCase):
model_name = 'export.string'
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
[u'í dag viðrar vel til loftárása'],
# ackbar.jpg
[u"If they ask you about fun, you tell them – fun is a filthy"
u" parasite"]
]),
ok(2))
self.assertEqual([
u"í dag viðrar vel til loftárása",
u"If they ask you about fun, you tell them – fun is a filthy parasite"
], values(self.read()))
class test_text(ImporterCase):
model_name = 'export.text'
def test_empty(self):
self.assertEqual(
self.import_(['value'], [['']]),
ok(1))
self.assertEqual([False], values(self.read()))
def test_imported(self):
s = (u"Breiðskífa er notað um útgefna hljómplötu sem inniheldur "
u"stúdíóupptökur frá einum flytjanda. Breiðskífur eru oftast "
u"milli 25-80 mínútur og er lengd þeirra oft miðuð við 33⅓ "
u"snúninga 12 tommu vínylplötur (sem geta verið allt að 30 mín "
u"hvor hlið).\n\nBreiðskífur eru stundum tvöfaldar og eru þær þá"
u" gefnar út á tveimur geisladiskum eða tveimur vínylplötum.")
self.assertEqual(
self.import_(['value'], [[s]]),
ok(1))
self.assertEqual([s], values(self.read()))
class test_selection(ImporterCase):
model_name = 'export.selection'
translations_fr = [
("Qux", "toto"),
("Bar", "titi"),
("Foo", "tete"),
]
def test_imported(self):
self.assertEqual(
self.import_(['value'], [
['Qux'],
['Bar'],
['Foo'],
['2'],
]),
ok(4))
self.assertEqual([3, 2, 1, 2], values(self.read()))
def test_imported_translated(self):
self.registry('res.lang').create(self.cr, openerp.SUPERUSER_ID, {
'name': u'Français',
'code': 'fr_FR',
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
'thousands_sep': ' ',
})
Translations = self.registry('ir.translation')
for source, value in self.translations_fr:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': 'export.selection,value',
'lang': 'fr_FR',
'type': 'selection',
'src': source,
'value': value
})
self.assertEqual(
self.import_(['value'], [
['toto'],
['tete'],
['titi'],
], context={'lang': 'fr_FR'}),
ok(3))
self.assertEqual([3, 1, 2], values(self.read()))
self.assertEqual(
self.import_(['value'], [['Foo']], context={'lang': 'fr_FR'}),
ok(1))
def test_invalid(self):
self.assertEqual(
self.import_(['value'], [['Baz']]),
error(1, u"Value 'Baz' not found in selection field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value'], [[42]]),
error(1, u"Value '42' not found in selection field 'unknown'"))
class test_selection_function(ImporterCase):
model_name = 'export.selection.function'
translations_fr = [
("Corge", "toto"),
("Grault", "titi"),
("Wheee", "tete"),
("Moog", "tutu"),
]
def test_imported(self):
""" import uses fields_get, so translates import label (may or may not
be good news) *and* serializes the selection function to reverse it:
import does not actually know that the selection field uses a function
"""
# NOTE: conflict between a value and a label => ?
self.assertEqual(
self.import_(['value'], [
['3'],
["Grault"],
]),
ok(2))
self.assertEqual(
['3', '1'],
values(self.read()))
def test_translated(self):
""" Expects output of selection function returns translated labels
"""
self.registry('res.lang').create(self.cr, openerp.SUPERUSER_ID, {
'name': u'Français',
'code': 'fr_FR',
'translatable': True,
'date_format': '%d.%m.%Y',
'decimal_point': ',',
'thousands_sep': ' ',
})
Translations = self.registry('ir.translation')
for source, value in self.translations_fr:
Translations.create(self.cr, openerp.SUPERUSER_ID, {
'name': 'export.selection,value',
'lang': 'fr_FR',
'type': 'selection',
'src': source,
'value': value
})
self.assertEqual(
self.import_(['value'], [
['toto'],
['tete'],
], context={'lang': 'fr_FR'}),
ok(2))
self.assertEqual(
self.import_(['value'], [['Wheee']], context={'lang': 'fr_FR'}),
ok(1))
class test_m2o(ImporterCase):
model_name = 'export.many2one'
def test_by_name(self):
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
# get its name
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
self.assertEqual(
self.import_(['value'], [
# import by name_get
[name1],
[name1],
[name2],
]),
ok(3))
# correct ids assigned to corresponding records
self.assertEqual([
(integer_id1, name1),
(integer_id1, name1),
(integer_id2, name2),],
values(self.read()))
def test_by_xid(self):
ExportInteger = self.registry('export.integer')
integer_id = ExportInteger.create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
xid = self.xid(ExportInteger.browse(
self.cr, openerp.SUPERUSER_ID, [integer_id])[0])
self.assertEqual(
self.import_(['value/id'], [[xid]]),
ok(1))
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_id(self):
integer_id = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
self.assertEqual(
self.import_(['value/.id'], [[integer_id]]),
ok(1))
b = self.browse()
self.assertEqual(42, b[0].value.value)
def test_by_names(self):
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
name1 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id1]))[integer_id1]
name2 = dict(self.registry('export.integer').name_get(
self.cr, openerp.SUPERUSER_ID,[integer_id2]))[integer_id2]
# names should be the same
self.assertEqual(name1, name2)
self.assertEqual(
self.import_(['value'], [[name2]]),
ok(1))
self.assertEqual([
(integer_id1, name1)
], values(self.read()))
def test_fail_by_implicit_id(self):
""" Can't implicitly import records by id
"""
# create integer objects
integer_id1 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 42})
integer_id2 = self.registry('export.integer').create(
self.cr, openerp.SUPERUSER_ID, {'value': 36})
self.assertEqual(
self.import_(['value'], [
# import by id, without specifying it
[integer_id1],
[integer_id2],
[integer_id1],
]),
error(1, u"No matching record found for name '%s' in field 'unknown'" % integer_id1))
def test_sub_field(self):
""" Does not implicitly create the record, does not warn that you can't
import m2o subfields (at all)...
"""
self.assertEqual(
self.import_(['value/value'], [['42']]),
error(1, u"Can not create Many-To-One records indirectly, import the field separately"))
def test_fail_noids(self):
self.assertEqual(
self.import_(['value'], [['nameisnoexist:3']]),
error(1, u"No matching record found for name 'nameisnoexist:3' in field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value/id'], [['noxidhere']]),
error(1, u"No matching record found for external id 'noxidhere' in field 'unknown'"))
self.cr.rollback()
self.assertEqual(
self.import_(['value/.id'], [[66]]),
error(1, u"No matching record found for database id '66' in field 'unknown'"))
class test_m2m(ImporterCase):
model_name = 'export.many2many'
# apparently, one and only thing which works is a
# csv_internal_sep-separated list of ids, xids, or names (depending if
# m2m/.id, m2m/id or m2m[/anythingelse]
def test_ids(self):
id1 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
id5 = self.registry('export.many2many.other').create(
self.cr, openerp.SUPERUSER_ID, {'value': 99, 'str': 'record4'})
self.assertEqual(
self.import_(['value/.id'], [
['%d,%d' % (id1, id2)],
['%d,%d,%d' % (id1, id3, id4)],
['%d,%d,%d' % (id1, id2, id3)],
['%d' % id5]
]),
ok(4))
ids = lambda records: [record.id for record in records]
b = self.browse()
self.assertEqual(ids(b[0].value), [id1, id2])
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(ids(b[2].value), [id1, id2, id3])
self.assertEqual(values(b[2].value), [3, 44, 84])
def test_noids(self):
self.assertEqual(
self.import_(['value/.id'], [['42']]),
error(1, u"No matching record found for database id '42' in field 'unknown'"))
def test_xids(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
self.assertEqual(
self.import_(['value/id'], [
['%s,%s' % (self.xid(records[0]), self.xid(records[1]))],
['%s' % self.xid(records[3])],
['%s,%s' % (self.xid(records[2]), self.xid(records[1]))],
]),
ok(3))
b = self.browse()
self.assertEqual(values(b[0].value), [3, 44])
self.assertEqual(values(b[2].value), [44, 84])
def test_noxids(self):
self.assertEqual(
self.import_(['value/id'], [['noxidforthat']]),
error(1, u"No matching record found for external id 'noxidforthat' in field 'unknown'"))
def test_names(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
records = M2O_o.browse(self.cr, openerp.SUPERUSER_ID, [id1, id2, id3, id4])
name = lambda record: dict(record.name_get())[record.id]
self.assertEqual(
self.import_(['value'], [
['%s,%s' % (name(records[1]), name(records[2]))],
['%s,%s,%s' % (name(records[0]), name(records[1]), name(records[2]))],
['%s,%s' % (name(records[0]), name(records[3]))],
]),
ok(3))
b = self.browse()
self.assertEqual(values(b[1].value), [3, 44, 84])
self.assertEqual(values(b[2].value), [3, 9])
def test_nonames(self):
self.assertEqual(
self.import_(['value'], [['wherethem2mhavenonames']]),
error(1, u"No matching record found for name 'wherethem2mhavenonames' in field 'unknown'"))
def test_import_to_existing(self):
M2O_o = self.registry('export.many2many.other')
id1 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 3, 'str': 'record0'})
id2 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 44, 'str': 'record1'})
id3 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 84, 'str': 'record2'})
id4 = M2O_o.create(self.cr, openerp.SUPERUSER_ID, {'value': 9, 'str': 'record3'})
xid = 'myxid'
self.assertEqual(
self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id1, id2)]]),
ok(1))
self.assertEqual(
self.import_(['id', 'value/.id'], [[xid, '%d,%d' % (id3, id4)]]),
ok(1))
b = self.browse()
self.assertEqual(len(b), 1)
# TODO: replacement of existing m2m values is correct?
self.assertEqual(values(b[0].value), [84, 9])
class test_o2m(ImporterCase):
model_name = 'export.one2many'
def test_name_get(self):
s = u'Java is a DSL for taking large XML files and converting them to' \
u' stack traces'
self.assertEqual(
self.import_(
['const', 'value'],
[['5', s]]),
error(1, u"No matching record found for name '%s' in field 'unknown'" % s))
def test_single(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63']
]),
ok(1))
(b,) = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.value), [63])
def test_multicore(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63'],
['6', '64'],
]),
ok(2))
b1, b2 = self.browse()
self.assertEqual(b1.const, 5)
self.assertEqual(values(b1.value), [63])
self.assertEqual(b2.const, 6)
self.assertEqual(values(b2.value), [64])
def test_multisub(self):
self.assertEqual(
self.import_(['const', 'value/value'], [
['5', '63'],
['', '64'],
['', '65'],
['', '66'],
]),
ok(4))
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
def test_multi_subfields(self):
self.assertEqual(
self.import_(['value/str', 'const', 'value/value'], [
['this', '5', '63'],
['is', '', '64'],
['the', '', '65'],
['rhythm', '', '66'],
]),
ok(4))
(b,) = self.browse()
self.assertEqual(values(b.value), [63, 64, 65, 66])
self.assertEqual(
values(b.value, 'str'),
'this is the rhythm'.split())
def test_link_inline(self):
id1 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
try:
self.import_(['const', 'value/.id'], [
['42', '%d,%d' % (id1, id2)]
])
except ValueError, e:
# should be Exception(Database ID doesn't exist: export.one2many.child : $id1,$id2)
self.assertIs(type(e), ValueError)
self.assertEqual(
e.args[0],
"invalid literal for int() with base 10: '%d,%d'" % (id1, id2))
def test_link(self):
id1 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = self.registry('export.one2many.child').create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
self.assertEqual(
self.import_(['const', 'value/.id'], [
['42', str(id1)],
['', str(id2)],
]),
ok(2))
[b] = self.browse()
self.assertEqual(b.const, 42)
# automatically forces link between core record and o2ms
self.assertEqual(values(b.value), [109, 262])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
def test_link_2(self):
O2M_c = self.registry('export.one2many.child')
id1 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Bf', 'value': 109
})
id2 = O2M_c.create(self.cr, openerp.SUPERUSER_ID, {
'str': 'Me', 'value': 262
})
self.assertEqual(
self.import_(['const', 'value/.id', 'value/value'], [
['42', str(id1), '1'],
['', str(id2), '2'],
]),
ok(2))
[b] = self.browse()
self.assertEqual(b.const, 42)
self.assertEqual(values(b.value), [1, 2])
self.assertEqual(values(b.value, field='parent_id'), [b, b])
class test_o2m_multiple(ImporterCase):
model_name = 'export.one2many.multiple'
def test_multi_mixed(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', '22'],
['', '13', '23'],
['', '14', ''],
]),
ok(4))
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', '21'],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '22'],
['', '', '23'],
]),
ok(6))
[b] = self.browse()
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
def test_multi_fullsplit(self):
self.assertEqual(
self.import_(['const', 'child1/value', 'child2/value'], [
['5', '11', ''],
['', '12', ''],
['', '13', ''],
['', '14', ''],
['', '', '21'],
['', '', '22'],
['', '', '23'],
]),
ok(7))
[b] = self.browse()
self.assertEqual(b.const, 5)
self.assertEqual(values(b.child1), [11, 12, 13, 14])
self.assertEqual(values(b.child2), [21, 22, 23])
# function, related, reference: written to db as-is...
# => function uses @type for value coercion/conversion
|
jballanc/openmicroscopy | refs/heads/develop | components/tools/OmeroPy/src/omero/util/goodeval.py | 5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def goodeval(fn, fnList, *args):
if(fn in fnList):
callFn = fn+'(';
for argCnt in range(0,len(args)):
callFn = callFn+str(args[argCnt])
if(argCnt<len(args)-1):
callFn = callFn+',';
callFn = callFn+')';
print callFn
exec(callFn);
|
polimediaupv/edx-platform | refs/heads/master | common/test/acceptance/accessibility/test_studio_library_axs.py | 71 | """
Accessibility tests for Studio Library pages.
Run just this test with:
SELENIUM_BROWSER=phantomjs paver test_bokchoy -d accessibility -t test_studio_library_axs.py
"""
from ..tests.studio.base_studio_test import StudioLibraryTest
from ..pages.studio.library import LibraryEditPage
class StudioLibraryAxsTest(StudioLibraryTest):
"""
Class to test Studio pages accessibility.
"""
def test_lib_edit_page_axs(self):
"""
Check accessibility of LibraryEditPage.
"""
lib_page = LibraryEditPage(self.browser, self.library_key)
lib_page.visit()
lib_page.wait_until_ready()
report = lib_page.do_axs_audit()
# There was one page in this session
self.assertEqual(1, len(report))
result = report[0]
# Verify that this page has no accessibility errors.
self.assertEqual(0, len(result.errors))
# Verify that this page currently has 3 accessibility warnings.
self.assertEqual(3, len(result.warnings))
# And that these are the warnings that the page currently gives.
for warning in result.warnings:
self.assertTrue(
warning.startswith(('Warning: AX_FOCUS_01', 'Warning: AX_COLOR_01', 'Warning: AX_IMAGE_01',)),
msg="Unexpected warning: {}".format(warning))
|
ryyan/thrift-benchmark | refs/heads/master | py/server.py | 1 | from thriftpy import load
from thriftpy.rpc import make_server
from thriftpy.transport import TFramedTransportFactory
echo_thrift = load("/app/sh/echo.thrift", module_name="echo_thrift")
class echoHandler(object):
def echo(self, msg):
return msg.text
make_server(service=echo_thrift.Echo, handler=echoHandler(),
host='127.0.0.1', port=9999,
trans_factory=TFramedTransportFactory()).serve()
|
dxl0632/deeplearning_nd_udacity | refs/heads/master | language-translation/helper.py | 156 | import os
import pickle
import copy
import numpy as np
CODES = {'<PAD>': 0, '<EOS>': 1, '<UNK>': 2, '<GO>': 3 }
def load_data(path):
"""
Load Dataset from File
"""
input_file = os.path.join(path)
with open(input_file, 'r', encoding='utf-8') as f:
return f.read()
def preprocess_and_save_data(source_path, target_path, text_to_ids):
"""
Preprocess Text Data. Save to to file.
"""
# Preprocess
source_text = load_data(source_path)
target_text = load_data(target_path)
source_text = source_text.lower()
target_text = target_text.lower()
source_vocab_to_int, source_int_to_vocab = create_lookup_tables(source_text)
target_vocab_to_int, target_int_to_vocab = create_lookup_tables(target_text)
source_text, target_text = text_to_ids(source_text, target_text, source_vocab_to_int, target_vocab_to_int)
# Save Data
with open('preprocess.p', 'wb') as out_file:
pickle.dump((
(source_text, target_text),
(source_vocab_to_int, target_vocab_to_int),
(source_int_to_vocab, target_int_to_vocab)), out_file)
def load_preprocess():
"""
Load the Preprocessed Training data and return them in batches of <batch_size> or less
"""
with open('preprocess.p', mode='rb') as in_file:
return pickle.load(in_file)
def create_lookup_tables(text):
"""
Create lookup tables for vocabulary
"""
vocab = set(text.split())
vocab_to_int = copy.copy(CODES)
for v_i, v in enumerate(vocab, len(CODES)):
vocab_to_int[v] = v_i
int_to_vocab = {v_i: v for v, v_i in vocab_to_int.items()}
return vocab_to_int, int_to_vocab
def save_params(params):
"""
Save parameters to file
"""
with open('params.p', 'wb') as out_file:
pickle.dump(params, out_file)
def load_params():
"""
Load parameters from file
"""
with open('params.p', mode='rb') as in_file:
return pickle.load(in_file)
def batch_data(source, target, batch_size):
"""
Batch source and target together
"""
for batch_i in range(0, len(source)//batch_size):
start_i = batch_i * batch_size
source_batch = source[start_i:start_i + batch_size]
target_batch = target[start_i:start_i + batch_size]
yield np.array(pad_sentence_batch(source_batch)), np.array(pad_sentence_batch(target_batch))
def pad_sentence_batch(sentence_batch):
"""
Pad sentence with <PAD> id
"""
max_sentence = max([len(sentence) for sentence in sentence_batch])
return [sentence + [CODES['<PAD>']] * (max_sentence - len(sentence))
for sentence in sentence_batch]
|
leiferikb/bitpop | refs/heads/master | src/native_client/run.py | 5 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import os
import subprocess
import sys
import tempfile
import pynacl.platform
# Target architecture for PNaCl can be set through the ``-arch``
# command-line argument, and when its value is ``env`` the following
# program environment variable is queried to figure out which
# architecture to target.
ARCH_ENV_VAR_NAME = 'PNACL_RUN_ARCH'
class Environment:
pass
env = Environment()
def SetupEnvironment():
# native_client/ directory
env.nacl_root = FindBaseDir()
toolchain_base = os.path.join(env.nacl_root,
'toolchain',
'%s_x86' % pynacl.platform.GetOS())
# Path to Native NaCl toolchain (glibc)
env.nnacl_root = os.path.join(toolchain_base, 'nacl_x86_glibc')
# Path to PNaCl toolchain
env.pnacl_base = os.path.join(toolchain_base, 'pnacl_newlib')
# QEMU
env.arm_root = os.path.join(toolchain_base, 'arm_trusted')
env.qemu_arm = os.path.join(env.arm_root, 'run_under_qemu_arm')
env.mips32_root = os.path.join(toolchain_base, 'mips_trusted')
env.qemu_mips32 = os.path.join(env.mips32_root, 'run_under_qemu_mips32')
# Path to 'readelf'
env.readelf = FindReadElf()
# Path to 'scons'
env.scons = os.path.join(env.nacl_root, 'scons')
# Library path for runnable-ld.so
env.library_path = []
# Suppress -S -a
env.paranoid = False
# Only print commands, don't run them
env.dry_run = False
# Force a specific sel_ldr
env.force_sel_ldr = None
# Force a specific IRT
env.force_irt = None
# Don't print anything
env.quiet = False
# Arch (x86-32, x86-64, arm, mips32)
env.arch = None
# Trace in QEMU
env.trace = False
# Debug the nexe using the debug stub
env.debug = False
# PNaCl (as opposed to NaCl).
env.is_pnacl = False
def PrintBanner(output):
if not env.quiet:
lines = output.split('\n')
print '*' * 80
for line in lines:
padding = ' ' * max(0, (80 - len(line)) / 2)
print padding + output + padding
print '*' * 80
def PrintCommand(s):
if not env.quiet:
print
print s
print
def GetMultiDir(arch):
if arch == 'x86-32':
return 'lib32'
elif arch == 'x86-64':
return 'lib'
else:
Fatal('nacl-gcc does not support %s' % arch)
def SetupArch(arch, allow_build=True):
'''Setup environment variables that require knowing the
architecture. We can only do this after we've seen the
nexe or once we've read -arch off the command-line.
'''
env.arch = arch
env.sel_ldr = FindOrBuildSelLdr(allow_build=allow_build)
env.irt = FindOrBuildIRT(allow_build=allow_build)
def SetupLibC(arch, is_dynamic):
if is_dynamic:
if env.is_pnacl:
libdir = os.path.join(env.pnacl_base, 'lib-' + arch)
else:
libdir = os.path.join(env.nnacl_root, 'x86_64-nacl', GetMultiDir(arch))
env.runnable_ld = os.path.join(libdir, 'runnable-ld.so')
env.library_path.append(libdir)
def main(argv):
SetupEnvironment()
return_code = 0
sel_ldr_options = []
# sel_ldr's "quiet" options need to come early in the command line
# to suppress noisy output from processing other options, like -Q.
sel_ldr_quiet_options = []
nexe, nexe_params = ArgSplit(argv[1:])
try:
if env.is_pnacl:
nexe = Translate(env.arch, nexe)
# Read the ELF file info
if env.is_pnacl and env.dry_run:
# In a dry run, we don't actually run pnacl-translate, so there is
# no nexe for readelf. Fill in the information manually.
arch = env.arch
is_dynamic = False
is_glibc_static = False
else:
arch, is_dynamic, is_glibc_static = ReadELFInfo(nexe)
# Add default sel_ldr options
if not env.paranoid:
sel_ldr_options += ['-a']
# -S signal handling is not supported on windows, but otherwise
# it is useful getting the address of crashes.
if not pynacl.platform.IsWindows():
sel_ldr_options += ['-S']
# X86-64 glibc static has validation problems without stub out (-s)
if arch == 'x86-64' and is_glibc_static:
sel_ldr_options += ['-s']
if env.quiet:
# Don't print sel_ldr logs
# These need to be at the start of the arglist for full effectiveness.
# -q means quiet most stderr warnings.
# -l /dev/null means log to /dev/null.
sel_ldr_quiet_options = ['-q', '-l', '/dev/null']
if env.debug:
# Disabling validation (-c) is used by the debug stub test.
# TODO(dschuff): remove if/when it's no longer necessary
sel_ldr_options += ['-c', '-c', '-g']
# Tell the user
if is_dynamic:
extra = 'DYNAMIC'
else:
extra = 'STATIC'
PrintBanner('%s is %s %s' % (os.path.basename(nexe),
arch.upper(), extra))
# Setup architecture-specific environment variables
SetupArch(arch)
# Setup LibC-specific environment variables
SetupLibC(arch, is_dynamic)
sel_ldr_args = []
# Add irt to sel_ldr arguments
if env.irt:
sel_ldr_args += ['-B', env.irt]
# Setup sel_ldr arguments
sel_ldr_args += sel_ldr_options + ['--']
if is_dynamic:
sel_ldr_args += [env.runnable_ld,
'--library-path', ':'.join(env.library_path)]
# The NaCl dynamic loader prefers posixy paths.
nexe_path = os.path.abspath(nexe)
nexe_path = nexe_path.replace('\\', '/')
sel_ldr_args += [nexe_path] + nexe_params
# Run sel_ldr!
retries = 0
try:
if hasattr(env, 'retries'):
retries = int(env.retries)
except ValueError:
pass
collate = env.collate or retries > 0
input = sys.stdin.read() if collate else None
for iter in range(1 + max(retries, 0)):
output = RunSelLdr(sel_ldr_args, quiet_args=sel_ldr_quiet_options,
collate=collate, stdin_string=input)
if env.last_return_code < 128:
# If the application crashes, we expect a 128+ return code.
break
sys.stdout.write(output or '')
return_code = env.last_return_code
finally:
if env.is_pnacl:
# Clean up the .nexe that was created.
try:
os.remove(nexe)
except:
pass
return return_code
def RunSelLdr(args, quiet_args=[], collate=False, stdin_string=None):
"""Run the sel_ldr command and optionally capture its output.
Args:
args: A string list containing the command and arguments.
collate: Whether to capture stdout+stderr (rather than passing
them through to the terminal).
stdin_string: Text to send to the command via stdin. If None, stdin is
inherited from the caller.
Returns:
A string containing the concatenation of any captured stdout plus
any captured stderr.
"""
prefix = []
# The bootstrap loader args (--r_debug, --reserved_at_zero) need to
# come before quiet_args.
bootstrap_loader_args = []
arch = pynacl.platform.GetArch3264()
if arch != pynacl.platform.ARCH3264_ARM and env.arch == 'arm':
prefix = [ env.qemu_arm, '-cpu', 'cortex-a9']
if env.trace:
prefix += ['-d', 'in_asm,op,exec,cpu']
args = ['-Q'] + args
if arch != pynacl.platform.ARCH3264_MIPS32 and env.arch == 'mips32':
prefix = [env.qemu_mips32]
if env.trace:
prefix += ['-d', 'in_asm,op,exec,cpu']
args = ['-Q'] + args
# Use the bootstrap loader on linux.
if pynacl.platform.IsLinux():
bootstrap = os.path.join(os.path.dirname(env.sel_ldr),
'nacl_helper_bootstrap')
loader = [bootstrap, env.sel_ldr]
template_digits = 'X' * 16
bootstrap_loader_args = ['--r_debug=0x' + template_digits,
'--reserved_at_zero=0x' + template_digits]
else:
loader = [env.sel_ldr]
return Run(prefix + loader + bootstrap_loader_args + quiet_args + args,
exit_on_failure=(not collate),
capture_stdout=collate, capture_stderr=collate,
stdin_string=stdin_string)
def FindOrBuildIRT(allow_build = True):
if env.force_irt:
if env.force_irt == 'none':
return None
elif env.force_irt == 'core':
flavors = ['irt_core']
else:
irt = env.force_irt
if not os.path.exists(irt):
Fatal('IRT not found: %s' % irt)
return irt
else:
flavors = ['irt_core']
irt_paths = []
for flavor in flavors:
path = os.path.join(env.nacl_root, 'scons-out',
'nacl_irt-%s/staging/%s.nexe' % (env.arch, flavor))
irt_paths.append(path)
for path in irt_paths:
if os.path.exists(path):
return path
if allow_build:
PrintBanner('irt not found. Building it with scons.')
irt = irt_paths[0]
BuildIRT(flavors[0])
assert(env.dry_run or os.path.exists(irt))
return irt
return None
def BuildIRT(flavor):
args = ('platform=%s naclsdk_validate=0 ' +
'sysinfo=0 -j8 %s') % (env.arch, flavor)
args = args.split()
Run([env.scons] + args, cwd=env.nacl_root)
def FindOrBuildSelLdr(allow_build=True):
if env.force_sel_ldr:
if env.force_sel_ldr in ('dbg','opt'):
modes = [ env.force_sel_ldr ]
else:
sel_ldr = env.force_sel_ldr
if not os.path.exists(sel_ldr):
Fatal('sel_ldr not found: %s' % sel_ldr)
return sel_ldr
else:
modes = ['opt','dbg']
loaders = []
for mode in modes:
sel_ldr = os.path.join(
env.nacl_root, 'scons-out',
'%s-%s-%s' % (mode, pynacl.platform.GetOS(), env.arch),
'staging', 'sel_ldr')
if pynacl.platform.IsWindows():
sel_ldr += '.exe'
loaders.append(sel_ldr)
# If one exists, use it.
for sel_ldr in loaders:
if os.path.exists(sel_ldr):
return sel_ldr
# Build it
if allow_build:
PrintBanner('sel_ldr not found. Building it with scons.')
sel_ldr = loaders[0]
BuildSelLdr(modes[0])
assert(env.dry_run or os.path.exists(sel_ldr))
return sel_ldr
return None
def BuildSelLdr(mode):
args = ('platform=%s MODE=%s-host naclsdk_validate=0 ' +
'sysinfo=0 -j8 sel_ldr') % (env.arch, mode)
args = args.split()
Run([env.scons] + args, cwd=env.nacl_root)
def Translate(arch, pexe):
output_file = os.path.splitext(pexe)[0] + '.' + arch + '.nexe'
pnacl_translate = os.path.join(env.pnacl_base, 'bin', 'pnacl-translate')
args = [ pnacl_translate, '-arch', arch, pexe, '-o', output_file,
'--allow-llvm-bitcode-input' ]
if env.zerocost_eh:
args.append('--pnacl-allow-zerocost-eh')
Run(args)
return output_file
def Stringify(args):
ret = ''
for arg in args:
if ' ' in arg:
ret += ' "%s"' % arg
else:
ret += ' %s' % arg
return ret.strip()
def PrepareStdin(stdin_string):
"""Prepare a stdin stream for a subprocess based on contents of a string.
This has to be in the form of an actual file, rather than directly piping
the string, since the child may (inappropriately) try to fseek() on stdin.
Args:
stdin_string: The characters to pipe to the subprocess.
Returns:
An open temporary file object ready to be read from.
"""
f = tempfile.TemporaryFile()
f.write(stdin_string)
f.seek(0)
return f
def Run(args, cwd=None, verbose=True, exit_on_failure=False,
capture_stdout=False, capture_stderr=False, stdin_string=None):
"""Run a command and optionally capture its output.
Args:
args: A string list containing the command and arguments.
cwd: Change to this directory before running.
verbose: Print the command before running it.
exit_on_failure: Exit immediately if the command returns nonzero.
capture_stdout: Capture the stdout as a string (rather than passing it
through to the terminal).
capture_stderr: Capture the stderr as a string (rather than passing it
through to the terminal).
stdin_string: Text to send to the command via stdin. If None, stdin is
inherited from the caller.
Returns:
A string containing the concatenation of any captured stdout plus
any captured stderr.
"""
if verbose:
PrintCommand(Stringify(args))
if env.dry_run:
return
stdout_redir = None
stderr_redir = None
stdin_redir = None
if capture_stdout:
stdout_redir = subprocess.PIPE
if capture_stderr:
stderr_redir = subprocess.PIPE
if stdin_string:
stdin_redir = PrepareStdin(stdin_string)
p = None
try:
# PNaCl toolchain executables (pnacl-translate, readelf) are scripts
# not binaries, so it doesn't want to run on Windows without a shell.
use_shell = True if pynacl.platform.IsWindows() else False
p = subprocess.Popen(args, stdin=stdin_redir, stdout=stdout_redir,
stderr=stderr_redir, cwd=cwd, shell=use_shell)
(stdout_contents, stderr_contents) = p.communicate()
except KeyboardInterrupt, e:
if p:
p.kill()
raise e
except BaseException, e:
if p:
p.kill()
raise e
env.last_return_code = p.returncode
if p.returncode != 0 and exit_on_failure:
if capture_stdout or capture_stderr:
# Print an extra message if any of the program's output wasn't
# going to the screen.
Fatal('Failed to run: %s' % Stringify(args))
sys.exit(p.returncode)
return (stdout_contents or '') + (stderr_contents or '')
def ArgSplit(argv):
"""Parse command-line arguments.
Returns:
Tuple (nexe, nexe_args) where nexe is the name of the nexe or pexe
to execute, and nexe_args are its runtime arguments.
"""
desc = ('Run a command-line nexe (or pexe). Automatically handles\n' +
'translation, building sel_ldr, and building the IRT.')
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-L', action='append', dest='library_path', default=[],
help='Additional library path for runnable-ld.so.')
parser.add_argument('--paranoid', action='store_true', default=False,
help='Remove -S (signals) and -a (file access) ' +
'from the default sel_ldr options.')
parser.add_argument('--loader', dest='force_sel_ldr', metavar='SEL_LDR',
help='Path to sel_ldr. "dbg" or "opt" means use ' +
'dbg or opt version of sel_ldr. ' +
'By default, use whichever sel_ldr already exists; ' +
'otherwise, build opt version.')
parser.add_argument('--irt', dest='force_irt', metavar='IRT',
help='Path to IRT nexe. "core" or "none" means use ' +
'Core IRT or no IRT. By default, use whichever IRT ' +
'already exists; otherwise, build irt_core.')
parser.add_argument('--dry-run', '-n', action='store_true', default=False,
help="Just print commands, don't execute them.")
parser.add_argument('--quiet', '-q', action='store_true', default=False,
help="Don't print anything.")
parser.add_argument('--retries', default='0', metavar='N',
help='Retry sel_ldr command up to N times (if ' +
'flakiness is expected). This argument implies ' +
'--collate.')
parser.add_argument('--collate', action='store_true', default=False,
help="Combine/collate sel_ldr's stdout and stderr, and " +
"print to stdout.")
parser.add_argument('--trace', '-t', action='store_true', default=False,
help='Trace qemu execution.')
parser.add_argument('--debug', '-g', action='store_true', default=False,
help='Run sel_ldr with debugging enabled.')
parser.add_argument('-arch', '-m', dest='arch', action='store',
choices=sorted(
pynacl.platform.ARCH3264_LIST + ['env']),
help=('Specify architecture for PNaCl translation. ' +
'"env" is a special value which obtains the ' +
'architecture from the environment ' +
'variable "%s".') % ARCH_ENV_VAR_NAME)
parser.add_argument('remainder', nargs=argparse.REMAINDER,
metavar='nexe/pexe + args')
parser.add_argument('--pnacl-allow-zerocost-eh', action='store_true',
default=False, dest='zerocost_eh',
help='Allow non-stable zero-cost exception handling.')
(options, args) = parser.parse_known_args(argv)
# Copy the options into env.
for (key, value) in vars(options).iteritems():
setattr(env, key, value)
args += options.remainder
nexe = args[0] if len(args) else ''
env.is_pnacl = nexe.endswith('.pexe')
if env.arch == 'env':
# Get the architecture from the environment.
try:
env.arch = os.environ[ARCH_ENV_VAR_NAME]
except Exception as e:
Fatal(('Option "-arch env" specified, but environment variable ' +
'"%s" not specified: %s') % (ARCH_ENV_VAR_NAME, e))
if not env.arch and env.is_pnacl:
# For NaCl we'll figure out the architecture from the nexe's
# architecture, but for PNaCl we first need to translate and the
# user didn't tell us which architecture to translate to. Be nice
# and just translate to the current machine's architecture.
env.arch = pynacl.platform.GetArch3264()
# Canonicalize env.arch.
env.arch = pynacl.platform.GetArch3264(env.arch)
return nexe, args[1:]
def Fatal(msg, *args):
if len(args) > 0:
msg = msg % args
print msg
sys.exit(1)
def FindReadElf():
'''Returns the path of "readelf" binary.'''
candidates = []
# Use PNaCl's if it available.
candidates.append(
os.path.join(env.pnacl_base, 'bin', 'pnacl-readelf'))
# Otherwise, look for the system readelf
for path in os.environ['PATH'].split(os.pathsep):
candidates.append(os.path.join(path, 'readelf'))
for readelf in candidates:
if os.path.exists(readelf):
return readelf
Fatal('Cannot find readelf!')
def ReadELFInfo(f):
''' Returns: (arch, is_dynamic, is_glibc_static) '''
readelf = env.readelf
readelf_out = Run([readelf, '-lh', f], capture_stdout=True, verbose=False)
machine_line = None
is_dynamic = False
is_glibc_static = False
for line in readelf_out.split('\n'):
line = line.strip()
if line.startswith('Machine:'):
machine_line = line
if line.startswith('DYNAMIC'):
is_dynamic = True
if '__libc_atexit' in line:
is_glibc_static = True
if not machine_line:
Fatal('Script error: readelf output did not make sense!')
if 'Intel 80386' in machine_line:
arch = 'x86-32'
elif 'X86-64' in machine_line:
arch = 'x86-64'
elif 'ARM' in machine_line:
arch = 'arm'
elif 'MIPS' in machine_line:
arch = 'mips32'
else:
Fatal('%s: Unknown machine type', f)
return (arch, is_dynamic, is_glibc_static)
def FindBaseDir():
'''Crawl backwards, starting from the directory containing this script,
until we find the native_client/ directory.
'''
curdir = os.path.abspath(sys.argv[0])
while os.path.basename(curdir) != 'native_client':
curdir,subdir = os.path.split(curdir)
if subdir == '':
# We've hit the file system root
break
if os.path.basename(curdir) != 'native_client':
Fatal('Unable to find native_client directory!')
return curdir
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
gold3bear/swift | refs/heads/master | test/unit/common/test_base_storage_server.py | 26 | # Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
from swift.common.base_storage_server import BaseStorageServer
from tempfile import mkdtemp
from swift import __version__ as swift_version
from swift.common.swob import Request
from swift.common.utils import get_logger, public
from shutil import rmtree
class FakeOPTIONS(BaseStorageServer):
server_type = 'test-server'
def __init__(self, conf, logger=None):
super(FakeOPTIONS, self).__init__(conf)
self.logger = logger or get_logger(conf, log_route='test-server')
class FakeANOTHER(FakeOPTIONS):
@public
def ANOTHER(self):
"""this is to test adding to allowed_methods"""
pass
class TestBaseStorageServer(unittest.TestCase):
"""Test swift.common.base_storage_server"""
def setUp(self):
self.tmpdir = mkdtemp()
self.testdir = os.path.join(self.tmpdir,
'tmp_test_base_storage_server')
def tearDown(self):
"""Tear down for testing swift.common.base_storage_server"""
rmtree(self.tmpdir)
def test_server_type(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
baseserver = BaseStorageServer(conf)
msg = 'Storage nodes have not implemented the Server type.'
try:
baseserver.server_type
except NotImplementedError as e:
self.assertEquals(str(e), msg)
def test_allowed_methods(self):
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, ['OPTIONS'])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
allowed_methods_test.sort()
self.assertEquals(allowed_methods_test, ['ANOTHER', 'OPTIONS'])
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, [])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
self.assertEquals(allowed_methods_test, [])
conf = {'devices': self.testdir, 'mount_check': 'false'}
# test what's available in the base class
allowed_methods_test = FakeOPTIONS(conf).allowed_methods
self.assertEquals(allowed_methods_test, ['OPTIONS'])
# test that a subclass can add allowed methods
allowed_methods_test = FakeANOTHER(conf).allowed_methods
allowed_methods_test.sort()
self.assertEquals(allowed_methods_test, ['ANOTHER', 'OPTIONS'])
def test_OPTIONS_error(self):
msg = 'Storage nodes have not implemented the Server type.'
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
baseserver = BaseStorageServer(conf)
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
try:
baseserver.OPTIONS(req)
except NotImplementedError as e:
self.assertEquals(str(e), msg)
def test_OPTIONS(self):
conf = {'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'}
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = FakeOPTIONS(conf).OPTIONS(req)
self.assertEquals(resp.headers['Allow'], 'OPTIONS')
self.assertEquals(resp.headers['Server'],
'test-server/' + swift_version)
|
mjtamlyn/django | refs/heads/master | tests/migrations/migrations_test_apps/lookuperror_c/migrations/0002_c2.py | 133 | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lookuperror_a', '0002_a2'),
('lookuperror_c', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='C2',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('a1', models.ForeignKey('lookuperror_a.A1', models.CASCADE)),
],
),
]
|
dvro/sf-open-data-analysis | refs/heads/master | app/core/clustering.py | 1 | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import cluster
from sklearn.neighbors import kneighbors_graph
def perform_clustering(algorithm, n_clusters, X):
print algorithm
if algorithm == 'MiniBatchKMeans':
model = cluster.MiniBatchKMeans(n_clusters=n_clusters)
elif algorithm == 'Birch':
model = cluster.Birch(n_clusters=n_clusters)
elif algorithm == 'DBSCAN':
model = cluster.DBSCAN(eps=.2)
elif algorithm == 'HDBSCAN':
import hdbscan
model = hdbscan.HDBSCAN(min_cluster_size=40, gen_min_span_tree=True)
elif algorithm == 'AffinityPropagation':
model = cluster.AffinityPropagation(damping=.9,
preference=-200)
elif algorithm == 'MeanShift':
bandwidth = cluster.estimate_bandwidth(X, quantile=0.3)
model = cluster.MeanShift(bandwidth=bandwidth,
bin_seeding=True)
elif algorithm == 'SpectralClustering':
model = cluster.SpectralClustering(n_clusters=n_clusters,
eigen_solver='arpack',
affinity="nearest_neighbors")
elif algorithm == 'Ward':
connectivity = kneighbors_graph(X, n_neighbors=10, include_self=False)
connectivity = 0.5 * (connectivity + connectivity.T)
model = cluster.AgglomerativeClustering(n_clusters=n_clusters,
linkage='ward',
connectivity=connectivity)
elif algorithm == 'AgglomerativeClustering':
connectivity = kneighbors_graph(X, n_neighbors=10, include_self=False)
connectivity = 0.5 * (connectivity + connectivity.T)
model = cluster.AgglomerativeClustering(linkage="average",
affinity="cityblock",
n_clusters=n_clusters,
connectivity=connectivity)
return get_labels(X, model)
def get_labels(X, model):
model.fit(X)
if hasattr(model, 'labels_'):
labels = model.labels_.astype(np.int)
else:
labels = model.predict(X)
return labels
|
sanjuro/RCJK | refs/heads/master | vendor/django/contrib/gis/tests/test_spatialrefsys.py | 9 | import unittest
from django.contrib.gis.db.backend import SpatialBackend
from django.contrib.gis.tests.utils import mysql, no_mysql, oracle, postgis, spatialite
if not mysql:
from django.contrib.gis.models import SpatialRefSys
test_srs = ({'srid' : 4326,
'auth_name' : ('EPSG', True),
'auth_srid' : 4326,
'srtext' : 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
'srtext14' : 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
'proj4' : '+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ',
'spheroid' : 'WGS 84', 'name' : 'WGS 84',
'geographic' : True, 'projected' : False, 'spatialite' : True,
'ellipsoid' : (6378137.0, 6356752.3, 298.257223563), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 1, 9),
},
{'srid' : 32140,
'auth_name' : ('EPSG', False),
'auth_srid' : 32140,
'srtext' : 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
'srtext14': 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],UNIT["metre",1,AUTHORITY["EPSG","9001"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],AUTHORITY["EPSG","32140"],AXIS["X",EAST],AXIS["Y",NORTH]]',
'proj4' : '+proj=lcc +lat_1=30.28333333333333 +lat_2=28.38333333333333 +lat_0=27.83333333333333 +lon_0=-99 +x_0=600000 +y_0=4000000 +ellps=GRS80 +datum=NAD83 +units=m +no_defs ',
'spheroid' : 'GRS 1980', 'name' : 'NAD83 / Texas South Central',
'geographic' : False, 'projected' : True, 'spatialite' : False,
'ellipsoid' : (6378137.0, 6356752.31414, 298.257222101), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 5, 10),
},
)
if SpatialBackend.postgis:
major, minor1, minor2 = SpatialBackend.version
POSTGIS_14 = major >=1 and minor1 >= 4
class SpatialRefSysTest(unittest.TestCase):
@no_mysql
def test01_retrieve(self):
"Testing retrieval of SpatialRefSys model objects."
for sd in test_srs:
srs = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(sd['srid'], srs.srid)
# Some of the authority names are borked on Oracle, e.g., SRID=32140.
# also, Oracle Spatial seems to add extraneous info to fields, hence the
# the testing with the 'startswith' flag.
auth_name, oracle_flag = sd['auth_name']
if postgis or (oracle and oracle_flag):
self.assertEqual(True, srs.auth_name.startswith(auth_name))
self.assertEqual(sd['auth_srid'], srs.auth_srid)
# No proj.4 and different srtext on oracle backends :(
if postgis:
if POSTGIS_14:
srtext = sd['srtext14']
else:
srtext = sd['srtext']
self.assertEqual(srtext, srs.wkt)
self.assertEqual(sd['proj4'], srs.proj4text)
@no_mysql
def test02_osr(self):
"Testing getting OSR objects from SpatialRefSys model objects."
for sd in test_srs:
sr = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(True, sr.spheroid.startswith(sd['spheroid']))
self.assertEqual(sd['geographic'], sr.geographic)
self.assertEqual(sd['projected'], sr.projected)
if not (spatialite and not sd['spatialite']):
# Can't get 'NAD83 / Texas South Central' from PROJ.4 string
# on SpatiaLite
self.assertEqual(True, sr.name.startswith(sd['name']))
# Testing the SpatialReference object directly.
if postgis or spatialite:
srs = sr.srs
self.assertEqual(sd['proj4'], srs.proj4)
# No `srtext` field in the `spatial_ref_sys` table in SpatiaLite
if not spatialite:
if POSTGIS_14:
srtext = sd['srtext14']
else:
srtext = sd['srtext']
self.assertEqual(srtext, srs.wkt)
@no_mysql
def test03_ellipsoid(self):
"Testing the ellipsoid property."
for sd in test_srs:
# Getting the ellipsoid and precision parameters.
ellps1 = sd['ellipsoid']
prec = sd['eprec']
# Getting our spatial reference and its ellipsoid
srs = SpatialRefSys.objects.get(srid=sd['srid'])
ellps2 = srs.ellipsoid
for i in range(3):
param1 = ellps1[i]
param2 = ellps2[i]
self.assertAlmostEqual(ellps1[i], ellps2[i], prec[i])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(SpatialRefSysTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
surligas/cs436-gnuradio | refs/heads/master | gr-blocks/python/blocks/qa_vector_insert.py | 20 | #!/usr/bin/env python
#
# Copyright 2012-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
import math
class test_vector_insert(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001(self):
src_data = [float(x) for x in range(16)]
expected_result = tuple(src_data)
period = 9177;
offset = 0;
src = blocks.null_source(1)
head = blocks.head(1, 10000000);
ins = blocks.vector_insert_b([1], period, offset);
dst = blocks.vector_sink_b()
self.tb.connect(src, head, ins, dst)
self.tb.run()
result_data = dst.data()
for i in range(10000):
if(i%period == offset):
self.assertEqual(1, result_data[i])
else:
self.assertEqual(0, result_data[i])
if __name__ == '__main__':
gr_unittest.run(test_vector_insert, "test_vector_insert.xml")
|
randomtask1155/gpdb | refs/heads/master | gpMgmt/sbin/gpsetdbid.py | 45 | #!/usr/bin/env python
'''
Copyright (c) Greenplum Inc 2010. All Rights Reserved.
This is a private script called by the Greenplum Management scripts.
With the script you can create the gp_dbid file within a segment's data
directory.
This script does NOT modify the configuration information stored within
the database.
'''
import os
from optparse import OptionGroup
from gppylib.gp_dbid import writeGpDbidFile
from gppylib.mainUtils import *
from gppylib.gpparseopts import OptParser, OptChecker
from gppylib.gplog import get_logger_if_verbose
__help__ = [""]
#------------------------------- GpSetDBId --------------------------------
class GpSetDBId:
"""
Setup a gp_dbid file for a specified directory.
"""
def __init__(self, options):
self.__directory = options.directory
self.__dbid = options.dbid
def run(self):
writeGpDbidFile(self.__directory, self.__dbid, logger=get_logger_if_verbose())
def cleanup(self):
pass
#-------------------------------------------------------------------------
@staticmethod
def createParser():
"""
Constructs and returns an option parser.
Called by simple_main()
"""
parser = OptParser(option_class=OptChecker,
version='%prog version $Revision: $')
parser.setHelp(__help__)
addStandardLoggingAndHelpOptions(parser, False)
opts = OptionGroup(parser, "Required Options")
opts.add_option('-d', '--directory', type='string')
opts.add_option('-i', '--dbid', type='int')
parser.add_option_group(opts)
parser.set_defaults()
return parser
#-------------------------------------------------------------------------
@staticmethod
def createProgram(options, args):
"""
Construct and returns a GpSetDBId object.
Called by simple_main()
"""
# sanity check
if len(args) > 0 :
raise ProgramArgumentValidationException(
"too many arguments: only options may be specified")
if not options.directory:
raise ProgramArgumentValidationException("--directory is required")
if not options.dbid:
raise ProgramArgumentValidationException("--dbid is required")
return GpSetDBId(options)
#------------------------------- Mainline --------------------------------
if __name__ == '__main__':
mainOptions = {
'suppressStartupLogMessage': True,
'useHelperToolLogging': True
}
simple_main(GpSetDBId.createParser,
GpSetDBId.createProgram,
mainOptions)
|
paalge/scikit-image | refs/heads/master | doc/examples/transform/plot_pyramid.py | 3 | """
====================
Build image pyramids
====================
The ``pyramid_gaussian`` function takes an image and yields successive images
shrunk by a constant scale factor. Image pyramids are often used, e.g., to
implement algorithms for denoising, texture discrimination, and scale-
invariant detection.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.transform import pyramid_gaussian
image = data.astronaut()
rows, cols, dim = image.shape
pyramid = tuple(pyramid_gaussian(image, downscale=2))
composite_image = np.zeros((rows, cols + cols / 2, 3), dtype=np.double)
composite_image[:rows, :cols, :] = pyramid[0]
i_row = 0
for p in pyramid[1:]:
n_rows, n_cols = p.shape[:2]
composite_image[i_row:i_row + n_rows, cols:cols + n_cols] = p
i_row += n_rows
fig, ax = plt.subplots()
ax.imshow(composite_image)
plt.show()
|
mbayon/TFG-MachineLearning | refs/heads/master | venv/lib/python3.6/site-packages/django/core/cache/utils.py | 585 | from __future__ import unicode_literals
import hashlib
from django.utils.encoding import force_bytes
from django.utils.http import urlquote
TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
def make_template_fragment_key(fragment_name, vary_on=None):
if vary_on is None:
vary_on = ()
key = ':'.join(urlquote(var) for var in vary_on)
args = hashlib.md5(force_bytes(key))
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.