code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import os
import platform
import sys
from logging.handlers import SysLogHandler
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
def get_logger_config(log_dir,
logging_env="no_env",
tracking_filename="tracking.log",
edx_filename="edx.log",
dev_env=False,
syslog_addr=None,
debug=False,
local_loglevel='INFO',
console_loglevel=None,
service_variant=None):
"""
Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings. The reason it's done
this way instead of registering directly is because I didn't want to worry
about resetting the logging state if this is called multiple times when
settings are extended.
If dev_env is set to true logging will not be done via local rsyslogd,
instead, tracking and application logs will be dropped in log_dir.
"tracking_filename" and "edx_filename" are ignored unless dev_env
is set to true since otherwise logging is handled by rsyslogd.
"""
# Revert to INFO if an invalid string is passed in
if local_loglevel not in LOG_LEVELS:
local_loglevel = 'INFO'
if console_loglevel is None or console_loglevel not in LOG_LEVELS:
console_loglevel = 'DEBUG' if debug else 'INFO'
if service_variant is None:
# default to a blank string so that if SERVICE_VARIANT is not
# set we will not log to a sub directory
service_variant = ''
hostname = platform.node().split(".")[0]
syslog_format = ("[service_variant={service_variant}]"
"[%(name)s][env:{logging_env}] %(levelname)s "
"[{hostname} %(process)d] [%(filename)s:%(lineno)d] "
"- %(message)s").format(service_variant=service_variant,
logging_env=logging_env,
hostname=hostname)
handlers = ['console', 'local'] if debug else ['console',
'syslogger-remote', 'local']
logger_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d '
'[%(name)s] %(filename)s:%(lineno)d - %(message)s',
},
'syslog_format': {'format': syslog_format},
'raw': {'format': '%(message)s'},
},
'handlers': {
'console': {
'level': console_loglevel,
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stderr,
},
'syslogger-remote': {
'level': 'INFO',
'class': 'logging.handlers.SysLogHandler',
'address': syslog_addr,
'formatter': 'syslog_format',
},
'newrelic': {
'level': 'ERROR',
'class': 'lms.lib.newrelic_logging.NewRelicHandler',
'formatter': 'raw',
}
},
'loggers': {
'tracking': {
'handlers': ['tracking'],
'level': 'DEBUG',
'propagate': False,
},
'': {
'handlers': handlers,
'level': 'DEBUG',
'propagate': False
},
}
}
if dev_env:
tracking_file_loc = os.path.join(log_dir, tracking_filename)
edx_file_loc = os.path.join(log_dir, edx_filename)
logger_config['handlers'].update({
'local': {
'class': 'logging.handlers.RotatingFileHandler',
'level': local_loglevel,
'formatter': 'standard',
'filename': edx_file_loc,
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': tracking_file_loc,
'formatter': 'raw',
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
})
else:
# for production environments we will only
# log INFO and up
logger_config['loggers']['']['level'] = 'INFO'
logger_config['handlers'].update({
'local': {
'level': local_loglevel,
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'formatter': 'syslog_format',
'facility': SysLogHandler.LOG_LOCAL0,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'facility': SysLogHandler.LOG_LOCAL1,
'formatter': 'raw',
},
})
return logger_config
| yokose-ks/edx-platform | common/lib/logsettings.py | Python | agpl-3.0 | 5,212 |
#!/usr/bin/env python
from nose.tools import ok_
from nose.tools import eq_
import networkx as nx
from networkx.algorithms.approximation import min_weighted_dominating_set
from networkx.algorithms.approximation import min_edge_dominating_set
class TestMinWeightDominatingSet:
def test_min_weighted_dominating_set(self):
graph = nx.Graph()
graph.add_edge(1, 2)
graph.add_edge(1, 5)
graph.add_edge(2, 3)
graph.add_edge(2, 5)
graph.add_edge(3, 4)
graph.add_edge(3, 6)
graph.add_edge(5, 6)
vertices = set([1, 2, 3, 4, 5, 6])
# due to ties, this might be hard to test tight bounds
dom_set = min_weighted_dominating_set(graph)
for vertex in vertices - dom_set:
neighbors = set(graph.neighbors(vertex))
ok_(len(neighbors & dom_set) > 0, "Non dominating set found!")
def test_star_graph(self):
"""Tests that an approximate dominating set for the star graph,
even when the center node does not have the smallest integer
label, gives just the center node.
For more information, see #1527.
"""
# Create a star graph in which the center node has the highest
# label instead of the lowest.
G = nx.star_graph(10)
G = nx.relabel_nodes(G, {0: 9, 9: 0})
eq_(min_weighted_dominating_set(G), {9})
def test_min_edge_dominating_set(self):
graph = nx.path_graph(5)
dom_set = min_edge_dominating_set(graph)
# this is a crappy way to test, but good enough for now.
for edge in graph.edges_iter():
if edge in dom_set:
continue
else:
u, v = edge
found = False
for dom_edge in dom_set:
found |= u == dom_edge[0] or u == dom_edge[1]
ok_(found, "Non adjacent edge found!")
graph = nx.complete_graph(10)
dom_set = min_edge_dominating_set(graph)
# this is a crappy way to test, but good enough for now.
for edge in graph.edges_iter():
if edge in dom_set:
continue
else:
u, v = edge
found = False
for dom_edge in dom_set:
found |= u == dom_edge[0] or u == dom_edge[1]
ok_(found, "Non adjacent edge found!")
| LumPenPacK/NetworkExtractionFromImages | win_build/nefi2_win_amd64_msvc_2015/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py | Python | bsd-2-clause | 2,410 |
"""
Tests for file field behavior, and specifically #639, in which Model.save()
gets called *again* for each FileField. This test will fail if calling a
ModelForm's save() method causes Model.save() to be called more than once.
"""
from __future__ import absolute_import
import os
import shutil
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils import unittest
from .models import Photo, PhotoForm, temp_storage_dir
class Bug639Test(unittest.TestCase):
def testBug639(self):
"""
Simulate a file upload and check how many times Model.save() gets
called.
"""
# Grab an image for testing.
filename = os.path.join(os.path.dirname(__file__), "test.jpg")
img = open(filename, "rb").read()
# Fake a POST QueryDict and FILES MultiValueDict.
data = {'title': 'Testing'}
files = {"image": SimpleUploadedFile('test.jpg', img, 'image/jpeg')}
form = PhotoForm(data=data, files=files)
p = form.save()
# Check the savecount stored on the object (see the model).
self.assertEqual(p._savecount, 1)
def tearDown(self):
"""
Make sure to delete the "uploaded" file to avoid clogging /tmp.
"""
p = Photo.objects.get()
p.image.delete(save=False)
shutil.rmtree(temp_storage_dir)
| LethusTI/supportcenter | vendor/django/tests/regressiontests/bug639/tests.py | Python | gpl-3.0 | 1,365 |
"""Generic thread tests.
Meant to be used by dummy_thread and thread. To allow for different modules
to be used, test_main() can be called with the module to use as the thread
implementation as its sole argument.
"""
import dummy_thread as _thread
import time
import Queue
import random
import unittest
from test import test_support
DELAY = 0 # Set > 0 when testing a module other than dummy_thread, such as
# the 'thread' module.
class LockTests(unittest.TestCase):
"""Test lock objects."""
def setUp(self):
# Create a lock
self.lock = _thread.allocate_lock()
def test_initlock(self):
#Make sure locks start locked
self.failUnless(not self.lock.locked(),
"Lock object is not initialized unlocked.")
def test_release(self):
# Test self.lock.release()
self.lock.acquire()
self.lock.release()
self.failUnless(not self.lock.locked(),
"Lock object did not release properly.")
def test_improper_release(self):
#Make sure release of an unlocked thread raises _thread.error
self.failUnlessRaises(_thread.error, self.lock.release)
def test_cond_acquire_success(self):
#Make sure the conditional acquiring of the lock works.
self.failUnless(self.lock.acquire(0),
"Conditional acquiring of the lock failed.")
def test_cond_acquire_fail(self):
#Test acquiring locked lock returns False
self.lock.acquire(0)
self.failUnless(not self.lock.acquire(0),
"Conditional acquiring of a locked lock incorrectly "
"succeeded.")
def test_uncond_acquire_success(self):
#Make sure unconditional acquiring of a lock works.
self.lock.acquire()
self.failUnless(self.lock.locked(),
"Uncondional locking failed.")
def test_uncond_acquire_return_val(self):
#Make sure that an unconditional locking returns True.
self.failUnless(self.lock.acquire(1) is True,
"Unconditional locking did not return True.")
def test_uncond_acquire_blocking(self):
#Make sure that unconditional acquiring of a locked lock blocks.
def delay_unlock(to_unlock, delay):
"""Hold on to lock for a set amount of time before unlocking."""
time.sleep(delay)
to_unlock.release()
self.lock.acquire()
start_time = int(time.time())
_thread.start_new_thread(delay_unlock,(self.lock, DELAY))
if test_support.verbose:
print
print "*** Waiting for thread to release the lock "\
"(approx. %s sec.) ***" % DELAY
self.lock.acquire()
end_time = int(time.time())
if test_support.verbose:
print "done"
self.failUnless((end_time - start_time) >= DELAY,
"Blocking by unconditional acquiring failed.")
class MiscTests(unittest.TestCase):
"""Miscellaneous tests."""
def test_exit(self):
#Make sure _thread.exit() raises SystemExit
self.failUnlessRaises(SystemExit, _thread.exit)
def test_ident(self):
#Test sanity of _thread.get_ident()
self.failUnless(isinstance(_thread.get_ident(), int),
"_thread.get_ident() returned a non-integer")
self.failUnless(_thread.get_ident() != 0,
"_thread.get_ident() returned 0")
def test_LockType(self):
#Make sure _thread.LockType is the same type as _thread.allocate_locke()
self.failUnless(isinstance(_thread.allocate_lock(), _thread.LockType),
"_thread.LockType is not an instance of what is "
"returned by _thread.allocate_lock()")
def test_interrupt_main(self):
#Calling start_new_thread with a function that executes interrupt_main
# should raise KeyboardInterrupt upon completion.
def call_interrupt():
_thread.interrupt_main()
self.failUnlessRaises(KeyboardInterrupt, _thread.start_new_thread,
call_interrupt, tuple())
def test_interrupt_in_main(self):
# Make sure that if interrupt_main is called in main threat that
# KeyboardInterrupt is raised instantly.
self.failUnlessRaises(KeyboardInterrupt, _thread.interrupt_main)
class ThreadTests(unittest.TestCase):
"""Test thread creation."""
def test_arg_passing(self):
#Make sure that parameter passing works.
def arg_tester(queue, arg1=False, arg2=False):
"""Use to test _thread.start_new_thread() passes args properly."""
queue.put((arg1, arg2))
testing_queue = Queue.Queue(1)
_thread.start_new_thread(arg_tester, (testing_queue, True, True))
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using tuple failed")
_thread.start_new_thread(arg_tester, tuple(), {'queue':testing_queue,
'arg1':True, 'arg2':True})
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using kwargs failed")
_thread.start_new_thread(arg_tester, (testing_queue, True), {'arg2':True})
result = testing_queue.get()
self.failUnless(result[0] and result[1],
"Argument passing for thread creation using both tuple"
" and kwargs failed")
def test_multi_creation(self):
#Make sure multiple threads can be created.
def queue_mark(queue, delay):
"""Wait for ``delay`` seconds and then put something into ``queue``"""
time.sleep(delay)
queue.put(_thread.get_ident())
thread_count = 5
testing_queue = Queue.Queue(thread_count)
if test_support.verbose:
print
print "*** Testing multiple thread creation "\
"(will take approx. %s to %s sec.) ***" % (DELAY, thread_count)
for count in xrange(thread_count):
if DELAY:
local_delay = round(random.random(), 1)
else:
local_delay = 0
_thread.start_new_thread(queue_mark,
(testing_queue, local_delay))
time.sleep(DELAY)
if test_support.verbose:
print 'done'
self.failUnless(testing_queue.qsize() == thread_count,
"Not all %s threads executed properly after %s sec." %
(thread_count, DELAY))
def test_main(imported_module=None):
global _thread, DELAY
if imported_module:
_thread = imported_module
DELAY = 2
if test_support.verbose:
print
print "*** Using %s as _thread module ***" % _thread
test_support.run_unittest(LockTests, MiscTests, ThreadTests)
if __name__ == '__main__':
test_main()
| xbmc/atv2 | xbmc/lib/libPython/Python/Lib/test/test_dummy_thread.py | Python | gpl-2.0 | 7,139 |
from statsmodels.regression.linear_model import GLS
gls = GLS.from_formula
from statsmodels.regression.linear_model import WLS
wls = WLS.from_formula
from statsmodels.regression.linear_model import OLS
ols = OLS.from_formula
from statsmodels.regression.linear_model import GLSAR
glsar = GLSAR.from_formula
from statsmodels.regression.mixed_linear_model import MixedLM
mixedlm = MixedLM.from_formula
from statsmodels.genmod.generalized_linear_model import GLM
glm = GLM.from_formula
from statsmodels.robust.robust_linear_model import RLM
rlm = RLM.from_formula
from statsmodels.discrete.discrete_model import MNLogit
mnlogit = MNLogit.from_formula
from statsmodels.discrete.discrete_model import Logit
logit = Logit.from_formula
from statsmodels.discrete.discrete_model import Probit
probit = Probit.from_formula
from statsmodels.discrete.discrete_model import Poisson
poisson = Poisson.from_formula
from statsmodels.discrete.discrete_model import NegativeBinomial
negativebinomial = NegativeBinomial.from_formula
from statsmodels.regression.quantile_regression import QuantReg
quantreg = QuantReg.from_formula
from statsmodels.duration.hazard_regression import PHReg
phreg = PHReg.from_formula
from statsmodels.genmod.generalized_estimating_equations import (GEE,
OrdinalGEE, NominalGEE)
gee = GEE.from_formula
ordinal_gee = OrdinalGEE.from_formula
nominal_gee = NominalGEE.from_formula
| hlin117/statsmodels | statsmodels/formula/api.py | Python | bsd-3-clause | 1,392 |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import network
ALIAS = 'os-floating-ip-pools'
authorize = extensions.os_compute_authorizer(ALIAS)
def _translate_floating_ip_view(pool_name):
return {
'name': pool_name,
}
def _translate_floating_ip_pools_view(pools):
return {
'floating_ip_pools': [_translate_floating_ip_view(pool_name)
for pool_name in pools]
}
class FloatingIPPoolsController(wsgi.Controller):
"""The Floating IP Pool API controller for the OpenStack API."""
def __init__(self):
self.network_api = network.API(skip_policy_check=True)
super(FloatingIPPoolsController, self).__init__()
@extensions.expected_errors(())
def index(self, req):
"""Return a list of pools."""
context = req.environ['nova.context']
authorize(context)
pools = self.network_api.get_floating_ip_pools(context)
return _translate_floating_ip_pools_view(pools)
class FloatingIpPools(extensions.V21APIExtensionBase):
"""Floating IPs support."""
name = "FloatingIpPools"
alias = ALIAS
version = 1
def get_resources(self):
resource = [extensions.ResourceExtension(ALIAS,
FloatingIPPoolsController())]
return resource
def get_controller_extensions(self):
"""It's an abstract function V21APIExtensionBase and the extension
will not be loaded without it.
"""
return []
| scripnichenko/nova | nova/api/openstack/compute/floating_ip_pools.py | Python | apache-2.0 | 2,196 |
import os
# toolchains options
ARCH = 'arm'
CPU = 'cortex-m3'
CROSS_TOOL = 'gcc'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = 'C:\Program Files (x86)\CodeSourcery\Sourcery G++ Lite\bin'
#EXEC_PATH = 'C:\Program Files (x86)\yagarto\bin'
elif CROSS_TOOL == 'keil':
print '================ERROR============================'
print 'Not support keil yet!'
print '================================================='
exit(0)
elif CROSS_TOOL == 'iar':
print '================ERROR============================'
print 'Not support iar yet!'
print '================================================='
exit(0)
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
# EFM32_BOARD = 'EFM32_G8XX_STK'
# EFM32_BOARD = 'EFM32_GXXX_DK'
EFM32_BOARD = 'EFM32GG_DK3750'
if EFM32_BOARD == 'EFM32_G8XX_STK':
EFM32_FAMILY = 'Gecko'
EFM32_TYPE = 'EFM32G890F128'
EFM32_LCD = 'none'
elif EFM32_BOARD == 'EFM32_GXXX_DK':
EFM32_FAMILY = 'Gecko'
EFM32_TYPE = 'EFM32G290F128'
EFM32_LCD = 'none'
elif EFM32_BOARD == 'EFM32GG_DK3750':
EFM32_FAMILY = 'Giant Gecko'
EFM32_TYPE = 'EFM32GG990F1024'
# EFM32_LCD = 'LCD_MAPPED'
EFM32_LCD = 'LCD_DIRECT'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'axf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m3 -mthumb -ffunction-sections -fdata-sections'
CFLAGS = DEVICE
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-efm32.map,-cref,-u,__cs3_reset -T'
if EFM32_BOARD == 'EFM32_G8XX_STK' or EFM32_BOARD == 'EFM32_GXXX_DK':
LFLAGS += ' efm32g_rom.ld'
elif EFM32_BOARD == 'EFM32GG_DK3750':
LFLAGS += ' efm32gg_rom.ld'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
| poranmeloge/test-github | stm32_rtt_wifi/bsp/efm32/rtconfig.py | Python | gpl-2.0 | 2,289 |
"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Platform/darwin.py 5023 2010/06/14 22:05:46 scons"
import posix
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
env['ENV']['PATH'] = env['ENV']['PATH'] + ':/sw/bin'
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| faarwa/EngSocP5 | zxing/cpp/scons/scons-local-2.0.0.final.0/SCons/Platform/darwin.py | Python | gpl-3.0 | 1,758 |
"""Example: statsmodels.OLS
"""
from statsmodels.datasets.longley import load
import statsmodels.api as sm
from statsmodels.iolib.table import SimpleTable, default_txt_fmt
import numpy as np
data = load()
data_orig = (data.endog.copy(), data.exog.copy())
#.. Note: In this example using zscored/standardized variables has no effect on
#.. regression estimates. Are there no numerical problems?
rescale = 0
#0: no rescaling, 1:demean, 2:standardize, 3:standardize and transform back
rescale_ratio = data.endog.std() / data.exog.std(0)
if rescale > 0:
# rescaling
data.endog -= data.endog.mean()
data.exog -= data.exog.mean(0)
if rescale > 1:
data.endog /= data.endog.std()
data.exog /= data.exog.std(0)
#skip because mean has been removed, but dimension is hardcoded in table
data.exog = sm.tools.add_constant(data.exog, prepend=False)
ols_model = sm.OLS(data.endog, data.exog)
ols_results = ols_model.fit()
# the Longley dataset is well known to have high multicollinearity
# one way to find the condition number is as follows
#Find OLS parameters for model with one explanatory variable dropped
resparams = np.nan * np.ones((7, 7))
res = sm.OLS(data.endog, data.exog).fit()
resparams[:, 0] = res.params
indall = range(7)
for i in range(6):
ind = indall[:]
del ind[i]
res = sm.OLS(data.endog, data.exog[:, ind]).fit()
resparams[ind, i + 1] = res.params
if rescale == 1:
pass
if rescale == 3:
resparams[:-1, :] *= rescale_ratio[:, None]
txt_fmt1 = default_txt_fmt
numformat = '%10.4f'
txt_fmt1 = dict(data_fmts=[numformat])
rowstubs = data.names[1:] + ['const']
headers = ['all'] + ['drop %s' % name for name in data.names[1:]]
tabl = SimpleTable(resparams, headers, rowstubs, txt_fmt=txt_fmt1)
nanstring = numformat % np.nan
nn = len(nanstring)
nanrep = ' ' * (nn - 1)
nanrep = nanrep[:nn // 2] + '-' + nanrep[nn // 2:]
print('Longley data - sensitivity to dropping an explanatory variable')
print(str(tabl).replace(nanstring, nanrep))
| DonBeo/statsmodels | examples/incomplete/ols_table.py | Python | bsd-3-clause | 1,999 |
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import subprocess
import sys
from util import build_utils
def DoGcc(options):
build_utils.MakeDirectory(os.path.dirname(options.output))
gcc_cmd = [
'gcc', # invoke host gcc.
'-E', # stop after preprocessing.
'-D', 'ANDROID', # Specify ANDROID define for pre-processor.
'-x', 'c-header', # treat sources as C header files
'-P', # disable line markers, i.e. '#line 309'
'-I', options.include_path,
'-o', options.output,
options.template
]
build_utils.CheckCallDie(gcc_cmd)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--include-path', help='Include path for gcc.')
parser.add_option('--template', help='Path to template.')
parser.add_option('--output', help='Path for generated file.')
parser.add_option('--stamp', help='Path to touch on success.')
# TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
parser.add_option('--ignore', help='Ignored.')
options, _ = parser.parse_args()
DoGcc(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| wangscript/libjingle-1 | trunk/build/android/gyp/gcc_preprocess.py | Python | bsd-3-clause | 1,415 |
# -*- coding: utf-8 -*-
from framework.routing import Rule, json_renderer
from website.addons.github import views
settings_routes = {
'rules': [
# Configuration
Rule(
[
'/project/<pid>/github/settings/',
'/project/<pid>/node/<nid>/github/settings/',
],
'post',
views.config.github_set_config,
json_renderer,
),
Rule(
[
'/project/<pid>/github/tarball/',
'/project/<pid>/node/<nid>/github/tarball/',
],
'get',
views.crud.github_download_starball,
json_renderer,
{'archive': 'tar'},
endpoint_suffix='__tar',
),
Rule(
[
'/project/<pid>/github/zipball/',
'/project/<pid>/node/<nid>/github/zipball/',
],
'get',
views.crud.github_download_starball,
json_renderer,
{'archive': 'zip'},
endpoint_suffix='__zip',
),
Rule(
[
'/project/<pid>/github/hook/',
'/project/<pid>/node/<nid>/github/hook/',
],
'post',
views.hooks.github_hook_callback,
json_renderer,
),
# OAuth: User
Rule(
'/settings/github/oauth/',
'get',
views.auth.github_oauth_start,
json_renderer,
endpoint_suffix='__user',
),
Rule(
'/settings/github/oauth/',
'delete',
views.auth.github_oauth_delete_user,
json_renderer,
),
# OAuth: Node
Rule(
[
'/project/<pid>/github/oauth/',
'/project/<pid>/node/<nid>/github/oauth/',
],
'get',
views.auth.github_oauth_start,
json_renderer,
),
Rule(
[
'/project/<pid>/github/user_auth/',
'/project/<pid>/node/<nid>/github/user_auth/',
],
'post',
views.auth.github_add_user_auth,
json_renderer,
),
Rule(
[
'/project/<pid>/github/oauth/',
'/project/<pid>/node/<nid>/github/oauth/',
'/project/<pid>/github/config/',
'/project/<pid>/node/<nid>/github/config/'
],
'delete',
views.auth.github_oauth_deauthorize_node,
json_renderer,
),
# OAuth: General
Rule(
[
'/addons/github/callback/<uid>/',
'/addons/github/callback/<uid>/<nid>/',
],
'get',
views.auth.github_oauth_callback,
json_renderer,
),
],
'prefix': '/api/v1',
}
api_routes = {
'rules': [
Rule(
'/github/repo/create/',
'post',
views.repos.github_create_repo,
json_renderer,
),
Rule(
[
'/project/<pid>/github/hgrid/root/',
'/project/<pid>/node/<nid>/github/hgrid/root/',
],
'get',
views.hgrid.github_root_folder_public,
json_renderer,
),
],
'prefix': '/api/v1'
}
| samanehsan/osf.io | website/addons/github/routes.py | Python | apache-2.0 | 3,429 |
class PythonVarArgsConstructor:
def __init__(self, mandatory, *varargs):
self.mandatory = mandatory
self.varargs = varargs
def get_args(self):
return self.mandatory, ' '.join(self.varargs)
| yahman72/robotframework | atest/testresources/testlibs/PythonVarArgsConstructor.py | Python | apache-2.0 | 228 |
from __future__ import print_function, division
from sympy.core.numbers import nan
from .function import Function
class Mod(Function):
"""Represents a modulo operation on symbolic expressions.
Receives two arguments, dividend p and divisor q.
The convention used is the same as Python's: the remainder always has the
same sign as the divisor.
Examples
========
>>> from sympy.abc import x, y
>>> x**2 % y
Mod(x**2, y)
>>> _.subs({x: 5, y: 6})
1
"""
@classmethod
def eval(cls, p, q):
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.singleton import S
from sympy.core.exprtools import gcd_terms
from sympy.polys.polytools import gcd
def doit(p, q):
"""Try to return p % q if both are numbers or +/-p is known
to be less than or equal q.
"""
if p.is_infinite or q.is_infinite or p is nan or q is nan:
return nan
if (p == q or p == -q or
p.is_Pow and p.exp.is_Integer and p.base == q or
p.is_integer and q == 1):
return S.Zero
if q.is_Number:
if p.is_Number:
return (p % q)
if q == 2:
if p.is_even:
return S.Zero
elif p.is_odd:
return S.One
# by ratio
r = p/q
try:
d = int(r)
except TypeError:
pass
else:
if type(d) is int:
rv = p - d*q
if (rv*q < 0) == True:
rv += q
return rv
# by difference
d = p - q
if d.is_negative:
if q.is_negative:
return d
elif q.is_positive:
return p
rv = doit(p, q)
if rv is not None:
return rv
# denest
if p.func is cls:
# easy
qinner = p.args[1]
if qinner == q:
return p
# XXX other possibilities?
# extract gcd; any further simplification should be done by the user
G = gcd(p, q)
if G != 1:
p, q = [
gcd_terms(i/G, clear=False, fraction=False) for i in (p, q)]
pwas, qwas = p, q
# simplify terms
# (x + y + 2) % x -> Mod(y + 2, x)
if p.is_Add:
args = []
for i in p.args:
a = cls(i, q)
if a.count(cls) > i.count(cls):
args.append(i)
else:
args.append(a)
if args != list(p.args):
p = Add(*args)
else:
# handle coefficients if they are not Rational
# since those are not handled by factor_terms
# e.g. Mod(.6*x, .3*y) -> 0.3*Mod(2*x, y)
cp, p = p.as_coeff_Mul()
cq, q = q.as_coeff_Mul()
ok = False
if not cp.is_Rational or not cq.is_Rational:
r = cp % cq
if r == 0:
G *= cq
p *= int(cp/cq)
ok = True
if not ok:
p = cp*p
q = cq*q
# simple -1 extraction
if p.could_extract_minus_sign() and q.could_extract_minus_sign():
G, p, q = [-i for i in (G, p, q)]
# check again to see if p and q can now be handled as numbers
rv = doit(p, q)
if rv is not None:
return rv*G
# put 1.0 from G on inside
if G.is_Float and G == 1:
p *= G
return cls(p, q, evaluate=False)
elif G.is_Mul and G.args[0].is_Float and G.args[0] == 1:
p = G.args[0]*p
G = Mul._from_args(G.args[1:])
return G*cls(p, q, evaluate=(p, q) != (pwas, qwas))
def _eval_is_integer(self):
from sympy.core.logic import fuzzy_and, fuzzy_not
p, q = self.args
if fuzzy_and([p.is_integer, q.is_integer, fuzzy_not(q.is_zero)]):
return True
def _eval_is_nonnegative(self):
if self.args[1].is_positive:
return True
def _eval_is_nonpositive(self):
if self.args[1].is_negative:
return True
| Shaswat27/sympy | sympy/core/mod.py | Python | bsd-3-clause | 4,488 |
# -*- coding: utf-8 -*-
"""
Unit tests for preference APIs.
"""
import datetime
import ddt
import unittest
from mock import patch
from pytz import UTC
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
from dateutil.parser import parse as parse_datetime
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from ...accounts.api import create_account
from ...errors import UserNotFound, UserNotAuthorized, PreferenceValidationError, PreferenceUpdateError
from ...models import UserProfile, UserOrgTag
from ...preferences.api import (
get_user_preference, get_user_preferences, set_user_preference, update_user_preferences, delete_user_preference,
update_email_opt_in
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Account APIs are only supported in LMS')
class TestPreferenceAPI(TestCase):
"""
These tests specifically cover the parts of the API methods that are not covered by test_views.py.
This includes the specific types of error raised, and default behavior when optional arguments
are not specified.
"""
password = "test"
def setUp(self):
super(TestPreferenceAPI, self).setUp()
self.user = UserFactory.create(password=self.password)
self.different_user = UserFactory.create(password=self.password)
self.staff_user = UserFactory(is_staff=True, password=self.password)
self.no_such_user = UserFactory.create(password=self.password)
self.no_such_user.username = "no_such_user"
self.test_preference_key = "test_key"
self.test_preference_value = "test_value"
set_user_preference(self.user, self.test_preference_key, self.test_preference_value)
def test_get_user_preference(self):
"""
Verifies the basic behavior of get_user_preference.
"""
self.assertEqual(
get_user_preference(self.user, self.test_preference_key),
self.test_preference_value
)
self.assertEqual(
get_user_preference(self.staff_user, self.test_preference_key, username=self.user.username),
self.test_preference_value
)
def test_get_user_preference_errors(self):
"""
Verifies that get_user_preference returns appropriate errors.
"""
with self.assertRaises(UserNotFound):
get_user_preference(self.user, self.test_preference_key, username="no_such_user")
with self.assertRaises(UserNotFound):
get_user_preference(self.no_such_user, self.test_preference_key)
with self.assertRaises(UserNotAuthorized):
get_user_preference(self.different_user, self.test_preference_key, username=self.user.username)
def test_get_user_preferences(self):
"""
Verifies the basic behavior of get_user_preferences.
"""
expected_user_preferences = {
self.test_preference_key: self.test_preference_value,
}
self.assertEqual(get_user_preferences(self.user), expected_user_preferences)
self.assertEqual(get_user_preferences(self.staff_user, username=self.user.username), expected_user_preferences)
def test_get_user_preferences_errors(self):
"""
Verifies that get_user_preferences returns appropriate errors.
"""
with self.assertRaises(UserNotFound):
get_user_preferences(self.user, username="no_such_user")
with self.assertRaises(UserNotFound):
get_user_preferences(self.no_such_user)
with self.assertRaises(UserNotAuthorized):
get_user_preferences(self.different_user, username=self.user.username)
def test_set_user_preference(self):
"""
Verifies the basic behavior of set_user_preference.
"""
test_key = u'ⓟⓡⓔⓕⓔⓡⓔⓝⓒⓔ_ⓚⓔⓨ'
test_value = u'ǝnןɐʌ_ǝɔuǝɹǝɟǝɹd'
set_user_preference(self.user, test_key, test_value)
self.assertEqual(get_user_preference(self.user, test_key), test_value)
set_user_preference(self.user, test_key, "new_value", username=self.user.username)
self.assertEqual(get_user_preference(self.user, test_key), "new_value")
@patch('openedx.core.djangoapps.user_api.models.UserPreference.save')
def test_set_user_preference_errors(self, user_preference_save):
"""
Verifies that set_user_preference returns appropriate errors.
"""
with self.assertRaises(UserNotFound):
set_user_preference(self.user, self.test_preference_key, "new_value", username="no_such_user")
with self.assertRaises(UserNotFound):
set_user_preference(self.no_such_user, self.test_preference_key, "new_value")
with self.assertRaises(UserNotAuthorized):
set_user_preference(self.staff_user, self.test_preference_key, "new_value", username=self.user.username)
with self.assertRaises(UserNotAuthorized):
set_user_preference(self.different_user, self.test_preference_key, "new_value", username=self.user.username)
too_long_key = "x" * 256
with self.assertRaises(PreferenceValidationError) as context_manager:
set_user_preference(self.user, too_long_key, "new_value")
errors = context_manager.exception.preference_errors
self.assertEqual(len(errors.keys()), 1)
self.assertEqual(
errors[too_long_key],
{
"developer_message": get_expected_validation_developer_message(too_long_key, "new_value"),
"user_message": get_expected_key_error_user_message(too_long_key, "new_value"),
}
)
for empty_value in (None, "", " "):
with self.assertRaises(PreferenceValidationError) as context_manager:
set_user_preference(self.user, self.test_preference_key, empty_value)
errors = context_manager.exception.preference_errors
self.assertEqual(len(errors.keys()), 1)
self.assertEqual(
errors[self.test_preference_key],
{
"developer_message": get_empty_preference_message(self.test_preference_key),
"user_message": get_empty_preference_message(self.test_preference_key),
}
)
user_preference_save.side_effect = [Exception, None]
with self.assertRaises(PreferenceUpdateError) as context_manager:
set_user_preference(self.user, u"new_key_ȻħȺɍłɇs", u"new_value_ȻħȺɍłɇs")
self.assertEqual(
context_manager.exception.developer_message,
u"Save failed for user preference 'new_key_ȻħȺɍłɇs' with value 'new_value_ȻħȺɍłɇs': "
)
self.assertEqual(
context_manager.exception.user_message,
u"Save failed for user preference 'new_key_ȻħȺɍłɇs' with value 'new_value_ȻħȺɍłɇs'."
)
def test_update_user_preferences(self):
"""
Verifies the basic behavior of update_user_preferences.
"""
expected_user_preferences = {
self.test_preference_key: "new_value",
}
set_user_preference(self.user, self.test_preference_key, "new_value")
self.assertEqual(
get_user_preference(self.user, self.test_preference_key),
"new_value"
)
set_user_preference(self.user, self.test_preference_key, "new_value", username=self.user.username)
self.assertEqual(
get_user_preference(self.user, self.test_preference_key),
"new_value"
)
@patch('openedx.core.djangoapps.user_api.models.UserPreference.delete')
@patch('openedx.core.djangoapps.user_api.models.UserPreference.save')
def test_update_user_preferences_errors(self, user_preference_save, user_preference_delete):
"""
Verifies that set_user_preferences returns appropriate errors.
"""
update_data = {
self.test_preference_key: "new_value"
}
with self.assertRaises(UserNotFound):
update_user_preferences(self.user, update_data, username="no_such_user")
with self.assertRaises(UserNotFound):
update_user_preferences(self.no_such_user, update_data)
with self.assertRaises(UserNotAuthorized):
update_user_preferences(self.staff_user, update_data, username=self.user.username)
with self.assertRaises(UserNotAuthorized):
update_user_preferences(self.different_user, update_data, username=self.user.username)
too_long_key = "x" * 256
with self.assertRaises(PreferenceValidationError) as context_manager:
update_user_preferences(self.user, {too_long_key: "new_value"})
errors = context_manager.exception.preference_errors
self.assertEqual(len(errors.keys()), 1)
self.assertEqual(
errors[too_long_key],
{
"developer_message": get_expected_validation_developer_message(too_long_key, "new_value"),
"user_message": get_expected_key_error_user_message(too_long_key, "new_value"),
}
)
for empty_value in ("", " "):
with self.assertRaises(PreferenceValidationError) as context_manager:
update_user_preferences(self.user, {self.test_preference_key: empty_value})
errors = context_manager.exception.preference_errors
self.assertEqual(len(errors.keys()), 1)
self.assertEqual(
errors[self.test_preference_key],
{
"developer_message": get_empty_preference_message(self.test_preference_key),
"user_message": get_empty_preference_message(self.test_preference_key),
}
)
user_preference_save.side_effect = [Exception, None]
with self.assertRaises(PreferenceUpdateError) as context_manager:
update_user_preferences(self.user, {self.test_preference_key: "new_value"})
self.assertEqual(
context_manager.exception.developer_message,
u"Save failed for user preference 'test_key' with value 'new_value': "
)
self.assertEqual(
context_manager.exception.user_message,
u"Save failed for user preference 'test_key' with value 'new_value'."
)
user_preference_delete.side_effect = [Exception, None]
with self.assertRaises(PreferenceUpdateError) as context_manager:
update_user_preferences(self.user, {self.test_preference_key: None})
self.assertEqual(
context_manager.exception.developer_message,
u"Delete failed for user preference 'test_key': "
)
self.assertEqual(
context_manager.exception.user_message,
u"Delete failed for user preference 'test_key'."
)
def test_delete_user_preference(self):
"""
Verifies the basic behavior of delete_user_preference.
"""
self.assertTrue(delete_user_preference(self.user, self.test_preference_key))
set_user_preference(self.user, self.test_preference_key, self.test_preference_value)
self.assertTrue(delete_user_preference(self.user, self.test_preference_key, username=self.user.username))
self.assertFalse(delete_user_preference(self.user, "no_such_key"))
@patch('openedx.core.djangoapps.user_api.models.UserPreference.delete')
def test_delete_user_preference_errors(self, user_preference_delete):
"""
Verifies that delete_user_preference returns appropriate errors.
"""
with self.assertRaises(UserNotFound):
delete_user_preference(self.user, self.test_preference_key, username="no_such_user")
with self.assertRaises(UserNotFound):
delete_user_preference(self.no_such_user, self.test_preference_key)
with self.assertRaises(UserNotAuthorized):
delete_user_preference(self.staff_user, self.test_preference_key, username=self.user.username)
with self.assertRaises(UserNotAuthorized):
delete_user_preference(self.different_user, self.test_preference_key, username=self.user.username)
user_preference_delete.side_effect = [Exception, None]
with self.assertRaises(PreferenceUpdateError) as context_manager:
delete_user_preference(self.user, self.test_preference_key)
self.assertEqual(
context_manager.exception.developer_message,
u"Delete failed for user preference 'test_key': "
)
self.assertEqual(
context_manager.exception.user_message,
u"Delete failed for user preference 'test_key'."
)
@ddt.ddt
class UpdateEmailOptInTests(ModuleStoreTestCase):
USERNAME = u'frank-underwood'
PASSWORD = u'ṕáśśẃőŕd'
EMAIL = u'[email protected]'
@ddt.data(
# Check that a 27 year old can opt-in
(27, True, u"True"),
# Check that a 32-year old can opt-out
(32, False, u"False"),
# Check that someone 14 years old can opt-in
(14, True, u"True"),
# Check that someone 13 years old cannot opt-in (must have turned 13 before this year)
(13, True, u"False"),
# Check that someone 12 years old cannot opt-in
(12, True, u"False")
)
@ddt.unpack
@override_settings(EMAIL_OPTIN_MINIMUM_AGE=13)
def test_update_email_optin(self, age, option, expected_result):
# Create the course and account.
course = CourseFactory.create()
create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
# Set year of birth
user = User.objects.get(username=self.USERNAME)
profile = UserProfile.objects.get(user=user)
year_of_birth = datetime.datetime.now().year - age # pylint: disable=maybe-no-member
profile.year_of_birth = year_of_birth
profile.save()
update_email_opt_in(user, course.id.org, option)
result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin')
self.assertEqual(result_obj.value, expected_result)
def test_update_email_optin_no_age_set(self):
# Test that the API still works if no age is specified.
# Create the course and account.
course = CourseFactory.create()
create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
user = User.objects.get(username=self.USERNAME)
update_email_opt_in(user, course.id.org, True)
result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin')
self.assertEqual(result_obj.value, u"True")
def test_update_email_optin_anonymous_user(self):
"""Verify that the API raises an exception for a user with no profile."""
course = CourseFactory.create()
no_profile_user, __ = User.objects.get_or_create(username="no_profile_user", password=self.PASSWORD)
with self.assertRaises(UserNotFound):
update_email_opt_in(no_profile_user, course.id.org, True)
@ddt.data(
# Check that a 27 year old can opt-in, then out.
(27, True, False, u"False"),
# Check that a 32-year old can opt-out, then in.
(32, False, True, u"True"),
# Check that someone 13 years old can opt-in, then out.
(13, True, False, u"False"),
# Check that someone 12 years old cannot opt-in, then explicitly out.
(12, True, False, u"False")
)
@ddt.unpack
@override_settings(EMAIL_OPTIN_MINIMUM_AGE=13)
def test_change_email_optin(self, age, option, second_option, expected_result):
# Create the course and account.
course = CourseFactory.create()
create_account(self.USERNAME, self.PASSWORD, self.EMAIL)
# Set year of birth
user = User.objects.get(username=self.USERNAME)
profile = UserProfile.objects.get(user=user)
year_of_birth = datetime.datetime.now(UTC).year - age # pylint: disable=maybe-no-member
profile.year_of_birth = year_of_birth
profile.save()
update_email_opt_in(user, course.id.org, option)
update_email_opt_in(user, course.id.org, second_option)
result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin')
self.assertEqual(result_obj.value, expected_result)
def _assert_is_datetime(self, timestamp):
if not timestamp:
return False
try:
parse_datetime(timestamp)
except ValueError:
return False
else:
return True
def get_expected_validation_developer_message(preference_key, preference_value):
"""
Returns the expected dict of validation messages for the specified key.
"""
return u"Value '{preference_value}' not valid for preference '{preference_key}': {error}".format(
preference_key=preference_key,
preference_value=preference_value,
error={
"key": [u"Ensure this value has at most 255 characters (it has 256)."]
}
)
def get_expected_key_error_user_message(preference_key, preference_value):
"""
Returns the expected user message for an invalid key.
"""
return u"Invalid user preference key '{preference_key}'.".format(preference_key=preference_key)
def get_empty_preference_message(preference_key):
"""
Returns the validation message shown for an empty preference.
"""
return "Preference '{preference_key}' cannot be set to an empty value.".format(preference_key=preference_key)
| GbalsaC/bitnamiP | openedx/core/djangoapps/user_api/preferences/tests/test_api.py | Python | agpl-3.0 | 17,881 |
callback_classes = [
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Packet>', 'ns3::Address const&', 'ns3::Address const&', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['unsigned char', 'ns3::Ptr<ns3::QueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
| kuropatkin/lte | src/virtual-net-device/bindings/callbacks_list.py | Python | gpl-2.0 | 1,101 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
from frappe.model import update_users_report_view_settings
from erpnext.patches.v4_0.fields_to_be_renamed import rename_map
def execute():
for dt, field_list in rename_map.items():
for field in field_list:
update_users_report_view_settings(dt, field[0], field[1])
| indictranstech/focal-erpnext | patches/v4_0/update_users_report_view_settings.py | Python | agpl-3.0 | 443 |
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Given the output of -t commands from a ninja build for a gyp and GN generated
build, report on differences between the command lines."""
import os
import shlex
import subprocess
import sys
# Must be in src/.
os.chdir(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
g_total_differences = 0
def FindAndRemoveArgWithValue(command_line, argname):
"""Given a command line as a list, remove and return the value of an option
that takes a value as a separate entry.
Modifies |command_line| in place.
"""
if argname not in command_line:
return ''
location = command_line.index(argname)
value = command_line[location + 1]
command_line[location:location + 2] = []
return value
def MergeSpacedArgs(command_line, argname):
"""Combine all arguments |argname| with their values, separated by a space."""
i = 0
result = []
while i < len(command_line):
arg = command_line[i]
if arg == argname:
result.append(arg + ' ' + command_line[i + 1])
i += 1
else:
result.append(arg)
i += 1
return result
def NormalizeSymbolArguments(command_line):
"""Normalize -g arguments.
If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g.
Modifies |command_line| in place.
"""
# Strip -g0 if there's no symbols.
have_some_symbols = False
for x in command_line:
if x.startswith('-g') and x != '-g0':
have_some_symbols = True
if not have_some_symbols and '-g0' in command_line:
command_line.remove('-g0')
# Rename -g2 to -g.
if '-g2' in command_line:
command_line[index('-g2')] = '-g'
def GetFlags(lines):
"""Turn a list of command lines into a semi-structured dict."""
flags_by_output = {}
for line in lines:
# TODO(scottmg): Hacky way of getting only cc for now.
if 'clang' not in line:
continue
command_line = shlex.split(line.strip())[1:]
output_name = FindAndRemoveArgWithValue(command_line, '-o')
dep_name = FindAndRemoveArgWithValue(command_line, '-MF')
NormalizeSymbolArguments(command_line)
command_line = MergeSpacedArgs(command_line, '-Xclang')
defines = [x for x in command_line if x.startswith('-D')]
include_dirs = [x for x in command_line if x.startswith('-I')]
dash_f = [x for x in command_line if x.startswith('-f')]
warnings = [x for x in command_line if x.startswith('-W')]
cc_file = [x for x in command_line if x.endswith('.cc') or
x.endswith('.c') or
x.endswith('.cpp')]
if len(cc_file) != 1:
print 'Skipping %s' % command_line
continue
assert len(cc_file) == 1
others = [x for x in command_line if x not in defines and \
x not in include_dirs and \
x not in dash_f and \
x not in warnings and \
x not in cc_file]
# Filter for libFindBadConstructs.so having a relative path in one and
# absolute path in the other.
others_filtered = []
for x in others:
if x.startswith('-Xclang ') and x.endswith('libFindBadConstructs.so'):
others_filtered.append(
'-Xclang ' +
os.path.join(os.getcwd(),
os.path.normpath(
os.path.join('out/gn_flags', x.split(' ', 1)[1]))))
elif x.startswith('-B'):
others_filtered.append(
'-B' +
os.path.join(os.getcwd(),
os.path.normpath(os.path.join('out/gn_flags', x[2:]))))
else:
others_filtered.append(x)
others = others_filtered
flags_by_output[cc_file[0]] = {
'output': output_name,
'depname': dep_name,
'defines': sorted(defines),
'include_dirs': sorted(include_dirs), # TODO(scottmg): This is wrong.
'dash_f': sorted(dash_f),
'warnings': sorted(warnings),
'other': sorted(others),
}
return flags_by_output
def CompareLists(gyp, gn, name, dont_care_gyp=None, dont_care_gn=None):
"""Return a report of any differences between gyp and gn lists, ignoring
anything in |dont_care_{gyp|gn}| respectively."""
global g_total_differences
if not dont_care_gyp:
dont_care_gyp = []
if not dont_care_gn:
dont_care_gn = []
output = ''
if gyp[name] != gn[name]:
gyp_set = set(gyp[name])
gn_set = set(gn[name])
missing_in_gyp = gyp_set - gn_set
missing_in_gn = gn_set - gyp_set
missing_in_gyp -= set(dont_care_gyp)
missing_in_gn -= set(dont_care_gn)
if missing_in_gyp or missing_in_gn:
output += ' %s differ:\n' % name
if missing_in_gyp:
output += ' In gyp, but not in GN:\n %s' % '\n '.join(
sorted(missing_in_gyp)) + '\n'
g_total_differences += len(missing_in_gyp)
if missing_in_gn:
output += ' In GN, but not in gyp:\n %s' % '\n '.join(
sorted(missing_in_gn)) + '\n\n'
g_total_differences += len(missing_in_gn)
return output
def Run(command_line):
"""Run |command_line| as a subprocess and return stdout. Raises on error."""
return subprocess.check_output(command_line, shell=True)
def main():
if len(sys.argv) != 2 and len(sys.argv) != 3:
print 'usage: %s gyp_target gn_target' % __file__
print ' or: %s target' % __file__
return 1
if len(sys.argv) == 2:
sys.argv.append(sys.argv[1])
print >>sys.stderr, 'Regenerating...'
# Currently only Release, non-component.
Run('gn gen out/gn_flags --args="is_debug=false is_component_build=false"')
os.environ.pop('GYP_DEFINES', None)
Run('python build/gyp_chromium -Goutput_dir=out_gyp_flags -Gconfig=Release')
gn = Run('ninja -C out/gn_flags -t commands %s' % sys.argv[2])
gyp = Run('ninja -C out_gyp_flags/Release -t commands %s' % sys.argv[1])
all_gyp_flags = GetFlags(gyp.splitlines())
all_gn_flags = GetFlags(gn.splitlines())
gyp_files = set(all_gyp_flags.keys())
gn_files = set(all_gn_flags.keys())
different_source_list = gyp_files != gn_files
if different_source_list:
print 'Different set of sources files:'
print ' In gyp, not in GN:\n %s' % '\n '.join(
sorted(gyp_files - gn_files))
print ' In GN, not in gyp:\n %s' % '\n '.join(
sorted(gn_files - gyp_files))
print '\nNote that flags will only be compared for files in both sets.\n'
file_list = gyp_files & gn_files
files_with_given_differences = {}
for filename in sorted(file_list):
gyp_flags = all_gyp_flags[filename]
gn_flags = all_gn_flags[filename]
differences = CompareLists(gyp_flags, gn_flags, 'dash_f')
differences += CompareLists(gyp_flags, gn_flags, 'defines')
differences += CompareLists(gyp_flags, gn_flags, 'include_dirs')
differences += CompareLists(gyp_flags, gn_flags, 'warnings', dont_care_gn=[
# More conservative warnings in GN we consider to be OK.
'-Wendif-labels',
'-Wextra',
'-Wsign-compare',
])
differences += CompareLists(gyp_flags, gn_flags, 'other')
if differences:
files_with_given_differences.setdefault(differences, []).append(filename)
for diff, files in files_with_given_differences.iteritems():
print '\n'.join(sorted(files))
print diff
print 'Total differences:', g_total_differences
# TODO(scottmg): Return failure on difference once we're closer to identical.
return 0
if __name__ == '__main__':
sys.exit(main())
| M4sse/chromium.src | tools/gn/bin/gyp_flag_compare.py | Python | bsd-3-clause | 7,719 |
# -*- coding: utf-8 -*-
"""
Test the QgsSettings class
Run with: ctest -V -R PyQgsSettings
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
import os
import tempfile
from qgis.core import QgsSettings, QgsTolerance, QgsMapLayerProxyModel
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QSettings, QVariant
from pathlib import Path
__author__ = 'Alessandro Pasotti'
__date__ = '02/02/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
start_app()
class TestQgsSettings(unittest.TestCase):
cnt = 0
def setUp(self):
self.cnt += 1
h, path = tempfile.mkstemp('.ini')
Path(path).touch()
assert QgsSettings.setGlobalSettingsPath(path)
self.settings = QgsSettings('testqgissettings', 'testqgissettings%s' % self.cnt)
self.globalsettings = QSettings(self.settings.globalSettingsPath(), QSettings.IniFormat)
self.globalsettings.sync()
assert os.path.exists(self.globalsettings.fileName())
def tearDown(self):
settings_file = self.settings.fileName()
settings_default_file = self.settings.globalSettingsPath()
del(self.settings)
try:
os.unlink(settings_file)
except:
pass
try:
os.unlink(settings_default_file)
except:
pass
def addToDefaults(self, key, value):
self.globalsettings.setValue(key, value)
self.globalsettings.sync()
def addArrayToDefaults(self, prefix, key, values):
defaults = QSettings(self.settings.globalSettingsPath(), QSettings.IniFormat) # NOQA
self.globalsettings.beginWriteArray(prefix)
i = 0
for v in values:
self.globalsettings.setArrayIndex(i)
self.globalsettings.setValue(key, v)
i += 1
self.globalsettings.endArray()
self.globalsettings.sync()
def addGroupToDefaults(self, prefix, kvp):
defaults = QSettings(self.settings.globalSettingsPath(), QSettings.IniFormat) # NOQA
self.globalsettings.beginGroup(prefix)
for k, v in kvp.items():
self.globalsettings.setValue(k, v)
self.globalsettings.endGroup()
self.globalsettings.sync()
def test_basic_functionality(self):
self.assertEqual(self.settings.value('testqgissettings/doesnotexists', 'notexist'), 'notexist')
self.settings.setValue('testqgissettings/name', 'qgisrocks')
self.settings.sync()
self.assertEqual(self.settings.value('testqgissettings/name'), 'qgisrocks')
def test_defaults(self):
self.assertIsNone(self.settings.value('testqgissettings/name'))
self.addToDefaults('testqgissettings/name', 'qgisrocks')
self.assertEqual(self.settings.value('testqgissettings/name'), 'qgisrocks')
def test_allkeys(self):
self.assertEqual(self.settings.allKeys(), [])
self.addToDefaults('testqgissettings/name', 'qgisrocks')
self.addToDefaults('testqgissettings/name2', 'qgisrocks2')
self.settings.setValue('nepoti/eman', 'osaple')
self.assertEqual(3, len(self.settings.allKeys()))
self.assertIn('testqgissettings/name', self.settings.allKeys())
self.assertIn('nepoti/eman', self.settings.allKeys())
self.assertEqual('qgisrocks', self.settings.value('testqgissettings/name'))
self.assertEqual('qgisrocks2', self.settings.value('testqgissettings/name2'))
self.assertEqual('qgisrocks', self.globalsettings.value('testqgissettings/name'))
self.assertEqual('osaple', self.settings.value('nepoti/eman'))
self.assertEqual(3, len(self.settings.allKeys()))
self.assertEqual(2, len(self.globalsettings.allKeys()))
def test_precedence_simple(self):
self.assertEqual(self.settings.allKeys(), [])
self.addToDefaults('testqgissettings/names/name1', 'qgisrocks1')
self.settings.setValue('testqgissettings/names/name1', 'qgisrocks-1')
self.assertEqual(self.settings.value('testqgissettings/names/name1'), 'qgisrocks-1')
def test_precedence_group(self):
"""Test if user can override a group value"""
self.assertEqual(self.settings.allKeys(), [])
self.addGroupToDefaults('connections-xyz', {
'OSM': 'http://a.tile.openstreetmap.org/{z}/{x}/{y}.png',
'OSM-b': 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png',
})
self.settings.beginGroup('connections-xyz')
self.assertEqual(self.settings.value('OSM'), 'http://a.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.assertEqual(self.settings.value('OSM-b'), 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
# Override edit
self.settings.beginGroup('connections-xyz')
self.settings.setValue('OSM', 'http://c.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
# Check it again!
self.settings.beginGroup('connections-xyz')
self.assertEqual(self.settings.value('OSM'), 'http://c.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.assertEqual(self.settings.value('OSM-b'), 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
# Override remove: the global value will be resumed!!!
self.settings.beginGroup('connections-xyz')
self.settings.remove('OSM')
self.settings.endGroup()
# Check it again!
self.settings.beginGroup('connections-xyz')
self.assertEqual(self.settings.value('OSM'), 'http://a.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.assertEqual(self.settings.value('OSM-b'), 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
# Override remove: store a blank!
self.settings.beginGroup('connections-xyz')
self.settings.setValue('OSM', '')
self.settings.endGroup()
# Check it again!
self.settings.beginGroup('connections-xyz')
self.assertEqual(self.settings.value('OSM'), '')
self.assertEqual(self.settings.value('OSM-b'), 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
# Override remove: store a None: will resume the global setting!
self.settings.beginGroup('connections-xyz')
self.settings.setValue('OSM', None)
self.settings.endGroup()
# Check it again!
self.settings.beginGroup('connections-xyz')
self.assertEqual(self.settings.value('OSM'), 'http://a.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.assertEqual(self.settings.value('OSM-b'), 'http://b.tile.openstreetmap.org/{z}/{x}/{y}.png')
self.settings.endGroup()
def test_uft8(self):
self.assertEqual(self.settings.allKeys(), [])
self.addToDefaults('testqgissettings/names/namèé↓1', 'qgisrocks↓1')
self.assertEqual(self.settings.value('testqgissettings/names/namèé↓1'), 'qgisrocks↓1')
self.settings.setValue('testqgissettings/names/namèé↓2', 'qgisrocks↓2')
self.assertEqual(self.settings.value('testqgissettings/names/namèé↓2'), 'qgisrocks↓2')
self.settings.setValue('testqgissettings/names/namèé↓1', 'qgisrocks↓-1')
self.assertEqual(self.settings.value('testqgissettings/names/namèé↓1'), 'qgisrocks↓-1')
def test_groups(self):
self.assertEqual(self.settings.allKeys(), [])
self.addToDefaults('testqgissettings/names/name1', 'qgisrocks1')
self.addToDefaults('testqgissettings/names/name2', 'qgisrocks2')
self.addToDefaults('testqgissettings/names/name3', 'qgisrocks3')
self.addToDefaults('testqgissettings/name', 'qgisrocks')
self.settings.beginGroup('testqgissettings')
self.assertEqual(self.settings.group(), 'testqgissettings')
self.assertEqual(['names'], self.settings.childGroups())
self.settings.setValue('surnames/name1', 'qgisrocks-1')
self.assertEqual(['surnames', 'names'], self.settings.childGroups())
self.settings.setValue('names/name1', 'qgisrocks-1')
self.assertEqual('qgisrocks-1', self.settings.value('names/name1'))
self.settings.endGroup()
self.assertEqual(self.settings.group(), '')
self.settings.beginGroup('testqgissettings/names')
self.assertEqual(self.settings.group(), 'testqgissettings/names')
self.settings.setValue('name4', 'qgisrocks-4')
keys = sorted(self.settings.childKeys())
self.assertEqual(keys, ['name1', 'name2', 'name3', 'name4'])
self.settings.endGroup()
self.assertEqual(self.settings.group(), '')
self.assertEqual('qgisrocks-1', self.settings.value('testqgissettings/names/name1'))
self.assertEqual('qgisrocks-4', self.settings.value('testqgissettings/names/name4'))
def test_global_groups(self):
self.assertEqual(self.settings.allKeys(), [])
self.assertEqual(self.globalsettings.allKeys(), [])
self.addToDefaults('testqgissettings/foo/first', 'qgis')
self.addToDefaults('testqgissettings/foo/last', 'rocks')
self.settings.beginGroup('testqgissettings')
self.assertEqual(self.settings.group(), 'testqgissettings')
self.assertEqual(['foo'], self.settings.childGroups())
self.assertEqual(['foo'], self.settings.globalChildGroups())
self.settings.endGroup()
self.assertEqual(self.settings.group(), '')
self.settings.setValue('testqgissettings/bar/first', 'qgis')
self.settings.setValue('testqgissettings/bar/last', 'rocks')
self.settings.beginGroup('testqgissettings')
self.assertEqual(sorted(['bar', 'foo']), sorted(self.settings.childGroups()))
self.assertEqual(['foo'], self.settings.globalChildGroups())
self.settings.endGroup()
self.globalsettings.remove('testqgissettings/foo')
self.settings.beginGroup('testqgissettings')
self.assertEqual(['bar'], self.settings.childGroups())
self.assertEqual([], self.settings.globalChildGroups())
self.settings.endGroup()
def test_group_section(self):
# Test group by using Section
self.settings.beginGroup('firstgroup', section=QgsSettings.Core)
self.assertEqual(self.settings.group(), 'core/firstgroup')
self.assertEqual([], self.settings.childGroups())
self.settings.setValue('key', 'value')
self.settings.setValue('key2/subkey1', 'subvalue1')
self.settings.setValue('key2/subkey2', 'subvalue2')
self.settings.setValue('key3', 'value3')
self.assertEqual(['key', 'key2/subkey1', 'key2/subkey2', 'key3'], self.settings.allKeys())
self.assertEqual(['key', 'key3'], self.settings.childKeys())
self.assertEqual(['key2'], self.settings.childGroups())
self.settings.endGroup()
self.assertEqual(self.settings.group(), '')
# Set value by writing the group manually
self.settings.setValue('firstgroup/key4', 'value4', section=QgsSettings.Core)
# Checking the value that have been set
self.assertEqual(self.settings.value('firstgroup/key', section=QgsSettings.Core), 'value')
self.assertEqual(self.settings.value('firstgroup/key2/subkey1', section=QgsSettings.Core), 'subvalue1')
self.assertEqual(self.settings.value('firstgroup/key2/subkey2', section=QgsSettings.Core), 'subvalue2')
self.assertEqual(self.settings.value('firstgroup/key3', section=QgsSettings.Core), 'value3')
self.assertEqual(self.settings.value('firstgroup/key4', section=QgsSettings.Core), 'value4')
# Clean up firstgroup
self.settings.remove('firstgroup', section=QgsSettings.Core)
def test_array(self):
self.assertEqual(self.settings.allKeys(), [])
self.addArrayToDefaults('testqgissettings', 'key', ['qgisrocks1', 'qgisrocks2', 'qgisrocks3'])
self.assertEqual(self.settings.allKeys(), ['testqgissettings/1/key', 'testqgissettings/2/key', 'testqgissettings/3/key', 'testqgissettings/size'])
self.assertEqual(self.globalsettings.allKeys(), ['testqgissettings/1/key', 'testqgissettings/2/key', 'testqgissettings/3/key', 'testqgissettings/size'])
self.assertEqual(3, self.globalsettings.beginReadArray('testqgissettings'))
self.globalsettings.endArray()
self.assertEqual(3, self.settings.beginReadArray('testqgissettings'))
values = []
for i in range(3):
self.settings.setArrayIndex(i)
values.append(self.settings.value("key"))
self.assertEqual(values, ['qgisrocks1', 'qgisrocks2', 'qgisrocks3'])
def test_array_overrides(self):
"""Test if an array completely shadows the global one"""
self.assertEqual(self.settings.allKeys(), [])
self.addArrayToDefaults('testqgissettings', 'key', ['qgisrocks1', 'qgisrocks2', 'qgisrocks3'])
self.assertEqual(self.settings.allKeys(), ['testqgissettings/1/key', 'testqgissettings/2/key', 'testqgissettings/3/key', 'testqgissettings/size'])
self.assertEqual(self.globalsettings.allKeys(), ['testqgissettings/1/key', 'testqgissettings/2/key', 'testqgissettings/3/key', 'testqgissettings/size'])
self.assertEqual(3, self.globalsettings.beginReadArray('testqgissettings'))
self.globalsettings.endArray()
self.assertEqual(3, self.settings.beginReadArray('testqgissettings'))
# Now override!
self.settings.beginWriteArray('testqgissettings')
self.settings.setArrayIndex(0)
self.settings.setValue('key', 'myqgisrocksmore1')
self.settings.setArrayIndex(1)
self.settings.setValue('key', 'myqgisrocksmore2')
self.settings.endArray()
# Check it!
self.assertEqual(2, self.settings.beginReadArray('testqgissettings'))
values = []
for i in range(2):
self.settings.setArrayIndex(i)
values.append(self.settings.value("key"))
self.assertEqual(values, ['myqgisrocksmore1', 'myqgisrocksmore2'])
def test_section_getters_setters(self):
self.assertEqual(self.settings.allKeys(), [])
self.settings.setValue('key1', 'core1', section=QgsSettings.Core)
self.settings.setValue('key2', 'core2', section=QgsSettings.Core)
self.settings.setValue('key1', 'server1', section=QgsSettings.Server)
self.settings.setValue('key2', 'server2', section=QgsSettings.Server)
self.settings.setValue('key1', 'gui1', section=QgsSettings.Gui)
self.settings.setValue('key2', 'gui2', QgsSettings.Gui)
self.settings.setValue('key1', 'plugins1', section=QgsSettings.Plugins)
self.settings.setValue('key2', 'plugins2', section=QgsSettings.Plugins)
self.settings.setValue('key1', 'misc1', section=QgsSettings.Misc)
self.settings.setValue('key2', 'misc2', section=QgsSettings.Misc)
self.settings.setValue('key1', 'auth1', section=QgsSettings.Auth)
self.settings.setValue('key2', 'auth2', section=QgsSettings.Auth)
self.settings.setValue('key1', 'app1', section=QgsSettings.App)
self.settings.setValue('key2', 'app2', section=QgsSettings.App)
self.settings.setValue('key1', 'provider1', section=QgsSettings.Providers)
self.settings.setValue('key2', 'provider2', section=QgsSettings.Providers)
# This is an overwrite of previous setting and it is intentional
self.settings.setValue('key1', 'auth1', section=QgsSettings.Auth)
self.settings.setValue('key2', 'auth2', section=QgsSettings.Auth)
# Test that the values are namespaced
self.assertEqual(self.settings.value('core/key1'), 'core1')
self.assertEqual(self.settings.value('core/key2'), 'core2')
self.assertEqual(self.settings.value('server/key1'), 'server1')
self.assertEqual(self.settings.value('server/key2'), 'server2')
self.assertEqual(self.settings.value('gui/key1'), 'gui1')
self.assertEqual(self.settings.value('gui/key2'), 'gui2')
self.assertEqual(self.settings.value('plugins/key1'), 'plugins1')
self.assertEqual(self.settings.value('plugins/key2'), 'plugins2')
self.assertEqual(self.settings.value('misc/key1'), 'misc1')
self.assertEqual(self.settings.value('misc/key2'), 'misc2')
# Test getters
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Core), 'core1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Core), 'core2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Server), 'server1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Server), 'server2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Gui), 'gui1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Gui), 'gui2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Plugins), 'plugins1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Plugins), 'plugins2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Misc), 'misc1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Misc), 'misc2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Auth), 'auth1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Auth), 'auth2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.App), 'app1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.App), 'app2')
self.assertEqual(self.settings.value('key1', None, section=QgsSettings.Providers), 'provider1')
self.assertEqual(self.settings.value('key2', None, section=QgsSettings.Providers), 'provider2')
# Test default values on Section getter
self.assertEqual(self.settings.value('key_not_exist', 'misc_not_exist', section=QgsSettings.Misc), 'misc_not_exist')
def test_contains(self):
self.assertEqual(self.settings.allKeys(), [])
self.addToDefaults('testqgissettings/name', 'qgisrocks1')
self.addToDefaults('testqgissettings/name2', 'qgisrocks2')
self.assertTrue(self.settings.contains('testqgissettings/name'))
self.assertTrue(self.settings.contains('testqgissettings/name2'))
self.settings.setValue('testqgissettings/name3', 'qgisrocks3')
self.assertTrue(self.settings.contains('testqgissettings/name3'))
def test_remove(self):
self.settings.setValue('testQgisSettings/temp', True)
self.assertEqual(self.settings.value('testQgisSettings/temp'), True)
self.settings.remove('testQgisSettings/temp')
self.assertEqual(self.settings.value('testqQgisSettings/temp'), None)
# Test remove by using Section
self.settings.setValue('testQgisSettings/tempSection', True, section=QgsSettings.Core)
self.assertEqual(self.settings.value('testQgisSettings/tempSection', section=QgsSettings.Core), True)
self.settings.remove('testQgisSettings/temp', section=QgsSettings.Core)
self.assertEqual(self.settings.value('testqQgisSettings/temp', section=QgsSettings.Core), None)
def test_enumValue(self):
self.settings.setValue('enum', 'LayerUnits')
self.assertEqual(self.settings.enumValue('enum', QgsTolerance.Pixels), QgsTolerance.LayerUnits)
self.settings.setValue('enum', 'dummy_setting')
self.assertEqual(self.settings.enumValue('enum', QgsTolerance.Pixels), QgsTolerance.Pixels)
self.assertEqual(type(self.settings.enumValue('enum', QgsTolerance.Pixels)), QgsTolerance.UnitType)
def test_setEnumValue(self):
self.settings.setValue('enum', 'LayerUnits')
self.assertEqual(self.settings.enumValue('enum', QgsTolerance.Pixels), QgsTolerance.LayerUnits)
self.settings.setEnumValue('enum', QgsTolerance.Pixels)
self.assertEqual(self.settings.enumValue('enum', QgsTolerance.Pixels), QgsTolerance.Pixels)
def test_flagValue(self):
pointAndLine = QgsMapLayerProxyModel.Filters(QgsMapLayerProxyModel.PointLayer | QgsMapLayerProxyModel.LineLayer)
pointAndPolygon = QgsMapLayerProxyModel.Filters(QgsMapLayerProxyModel.PointLayer | QgsMapLayerProxyModel.PolygonLayer)
self.settings.setValue('flag', 'PointLayer|PolygonLayer')
self.assertEqual(self.settings.flagValue('flag', pointAndLine), pointAndPolygon)
self.settings.setValue('flag', 'dummy_setting')
self.assertEqual(self.settings.flagValue('flag', pointAndLine), pointAndLine)
self.assertEqual(type(self.settings.flagValue('enum', pointAndLine)), QgsMapLayerProxyModel.Filters)
def test_overwriteDefaultValues(self):
"""Test that unchanged values are not stored"""
self.globalsettings.setValue('a_value_with_default', 'a value')
self.globalsettings.setValue('an_invalid_value', QVariant())
self.assertEqual(self.settings.value('a_value_with_default'), 'a value')
self.assertEqual(self.settings.value('an_invalid_value'), QVariant())
# Now, set them with the same current value
self.settings.setValue('a_value_with_default', 'a value')
self.settings.setValue('an_invalid_value', QVariant())
# Check
pure_settings = QSettings(self.settings.fileName(), QSettings.IniFormat)
self.assertFalse('a_value_with_default' in pure_settings.allKeys())
self.assertFalse('an_invalid_value' in pure_settings.allKeys())
# Set a changed value
self.settings.setValue('a_value_with_default', 'a new value')
self.settings.setValue('an_invalid_value', 'valid value')
# Check
self.assertTrue('a_value_with_default' in pure_settings.allKeys())
self.assertTrue('an_invalid_value' in pure_settings.allKeys())
self.assertEqual(self.settings.value('a_value_with_default'), 'a new value')
self.assertEqual(self.settings.value('an_invalid_value'), 'valid value')
# Re-set to original values
self.settings.setValue('a_value_with_default', 'a value')
self.settings.setValue('an_invalid_value', QVariant())
self.assertEqual(self.settings.value('a_value_with_default'), 'a value')
self.assertEqual(self.settings.value('an_invalid_value'), QVariant())
# Check if they are gone
pure_settings = QSettings(self.settings.fileName(), QSettings.IniFormat)
self.assertFalse('a_value_with_default' not in pure_settings.allKeys())
self.assertFalse('an_invalid_value' not in pure_settings.allKeys())
if __name__ == '__main__':
unittest.main()
| pblottiere/QGIS | tests/src/python/test_qgssettings.py | Python | gpl-2.0 | 23,013 |
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>..
#import pynotify, gtk, gettext
from gi.repository import Gtk, Gdk, Notify
import gettext
import popupmenu
from autokey.configmanager import *
from autokey import common
HAVE_APPINDICATOR = False
try:
from gi.repository import AppIndicator3
HAVE_APPINDICATOR = True
except ImportError:
pass
gettext.install("autokey")
TOOLTIP_RUNNING = _("AutoKey - running")
TOOLTIP_PAUSED = _("AutoKey - paused")
def get_notifier(app):
if HAVE_APPINDICATOR:
return IndicatorNotifier(app)
else:
return Notifier(app)
class Notifier:
"""
Encapsulates all functionality related to the notification icon, notifications, and tray menu.
"""
def __init__(self, autokeyApp):
Notify.init("AutoKey")
self.app = autokeyApp
self.configManager = autokeyApp.service.configManager
self.icon = Gtk.StatusIcon.new_from_icon_name(ConfigManager.SETTINGS[NOTIFICATION_ICON])
self.update_tool_tip()
self.icon.connect("popup_menu", self.on_popup_menu)
self.icon.connect("activate", self.on_show_configure)
self.errorItem = None
self.update_visible_status()
def update_visible_status(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
self.icon.set_visible(True)
else:
self.icon.set_visible(False)
def update_tool_tip(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
if ConfigManager.SETTINGS[SERVICE_RUNNING]:
self.icon.set_tooltip_text(TOOLTIP_RUNNING)
else:
self.icon.set_tooltip_text(TOOLTIP_PAUSED)
def hide_icon(self):
self.icon.set_visible(False)
def rebuild_menu(self):
pass
# Signal Handlers ----
def on_popup_menu(self, status_icon, button, activate_time, data=None):
# Main Menu items
enableMenuItem = Gtk.CheckMenuItem(_("Enable Expansions"))
enableMenuItem.set_active(self.app.service.is_running())
enableMenuItem.set_sensitive(not self.app.serviceDisabled)
configureMenuItem = Gtk.ImageMenuItem(_("Show Main Window"))
configureMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.MENU))
removeMenuItem = Gtk.ImageMenuItem(_("Remove icon"))
removeMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_CLOSE, Gtk.IconSize.MENU))
quitMenuItem = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_QUIT, None)
# Menu signals
enableMenuItem.connect("toggled", self.on_enable_toggled)
configureMenuItem.connect("activate", self.on_show_configure)
removeMenuItem.connect("activate", self.on_remove_icon)
quitMenuItem.connect("activate", self.on_destroy_and_exit)
# Get phrase folders to add to main menu
folders = []
items = []
for folder in self.configManager.allFolders:
if folder.showInTrayMenu:
folders.append(folder)
for item in self.configManager.allItems:
if item.showInTrayMenu:
items.append(item)
# Construct main menu
menu = popupmenu.PopupMenu(self.app.service, folders, items, False)
if len(items) > 0:
menu.append(Gtk.SeparatorMenuItem())
menu.append(enableMenuItem)
if self.errorItem is not None:
menu.append(self.errorItem)
menu.append(configureMenuItem)
menu.append(removeMenuItem)
menu.append(quitMenuItem)
menu.show_all()
menu.popup(None, None, None, None, button, activate_time)
def on_enable_toggled(self, widget, data=None):
if widget.active:
self.app.unpause_service()
else:
self.app.pause_service()
def on_show_configure(self, widget, data=None):
self.app.show_configure()
def on_remove_icon(self, widget, data=None):
self.icon.set_visible(False)
ConfigManager.SETTINGS[SHOW_TRAY_ICON] = False
def on_destroy_and_exit(self, widget, data=None):
self.app.shutdown()
def notify_error(self, message):
self.show_notify(message, Gtk.STOCK_DIALOG_ERROR)
self.errorItem = Gtk.MenuItem(_("View script error"))
self.errorItem.connect("activate", self.on_show_error)
self.icon.set_from_icon_name(common.ICON_FILE_NOTIFICATION_ERROR)
def on_show_error(self, widget, data=None):
self.app.show_script_error()
self.errorItem = None
self.icon.set_from_icon_name(ConfigManager.SETTINGS[NOTIFICATION_ICON])
def show_notify(self, message, iconName):
Gdk.threads_enter()
n = Notify.Notification.new("AutoKey", message, iconName)
n.set_urgency(Notify.Urgency.LOW)
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
n.attach_to_status_icon(self.icon)
n.show()
Gdk.threads_leave()
class IndicatorNotifier:
def __init__(self, autokeyApp):
Notify.init("AutoKey")
self.app = autokeyApp
self.configManager = autokeyApp.service.configManager
self.indicator = AppIndicator3.Indicator.new("AutoKey", ConfigManager.SETTINGS[NOTIFICATION_ICON],
AppIndicator3.IndicatorCategory.APPLICATION_STATUS)
self.indicator.set_attention_icon(common.ICON_FILE_NOTIFICATION_ERROR)
self.update_visible_status()
self.rebuild_menu()
def update_visible_status(self):
if ConfigManager.SETTINGS[SHOW_TRAY_ICON]:
self.indicator.set_status(AppIndicator3.IndicatorStatus.ACTIVE)
else:
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
def hide_icon(self):
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
def rebuild_menu(self):
# Main Menu items
self.errorItem = Gtk.MenuItem(_("View script error"))
enableMenuItem = Gtk.CheckMenuItem(_("Enable Expansions"))
enableMenuItem.set_active(self.app.service.is_running())
enableMenuItem.set_sensitive(not self.app.serviceDisabled)
configureMenuItem = Gtk.ImageMenuItem(_("Show Main Window"))
configureMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_PREFERENCES, Gtk.IconSize.MENU))
removeMenuItem = Gtk.ImageMenuItem(_("Remove icon"))
removeMenuItem.set_image(Gtk.Image.new_from_stock(Gtk.STOCK_CLOSE, Gtk.IconSize.MENU))
quitMenuItem = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_QUIT, None)
# Menu signals
enableMenuItem.connect("toggled", self.on_enable_toggled)
configureMenuItem.connect("activate", self.on_show_configure)
removeMenuItem.connect("activate", self.on_remove_icon)
quitMenuItem.connect("activate", self.on_destroy_and_exit)
self.errorItem.connect("activate", self.on_show_error)
# Get phrase folders to add to main menu
folders = []
items = []
for folder in self.configManager.allFolders:
if folder.showInTrayMenu:
folders.append(folder)
for item in self.configManager.allItems:
if item.showInTrayMenu:
items.append(item)
# Construct main menu
self.menu = popupmenu.PopupMenu(self.app.service, folders, items, False)
if len(items) > 0:
self.menu.append(Gtk.SeparatorMenuItem())
self.menu.append(self.errorItem)
self.menu.append(enableMenuItem)
self.menu.append(configureMenuItem)
self.menu.append(removeMenuItem)
self.menu.append(quitMenuItem)
self.menu.show_all()
self.errorItem.hide()
self.indicator.set_menu(self.menu)
def notify_error(self, message):
self.show_notify(message, Gtk.STOCK_DIALOG_ERROR)
self.errorItem.show()
self.indicator.set_status(AppIndicator3.IndicatorStatus.ATTENTION)
def show_notify(self, message, iconName):
Gdk.threads_enter()
n = Notify.Notification.new("AutoKey", message, iconName)
n.set_urgency(Notify.Urgency.LOW)
n.show()
Gdk.threads_leave()
def update_tool_tip(self):
pass
def on_show_error(self, widget, data=None):
self.app.show_script_error()
self.errorItem.hide()
self.update_visible_status()
def on_enable_toggled(self, widget, data=None):
if widget.active:
self.app.unpause_service()
else:
self.app.pause_service()
def on_show_configure(self, widget, data=None):
self.app.show_configure()
def on_remove_icon(self, widget, data=None):
self.indicator.set_status(AppIndicator3.IndicatorStatus.PASSIVE)
ConfigManager.SETTINGS[SHOW_TRAY_ICON] = False
def on_destroy_and_exit(self, widget, data=None):
self.app.shutdown()
class UnityLauncher(IndicatorNotifier):
SHOW_ITEM_STRING = _("Add to quicklist/notification menu")
#def __init__(self, autokeyApp):
# IndicatorNotifier.__init__(self, autokeyApp)
def __getQuickItem(self, label):
item = Dbusmenu.Menuitem.new()
item.property_set(Dbusmenu.MENUITEM_PROP_LABEL, label)
item.property_set_bool(Dbusmenu.MENUITEM_PROP_VISIBLE, True)
return item
def rebuild_menu(self):
IndicatorNotifier.rebuild_menu(self)
print threading.currentThread().name
#try:
from gi.repository import Unity, Dbusmenu
HAVE_UNITY = True
print "have unity"
#except ImportError:
# return
print "rebuild unity menu"
self.launcher = Unity.LauncherEntry.get_for_desktop_id ("autokey-gtk.desktop")
# Main Menu items
enableMenuItem = self.__getQuickItem(_("Enable Expansions"))
enableMenuItem.property_set(Dbusmenu.MENUITEM_PROP_TOGGLE_TYPE, Dbusmenu.MENUITEM_TOGGLE_CHECK)
#if self.app.service.is_running():
# enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, Dbusmenu.MENUITEM_TOGGLE_STATE_CHECKED)
#else:
# enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, Dbusmenu.MENUITEM_TOGGLE_STATE_UNCHECKED)
enableMenuItem.property_set_int(Dbusmenu.MENUITEM_PROP_TOGGLE_STATE, int(self.app.service.is_running()))
enableMenuItem.property_set_bool(Dbusmenu.MENUITEM_PROP_ENABLED, not self.app.serviceDisabled)
configureMenuItem = self.__getQuickItem(_("Show Main Window"))
# Menu signals
enableMenuItem.connect("item-activated", self.on_ql_enable_toggled, None)
configureMenuItem.connect("item-activated", self.on_show_configure, None)
# Get phrase folders to add to main menu
# folders = []
# items = []
# for folder in self.configManager.allFolders:
# if folder.showInTrayMenu:
# folders.append(folder)
#
# for item in self.configManager.allItems:
# if item.showInTrayMenu:
# items.append(item)
# Construct main menu
quicklist = Dbusmenu.Menuitem.new()
#if len(items) > 0:
# self.menu.append(Gtk.SeparatorMenuItem())
quicklist.child_append(enableMenuItem)
quicklist.child_append(configureMenuItem)
self.launcher.set_property ("quicklist", quicklist)
def on_ql_enable_toggled(self, menuitem, data=None):
if menuitem.property_get_int(Menuitem.MENUITEM_PROP_TOGGLE_STATE) == Menuitem.MENUITEM_TOGGLE_STATE_CHECKED:
self.app.unpause_service()
else:
self.app.pause_service()
| okroener/autokey | src/lib/gtkui/notifier.py | Python | gpl-3.0 | 12,947 |
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.application.strports}.
"""
from twisted.trial.unittest import TestCase
from twisted.application import strports
from twisted.application import internet
from twisted.internet.test.test_endpoints import ParserTestCase
from twisted.internet.protocol import Factory
from twisted.internet.endpoints import TCP4ServerEndpoint, UNIXServerEndpoint
class DeprecatedParseTestCase(ParserTestCase):
"""
L{strports.parse} is deprecated. It's an alias for a method that is now
private in L{twisted.internet.endpoints}.
"""
def parse(self, *a, **kw):
result = strports.parse(*a, **kw)
warnings = self.flushWarnings([self.parse])
self.assertEquals(len(warnings), 1)
self.assertEquals(
warnings[0]['message'],
"twisted.application.strports.parse was deprecated "
"in Twisted 10.2.0: in favor of twisted.internet.endpoints.serverFromString")
return result
def test_simpleNumeric(self):
"""
Base numeric ports should be parsed as TCP.
"""
self.assertEquals(self.parse('80', self.f),
('TCP', (80, self.f), {'interface':'', 'backlog':50}))
def test_allKeywords(self):
"""
A collection of keyword arguments with no prefixed type, like 'port=80',
will be parsed as keyword arguments to 'tcp'.
"""
self.assertEquals(self.parse('port=80', self.f),
('TCP', (80, self.f), {'interface':'', 'backlog':50}))
class ServiceTestCase(TestCase):
"""
Tests for L{strports.service}.
"""
def test_service(self):
"""
L{strports.service} returns a L{StreamServerEndpointService}
constructed with an endpoint produced from
L{endpoint.serverFromString}, using the same syntax.
"""
reactor = object() # the cake is a lie
aFactory = Factory()
aGoodPort = 1337
svc = strports.service(
'tcp:'+str(aGoodPort), aFactory, reactor=reactor)
self.assertIsInstance(svc, internet.StreamServerEndpointService)
# See twisted.application.test.test_internet.TestEndpointService.
# test_synchronousRaiseRaisesSynchronously
self.assertEquals(svc._raiseSynchronously, True)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
# Maybe we should implement equality for endpoints.
self.assertEquals(svc.endpoint._port, aGoodPort)
self.assertIdentical(svc.factory, aFactory)
self.assertIdentical(svc.endpoint._reactor, reactor)
def test_serviceDefaultReactor(self):
"""
L{strports.service} will use the default reactor when none is provided
as an argument.
"""
from twisted.internet import reactor as globalReactor
aService = strports.service("tcp:80", None)
self.assertIdentical(aService.endpoint._reactor, globalReactor)
def test_serviceDeprecatedDefault(self):
"""
L{strports.service} still accepts a 'default' argument, which will
affect the parsing of 'default' (i.e. 'not containing a colon')
endpoint descriptions, but this behavior is deprecated.
"""
svc = strports.service("8080", None, "unix")
self.assertIsInstance(svc.endpoint, UNIXServerEndpoint)
warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
"The 'default' parameter was deprecated in Twisted 10.2.0. "
"Use qualified endpoint descriptions; for example, 'tcp:8080'.")
self.assertEquals(len(warnings), 1)
# Almost the same case, but slightly tricky - explicitly passing the old
# default value, None, also must trigger a deprecation warning.
svc = strports.service("tcp:8080", None, None)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
"The 'default' parameter was deprecated in Twisted 10.2.0.")
self.assertEquals(len(warnings), 1)
def test_serviceDeprecatedUnqualified(self):
"""
Unqualified strport descriptions, i.e. "8080", are deprecated.
"""
svc = strports.service("8080", None)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
warnings = self.flushWarnings(
[self.test_serviceDeprecatedUnqualified])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
"Unqualified strport description passed to 'service'."
"Use qualified endpoint descriptions; for example, 'tcp:8080'.")
self.assertEquals(len(warnings), 1)
| eunchong/build | third_party/twisted_10_2/twisted/test/test_strports.py | Python | bsd-3-clause | 5,121 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) Ansible Inc, 2016
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import glob
import os
import pickle
import platform
import select
import shlex
import subprocess
import traceback
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
def sysv_is_enabled(name):
'''
This function will check if the service name supplied
is enabled in any of the sysv runlevels
:arg name: name of the service to test for
'''
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
def get_sysv_script(name):
'''
This function will return the expected path for an init script
corresponding to the service name supplied.
:arg name: name or path of the service to test for
'''
if name.startswith('/'):
result = name
else:
result = '/etc/init.d/%s' % name
return result
def sysv_exists(name):
'''
This function will return True or False depending on
the existence of an init script corresponding to the service name supplied.
:arg name: name of the service to test for
'''
return os.path.exists(get_sysv_script(name))
def fail_if_missing(module, found, service, msg=''):
'''
This function will return an error or exit gracefully depending on check mode status
and if the service is missing or not.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg found: boolean indicating if services was found or not
:arg service: name of service
:kw msg: extra info to append to error/success msg when missing
'''
if not found:
if module.check_mode:
module.exit_json(msg="Service %s not found on %s, assuming it will exist on full run" % (service, msg), changed=True)
else:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
def daemonize(module, cmd):
'''
Execute a command while detaching as a daemon, returns rc, stdout, and stderr.
:arg module: is an AnsibleModule object, used for it's utility methods
:arg cmd: is a list or string representing the command and options to run
This is complex because daemonization is hard for people.
What we do is daemonize a part of this module, the daemon runs the command,
picks up the return code and output, and returns it to the main process.
'''
# init some vars
chunk = 4096 # FIXME: pass in as arg?
errors = 'surrogate_or_strict'
# start it!
try:
pipe = os.pipe()
pid = os.fork()
except OSError:
module.fail_json(msg="Error while attempting to fork: %s", exception=traceback.format_exc())
# we don't do any locking as this should be a unique module/process
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
# clone stdin/out/err
for num in range(3):
if fd != num:
os.dup2(fd, num)
# close otherwise
if fd not in range(3):
os.close(fd)
# Make us a daemon
pid = os.fork()
# end if not in child
if pid > 0:
os._exit(0)
# get new process session and detach
sid = os.setsid()
if sid == -1:
module.fail_json(msg="Unable to detach session while daemonizing")
# avoid possible problems with cwd being removed
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# if command is string deal with py2 vs py3 conversions for shlex
if not isinstance(cmd, list):
if PY2:
cmd = shlex.split(to_bytes(cmd, errors=errors))
else:
cmd = shlex.split(to_text(cmd, errors=errors))
# make sure we always use byte strings
run_cmd = []
for c in cmd:
run_cmd.append(to_bytes(c, errors=errors))
# execute the command in forked process
p = subprocess.Popen(run_cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=lambda: os.close(pipe[1]))
fds = [p.stdout, p.stderr]
# loop reading output till its done
output = {p.stdout: b(""), p.sterr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if (rfd + wfd + efd) or p.poll():
for out in fds:
if out in rfd:
data = os.read(out.fileno(), chunk)
if not data:
fds.remove(out)
output[out] += b(data)
# even after fds close, we might want to wait for pid to die
p.wait()
# Return a pickled data of parent
return_data = pickle.dumps([p.returncode, to_text(output[p.stdout]), to_text(output[p.stderr])], protocol=pickle.HIGHEST_PROTOCOL)
os.write(pipe[1], to_bytes(return_data, errors=errors))
# clean up
os.close(pipe[1])
os._exit(0)
elif pid == -1:
module.fail_json(msg="Unable to fork, no exception thrown, probably due to lack of resources, check logs.")
else:
# in parent
os.close(pipe[1])
os.waitpid(pid, 0)
# Grab response data after child finishes
return_data = b("")
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
data = os.read(pipe[0], chunk)
if not data:
break
return_data += b(data)
# Note: no need to specify encoding on py3 as this module sends the
# pickle to itself (thus same python interpreter so we aren't mixing
# py2 and py3)
return pickle.loads(to_bytes(return_data, errors=errors))
def check_ps(module, pattern):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = module.get_bin_path('ps', True)
(rc, out, err) = module.run_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
for line in out.split('\n'):
if pattern in line:
return True
return False
| e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/module_utils/service.py | Python | bsd-3-clause | 7,923 |
# Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from .. import fixer_base
from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'intern'
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
Attr(Name(u"sys"), Name(u"intern")) +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()])] + after)
new.prefix = node.prefix
touch_import(None, u'sys', node)
return new
| nmercier/linux-cross-gcc | win32/bin/Lib/lib2to3/fixes/fix_intern.py | Python | bsd-3-clause | 1,451 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Toshio Kuratomi <[email protected]>, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
.. warn:: This module_util is currently internal implementation.
We want to evaluate this code for stability and API suitability before
making backwards compatibility guarantees. The API may change between
releases. Do not use this unless you are willing to port your module code.
"""
import codecs
from ansible.module_utils.six import PY3, text_type, binary_type
try:
codecs.lookup_error('surrogateescape')
HAS_SURROGATEESCAPE = True
except LookupError:
HAS_SURROGATEESCAPE = False
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape',
'surrogate_or_strict',
'surrogate_then_replace'))
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a byte string
:arg obj: An object to make sure is a byte string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a text string to
a byte string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the text string is not
encodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. There are three additional error strategies
specifically aimed at helping people to port code. The first two are:
:surrogate_or_strict: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``strict``
:surrogate_or_replace: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``replace``.
Because ``surrogateescape`` was added in Python3 this usually means that
Python3 will use ``surrogateescape`` and Python2 will use the fallback
error handler. Note that the code checks for ``surrogateescape`` when the
module is imported. If you have a backport of ``surrogateescape`` for
Python2, be sure to register the error handler prior to importing this
module.
The last error handler is:
:surrogate_then_replace: Will use ``surrogateescape`` if it is a valid
handler. If encoding with ``surrogateescape`` would traceback,
surrogates are first replaced with a replacement characters
and then the string is encoded using ``replace`` (which replaces
the rest of the nonencodable bytes). If ``surrogateescape`` is
not present it will simply use ``replace``. (Added in Ansible 2.3)
This strategy is designed to never traceback when it attempts
to encode a string.
The default until Ansible-2.2 was ``surrogate_or_replace``
From Ansible-2.3 onwards, the default is ``surrogate_then_replace``.
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the bytes version of that string.
:empty: Return an empty byte string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a byte string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a text string.
.. note:: If passed a byte string, this function does not check that the
string is valid in the specified encoding. If it's important that the
byte string is in the specified encoding do::
encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8')
.. version_changed:: 2.3
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
"""
if isinstance(obj, binary_type):
return obj
# We're given a text string
# If it has surrogates, we know because it will decode
original_errors = errors
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, text_type):
try:
# Try this first as it's the fastest
return obj.encode(encoding, errors)
except UnicodeEncodeError:
if original_errors in (None, 'surrogate_then_replace'):
# Slow but works
return_string = obj.encode('utf-8', 'surrogateescape')
return_string = return_string.decode('utf-8', 'replace')
return return_string.encode(encoding, 'replace')
raise
# Note: We do these last even though we have to call to_bytes again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return to_bytes('')
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
# python2.4 doesn't have b''
return to_bytes('')
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring)
return to_bytes(value, encoding, errors)
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a text string
:arg obj: An object to make sure is a text string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a byte string to
a text string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the byte string is not
decodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. We support three additional error strategies
specifically aimed at helping people to port code:
:surrogate_or_strict: Will use surrogateescape if it is a valid
handler, otherwise it will use strict
:surrogate_or_replace: Will use surrogateescape if it is a valid
handler, otherwise it will use replace.
:surrogate_then_replace: Does the same as surrogate_or_replace but
`was added for symmetry with the error handlers in
:func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3)
Because surrogateescape was added in Python3 this usually means that
Python3 will use `surrogateescape` and Python2 will use the fallback
error handler. Note that the code checks for surrogateescape when the
module is imported. If you have a backport of `surrogateescape` for
python2, be sure to register the error handler prior to importing this
module.
The default until Ansible-2.2 was `surrogate_or_replace`
In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry
with :func:`ansible.module_utils._text.to_bytes` .
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the text version of that string.
:empty: Return an empty text string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a text string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a byte string.
From Ansible-2.3 onwards, the default is `surrogate_then_replace`.
.. version_changed:: 2.3
Added the surrogate_then_replace error handler and made it the default error handler.
"""
if isinstance(obj, text_type):
return obj
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, binary_type):
# Note: We don't need special handling for surrogate_then_replace
# because all bytes will either be made into surrogates or are valid
# to decode.
return obj.decode(encoding, errors)
# Note: We do these last even though we have to call to_text again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return u''
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
return u''
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring)
return to_text(value, encoding, errors)
#: :py:func:`to_native`
#: Transform a variable into the native str type for the python version
#:
#: On Python2, this is an alias for
#: :func:`~ansible.module_utils.to_bytes`. On Python3 it is an alias for
#: :func:`~ansible.module_utils.to_text`. It makes it easier to
#: transform a variable into the native str type for the python version
#: the code is running on. Use this when constructing the message to
#: send to exceptions or when dealing with an API that needs to take
#: a native string. Example::
#:
#: try:
#: 1//0
#: except ZeroDivisionError as e:
#: raise MyException('Encountered and error: %s' % to_native(e))
if PY3:
to_native = to_text
else:
to_native = to_bytes
| Slezhuk/ansible | lib/ansible/module_utils/_text.py | Python | gpl-3.0 | 12,325 |
import frappe
def execute():
if "device" not in frappe.db.get_table_columns("Sessions"):
frappe.db.sql("alter table tabSessions add column `device` varchar(255) default 'desktop'")
| indautgrp/frappe | frappe/patches/v5_0/modify_session.py | Python | mit | 185 |
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.play_iterator import HostState, PlayIterator
from ansible.playbook import Playbook
from ansible.playbook.task import Task
from ansible.playbook.play_context import PlayContext
from units.mock.loader import DictDataLoader
from units.mock.path import mock_unfrackpath_noop
class TestPlayIterator(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_host_state(self):
hs = HostState(blocks=[x for x in range(0, 10)])
hs.tasks_child_state = HostState(blocks=[0])
hs.rescue_child_state = HostState(blocks=[1])
hs.always_child_state = HostState(blocks=[2])
hs.__repr__()
hs.run_state = 100
hs.__repr__()
hs.fail_state = 15
hs.__repr__()
for i in range(0, 10):
hs.cur_block = i
self.assertEqual(hs.get_current_block(), i)
new_hs = hs.copy()
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_play_iterator(self):
#import epdb; epdb.st()
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
roles:
- test_role
pre_tasks:
- debug: msg="this is a pre_task"
tasks:
- debug: msg="this is a regular task"
- block:
- debug: msg="this is a block task"
- block:
- debug: msg="this is a sub-block in a block"
rescue:
- debug: msg="this is a rescue task"
- block:
- debug: msg="this is a sub-block in a rescue"
always:
- debug: msg="this is an always task"
- block:
- debug: msg="this is a sub-block in an always"
post_tasks:
- debug: msg="this is a post_task"
""",
'/etc/ansible/roles/test_role/tasks/main.yml': """
- name: role task
debug: msg="this is a role task"
- block:
- name: role block task
debug: msg="inside block in role"
always:
- name: role always task
debug: msg="always task in block in role"
- include: foo.yml
- name: role task after include
debug: msg="after include in role"
- block:
- name: starting role nested block 1
debug:
- block:
- name: role nested block 1 task 1
debug:
- name: role nested block 1 task 2
debug:
- name: role nested block 1 task 3
debug:
- name: end of role nested block 1
debug:
- name: starting role nested block 2
debug:
- block:
- name: role nested block 2 task 1
debug:
- name: role nested block 2 task 2
debug:
- name: role nested block 2 task 3
debug:
- name: end of role nested block 2
debug:
""",
'/etc/ansible/roles/test_role/tasks/foo.yml': """
- name: role included task
debug: msg="this is task in an include from a role"
"""
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
mock_var_manager._fact_cache['host00'] = dict()
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# lookup up an original task
target_task = p._entries[0].tasks[0].block[0]
task_copy = target_task.copy(exclude_parent=True)
found_task = itr.get_original_task(hosts[0], task_copy)
self.assertEqual(target_task, found_task)
bad_task = Task()
found_task = itr.get_original_task(hosts[0], bad_task)
self.assertIsNone(found_task)
# pre task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# role task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.name, "role task")
self.assertIsNotNone(task._role)
# role block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role block task")
self.assertIsNotNone(task._role)
# role block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role always task")
self.assertIsNotNone(task._role)
# role include task
#(host_state, task) = itr.get_next_task_for_host(hosts[0])
#self.assertIsNotNone(task)
#self.assertEqual(task.action, 'debug')
#self.assertEqual(task.name, "role included task")
#self.assertIsNotNone(task._role)
# role task after include
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role task after include")
self.assertIsNotNone(task._role)
# role nested block tasks
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "starting role nested block 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 1 task 3")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "end of role nested block 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "starting role nested block 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 1")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 2")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "role nested block 2 task 3")
self.assertIsNotNone(task._role)
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.name, "end of role nested block 2")
self.assertIsNotNone(task._role)
# regular play task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertIsNone(task._role)
# block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a block task"))
# sub-block task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a block"))
# mark the host failed
itr.mark_host_failed(hosts[0])
# block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a rescue task"))
# sub-block rescue task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in a rescue"))
# block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is an always task"))
# sub-block always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg="this is a sub-block in an always"))
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# post task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)
# host 0 shouldn't be in the failed hosts, as the error
# was handled by a rescue block
failed_hosts = itr.get_failed_hosts()
self.assertNotIn(hosts[0], failed_hosts)
def test_play_iterator_nested_blocks(self):
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: false
tasks:
- block:
- block:
- block:
- block:
- block:
- debug: msg="this is the first task"
- ping:
rescue:
- block:
- block:
- block:
- block:
- debug: msg="this is the rescue task"
always:
- block:
- block:
- block:
- block:
- debug: msg="this is the always task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# get the first task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the first task'))
# fail the host
itr.mark_host_failed(hosts[0])
# get the resuce task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the rescue task'))
# get the always task
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'debug')
self.assertEqual(task.args, dict(msg='this is the always task'))
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# implicit meta: flush_handlers
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNotNone(task)
self.assertEqual(task.action, 'meta')
self.assertEqual(task.args, dict(_raw_params='flush_handlers'))
# end of iteration
(host_state, task) = itr.get_next_task_for_host(hosts[0])
self.assertIsNone(task)
def test_play_iterator_add_tasks(self):
fake_loader = DictDataLoader({
'test_play.yml': """
- hosts: all
gather_facts: no
tasks:
- debug: msg="dummy task"
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
hosts = []
for i in range(0, 10):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory = MagicMock()
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# test the high-level add_tasks() method
s = HostState(blocks=[0,1,2])
itr._insert_tasks_into_state = MagicMock(return_value=s)
itr.add_tasks(hosts[0], [MagicMock(), MagicMock(), MagicMock()])
self.assertEqual(itr._host_states[hosts[0].name], s)
# now actually test the lower-level method that does the work
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
# iterate past first task
_, task = itr.get_next_task_for_host(hosts[0])
while(task and task.action != 'debug'):
_, task = itr.get_next_task_for_host(hosts[0])
if task is None:
raise Exception("iterated past end of play while looking for place to insert tasks")
# get the current host state and copy it so we can mutate it
s = itr.get_host_state(hosts[0])
s_copy = s.copy()
# assert with an empty task list, or if we're in a failed state, we simply return the state as-is
res_state = itr._insert_tasks_into_state(s_copy, task_list=[])
self.assertEqual(res_state, s_copy)
s_copy.fail_state = itr.FAILED_TASKS
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
self.assertEqual(res_state, s_copy)
# but if we've failed with a rescue/always block
mock_task = MagicMock()
s_copy.run_state = itr.ITERATING_RESCUE
res_state = itr._insert_tasks_into_state(s_copy, task_list=[mock_task])
self.assertEqual(res_state, s_copy)
self.assertIn(mock_task, res_state._blocks[res_state.cur_block].rescue)
itr._host_states[hosts[0].name] = res_state
(next_state, next_task) = itr.get_next_task_for_host(hosts[0], peek=True)
self.assertEqual(next_task, mock_task)
itr._host_states[hosts[0].name] = s
# test a regular insertion
s_copy = s.copy()
res_state = itr._insert_tasks_into_state(s_copy, task_list=[MagicMock()])
| grimmjow8/ansible | test/units/executor/test_play_iterator.py | Python | gpl-3.0 | 18,831 |
import sys
import os
import unittest
import cStringIO
import warnings
import re
try:
import json
except ImportError:
import simplejson as json
from support import html5lib_test_files
from html5lib.tokenizer import HTMLTokenizer
from html5lib import constants
class TokenizerTestParser(object):
def __init__(self, initialState, lastStartTag=None):
self.tokenizer = HTMLTokenizer
self._state = initialState
self._lastStartTag = lastStartTag
def parse(self, stream, encoding=None, innerHTML=False):
tokenizer = self.tokenizer(stream, encoding)
self.outputTokens = []
tokenizer.state = getattr(tokenizer, self._state)
if self._lastStartTag is not None:
tokenizer.currentToken = {"type": "startTag",
"name":self._lastStartTag}
types = dict((v,k) for k,v in constants.tokenTypes.iteritems())
for token in tokenizer:
getattr(self, 'process%s' % types[token["type"]])(token)
return self.outputTokens
def processDoctype(self, token):
self.outputTokens.append([u"DOCTYPE", token["name"], token["publicId"],
token["systemId"], token["correct"]])
def processStartTag(self, token):
self.outputTokens.append([u"StartTag", token["name"],
dict(token["data"][::-1]), token["selfClosing"]])
def processEmptyTag(self, token):
if token["name"] not in constants.voidElements:
self.outputTokens.append(u"ParseError")
self.outputTokens.append([u"StartTag", token["name"], dict(token["data"][::-1])])
def processEndTag(self, token):
self.outputTokens.append([u"EndTag", token["name"],
token["selfClosing"]])
def processComment(self, token):
self.outputTokens.append([u"Comment", token["data"]])
def processSpaceCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
self.processSpaceCharacters = self.processCharacters
def processCharacters(self, token):
self.outputTokens.append([u"Character", token["data"]])
def processEOF(self, token):
pass
def processParseError(self, token):
self.outputTokens.append([u"ParseError", token["data"]])
def concatenateCharacterTokens(tokens):
outputTokens = []
for token in tokens:
if not "ParseError" in token and token[0] == "Character":
if (outputTokens and not "ParseError" in outputTokens[-1] and
outputTokens[-1][0] == "Character"):
outputTokens[-1][1] += token[1]
else:
outputTokens.append(token)
else:
outputTokens.append(token)
return outputTokens
def normalizeTokens(tokens):
# TODO: convert tests to reflect arrays
for i, token in enumerate(tokens):
if token[0] == u'ParseError':
tokens[i] = token[0]
return tokens
def tokensMatch(expectedTokens, receivedTokens, ignoreErrorOrder,
ignoreErrors=False):
"""Test whether the test has passed or failed
If the ignoreErrorOrder flag is set to true we don't test the relative
positions of parse errors and non parse errors
"""
checkSelfClosing= False
for token in expectedTokens:
if (token[0] == "StartTag" and len(token) == 4
or token[0] == "EndTag" and len(token) == 3):
checkSelfClosing = True
break
if not checkSelfClosing:
for token in receivedTokens:
if token[0] == "StartTag" or token[0] == "EndTag":
token.pop()
if not ignoreErrorOrder and not ignoreErrors:
return expectedTokens == receivedTokens
else:
#Sort the tokens into two groups; non-parse errors and parse errors
tokens = {"expected":[[],[]], "received":[[],[]]}
for tokenType, tokenList in zip(tokens.keys(),
(expectedTokens, receivedTokens)):
for token in tokenList:
if token != "ParseError":
tokens[tokenType][0].append(token)
else:
if not ignoreErrors:
tokens[tokenType][1].append(token)
return tokens["expected"] == tokens["received"]
def unescape_test(test):
def decode(inp):
return inp.decode("unicode-escape")
test["input"] = decode(test["input"])
for token in test["output"]:
if token == "ParseError":
continue
else:
token[1] = decode(token[1])
if len(token) > 2:
for key, value in token[2]:
del token[2][key]
token[2][decode(key)] = decode(value)
return test
def runTokenizerTest(test):
#XXX - move this out into the setup function
#concatenate all consecutive character tokens into a single token
if 'doubleEscaped' in test:
test = unescape_test(test)
expected = concatenateCharacterTokens(test['output'])
if 'lastStartTag' not in test:
test['lastStartTag'] = None
outBuffer = cStringIO.StringIO()
stdout = sys.stdout
sys.stdout = outBuffer
parser = TokenizerTestParser(test['initialState'],
test['lastStartTag'])
tokens = parser.parse(test['input'])
tokens = concatenateCharacterTokens(tokens)
received = normalizeTokens(tokens)
errorMsg = u"\n".join(["\n\nInitial state:",
test['initialState'] ,
"\nInput:", unicode(test['input']),
"\nExpected:", unicode(expected),
"\nreceived:", unicode(tokens)])
errorMsg = errorMsg.encode("utf-8")
ignoreErrorOrder = test.get('ignoreErrorOrder', False)
assert tokensMatch(expected, received, ignoreErrorOrder), errorMsg
def _doCapitalize(match):
return match.group(1).upper()
_capitalizeRe = re.compile(r"\W+(\w)").sub
def capitalize(s):
s = s.lower()
s = _capitalizeRe(_doCapitalize, s)
return s
def test_tokenizer():
for filename in html5lib_test_files('tokenizer', '*.test'):
tests = json.load(file(filename))
testName = os.path.basename(filename).replace(".test","")
if 'tests' in tests:
for index,test in enumerate(tests['tests']):
#Skip tests with a self closing flag
skip = False
if 'initialStates' not in test:
test["initialStates"] = ["Data state"]
for initialState in test["initialStates"]:
test["initialState"] = capitalize(initialState)
yield runTokenizerTest, test
| wangtaoking1/found_website | 项目代码/html5lib/tests/test_tokenizer.py | Python | gpl-2.0 | 6,826 |
__all__ = ['Counter', 'deque', 'defaultdict', 'namedtuple', 'OrderedDict']
# For bootstrapping reasons, the collection ABCs are defined in _abcoll.py.
# They should however be considered an integral part of collections.py.
from _abcoll import *
import _abcoll
__all__ += _abcoll.__all__
from _collections import deque, defaultdict
from operator import itemgetter as _itemgetter
from keyword import iskeyword as _iskeyword
import sys as _sys
import heapq as _heapq
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
################################################################################
### OrderedDict
################################################################################
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as regular dictionaries.
# The internal self.__map dict maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries, but keyword arguments are not recommended because
their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link at the end of the linked list,
# and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[PREV]
last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which gets
# removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
# Traverse the linked list in order.
NEXT, KEY = 1, 2
root = self.__root
curr = root[NEXT]
while curr is not root:
yield curr[KEY]
curr = curr[NEXT]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
# Traverse the linked list in reverse order.
PREV, KEY = 0, 2
root = self.__root
curr = root[PREV]
while curr is not root:
yield curr[KEY]
curr = curr[PREV]
def clear(self):
'od.clear() -> None. Remove all items from od.'
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
dict.clear(self)
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) pairs in od'
for k in self:
yield (k, self[k])
update = MutableMapping.update
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
value. If key is not found, d is returned if given, otherwise KeyError
is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
key = next(reversed(self) if last else iter(self))
value = self.pop(key)
return key, value
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
If not specified, the value defaults to None.
'''
self = cls()
for key in iterable:
self[key] = value
return self
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
'od.__ne__(y) <==> od!=y'
return not self == other
# -- the following methods support python 3.x style dictionary views --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
################################################################################
### namedtuple
################################################################################
def namedtuple(typename, field_names, verbose=False, rename=False):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', 'x y')
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessable by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Parse and validate the field names. Validation serves two purposes,
# generating informative error messages and preventing template injection attacks.
if isinstance(field_names, basestring):
field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
field_names = tuple(map(str, field_names))
if rename:
names = list(field_names)
seen = set()
for i, name in enumerate(names):
if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
or not name or name[0].isdigit() or name.startswith('_')
or name in seen):
names[i] = '_%d' % i
seen.add(name)
field_names = tuple(names)
for name in (typename,) + field_names:
if not all(c.isalnum() or c=='_' for c in name):
raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a keyword: %r' % name)
if name[0].isdigit():
raise ValueError('Type names and field names cannot start with a number: %r' % name)
seen_names = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: %r' % name)
if name in seen_names:
raise ValueError('Encountered duplicate field name: %r' % name)
seen_names.add(name)
# Create and fill-in the class template
numfields = len(field_names)
argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes
reprtxt = ', '.join('%s=%%r' % name for name in field_names)
template = '''class %(typename)s(tuple):
'%(typename)s(%(argtxt)s)' \n
__slots__ = () \n
_fields = %(field_names)r \n
def __new__(_cls, %(argtxt)s):
'Create new instance of %(typename)s(%(argtxt)s)'
return _tuple.__new__(_cls, (%(argtxt)s)) \n
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
'Make a new %(typename)s object from a sequence or iterable'
result = new(cls, iterable)
if len(result) != %(numfields)d:
raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
return result \n
def __repr__(self):
'Return a nicely formatted representation string'
return '%(typename)s(%(reprtxt)s)' %% self \n
def _asdict(self):
'Return a new OrderedDict which maps field names to their values'
return OrderedDict(zip(self._fields, self)) \n
def _replace(_self, **kwds):
'Return a new %(typename)s object replacing specified fields with new values'
result = _self._make(map(kwds.pop, %(field_names)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
return result \n
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return tuple(self) \n\n''' % locals()
for i, name in enumerate(field_names):
template += " %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i)
if verbose:
print template
# Execute the template string in a temporary namespace and
# support tracing utilities by setting a value for frame.f_globals['__name__']
namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
OrderedDict=OrderedDict, _property=property, _tuple=tuple)
try:
exec template in namespace
except SyntaxError, e:
raise SyntaxError(e.message + ':\n' + template)
result = namespace[typename]
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in enviroments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return result
########################################################################
### Counter
########################################################################
class Counter(dict):
'''Dict subclass for counting hashable items. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> c = Counter('abcdeabcdabcaba') # count elements from a string
>>> c.most_common(3) # three most common elements
[('a', 5), ('b', 4), ('c', 3)]
>>> sorted(c) # list all unique elements
['a', 'b', 'c', 'd', 'e']
>>> ''.join(sorted(c.elements())) # list elements with repetitions
'aaaaabbbbcccdde'
>>> sum(c.values()) # total of all counts
15
>>> c['a'] # count of letter 'a'
5
>>> for elem in 'shazam': # update counts from an iterable
... c[elem] += 1 # by adding 1 to each element's count
>>> c['a'] # now there are seven 'a'
7
>>> del c['b'] # remove all 'b'
>>> c['b'] # now there are zero 'b'
0
>>> d = Counter('simsalabim') # make another counter
>>> c.update(d) # add in the second counter
>>> c['a'] # now there are nine 'a'
9
>>> c.clear() # empty the counter
>>> c
Counter()
Note: If a count is set to zero or reduced to zero, it will remain
in the counter until the entry is deleted or the counter is cleared:
>>> c = Counter('aaabbc')
>>> c['b'] -= 2 # reduce the count of 'b' by two
>>> c.most_common() # 'b' is still in, but its count is zero
[('a', 3), ('c', 1), ('b', 0)]
'''
# References:
# http://en.wikipedia.org/wiki/Multiset
# http://www.gnu.org/software/smalltalk/manual-base/html_node/Bag.html
# http://www.demo2s.com/Tutorial/Cpp/0380__set-multiset/Catalog0380__set-multiset.htm
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
super(Counter, self).__init__()
self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
# Needed so that self[missing_item] does not raise KeyError
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abcdeabcdabcaba').most_common(3)
[('a', 5), ('b', 4), ('c', 3)]
'''
# Emulate Bag.sortedByCount from Smalltalk
if n is None:
return sorted(self.iteritems(), key=_itemgetter(1), reverse=True)
return _heapq.nlargest(n, self.iteritems(), key=_itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
# Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1
>>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
>>> product = 1
>>> for factor in prime_factors.elements(): # loop over factors
... product *= factor # and multiply them
>>> product
1836
Note, if an element's count has been set to zero or is a negative
number, elements() will ignore it.
'''
# Emulate Bag.do from Smalltalk and Multiset.begin from C++.
return _chain.from_iterable(_starmap(_repeat, self.iteritems()))
# Override dict methods where necessary
@classmethod
def fromkeys(cls, iterable, v=None):
# There is no equivalent method for counters because setting v=1
# means that no element can have a count greater than one.
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
# The regular dict.update() operation makes no sense here because the
# replace behavior results in the some of original untouched counts
# being mixed-in with all of the other counts for a mismash that
# doesn't have a straight-forward interpretation in most counting
# contexts. Instead, we implement straight-addition. Both the inputs
# and outputs are allowed to contain zero and negative counts.
if iterable is not None:
if isinstance(iterable, Mapping):
if self:
self_get = self.get
for elem, count in iterable.iteritems():
self[elem] = self_get(elem, 0) + count
else:
super(Counter, self).update(iterable) # fast path when counter is empty
else:
self_get = self.get
for elem in iterable:
self[elem] = self_get(elem, 0) + 1
if kwds:
self.update(kwds)
def subtract(self, iterable=None, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.subtract('witch') # subtract elements from another iterable
>>> c.subtract(Counter('watch')) # subtract elements from another counter
>>> c['h'] # 2 in which, minus 1 in witch, minus 1 in watch
0
>>> c['w'] # 1 in which, minus 1 in witch, minus 1 in watch
-1
'''
if iterable is not None:
self_get = self.get
if isinstance(iterable, Mapping):
for elem, count in iterable.items():
self[elem] = self_get(elem, 0) - count
else:
for elem in iterable:
self[elem] = self_get(elem, 0) - 1
if kwds:
self.subtract(kwds)
def copy(self):
'Return a shallow copy.'
return self.__class__(self)
def __reduce__(self):
return self.__class__, (dict(self),)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
super(Counter, self).__delitem__(elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count + other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
newcount = count - other[elem]
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count < 0:
result[elem] = 0 - count
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = other_count if count < other_count else count
if newcount > 0:
result[elem] = newcount
for elem, count in other.items():
if elem not in self and count > 0:
result[elem] = count
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem, count in self.items():
other_count = other[elem]
newcount = count if count < other_count else other_count
if newcount > 0:
result[elem] = newcount
return result
if __name__ == '__main__':
# verify that instances can be pickled
from cPickle import loads, dumps
Point = namedtuple('Point', 'x, y', True)
p = Point(x=10, y=20)
assert p == loads(dumps(p))
# test and demonstrate ability to override methods
class Point(namedtuple('Point', 'x y')):
__slots__ = ()
@property
def hypot(self):
return (self.x ** 2 + self.y ** 2) ** 0.5
def __str__(self):
return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
for p in Point(3, 4), Point(14, 5/7.):
print p
class Point(namedtuple('Point', 'x y')):
'Point class with optimized _make() and _replace() without error-checking'
__slots__ = ()
_make = classmethod(tuple.__new__)
def _replace(self, _map=map, **kwds):
return self._make(_map(kwds.get, ('x', 'y'), self))
print Point(11, 22)._replace(x=100)
Point3D = namedtuple('Point3D', Point._fields + ('z',))
print Point3D.__doc__
import doctest
TestResults = namedtuple('TestResults', 'failed attempted')
print TestResults(*doctest.testmod())
| Symmetry-Innovations-Pty-Ltd/Python-2.7-for-QNX6.5.0-x86 | usr/pkg/lib/python2.7/collections.py | Python | mit | 25,363 |
"""Utilities to support packages."""
# NOTE: This module must remain compatible with Python 2.3, as it is shared
# by setuptools for distribution with Python 2.3 and up.
import os
import sys
import imp
import os.path
from types import ModuleType
from org.python.core import imp as _imp, BytecodeLoader
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
# equivalent to CPythonLib's pkgutil.read_code except that we need
# diff args to pass into our underlying imp implementation, as
# accessed by _imp here
def read_jython_code(fullname, file, filename):
data = _imp.readCode(filename, file, False)
return BytecodeLoader.makeCode(fullname + "$py", data, filename)
def simplegeneric(func):
"""Make a trivial single-dispatch generic function"""
registry = {}
def wrapper(*args, **kw):
ob = args[0]
try:
cls = ob.__class__
except AttributeError:
cls = type(ob)
try:
mro = cls.__mro__
except AttributeError:
try:
class cls(cls, object):
pass
mro = cls.__mro__[1:]
except TypeError:
mro = object, # must be an ExtensionClass or some such :(
for t in mro:
if t in registry:
return registry[t](*args, **kw)
else:
return func(*args, **kw)
try:
wrapper.__name__ = func.__name__
except (TypeError, AttributeError):
pass # Python 2.3 doesn't allow functions to be renamed
def register(typ, func=None):
if func is None:
return lambda f: register(typ, f)
registry[typ] = func
return func
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
wrapper.register = register
return wrapper
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_loader, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(path=None, prefix=''):
"""Yields (module_loader, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
#@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
iter_importer_modules = simplegeneric(iter_importer_modules)
class ImpImporter:
"""PEP 302 Importer that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 importer that searches that
directory. ImpImporter(None) produces a PEP 302 importer that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
filenames = os.listdir(self.path)
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
for fn in os.listdir(path):
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
def get_data(self, pathname):
f = open(pathname, "rb")
try:
return f.read()
finally:
f.close()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is None:
fullname = self.fullname
elif fullname != self.fullname:
raise ImportError("Loader for module %s cannot handle "
"module %s" % (self.fullname, fullname))
return fullname
def is_package(self, fullname):
fullname = self._fix_name(fullname)
return self.etc[2]==imp.PKG_DIRECTORY
def get_code(self, fullname=None):
fullname = self._fix_name(fullname)
if self.code is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
source = self.get_source(fullname)
self.code = compile(source, self.filename, 'exec')
elif mod_type==imp.PY_COMPILED:
self._reopen()
try:
self.code = read_jython_code(fullname, self.file, self.filename)
finally:
self.file.close()
elif mod_type==imp.PKG_DIRECTORY:
self.code = self._get_delegate().get_code()
return self.code
def get_source(self, fullname=None):
fullname = self._fix_name(fullname)
if self.source is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self._reopen()
try:
self.source = self.file.read()
finally:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
f = open(self.filename[:-1], 'rU')
try:
self.source = f.read()
finally:
f.close()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
def _get_delegate(self):
return ImpImporter(self.filename).find_module('__init__')
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
if self.etc[2]==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
try:
import zipimport
from zipimport import zipimporter
def iter_zipimport_modules(importer, prefix=''):
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
dirlist.sort()
_prefix = importer.prefix
plen = len(_prefix)
yielded = {}
import inspect
for fn in dirlist:
if not fn.startswith(_prefix):
continue
fn = fn[plen:].split(os.sep)
if len(fn)==2 and fn[1].startswith('__init__.py'):
if fn[0] not in yielded:
yielded[fn[0]] = 1
yield fn[0], True
if len(fn)!=1:
continue
modname = inspect.getmodulename(fn[0])
if modname=='__init__':
continue
if modname and '.' not in modname and modname not in yielded:
yielded[modname] = 1
yield prefix + modname, False
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
except ImportError:
pass
def get_importer(path_item):
"""Retrieve a PEP 302 importer for the given path item
The returned importer is cached in sys.path_importer_cache
if it was newly created by a path hook.
If there is no importer, a wrapper around the basic import
machinery is returned. This wrapper is never inserted into
the importer cache (None is inserted instead).
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
importer = ImpImporter(path_item)
except ImportError:
importer = None
return importer
def iter_importers(fullname=""):
"""Yield PEP 302 importers for the given module name
If fullname contains a '.', the importers will be for the package
containing fullname, otherwise they will be importers for sys.meta_path,
sys.path, and Python's "classic" import machinery, in that order. If
the named module is in a package, that package is imported as a side
effect of invoking this function.
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
standard import machinery to find files in alternative locations
are partially supported, but are searched AFTER sys.path. Normally,
these locations are searched BEFORE sys.path, preventing sys.path
entries from shadowing them.
For this to cause a visible difference in behaviour, there must
be a module or package name that is accessible via both sys.path
and one of the non PEP 302 file system mechanisms. In this case,
the emulation will find the former version, while the builtin
import mechanism will find the latter.
Items of the following types can be affected by this discrepancy:
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
"""
if fullname.startswith('.'):
raise ImportError("Relative module names not supported")
if '.' in fullname:
# Get the containing package's __path__
pkg = '.'.join(fullname.split('.')[:-1])
if pkg not in sys.modules:
__import__(pkg)
path = getattr(sys.modules[pkg], '__path__', None) or []
else:
for importer in sys.meta_path:
yield importer
path = sys.path
for item in path:
yield get_importer(item)
if '.' not in fullname:
yield ImpImporter()
def get_loader(module_or_name):
"""Get a PEP 302 "loader" object for module_or_name
If the module or package is accessible via the normal import
mechanism, a wrapper around the relevant part of that machinery
is returned. Returns None if the module cannot be found or imported.
If the named module is not already imported, its containing package
(if any) is imported, in order to establish the package __path__.
This function uses iter_importers(), and is thus subject to the same
limitations regarding platform-specific special import locations such
as the Windows registry.
"""
if module_or_name in sys.modules:
module_or_name = sys.modules[module_or_name]
if isinstance(module_or_name, ModuleType):
module = module_or_name
loader = getattr(module, '__loader__', None)
if loader is not None:
return loader
fullname = module.__name__
elif module_or_name == sys:
# Jython sys is not a real module; fake it here for now since
# making it a module requires a fair amount of decoupling from
# PySystemState
fullname = "sys"
else:
fullname = module_or_name
return find_loader(fullname)
def find_loader(fullname):
"""Find a PEP 302 "loader" object for fullname
If fullname contains dots, path must be the containing package's __path__.
Returns None if the module cannot be found or imported. This function uses
iter_importers(), and is thus subject to the same limitations regarding
platform-specific special import locations such as the Windows registry.
"""
for importer in iter_importers(fullname):
loader = importer.find_module(fullname)
if loader is not None:
return loader
return None
def extend_path(path, name):
"""Extend a package's path.
Intended use is to place the following code in a package's __init__.py:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
This will add to the package's __path__ all subdirectories of
directories on sys.path named after the package. This is useful
if one wants to distribute different parts of a single logical
package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
except that it doesn't special-case lines starting with 'import'.
A *.pkg file is trusted at face value: apart from checking for
duplicates, all entries found in a *.pkg file are added to the
path, regardless of whether they are exist the filesystem. (This
is a feature.)
If the input path is not a list (as is the case for frozen
packages) it is returned unchanged. The input path is not
modified; an extended copy is returned. Items are only appended
to the copy at the end.
It is assumed that sys.path is a sequence. Items of sys.path that
are not (unicode or 8-bit) strings referring to existing
directories are ignored. Unicode items of sys.path that cause
errors when used as filenames may cause this function to raise an
exception (in line with os.path.isdir() behavior).
"""
if not isinstance(path, list):
# This could happen e.g. when this is called from inside a
# frozen package. Return the path unchanged in that case.
return path
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
# Just in case os.extsep != '.'
sname = os.extsep.join(name.split('.'))
sname_pkg = sname + os.extsep + "pkg"
init_py = "__init__" + os.extsep + "py"
path = path[:] # Start with a copy of the existing path
for dir in sys.path:
if not isinstance(dir, basestring) or not os.path.isdir(dir):
continue
subdir = os.path.join(dir, pname)
# XXX This may still add duplicate entries to path on
# case-insensitive filesystems
initfile = os.path.join(subdir, init_py)
if subdir not in path and os.path.isfile(initfile):
path.append(subdir)
# XXX Is this the right thing for subpackages like zope.app?
# It looks for a file named "zope.app.pkg"
pkgfile = os.path.join(dir, sname_pkg)
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
except IOError, msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
try:
for line in f:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
path.append(line) # Don't check for existence!
finally:
f.close()
return path
| zephyrplugins/zephyr | zephyr.plugin.jython/jython2.5.2rc3/Lib/pkgutil.py | Python | epl-1.0 | 19,027 |
from . import foo | asedunov/intellij-community | python/testData/completion/relativeFromImportInNamespacePackage2/nspkg1/a.after.py | Python | apache-2.0 | 17 |
from __future__ import unicode_literals
from django.apps import AppConfig
class ProfileConfig(AppConfig):
name = "profiles"
verbose_name = 'User Profiles'
def ready(self):
from . import signals # noqa
| ramaseshan/symptomchecker | symptomcheck/src/profiles/apps.py | Python | gpl-2.0 | 226 |
records = [select.query.decode(r) for r in records] | akosyakov/intellij-community | python/testData/joinLines/ListComprehension-after.py | Python | apache-2.0 | 51 |
class С:
def __init__(self, x=None):
if x is None:
self.foo = {
'A': {
'x': 0,
'y': 0,
},
}
else: # init was given the previous state
assert isinstance(x, С)
self.foo = {
'A': {
'x': x.f<caret>oo['A']['x'],
'y': x.foo['A']['y'],
},
} | mdanielwork/intellij-community | python/testData/refactoring/rename/renameSelfAndParameterAttribute.py | Python | apache-2.0 | 460 |
# Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import sys
import jinja2
from django.conf import settings
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.utils import six
from django.utils.module_loading import import_string
from .base import BaseEngine
from .utils import csrf_input_lazy, csrf_token_lazy
class Jinja2(BaseEngine):
app_dirname = 'jinja2'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
super(Jinja2, self).__init__(params)
environment = options.pop('environment', 'jinja2.Environment')
environment_cls = import_string(environment)
options.setdefault('autoescape', True)
options.setdefault('loader', jinja2.FileSystemLoader(self.template_dirs))
options.setdefault('auto_reload', settings.DEBUG)
options.setdefault('undefined',
jinja2.DebugUndefined if settings.DEBUG else jinja2.Undefined)
self.env = environment_cls(**options)
def from_string(self, template_code):
return Template(self.env.from_string(template_code))
def get_template(self, template_name):
try:
return Template(self.env.get_template(template_name))
except jinja2.TemplateNotFound as exc:
six.reraise(
TemplateDoesNotExist,
TemplateDoesNotExist(exc.name, backend=self),
sys.exc_info()[2],
)
except jinja2.TemplateSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
six.reraise(TemplateSyntaxError, new, sys.exc_info()[2])
class Template(object):
def __init__(self, template):
self.template = template
self.origin = Origin(
name=template.filename, template_name=template.name,
)
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context['request'] = request
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
return self.template.render(context)
class Origin(object):
"""
A container to hold debug information as described in the template API
documentation.
"""
def __init__(self, name, template_name):
self.name = name
self.template_name = template_name
def get_exception_info(exception):
"""
Formats exception information for display on the debug page using the
structure described in the template API documentation.
"""
context_lines = 10
lineno = exception.lineno
lines = list(enumerate(exception.source.strip().split("\n"), start=1))
during = lines[lineno - 1][1]
total = len(lines)
top = max(0, lineno - context_lines - 1)
bottom = min(total, lineno + context_lines)
return {
'name': exception.filename,
'message': exception.message,
'source_lines': lines[top:bottom],
'line': lineno,
'before': '',
'during': during,
'after': '',
'total': total,
'top': top,
'bottom': bottom,
}
| Vvucinic/Wander | venv_2_7/lib/python2.7/site-packages/Django-1.9-py2.7.egg/django/template/backends/jinja2.py | Python | artistic-2.0 | 3,342 |
doctests = """
########### Tests mostly copied from test_listcomps.py ############
Test simple loop with conditional
>>> sum({i*i for i in range(100) if i&1 == 1})
166650
Test simple case
>>> {2*y + x + 1 for x in (0,) for y in (1,)}
set([3])
Test simple nesting
>>> list(sorted({(i,j) for i in range(3) for j in range(4)}))
[(0, 0), (0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2), (1, 3), (2, 0), (2, 1), (2, 2), (2, 3)]
Test nesting with the inner expression dependent on the outer
>>> list(sorted({(i,j) for i in range(4) for j in range(i)}))
[(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]
Make sure the induction variable is not exposed
>>> i = 20
>>> sum({i*i for i in range(100)})
328350
>>> i
20
Verify that syntax error's are raised for setcomps used as lvalues
>>> {y for y in (1,2)} = 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
>>> {y for y in (1,2)} += 10 # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: ...
Make a nested set comprehension that acts like set(range())
>>> def srange(n):
... return {i for i in range(n)}
>>> list(sorted(srange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Same again, only as a lambda expression instead of a function definition
>>> lrange = lambda n: {i for i in range(n)}
>>> list(sorted(lrange(10)))
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
Generators can call other generators:
>>> def grange(n):
... for x in {i for i in range(n)}:
... yield x
>>> list(sorted(grange(5)))
[0, 1, 2, 3, 4]
Make sure that None is a valid return value
>>> {None for i in range(10)}
set([None])
########### Tests for various scoping corner cases ############
Return lambdas that use the iteration variable as a default argument
>>> items = {(lambda i=i: i) for i in range(5)}
>>> {x() for x in items} == set(range(5))
True
Same again, only this time as a closure variable
>>> items = {(lambda: i) for i in range(5)}
>>> {x() for x in items}
set([4])
Another way to test that the iteration variable is local to the list comp
>>> items = {(lambda: i) for i in range(5)}
>>> i = 20
>>> {x() for x in items}
set([4])
And confirm that a closure can jump over the list comp scope
>>> items = {(lambda: y) for i in range(5)}
>>> y = 2
>>> {x() for x in items}
set([2])
We also repeat each of the above scoping tests inside a function
>>> def test_func():
... items = {(lambda i=i: i) for i in range(5)}
... return {x() for x in items}
>>> test_func() == set(range(5))
True
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: i) for i in range(5)}
... i = 20
... return {x() for x in items}
>>> test_func()
set([4])
>>> def test_func():
... items = {(lambda: y) for i in range(5)}
... y = 2
... return {x() for x in items}
>>> test_func()
set([2])
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=None):
import sys
from test import test_support
from test import test_setcomps
test_support.run_doctest(test_setcomps, verbose)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
test_support.run_doctest(test_setcomps, verbose)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
if __name__ == "__main__":
test_main(verbose=True)
| teeple/pns_server | work/install/Python-2.7.4/Lib/test/test_setcomps.py | Python | gpl-2.0 | 3,847 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def main(request, response):
headers = []
if 'Content-Type' in request.GET:
headers += [('Content-Type', request.GET['Content-Type'])]
with open('./resources/ahem/AHEM____.TTF') as f:
return 200, headers, f.read()
| benschulz/servo | tests/wpt/mozilla/tests/mozilla/resources/no_mime_type.py | Python | mpl-2.0 | 443 |
# (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: memory
short_description: RAM backed, non persistent
description:
- RAM backed cache that is not persistent.
- This is the default used if no other plugin is specified.
- There are no options to configure.
version_added: historical
author: core team (@ansible-core)
'''
from ansible.plugins.cache import BaseCacheModule
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self._cache = {}
def get(self, key):
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
def keys(self):
return self._cache.keys()
def contains(self, key):
return key in self._cache
def delete(self, key):
del self._cache[key]
def flush(self):
self._cache = {}
def copy(self):
return self._cache.copy()
def __getstate__(self):
return self.copy()
def __setstate__(self, data):
self._cache = data
| e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/cache/memory.py | Python | bsd-3-clause | 1,272 |
# settings file for builds.
# if you want to have custom builds, copy this file to "localbuildsettings.py" and make changes there.
# possible fields:
# resourceBaseUrl - optional - the URL base for external resources (all resources embedded in standard IITC)
# distUrlBase - optional - the base URL to use for update checks
# buildMobile - optional - if set, mobile builds are built with 'ant'. requires the Android SDK and appropriate mobile/local.properties file configured
# preBuild - optional - an array of strings to run as commands, via os.system, before building the scripts
# postBuild - optional - an array of string to run as commands, via os.system, after all builds are complete
buildSettings = {
# local: use this build if you're not modifying external resources
# no external resources allowed - they're not needed any more
'local': {
'resourceUrlBase': 'http://localhost:8100',
'distUrlBase': 'http://localhost:8100',
},
# local8000: if you need to modify external resources, this build will load them from
# the web server at http://0.0.0.0:8000/dist
# (This shouldn't be required any more - all resources are embedded. but, it remains just in case some new feature
# needs external resources)
'local8000': {
'resourceUrlBase': 'http://0.0.0.0:8000/dist',
'distUrlBase': None,
},
# mobile: default entry that also builds the mobile .apk
# you will need to have the android-sdk installed, and the file mobile/local.properties created as required
'mobile': {
'resourceUrlBase': None,
'distUrlBase': None,
'buildMobile': 'debug',
},
# if you want to publish your own fork of the project, and host it on your own web site
# create a localbuildsettings.py file containing something similar to this
# note: Firefox+Greasemonkey require the distUrlBase to be "https" - they won't check for updates on regular "http" URLs
#'example': {
# 'resourceBaseUrl': 'http://www.example.com/iitc/dist',
# 'distUrlBase': 'https://secure.example.com/iitc/dist',
#},
}
# defaultBuild - the name of the default build to use if none is specified on the build.py command line
# (in here as an example - it only works in localbuildsettings.py)
#defaultBuild = 'local'
| McBen/ingress-intel-total-conversion | buildsettings.py | Python | isc | 2,320 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import shutil
import sys
import dirtyjson as json
from ..decorators import linter
from ..parsers.base import ParserBase
@linter(
name="coala",
install=[
["pipx", "install", "--spec", "coala-bears", "coala"],
[sys.executable, "-m", "pip", "install", "-U", "coala-bears"],
],
help_cmd=["coala", "-h"],
run=["coala", "-C", "--json", "--log-json", "--limit-files", "5000"],
rundefault=["coala", "-C", "--json", "--log-json", "--limit-files", "5000"],
dotfiles=[".coafile"],
language="all",
autorun=True,
run_per_file=False,
concurrency=1,
)
class CoalaParser(ParserBase):
"""Parse json coala output."""
def install(self):
if not any(
dotfile.strip() in os.listdir(os.getcwd())
for dotfile in self.config.get("dotfiles")
):
config_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), "config")
)
dotfile_name = self.config.get("dotfiles")[0]
shutil.copyfile(
os.path.join(config_dir, dotfile_name),
os.path.join(os.getcwd(), dotfile_name),
)
def parse(self, output):
messages = set()
lint_data = [
msg
for category in json.loads(output).get("results", {}).values()
for msg in category
]
for msgdata in lint_data:
try:
msgbody = msgdata["message"]
for line in msgdata.get("affected_code", []):
path = line.get("file")
line = line.get("start", {}).get("line")
messages.add((path, line, msgbody))
except (ValueError, KeyError):
print("Invalid message: {0}".format(msgdata))
return messages
| guykisel/inline-plz | inlineplz/linters/coala.py | Python | isc | 1,918 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
""" This file defines the RoutingTree class which can be used for constructing
routing trees for route segments from the fpga_interchange.physical_netlist
class PhysicalBelPin/PhysicalSitePin/PhysicalSitePip/PhysicalPip.
Use of the RoutingTree requires having the DeviceResources class loaded for
the relevant part for the design. Use
interchange_capnp.Interchange.read_device_resources to load a device resource
file.
"""
def create_id_map(id_to_segment, segments):
""" Create or update dict from object ids of segments to segments. """
for segment in segments:
segment_id = id(segment)
assert segment_id not in id_to_segment
id_to_segment[segment_id] = segment
create_id_map(id_to_segment, segment.branches)
def check_tree(routing_tree, segment):
""" Recursively checks a routing tree.
Checks for:
- Circular routing trees
- Child segments are connected to their parents.
"""
# Check for circular routing tree
for _ in yield_branches(segment):
pass
# Ensure children are connected to parent.
root_resource = routing_tree.get_device_resource(segment)
for child in segment.branches:
child_resource = routing_tree.get_device_resource(child)
assert root_resource.is_connected(child_resource), (str(segment),
str(child),
root_resource,
child_resource)
check_tree(routing_tree, child)
def yield_branches(routing_branch):
""" Yield all routing branches starting from the given route segment.
This will yield the input route branch in addition to its children.
An AssertionError will be raised for a circular route is detected.
"""
objs = set()
def descend(obj):
obj_id = id(obj)
assert obj_id not in objs
objs.add(obj_id)
yield obj
for seg in obj.branches:
for s in descend(seg):
yield s
for s in descend(routing_branch):
yield s
def sort_branches(branches):
""" Sort branches by the branch tuple.
The branch tuple is:
('bel_pin'/'site_pin'/'site_pip'/'pip', <site>/<tile>, ...)
so sorting in this way ensures that BEL pins are grouped, etc.
This also canonicalize the branch order, which makes comparing trees each,
just normalize both trees, and compare the result.
"""
branches.sort(key=lambda item: item.to_tuple())
def get_tuple_tree(root_branch):
""" Convert a rout branch in a two tuple. """
return root_branch.to_tuple(), tuple(
get_tuple_tree(branch) for branch in root_branch.branches)
class RoutingTree():
""" Utility class for managing stitching of a routing tree. """
def __init__(self, device_resources, site_types, stubs, sources):
# Check that no duplicate routing resources are present.
tuple_to_id = {}
for stub in stubs:
for branch in yield_branches(stub):
tup = branch.to_tuple()
assert tup not in tuple_to_id, tup
tuple_to_id[tup] = id(branch)
for source in sources:
for branch in yield_branches(source):
tup = branch.to_tuple()
assert tup not in tuple_to_id, tup
tuple_to_id[tup] = id(branch)
self.id_to_segment = {}
self.id_to_device_resource = {}
self.stubs = stubs
self.sources = sources
self.connections = None
# Populate id_to_segment and id_to_device_resource maps.
create_id_map(self.id_to_segment, self.stubs)
create_id_map(self.id_to_segment, self.sources)
for segment_id, segment in self.id_to_segment.items():
self.id_to_device_resource[
segment_id] = segment.get_device_resource(
site_types, device_resources)
# Verify initial input makes sense.
self.check_trees()
def segment_for_id(self, segment_id):
""" Get routing segment based on the object id of the routing segment. """
return self.id_to_segment[segment_id]
def normalize_tree(self):
""" Normalize the routing tree by sorted element. """
sort_branches(self.stubs)
sort_branches(self.sources)
for stub in self.stubs:
for branch in yield_branches(stub):
sort_branches(branch.branches)
for source in self.sources:
for branch in yield_branches(source):
sort_branches(branch.branches)
def get_tuple_tree(self):
""" Get tuple tree representation of the current routing tree.
This is suitable for equality checking if normalized with
normalize_tree.
"""
return (tuple(get_tuple_tree(stub) for stub in self.stubs),
tuple(get_tuple_tree(source) for source in self.sources))
def get_device_resource_for_id(self, segment_id):
""" Get the device resource that corresponds to the segment id given. """
return self.id_to_device_resource[segment_id]
def get_device_resource(self, segment):
""" Get the device resource that corresponds to the segment given. """
return self.id_to_device_resource[id(segment)]
def check_trees(self):
""" Check that the routing tree at and below obj is valid.
This method should be called after all route segments have been added
to the node cache.
"""
for stub in self.stubs:
check_tree(self, stub)
for source in self.sources:
assert self.get_device_resource(source).is_root(), source
check_tree(self, source)
def connections_for_segment_id(self, segment_id):
""" Yield all connection resources connected to segment id given. """
resource = self.id_to_device_resource[segment_id]
for site_wire in resource.site_wires():
yield site_wire
for node in resource.nodes():
yield node
def build_connections(self):
""" Create a dictionary of connection resources to segment ids. """
self.connections = {}
for segment_id in self.id_to_segment.keys():
for connection in self.connections_for_segment_id(segment_id):
if connection not in self.connections:
self.connections[connection] = set()
self.connections[connection].add(segment_id)
def get_connection(self, connection_resource):
""" Get list of segment ids connected to connection_resource. """
if self.connections is None:
self.build_connections()
return self.connections[connection_resource]
def reroot(self):
""" Determine which routing segments are roots and non-roots.
Repopulates stubs and sources list with new roots and non-root
segments.
"""
if self.connections is None:
self.build_connections()
segments = self.stubs + self.sources
self.stubs.clear()
self.sources.clear()
source_segment_ids = set()
# Example each connection and find the best root.
for segment_ids in self.connections.values():
root_priority = None
root = None
root_count = 0
for segment_id in segment_ids:
resource = self.get_device_resource_for_id(segment_id)
if resource.is_root():
possible_root_priority = resource.root_priority()
if root is None:
root_priority = possible_root_priority
root = segment_id
root_count = 1
elif possible_root_priority < root_priority:
root_priority = possible_root_priority
root = segment_id
root_count = 1
elif possible_root_priority == root_priority:
root_count += 1
if root is not None:
# Generate an error if multiple segments could be a root.
# This should only occur near IO pads. In most cases, the
# root should be the only Direction.Output BEL pin on the site
# wire.
assert root_count == 1
source_segment_ids.add(root)
for segment in segments:
if id(segment) in source_segment_ids:
self.sources.append(segment)
else:
self.stubs.append(segment)
def attach(self, parent_id, child_id):
""" Attach a child routing tree to the routing tree for parent. """
assert self.id_to_device_resource[parent_id].is_connected(
self.id_to_device_resource[child_id])
self.id_to_segment[parent_id].branches.append(
self.id_to_segment[child_id])
def check_count(self):
""" Verify that every segment is reachable from stubs and sources list.
This check ensures no routing segment is orphaned during processing.
"""
count = 0
for stub in self.stubs:
for _ in yield_branches(stub):
count += 1
for source in self.sources:
for _ in yield_branches(source):
count += 1
assert len(self.id_to_segment) == count
def attach_candidates(routing_tree, id_to_idx, stitched_stubs, objs_to_attach,
route_branch, visited):
""" Attach children of branches in the routing tree route_branch.
routing_tree : RoutingTree
A node cache that contains all routing branches in the net.
id_to_idx : dict object id to int
Map of object id to idx in a list of unstitched routing branches.
stitched_stubs : set of int
Set of indicies of stubs that have been stitched. Used to track which
stubs have been stitched into the tree, and verify stubs are not
stitched twice into the tree.
objs_to_attach : list of parent object id to child object id
When attach_candidates finds a stub that should be stitched into the
routing tree, rather than stitch it immediately, it adds a parent of
(id(parent), id(child)) to objs_to_attach. This deferal enables the
traversal of the input routing tree without modification.
After attach_candidates returns, elements of objs_to_attach should be
passed to routing_tree.attach to join the trees.
obj : PhysicalBelPin/PhysicalSitePin/PhysicalSitePip/PhysicalPip
Root of routing tree to iterate over to identify candidates to attach
to routing tree..
visited : set of ids to routing branches.
"""
root_obj_id = id(route_branch)
assert root_obj_id not in id_to_idx
for branch in yield_branches(route_branch):
# Make sure each route branch is only visited once.
assert id(branch) not in visited
visited.add(id(branch))
for connection in routing_tree.connections_for_segment_id(id(branch)):
for segment_id in routing_tree.get_connection(connection):
if id(branch) == segment_id:
continue
if segment_id not in id_to_idx:
continue
# There should never be a loop because root_obj_id should not
# be in the id_to_idx map once it is stitched into another tree.
assert root_obj_id != segment_id
if not routing_tree.get_device_resource(branch).is_connected(
routing_tree.get_device_resource_for_id(segment_id)):
continue
idx = id_to_idx[segment_id]
if idx in stitched_stubs:
assert segment_id in objs_to_attach
proposed_parent = id(branch)
old_parent = objs_to_attach[segment_id]
assert old_parent == proposed_parent, (
str(routing_tree.segment_for_id(proposed_parent)),
str(routing_tree.segment_for_id(old_parent)),
str(routing_tree.segment_for_id(segment_id)))
else:
stitched_stubs.add(idx)
objs_to_attach[segment_id] = id(branch)
def attach_from_parents(routing_tree, id_to_idx, parents, visited):
""" Attach children routing tree starting from list of parent routing trees.
routing_tree : RoutingTree
A node cache that contains all routing branches in the net.
id_to_idx : dict object id to int
Map of object id to idx in a list of unstitched routing branches.
parents : list of PhysicalBelPin/PhysicalSitePin/PhysicalSitePip/PhysicalPip
Roots of routing tree to search for children trees.
visited : set of ids to routing branches.
Returns set of indicies to stitched stubs.
"""
objs_to_attach = {}
stitched_stubs = set()
for parent in parents:
attach_candidates(
routing_tree=routing_tree,
id_to_idx=id_to_idx,
stitched_stubs=stitched_stubs,
objs_to_attach=objs_to_attach,
route_branch=parent,
visited=visited)
for child_id, branch_id in objs_to_attach.items():
# The branch_id should not be in the id_to_idx map, because it should
# be an outstanding stub.
assert branch_id not in id_to_idx
# The child_id should be in the id_to_idx map, because it should be an
# outstanding stub.
assert child_id in id_to_idx
routing_tree.attach(branch_id, child_id)
stitched_stubs.add(id_to_idx[child_id])
del id_to_idx[child_id]
# Return the newly stitched stubs, so that they form the new parent list.
return stitched_stubs
def stitch_segments(device_resources, site_types, segments):
""" Stitch segments of the routing tree into trees rooted from net sources. """
routing_tree = RoutingTree(
device_resources, site_types, stubs=segments, sources=[])
routing_tree.reroot()
# Create a id to idx map so that stitching can be deferred when walking
# trees
id_to_idx = {}
for idx, stub in enumerate(routing_tree.stubs):
assert idx not in id_to_idx
id_to_idx[id(stub)] = idx
# Initial set of tree parents are just the sources
parents = routing_tree.sources
stitched_stubs = set()
# Track visited nodes, as it is expected to never visit a route branch
# more than once.
visited = set()
# Continue iterating until no more stubs are stitched.
while len(parents) > 0:
# Starting from the parents of the current tree, add stubs the
# descend from this set, and create a new set of parents from those
# stubs.
newly_stitched_stubs = attach_from_parents(routing_tree, id_to_idx,
parents, visited)
# Mark the newly stitched stubs to be removed.
stitched_stubs |= newly_stitched_stubs
# New set of parents using from the newly stitched stubs.
parents = [routing_tree.stubs[idx] for idx in newly_stitched_stubs]
# Remove stitched stubs from stub list
for idx in sorted(stitched_stubs, reverse=True):
del routing_tree.stubs[idx]
# Make sure new trees are sensible.
routing_tree.check_trees()
routing_tree.check_count()
return routing_tree.sources, routing_tree.stubs
def flatten_segments(segments):
""" Take a list of routing segments and flatten out any children. """
output = []
for segment in segments:
for branch in yield_branches(segment):
output.append(branch)
for segment in output:
segment.branches.clear()
return output
| SymbiFlow/python-fpga-interchange | fpga_interchange/route_stitching.py | Python | isc | 16,298 |
#! /usr/bin/env python
# this script does not perform full installation,
# it is meant for use from Makefile
import sys, os.path, re
from distutils.core import setup
from distutils.extension import Extension
# check if configure has run
if not os.path.isfile('config.mak'):
print "please run ./configure && make first"
print "Note: setup.py is supposed to be run from Makefile"
sys.exit(1)
# load version
buf = open("configure.ac","r").read(256)
m = re.search("AC_INIT[(][^,]*,\s+([^)]*)[)]", buf)
ac_ver = m.group(1)
def getvar(name):
cf = open('config.mak').read()
m = re.search(r'^%s\s*=\s*(.*)' % name, cf, re.M)
return m.group(1).strip()
sfx = getvar('SUFFIX')
share_dup_files = [
'sql/pgq/pgq.sql',
'sql/londiste/londiste.sql',
'sql/pgq_ext/pgq_ext.sql',
'sql/pgq_node/pgq_node.sql',
]
if os.path.isfile('sql/txid/txid.sql'):
share_dup_files.append('sql/txid/txid.sql')
# run actual setup
setup(
name = "skytools",
license = "BSD",
version = ac_ver,
maintainer = "Marko Kreen",
maintainer_email = "[email protected]",
url = "http://pgfoundry.org/projects/skytools/",
package_dir = {'': 'python'},
packages = ['skytools', 'londiste', 'pgq', 'pgq.cascade'],
data_files = [
('share/doc/skytools%s/conf' % sfx, [
'python/conf/wal-master.ini',
'python/conf/wal-slave.ini',
]),
('share/skytools' + sfx, share_dup_files)],
ext_modules=[Extension("skytools._cquoting", ['python/modules/cquoting.c'])],
)
| mpihlak/skytools-dev | setup_skytools.py | Python | isc | 1,526 |
"""
Based on :mod:`django.contrib.auth.tokens`. Supports the following settings:
:setting:`WALDO_REGISTRATION_TIMEOUT_DAYS`
The number of days a registration link will be valid before expiring. Default: 1.
:setting:`WALDO_EMAIL_TIMEOUT_DAYS`
The number of days an email change link will be valid before expiring. Default: 1.
"""
from hashlib import sha1
from datetime import date
from django.conf import settings
from django.utils.http import int_to_base36, base36_to_int
from django.contrib.auth.tokens import PasswordResetTokenGenerator
REGISTRATION_TIMEOUT_DAYS = getattr(settings, 'WALDO_REGISTRATION_TIMEOUT_DAYS', 1)
EMAIL_TIMEOUT_DAYS = getattr(settings, 'WALDO_EMAIL_TIMEOUT_DAYS', 1)
class RegistrationTokenGenerator(PasswordResetTokenGenerator):
"""Strategy object used to generate and check tokens for the user registration mechanism."""
def check_token(self, user, token):
"""Check that a registration token is correct for a given user."""
# If the user is active, the hash can't be valid.
if user.is_active:
return False
# Parse the token
try:
ts_b36, hash = token.split('-')
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp and uid have not been tampered with.
if self._make_token_with_timestamp(user, ts) != token:
return False
# Check that the timestamp is within limit
if (self._num_days(self._today()) - ts) > REGISTRATION_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, timestamp):
ts_b36 = int_to_base36(timestamp)
# By hashing on the internal state of the user and using state that is
# sure to change, we produce a hash that will be invalid as soon as it
# is used.
hash = sha1(settings.SECRET_KEY + unicode(user.id) + unicode(user.is_active) + user.last_login.strftime('%Y-%m-%d %H:%M:%S') + unicode(timestamp)).hexdigest()[::2]
return '%s-%s' % (ts_b36, hash)
registration_token_generator = RegistrationTokenGenerator()
class EmailTokenGenerator(PasswordResetTokenGenerator):
"""Strategy object used to generate and check tokens for a user email change mechanism."""
def make_token(self, user, email):
"""Returns a token that can be used once to do an email change for the given user and email."""
return self._make_token_with_timestamp(user, email, self._num_days(self._today()))
def check_token(self, user, email, token):
if email == user.email:
return False
# Parse the token
try:
ts_b36, hash = token.split('-')
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp and uid have not been tampered with.
if self._make_token_with_timestamp(user, email, ts) != token:
return False
# Check that the timestamp is within limit
if (self._num_days(self._today()) - ts) > EMAIL_TIMEOUT_DAYS:
return False
return True
def _make_token_with_timestamp(self, user, email, timestamp):
ts_b36 = int_to_base36(timestamp)
hash = sha1(settings.SECRET_KEY + unicode(user.id) + user.email + email + unicode(timestamp)).hexdigest()[::2]
return '%s-%s' % (ts_b36, hash)
email_token_generator = EmailTokenGenerator() | ithinksw/philo | philo/contrib/waldo/tokens.py | Python | isc | 3,279 |
#!/usr/bin/env python
import urllib.request
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("url", help="the URL whose HTML you want to extract telephone numbers from", type=str)
args = parser.parse_args()
with urllib.request.urlopen(args.url) as response:
html = response.read().decode('utf-8')
# Naive, simple regex; can be further refined (overinclusive in some respects (e.g., any 10-digit numerical string), no detection when non-parentheses phone number first in a parenthetical clause, no real international support, no extension support, no letters-as-numbers support)
regex = re.compile(r'0?0?1?-?\(?[0-9]{3}\)?\s?-?[0-9]{3}-?[0-9]{4}')
print(regex.findall(html)) | ArthurZey/toyproblems | phone_number_extractor.py | Python | mit | 711 |
import difflib
import shutil
__author__ = 'Adam'
import unittest
import os
import useful
class TestFileContentProcedures(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = "./tests/data/"
cls.text_file_name = "TextFile_UTF16_CRLF.txt"
cls.text_file_path = os.path.join(cls.test_dir, cls.text_file_name)
cls.text_file_encoding = "UTF-16BE"
cls.text_file_eol = "CRLF"
import codecs
with codecs.open(cls.text_file_path, 'rb',
encoding=cls.text_file_encoding) as f:
cls.text_file_contents = f.read()
cls.script_file_name = "ScriptFile_UTF8_LF.py"
cls.script_file_path = os.path.join(cls.test_dir, cls.script_file_name)
cls.script_file_encoding = "UTF-8"
cls.script_file_eol = "LF"
with codecs.open(cls.script_file_path, 'rb',
encoding=cls.script_file_encoding) as f:
cls.script_file_contents = f.read()
cls.set_contents = cls.text_file_contents
cls.set_name = "TestSetContents.txt"
cls.set_path = os.path.join(cls.test_dir, cls.set_name)
# diff testing
cls.diff_target_path = os.path.join(cls.test_dir, "ScriptFile_Copy.py")
shutil.copyfile(cls.script_file_path, cls.diff_target_path)
cls.diff_new_path = os.path.join(cls.test_dir,
"ScriptFile_Diff_Test.py")
with open(cls.diff_target_path, "rb") as f:
target_data = f.read().split("\n")
with open(cls.diff_new_path, "rb") as f:
new_data = f.read().split("\n")
diff_data = difflib.ndiff(target_data, new_data)
diff_data = list(diff_data)
cls.comp_diff_data = useful.make_comp_diff(diff_data)
@classmethod
def tearDownClass(cls):
# os.remove(cls.set_path)
# os.remove(cls.diff_target_path)
pass
'''
The system is required to be able to obtain the content of a file.
This test is successful if the content is matched as is with expected data.
'''
def test_get_file_contents(self):
from fsentity import FileSystemFile
script_file = FileSystemFile(self.script_file_path)
self.assertEquals(script_file.get_contents()[0],
self.script_file_contents)
text_file = FileSystemFile(self.text_file_path)
self.assertEquals(text_file.get_contents()[0], self.text_file_contents)
'''
The system must be able to set the contents of a file.
Test is successful if changes are made that match the expected outcome.
'''
def test_set_file_contents(self):
from fsentity import FileSystemDirectory
d = FileSystemDirectory(self.test_dir)
d.create_file(self.set_name, self.set_contents)
import codecs
with codecs.open(self.set_path, 'rb', encoding="utf-8") as f:
file_data = f.read()
# print file_data
self.assertEquals(file_data, self.set_contents)
'''
The system will need to update a file's contents from a differential
format.
The test is successful if the resulting file contents matches the result
of the original content with
a supplied delta.
'''
def test_set_file_from_diff(self):
from fsentity import FileSystemFile
target_file = FileSystemFile(self.diff_target_path)
diff_crc = FileSystemFile(self.diff_new_path).get_crc32()
self.assertTrue(target_file.set_from_comp_diff(self.comp_diff_data,
original_crc=diff_crc))
''' Identify byte encoding '''
def test_identify_encoding(self):
from fsentity import FileSystemFile
text_file = FileSystemFile(self.text_file_path)
self.assertEqual(
text_file.get_encoding().upper(),
self.text_file_encoding
)
script_file = FileSystemFile(self.script_file_path)
self.assertEqual(self.script_file_encoding,
script_file.get_encoding().upper())
''' Identify EOL format '''
def test_identify_line_ending(self):
from fsentity import FileSystemFile
f = FileSystemFile(self.text_file_path)
self.assertEqual(self.text_file_eol, f.get_line_ending()[0])
f = FileSystemFile(self.script_file_path)
self.assertEqual(self.script_file_eol, f.get_line_ending()[0])
''' ... code style? '''
def test_identify_format(self):
from fsentity import FileSystemFile
lang = FileSystemFile(self.script_file_path).get_programming_language()
self.assertEqual("Python", lang)
| Adam01/Cylinder-server | tests/test_file_content_procedures.py | Python | mit | 4,718 |
from axelrod import Player
import random
class Random(Player):
"""A player who randomly chooses between cooperating and defecting."""
name = 'Random'
def strategy(self, opponent):
return random.choice(['C','D'])
| drvinceknight/Axelrod | axelrod/strategies/rand.py | Python | mit | 235 |
import unittest
import copy
from scrapy.http import Headers
class HeadersTest(unittest.TestCase):
def test_basics(self):
h = Headers({'Content-Type': 'text/html', 'Content-Length': 1234})
assert h['Content-Type']
assert h['Content-Length']
self.assertRaises(KeyError, h.__getitem__, 'Accept')
self.assertEqual(h.get('Accept'), None)
self.assertEqual(h.getlist('Accept'), [])
self.assertEqual(h.get('Accept', '*/*'), '*/*')
self.assertEqual(h.getlist('Accept', '*/*'), ['*/*'])
self.assertEqual(h.getlist('Accept', ['text/html', 'images/jpeg']), ['text/html','images/jpeg'])
def test_single_value(self):
h = Headers()
h['Content-Type'] = 'text/html'
self.assertEqual(h['Content-Type'], 'text/html')
self.assertEqual(h.get('Content-Type'), 'text/html')
self.assertEqual(h.getlist('Content-Type'), ['text/html'])
def test_multivalue(self):
h = Headers()
h['X-Forwarded-For'] = hlist = ['ip1', 'ip2']
self.assertEqual(h['X-Forwarded-For'], 'ip2')
self.assertEqual(h.get('X-Forwarded-For'), 'ip2')
self.assertEqual(h.getlist('X-Forwarded-For'), hlist)
assert h.getlist('X-Forwarded-For') is not hlist
def test_encode_utf8(self):
h = Headers({u'key': u'\xa3'}, encoding='utf-8')
key, val = dict(h).items()[0]
assert isinstance(key, str), key
assert isinstance(val[0], str), val[0]
self.assertEqual(val[0], '\xc2\xa3')
def test_encode_latin1(self):
h = Headers({u'key': u'\xa3'}, encoding='latin1')
key, val = dict(h).items()[0]
self.assertEqual(val[0], '\xa3')
def test_encode_multiple(self):
h = Headers({u'key': [u'\xa3']}, encoding='utf-8')
key, val = dict(h).items()[0]
self.assertEqual(val[0], '\xc2\xa3')
def test_delete_and_contains(self):
h = Headers()
h['Content-Type'] = 'text/html'
assert 'Content-Type' in h
del h['Content-Type']
assert 'Content-Type' not in h
def test_setdefault(self):
h = Headers()
hlist = ['ip1', 'ip2']
olist = h.setdefault('X-Forwarded-For', hlist)
assert h.getlist('X-Forwarded-For') is not hlist
assert h.getlist('X-Forwarded-For') is olist
h = Headers()
olist = h.setdefault('X-Forwarded-For', 'ip1')
self.assertEqual(h.getlist('X-Forwarded-For'), ['ip1'])
assert h.getlist('X-Forwarded-For') is olist
def test_iterables(self):
idict = {'Content-Type': 'text/html', 'X-Forwarded-For': ['ip1', 'ip2']}
h = Headers(idict)
self.assertEqual(dict(h), {'Content-Type': ['text/html'], 'X-Forwarded-For': ['ip1', 'ip2']})
self.assertEqual(h.keys(), ['X-Forwarded-For', 'Content-Type'])
self.assertEqual(h.items(), [('X-Forwarded-For', ['ip1', 'ip2']), ('Content-Type', ['text/html'])])
self.assertEqual(list(h.iteritems()),
[('X-Forwarded-For', ['ip1', 'ip2']), ('Content-Type', ['text/html'])])
self.assertEqual(h.values(), ['ip2', 'text/html'])
def test_update(self):
h = Headers()
h.update({'Content-Type': 'text/html', 'X-Forwarded-For': ['ip1', 'ip2']})
self.assertEqual(h.getlist('Content-Type'), ['text/html'])
self.assertEqual(h.getlist('X-Forwarded-For'), ['ip1', 'ip2'])
def test_copy(self):
h1 = Headers({'header1': ['value1', 'value2']})
h2 = copy.copy(h1)
self.assertEqual(h1, h2)
self.assertEqual(h1.getlist('header1'), h2.getlist('header1'))
assert h1.getlist('header1') is not h2.getlist('header1')
assert isinstance(h2, Headers)
def test_appendlist(self):
h1 = Headers({'header1': 'value1'})
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), ['value1', 'value3'])
h1 = Headers()
h1.appendlist('header1', 'value1')
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), ['value1', 'value3'])
def test_setlist(self):
h1 = Headers({'header1': 'value1'})
self.assertEqual(h1.getlist('header1'), ['value1'])
h1.setlist('header1', ['value2', 'value3'])
self.assertEqual(h1.getlist('header1'), ['value2', 'value3'])
def test_setlistdefault(self):
h1 = Headers({'header1': 'value1'})
h1.setlistdefault('header1', ['value2', 'value3'])
h1.setlistdefault('header2', ['value2', 'value3'])
self.assertEqual(h1.getlist('header1'), ['value1'])
self.assertEqual(h1.getlist('header2'), ['value2', 'value3'])
def test_none_value(self):
h1 = Headers()
h1['foo'] = 'bar'
h1['foo'] = None
h1.setdefault('foo', 'bar')
self.assertEqual(h1.get('foo'), None)
self.assertEqual(h1.getlist('foo'), [])
| tashigaofei/BlogSpider | scrapy/tests/test_http_headers.py | Python | mit | 4,934 |
'''
mode | desc
r 또는 rt | 텍스트 모드로 읽기
w 또는 wt | 텍스트 모드로 쓰기
a 또는 at | 텍스트 모드로 파일 마지막에 추가하기
rb | 바이너리 모드로 읽기
wb | 바이너리 모드로 쓰기
ab | 바이너리 모드로 파일 마지막에 추가하기
'''
f = open("./py200_sample.txt", "w")
f.write("abcd")
f.close()
r = open("./py200_sample.txt", "r")
print("-" * 60)
print(r.readline())
r.close()
| JaeGyu/PythonEx_1 | p200_048.py | Python | mit | 497 |
from otp.ai.AIBaseGlobal import *
from pandac.PandaModules import *
from DistributedNPCToonBaseAI import *
import ToonDNA
from direct.task.Task import Task
from toontown.ai import DatabaseObject
from toontown.estate import ClosetGlobals
class DistributedNPCTailorAI(DistributedNPCToonBaseAI):
freeClothes = simbase.config.GetBool('free-clothes', 0)
housingEnabled = simbase.config.GetBool('want-housing', 1)
def __init__(self, air, npcId):
DistributedNPCToonBaseAI.__init__(self, air, npcId)
self.timedOut = 0
self.givesQuests = 0
self.customerDNA = None
self.customerId = None
return
def getTailor(self):
return 1
def delete(self):
taskMgr.remove(self.uniqueName('clearMovie'))
self.ignoreAll()
self.customerDNA = None
self.customerId = None
DistributedNPCToonBaseAI.delete(self)
return
def avatarEnter(self):
avId = self.air.getAvatarIdFromSender()
if not self.air.doId2do.has_key(avId):
self.notify.warning('Avatar: %s not found' % avId)
return
if self.isBusy():
self.freeAvatar(avId)
return
av = self.air.doId2do[avId]
self.customerDNA = ToonDNA.ToonDNA()
self.customerDNA.makeFromNetString(av.getDNAString())
self.customerId = avId
av.b_setDNAString(self.customerDNA.makeNetString())
self.acceptOnce(self.air.getAvatarExitEvent(avId), self.__handleUnexpectedExit, extraArgs=[avId])
flag = NPCToons.PURCHASE_MOVIE_START_BROWSE
if self.freeClothes:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
elif self.air.questManager.hasTailorClothingTicket(av, self) == 1:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
elif self.air.questManager.hasTailorClothingTicket(av, self) == 2:
flag = NPCToons.PURCHASE_MOVIE_START
if self.housingEnabled and self.isClosetAlmostFull(av):
flag = NPCToons.PURCHASE_MOVIE_START_NOROOM
self.sendShoppingMovie(avId, flag)
DistributedNPCToonBaseAI.avatarEnter(self)
def isClosetAlmostFull(self, av):
numClothes = len(av.clothesTopsList) / 4 + len(av.clothesBottomsList) / 2
if numClothes >= av.maxClothes - 1:
return 1
return 0
def sendShoppingMovie(self, avId, flag):
self.busy = avId
self.sendUpdate('setMovie', [flag,
self.npcId,
avId,
ClockDelta.globalClockDelta.getRealNetworkTime()])
taskMgr.doMethodLater(NPCToons.TAILOR_COUNTDOWN_TIME, self.sendTimeoutMovie, self.uniqueName('clearMovie'))
def rejectAvatar(self, avId):
self.notify.warning('rejectAvatar: should not be called by a Tailor!')
def sendTimeoutMovie(self, task):
toon = self.air.doId2do.get(self.customerId)
if toon != None and self.customerDNA:
toon.b_setDNAString(self.customerDNA.makeNetString())
self.timedOut = 1
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_TIMEOUT,
self.npcId,
self.busy,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendClearMovie(None)
return Task.done
def sendClearMovie(self, task):
self.ignore(self.air.getAvatarExitEvent(self.busy))
self.customerDNA = None
self.customerId = None
self.busy = 0
self.timedOut = 0
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_CLEAR,
self.npcId,
0,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendUpdate('setCustomerDNA', [0, ''])
return Task.done
def completePurchase(self, avId):
self.busy = avId
self.sendUpdate('setMovie', [NPCToons.PURCHASE_MOVIE_COMPLETE,
self.npcId,
avId,
ClockDelta.globalClockDelta.getRealNetworkTime()])
self.sendClearMovie(None)
return
def setDNA(self, blob, finished, which):
avId = self.air.getAvatarIdFromSender()
if avId != self.customerId:
if self.customerId:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA customer is %s' % self.customerId)
self.notify.warning('customerId: %s, but got setDNA for: %s' % (self.customerId, avId))
return
testDNA = ToonDNA.ToonDNA()
if not testDNA.isValidNetString(blob):
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA: invalid dna: %s' % blob)
return
if self.air.doId2do.has_key(avId):
av = self.air.doId2do[avId]
if finished == 2 and which > 0:
if self.air.questManager.removeClothingTicket(av, self) == 1 or self.freeClothes:
av.b_setDNAString(blob)
if which & ClosetGlobals.SHIRT:
if av.addToClothesTopsList(self.customerDNA.topTex, self.customerDNA.topTexColor, self.customerDNA.sleeveTex, self.customerDNA.sleeveTexColor) == 1:
av.b_setClothesTopsList(av.getClothesTopsList())
else:
self.notify.warning('NPCTailor: setDNA() - unable to save old tops - we exceeded the tops list length')
if which & ClosetGlobals.SHORTS:
if av.addToClothesBottomsList(self.customerDNA.botTex, self.customerDNA.botTexColor) == 1:
av.b_setClothesBottomsList(av.getClothesBottomsList())
else:
self.notify.warning('NPCTailor: setDNA() - unable to save old bottoms - we exceeded the bottoms list length')
self.air.writeServerEvent('boughtTailorClothes', avId, '%s|%s|%s' % (self.doId, which, self.customerDNA.asTuple()))
else:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA bogus clothing ticket')
self.notify.warning('NPCTailor: setDNA() - client tried to purchase with bogus clothing ticket!')
if self.customerDNA:
av.b_setDNAString(self.customerDNA.makeNetString())
elif finished == 1:
if self.customerDNA:
av.b_setDNAString(self.customerDNA.makeNetString())
else:
self.sendUpdate('setCustomerDNA', [avId, blob])
else:
self.notify.warning('no av for avId: %d' % avId)
if self.timedOut == 1 or finished == 0:
return
if self.busy == avId:
taskMgr.remove(self.uniqueName('clearMovie'))
self.completePurchase(avId)
elif self.busy:
self.air.writeServerEvent('suspicious', avId, 'DistributedNPCTailorAI.setDNA busy with %s' % self.busy)
self.notify.warning('setDNA from unknown avId: %s busy: %s' % (avId, self.busy))
def __handleUnexpectedExit(self, avId):
self.notify.warning('avatar:' + str(avId) + ' has exited unexpectedly')
if self.customerId == avId:
toon = self.air.doId2do.get(avId)
if toon == None:
toon = DistributedToonAI.DistributedToonAI(self.air)
toon.doId = avId
if self.customerDNA:
toon.b_setDNAString(self.customerDNA.makeNetString())
db = DatabaseObject.DatabaseObject(self.air, avId)
db.storeObject(toon, ['setDNAString'])
else:
self.notify.warning('invalid customer avId: %s, customerId: %s ' % (avId, self.customerId))
if self.busy == avId:
self.sendClearMovie(None)
else:
self.notify.warning('not busy with avId: %s, busy: %s ' % (avId, self.busy))
return
| ksmit799/Toontown-Source | toontown/toon/DistributedNPCTailorAI.py | Python | mit | 8,113 |
# -*- coding: utf-8 -*-
import os
import time
import logging
import string
import requests
import unicodedata
import base64
try: import cPickle as pickle
except: import pickle
import datetime
from django.utils import timezone
import json
from pprint import pprint
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.http import HttpResponseForbidden
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect, render
logger = logging.getLogger(__name__)
import boto.ec2
import boto.ec2.cloudwatch
from django.contrib.auth.models import User
from userprofile.models import Profile as userprofile
from userprofile.views import _log_user_activity
from amazon import s3_funcs
from amazon import s3_funcs_shortcuts
from django.contrib.auth.decorators import login_required
from django.template.defaultfilters import filesizeformat, upper
from django.contrib.humanize.templatetags.humanize import naturalday
from cloudly.templatetags.cloud_extras import clean_ps_command
from operator import itemgetter, attrgetter, methodcaller
from cloudly.templatetags.cloud_extras import clear_filename, get_file_extension
from vms.models import Cache
import decimal
from django.db.models.base import ModelState
import pymongo
from pymongo import MongoClient
from pymongo import ASCENDING, DESCENDING
client = MongoClient('mongo', 27017)
mongo = client.cloudly
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
@login_required()
def update_session(request):
for value in request.POST:
if(value != 'secret'):
request.session[value] = request.POST[value]
request.session.modified = True
return render_to_response('ajax_null.html', locals())
@login_required()
def aws_vm_view(request,vm_name):
print '-- aws_vm_view'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
user.last_login = datetime.datetime.now()
user.save()
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/aws/"+vm_name,"aws_vm_view",ip=ip)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
try:
vm_cache = pickle.loads(vm_cache)[vm_name]
except:
return HttpResponse("XXX " + vm_name)
ec2_region = vm_cache['instance']['region']['name']
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
if(vms_cache.vms_console_output_cache):
console_output = vms_cache.vms_console_output_cache
else:
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
reservations = ec2conn.get_all_instances(instance_ids=[vm_name,])
instance = reservations[0].instances[0]
console_output = instance.get_console_output()
console_output = console_output.output
if(not console_output):
console_output = ""
vms_cache.vms_console_output_cache = console_output
vms_cache.save()
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(minutes=60)
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="NetworkIn")[0]
networkin_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="NetworkOut")[0]
networkout_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskReadOps")[0]
disk_readops_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskWriteOps")[0]
disk_writeops_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskReadBytes")[0]
disk_readbytes_datapoints = metric.query(start, end, 'Average', '')
metric = cloudwatch.list_metrics(dimensions={'InstanceId':vm_cache['id']}, metric_name="DiskWriteBytes")[0]
disk_writebytes_datapoints = metric.query(start, end, 'Average', '')
networkin_datapoints = json.dumps(networkin_datapoints,default=date_handler)
networkout_datapoints = json.dumps(networkout_datapoints,default=date_handler)
disk_readops_datapoints = json.dumps(disk_readops_datapoints,default=date_handler)
disk_writeops_datapoints = json.dumps(disk_writeops_datapoints,default=date_handler)
disk_readbytes_datapoints = json.dumps(disk_readbytes_datapoints,default=date_handler)
disk_writebytes_datapoints = json.dumps(disk_writebytes_datapoints,default=date_handler)
return render_to_response('aws_vm.html', {'vm_name':vm_name,'vm_cache':vm_cache,'console_output':console_output,'networkin_datapoints':networkin_datapoints,'networkout_datapoints':networkout_datapoints,'disk_readops_datapoints':disk_readops_datapoints,'disk_writeops_datapoints':disk_writeops_datapoints,'disk_readbytes_datapoints':disk_readbytes_datapoints,'disk_writebytes_datapoints':disk_writebytes_datapoints,}, context_instance=RequestContext(request))
@login_required()
def control_aws_vm(request, vm_name, action):
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
user.last_login = datetime.datetime.now()
user.save()
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/aws/"+vm_name+"/"+action+"/","control_aws_vm",ip=ip)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
vm_cache = pickle.loads(vm_cache)[vm_name]
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2_region = vm_cache['instance']['region']['name']
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
if(action=="reboot"):
ec2conn.reboot_instances([vm_name,])
if(action=="start"):
ec2conn.start_instances([vm_name,])
if(action=="stop"):
ec2conn.stop_instances([vm_name,])
if(action=="terminate"):
ec2conn.terminate_instances([vm_name,])
return HttpResponseRedirect("/")
@login_required()
def server_view(request, hwaddr):
print '-- server_view'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
ip = request.META['REMOTE_ADDR']
_log_user_activity(profile,"click","/server/"+hwaddr,"server_view",ip=ip)
hwaddr_orig = hwaddr
hwaddr = hwaddr.replace('-',':')
server = mongo.servers.find_one({'secret':profile.secret,'uuid':hwaddr,})
server_status = "Running"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
server_status = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
server_status = "Offline"
try:
uuid = server['uuid']
except:
return HttpResponse("access denied")
disks_usage_ = []
#disks_usage = mongo.disks_usage.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in disks_usage: disks_usage_.append(i)
disks_usage = disks_usage_
networking_ = []
#networking = mongo.networking.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in networking: networking_.append(i)
networking = networking_
mem_usage_ = []
#mem_usage = mongo.memory_usage.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in mem_usage: mem_usage_.append(i)
mem_usage = mem_usage_
loadavg_ = []
#loadavg = mongo.loadavg.find({'uuid':uuid,}).sort('_id',-1).limit(60)
#for i in loadavg: loadavg_.append(i)
loadavg = loadavg_
activity = mongo.activity.find({'uuid':uuid,}).sort('_id',-1).limit(3)
disks = []
disks_ = server[u'disks_usage']
for disk in disks_:
if not disk[5] in disks:
disks.append(disk[5])
return render_to_response('server_detail.html', {'secret':profile.secret,'hwaddr':hwaddr,'hwaddr_orig':hwaddr_orig,'server':server,'server_status':server_status,'disks_usage':disks_usage,'disks':disks,'mem_usage':mem_usage,'loadavg':loadavg,'networking':networking,'activity':activity,}, context_instance=RequestContext(request))
@login_required()
def ajax_update_server_name(request):
response = {}
response["success"] = "true"
response = str(response).replace('u"','"')
response = response.replace("'",'"')
server_ = request.POST['server']
secret = request.POST['secret']
server_ = server_.replace('-', ':')
server = mongo.servers.find_one({'secret':secret,'uuid':server_,})
if request.POST["servername"] == "":
server['name'] = request.POST['server'].replace("-", ":")
else:
server['name'] = request.POST["servername"]
server = mongo.servers.update({'secret':secret, 'uuid':server_}, server)
vms_cache = Cache.objects.get(user=request.user)
vms_cache.delete()
return HttpResponse(response, content_type="application/json")
@login_required()
def ajax_vms_refresh(request):
user = request.user
profile = userprofile.objects.get(user=request.user)
print 'Refreshing', user, 'VMs cache..'
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
virtual_machines = {}
servers = mongo.servers.find({'secret':profile.secret,}).sort('_id',-1)
vms_cache = Cache.objects.get_or_create(user=user)
vms_cache = vms_cache[0]
vms_cache.is_updating = True
vms_cache.save()
if(servers.count()):
print 'servers count', servers.count()
for server in servers:
instance_metrics = {}
instance_metrics['id'] = server['uuid']
instance_metrics['user_id'] = request.user.id
instance_metrics['provider'] = 'agent'
instance_metrics['instance'] = {}
instance_metrics['instance']['user_id'] = request.user.id
instance_metrics['instance']['state'] = {}
instance_metrics['instance']['tags'] = {}
try:
instance_metrics["instance"]['tags']['Name'] = server['name']
#instance_metrics["instance"]['tags']['Name'] = ''.join(x for x in unicodedata.normalize('NFKD', server['hostname']) if x in string.ascii_letters).lower()
except:
instance_metrics["instance"]['tags']['Name'] = server['hostname'].replace('.','-').lower()
uuid = server['uuid']
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
instance_metrics['instance']['state']['state'] = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
instance_metrics['instance']['state']['state'] = "Offline"
else:
instance_metrics['instance']['state']['state'] = "Running"
cpu_usage_ = ""
params = {'start':'2m-ago','m':'sum:' + uuid.replace(':','-') + '.sys.cpu'}
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
try:
tsdb_response = tsdb_response[0]['dps']
except:
tsdb_response = []
c=0
for i in tsdb_response:
cpu_usage_ += str(round(tsdb_response[i],2))
cpu_usage_ += ","
if(c==60): break
c+=1
cpu_usage = cpu_usage_[:-1]
cpu_usage_reversed = ""
cpu_usage_array_reversed = []
for i in cpu_usage.split(','): cpu_usage_array_reversed.insert(0,i)
for i in cpu_usage_array_reversed: cpu_usage_reversed += str(i)+","
cpu_usage_reversed = cpu_usage_reversed[:-1]
instance_metrics['cpu_utilization_datapoints'] = cpu_usage_reversed
virtual_machines[server['uuid'].replace(':','-')] = instance_metrics
#print 'virtual_machines', virtual_machines
if aws_ec2_verified:
aws_regions = profile.aws_enabled_regions.split(',')
print 'AWS regions', aws_regions
for ec2_region in aws_regions:
if(ec2_region):
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
try:
reservations = ec2conn.get_all_instances()
except:
vms_cache.is_updating = False
vms_cache.vms_response = ""
vms_cache.save()
print vms_cache.is_updating
print vms_cache.vms_response
#return HttpResponse("access denied")
instances = [i for r in reservations for i in r.instances]
for instance in instances:
if not instance: continue
instance_metrics = {}
instance_metrics['instance'] = {}
print '** instance', instance.id, instance.private_ip_address
volumes = []
for volume in ec2conn.get_all_volumes(filters={'attachment.instance-id': instance.id}):
volumes.append([volume.id, volume.iops, volume.size,])
groups = []
for group in instance.__dict__['groups']:
groups.append([group.id, group.name,])
instance_metrics['id'] = instance.id
instance_metrics['user_id'] = request.user.id
instance_metrics['provider'] = "aws-ec2"
instance_metrics['instance']['placement'] = instance.placement
instance_metrics['instance']['user_id'] = request.user.id
instance_metrics['instance']['groups'] = groups
instance_metrics['instance']['block_device_mapping'] = volumes
instance_metrics['instance']['architecture'] = instance.architecture
instance_metrics['instance']['client_token'] = instance.client_token
instance_metrics['instance']['dns_name'] = instance.dns_name
instance_metrics['instance']['private_ip_address'] = instance.private_ip_address
instance_metrics['instance']['hypervisor'] = instance.hypervisor
instance_metrics['instance']['id'] = instance.id
instance_metrics['instance']['image_id'] = instance.image_id
instance_metrics['instance']['instance_type'] = instance.instance_type
instance_metrics['instance']['ip_address'] = instance.ip_address
instance_metrics['instance']['key_name'] = instance.key_name
instance_metrics['instance']['launch_time'] = instance.launch_time
instance_metrics['instance']['monitored'] = instance.monitored
instance_metrics['instance']['persistent'] = instance.persistent
instance_metrics['instance']['ramdisk'] = instance.ramdisk
instance_metrics['instance']['root_device_name'] = instance.root_device_name
instance_metrics['instance']['root_device_type'] = instance.root_device_type
instance_metrics['instance']['tags'] = instance.tags
instance_metrics['instance']['virtualization_type'] = instance.virtualization_type
instance_metrics['instance']['vpc_id'] = instance.vpc_id
instance_metrics['instance']['region'] = {"endpoint":instance.region.endpoint,"name":instance.region.name,}
instance_metrics['instance']['state'] = {"state":instance.state,"code":instance.state_code,"state_reason":instance.state_reason,}
virtual_machines[instance.id] = instance_metrics
print 'Updating', request.user, 'cache..'
print instance.platform, instance.product_codes
try:
ec2conn.monitor_instance(str(instance.id))
except:
print instance.id, 'instance not in a monitorable state!!'.upper()
#pprint(instance_metrics)
continue
# Here is where you define start - end for the Logs...............
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(minutes=60)
# This is how you list all possible values on the response....
# print ec2conn.list_metrics()
try:
metric = cloudwatch.list_metrics(dimensions={'InstanceId':instance.id}, metric_name="CPUUtilization")[0]
except: continue
cpu_utilization_datapoints = metric.query(start, end, 'Average', 'Percent')
instance_metrics['cpu_utilization_datapoints'] = json.dumps(cpu_utilization_datapoints,default=date_handler)
virtual_machines[instance.id] = instance_metrics
vms_cache.vms_response = base64.b64encode(pickle.dumps(virtual_machines, pickle.HIGHEST_PROTOCOL))
vms_cache.last_seen = timezone.now()
vms_cache.is_updating = False
vms_cache.save()
print 'VMs cache was succesfully updated.'
return HttpResponse("ALLDONE")
@login_required()
def ajax_virtual_machines(request):
print '-- ajax virtual machines'
print request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
try:
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
except: vm_cache = {}
try:
vm_cache = pickle.loads(vm_cache)
except: vm_cache = {}
c=0
ajax_vms_response = "{"
for vm in vm_cache:
if(vm_cache[vm]["instance"]["state"]["state"].lower()!="terminated"):
data_median = 0
isotope_filter_classes = " offline linux "
try:
data = ""
cpu_utilization_datapoints = vm_cache[vm]["cpu_utilization_datapoints"]
cpu_utilization_datapoints = json.loads(cpu_utilization_datapoints)
z=0
for i in cpu_utilization_datapoints:
data += str(i["Average"])
try:
data_median += float(i["Average"])
except: pass
if(len(cpu_utilization_datapoints)-1>z):
data += ","
#print data
z+=1
try:
data_median = data_median/z
except: data_median = 0
except:
try:
data = vm_cache[vm]["cpu_utilization_datapoints"]
z = 0
data_median = 0
for i in data.split(','):
z+=1
data_median += float(i)
data_median = data_median/z
except: data = ""
try:
instance_name = vm_cache[vm]["instance"]["tags"]["Name"]
except:
instance_name = vm
print 'instance_name', instance_name
color = "silver "
vm_state = vm_cache[vm]["instance"]["state"]["state"].title()
server_mac_address = vm_cache[vm]['id']
server_mac_address = str(server_mac_address).replace(':','-')
if(vm_state=="Running"):
isotope_filter_classes = " linux "
if(data_median<17):
color = "lightBlue "
if(data_median>=17 and data_median<=35):
color = "green "
isotope_filter_classes += " busy"
if(data_median>35 and data_median<=50):
color = "darkGreen "
isotope_filter_classes += " busy"
if(data_median>50 and data_median<=70):
color = "lightOrange "
isotope_filter_classes += " busy"
if(data_median>70):
isotope_filter_classes += " busy critical"
color = "red "
if data_median>85:
vm_state = "Hot hot hot!"
if(vm_state=="Stopping"):
color = "pink "
if(vm_state=="Pending"):
color = "pink "
if(vm_state=="Shutting-Down"):
color = "pink "
if(vm_state=="Stopped"):
isotope_filter_classes += " offline"
if(vm_cache[vm]['provider']!='agent'):
isotope_filter_classes += " cloud"
ajax_vms_response += "\""
ajax_vms_response += server_mac_address
ajax_vms_response += "\": {"
ajax_vms_response += "\"vmcolor\":\""
ajax_vms_response += color
ajax_vms_response += "\","
ajax_vms_response += "\"vmname\":\""
ajax_vms_response += instance_name
ajax_vms_response += "\","
ajax_vms_response += "\"vmtitle\":\""
ajax_vms_response += isotope_filter_classes
ajax_vms_response += "\","
ajax_vms_response += "\"averge\":\""
ajax_vms_response += data
ajax_vms_response += "\","
ajax_vms_response += "\"state\":\""
ajax_vms_response += vm_state
ajax_vms_response += "\","
ajax_vms_response += "\"link\":\""
if(vm_cache[vm]['provider']=='agent'):
ajax_vms_response += "/server/"+vm+"/"
else:
ajax_vms_response += "/aws/"+vm+"/"
ajax_vms_response += "\""
ajax_vms_response += "},"
if(c==len(vm_cache)-1):
ajax_vms_response += "}"
c+=1
#print '-_'*80
#print vm_cache[vm]["instance"]["state"]["state"].title(), vm
ajax_vms_response = ajax_vms_response.replace(",}","}")
if(not vm_cache): ajax_vms_response = {}
return render_to_response('ajax_virtual_machines.html', {'user':user,'ajax_vms_response':ajax_vms_response,'vms_cached_response':vm_cache,}, context_instance=RequestContext(request))
@login_required()
def ajax_aws_graphs(request, instance_id, graph_type="all"):
print '-- ajax_aws_graphs', request.user
user = request.user
profile = userprofile.objects.get(user=request.user)
vms_cache = Cache.objects.get(user=user)
vm_cache = vms_cache.vms_response
vm_cache = base64.b64decode(vm_cache)
try:
vm_cache = pickle.loads(vm_cache)[instance_id]
except:
return HttpResponse("XXX " + instance_id)
if(vm_cache['user_id']!=request.user.id):
return HttpResponse("access denied")
aws_access_key = profile.aws_access_key
aws_secret_key = profile.aws_secret_key
aws_ec2_verified = profile.aws_ec2_verified
ec2_region = vm_cache['instance']['region']['name']
ec2conn = boto.ec2.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
cloudwatch = boto.ec2.cloudwatch.connect_to_region(ec2_region,aws_access_key_id=aws_access_key,aws_secret_access_key=aws_secret_key)
reservations = ec2conn.get_all_instances(instance_ids=[instance_id,])
instance = reservations[0].instances[0]
end = datetime.datetime.utcnow()
start = end - datetime.timedelta(days=10)
metric = cloudwatch.list_metrics(dimensions={'InstanceId':instance_id}, metric_name="CPUUtilization")[0]
cpu_utilization_datapoints = metric.query(start, end, 'Average', 'Percent',period=3600)
return HttpResponse("data " + instance_id + "=" + str(instance) + " ** " + graph_type.upper())
@login_required()
def ajax_server_graphs(request, hwaddr, graph_type=""):
print '-- ajax_server_graphs, type', graph_type
print request.user
graphs_mixed_respose = []
secret = request.POST['secret']
uuid = request.POST['server']
uuid = uuid.replace('-',':')
server = mongo.servers.find_one({'secret':secret,'uuid':uuid,})
print 'debug', secret, uuid
try:
uuid = server['uuid']
except:
return HttpResponse("access denied")
server_status = "Running"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>20):
server_status = "Stopped"
if((datetime.datetime.utcnow()-server['last_seen']).total_seconds()>1800):
server_status = "Offline"
#activity = mongo.activity.find({'uuid':uuid,}).sort('_id',-1).limit(3)
if(graph_type=="server_info"):
graphs_mixed_respose = {}
graphs_mixed_respose['name'] = server['name']
graphs_mixed_respose['server_info_hostname'] = server['hostname']
graphs_mixed_respose['cpu_used'] = server['cpu_usage']['cpu_used']
graphs_mixed_respose['memory_used'] = server['memory_usage']['memory_used_percentage']
graphs_mixed_respose['swap_used'] = server['memory_usage']['swap_used_percentage']
graphs_mixed_respose['loadavg_used'] = server['loadavg'][1]
graphs_mixed_respose['server_info_uptime'] = server['uptime']
graphs_mixed_respose['server_info_loadavg'] = server['loadavg']
graphs_mixed_respose['server_info_status'] = server_status
graphs_mixed_respose = str(graphs_mixed_respose).replace('u"','"')
graphs_mixed_respose = graphs_mixed_respose.replace("'",'"')
graphs_mixed_respose = str(graphs_mixed_respose).replace('u"','"')
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="processes"):
processes_ = []
processes = server['processes']
c=0
for line in processes:
if(c>0):
if not line:break
line = line.split(' ')
line_ = []
for i in line:
if i: line_.append(i)
line = line_
process_user = line[0]
process_pid = line[1]
process_cpu = line[2]
process_mem = line[3]
process_vsz = line[4]
process_rss = line[5]
process_tty = line[6]
process_stat = line[7]
process_start_time = line[8]+'-'+line[9]
process_command = line[10:]
process_name = clean_ps_command(process_command[0])
process = {
'pid': process_pid,
'cpu': process_cpu+'%',
'mem': process_mem+'%',
# 'vsz': process_vsz,
# 'rss': process_rss,
# 'tty': process_tty,
# 'stat': process_stat,
# 'start_time': process_start_time,
'process': process_name,
'command': ' '.join(str(x) for x in process_command).replace("[", "").replace("]","")
}
process['user'] = '<span class=\\"label label-success\\">'
if int(float(process_cpu)) > 50:
process['user'] = '<span class=\\"label label-warning\\">'
if int(float(process_cpu)) > 75:
process['user'] = '<span class=\\"label label-danger\\">'
process['user'] += process_user
process['user'] += '</span>'
processes_.append(process)
c+=1
processes = {}
processes['data'] = processes_
processes = str(processes).replace(" u'"," '").replace("[u'","['").replace("'",'"').replace("\\\\", "\\")
return HttpResponse(processes, content_type="application/json")
if(graph_type=="network_connections"):
network_connections_ = []
network_connections = server['network_connections']['listen']
for conn in network_connections:
connection = {}
connection['proto'] = conn[1]
connection['recv-q'] = conn[2]
connection['send-q'] = conn[3]
connection['address'] = conn[4]
if conn[6]:
connection['port'] = conn[5] + "/" + conn[6]
else:
connection['port'] = conn[5]
network_connections_.append(connection)
network_connections = {}
network_connections['data'] = network_connections_
network_connections = str(network_connections).replace(" u'"," '")
network_connections = str(network_connections).replace("'",'"')
return HttpResponse(network_connections, content_type="application/json")
if(graph_type=="active_network_connections"):
active_network_connections_ = []
active_network_connections = server['network_connections']['established']
for conn in active_network_connections:
connection = {}
connection['proto'] = conn[1]
connection['recv-q'] = conn[2]
connection['send-q'] = conn[3]
connection['local-address'] = conn[7]
connection['foreign-address'] = conn[4]
connection['foreign-port'] = conn[5]
active_network_connections_.append(connection)
active_network_connections = {}
active_network_connections['data'] = active_network_connections_
active_network_connections = str(active_network_connections).replace(" u'"," '")
active_network_connections = str(active_network_connections).replace("'",'"')
return HttpResponse(active_network_connections, content_type="application/json")
if(graph_type=="loadavg"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = [[],[],[]]
loadavg_specific_queries = ['1-min','5-mins','15-mins']
count = 0
for i in loadavg_specific_queries:
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.loadavg'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.loadavg'}
params_ = params
params_['m'] = params['m'] + "{avg="+i+"}"
tsdb = requests.get('http://hbase:4242/api/query', params=params_)
params = params_
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose[count].append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose[count] = sorted(graphs_mixed_respose[count], key=itemgetter(0))
graphs_mixed_respose[count] = graphs_mixed_respose[count][::-1]
count += 1
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="disks"):
print '*'*1000
print request.POST
mount_ponit = request.POST['mountPoint']
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.disks'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.disks'}
params['m'] += "{mm=disk_used,mount_point="+mount_ponit+"}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="cpu_usage"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.cpu'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.cpu'}
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="mem_usage" or graph_type=="swap_usage"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.memory'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.memory'}
if(graph_type=="mem_usage"):
params['m'] += "{mm=memory_used}"
if(graph_type=="swap_usage"):
params['m'] += "{mm=swap_used}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
if(graph_type=="network_input_packets" or graph_type=="inbound_traffic" or graph_type=="network_output_packets" or graph_type=="outbound_traffic"):
params = None
graph_interval = request.POST['interval']
graphs_mixed_respose = []
if(graph_interval=="3m"):
params = {'start':'3m-ago','m':'avg:3s-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="15m"):
params = {'start':'15m-ago','m':'avg:15s-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="1h"):
params = {'start':'1h-ago','m':'avg:1m-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="1d"):
params = {'start':'1d-ago','m':'avg:30m-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="7d"):
params = {'start':'7d-ago','m':'avg:3h-avg:' + hwaddr + '.sys.network'}
if(graph_interval=="30d"):
params = {'start':'30d-ago','m':'avg:12h-avg:' + hwaddr + '.sys.network'}
if(graph_type=="network_input_packets"):
params['m'] += "{mm=input_accept_packets}"
if(graph_type=="network_input_bytes"):
params['m'] += "{mm=input_accept_bytes}"
if(graph_type=="network_output_packets"):
params['m'] += "{mm=output_accept_packets}"
if(graph_type=="network_output_bytes"):
params['m'] += "{mm=output_accept_bytes}"
if(params):
tsdb = requests.get('http://hbase:4242/api/query',params=params)
tsdb_response = json.loads(tsdb.text)
tsdb_response = tsdb_response[0]['dps']
for i in tsdb_response:
graphs_mixed_respose.append([int(i),round(float(tsdb_response[i]),2)])
graphs_mixed_respose = sorted(graphs_mixed_respose, key=itemgetter(0))
graphs_mixed_respose = [graphs_mixed_respose[::-1],]
graphs_mixed_respose = str(graphs_mixed_respose).replace("u'","'")
return HttpResponse(graphs_mixed_respose, content_type="application/json")
return HttpResponse("I'm sorry I don't understand")
def ajax_virtual_machines_box(request):
return render_to_response('ajax_virtual_machines_box.html', locals(), context_instance=RequestContext(request))
| followyourheart/cloudly | vms/views.py | Python | mit | 39,971 |
# -*- coding: utf-8 -*-
"""Checks/fixes are bundled in one namespace."""
import logging
from rdflib.namespace import RDF, SKOS
from .rdftools.namespace import SKOSEXT
from .rdftools import localname, find_prop_overlap
def _hierarchy_cycles_visit(rdf, node, parent, break_cycles, status):
if status.get(node) is None:
status[node] = 1 # entered
for child in sorted(rdf.subjects(SKOS.broader, node)):
_hierarchy_cycles_visit(
rdf, child, node, break_cycles, status)
status[node] = 2 # set this node as completed
elif status.get(node) == 1: # has been entered but not yet done
if break_cycles:
logging.warning("Hierarchy cycle removed at %s -> %s",
localname(parent), localname(node))
rdf.remove((node, SKOS.broader, parent))
rdf.remove((node, SKOS.broaderTransitive, parent))
rdf.remove((node, SKOSEXT.broaderGeneric, parent))
rdf.remove((node, SKOSEXT.broaderPartitive, parent))
rdf.remove((parent, SKOS.narrower, node))
rdf.remove((parent, SKOS.narrowerTransitive, node))
else:
logging.warning(
"Hierarchy cycle detected at %s -> %s, "
"but not removed because break_cycles is not active",
localname(parent), localname(node))
elif status.get(node) == 2: # is completed already
pass
def hierarchy_cycles(rdf, fix=False):
"""Check if the graph contains skos:broader cycles and optionally break these.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing any skos:broader that overlaps
with skos:broaderTransitive.
"""
top_concepts = sorted(rdf.subject_objects(SKOS.hasTopConcept))
status = {}
for cs, root in top_concepts:
_hierarchy_cycles_visit(
rdf, root, None, fix, status=status)
# double check that all concepts were actually visited in the search,
# and visit remaining ones if necessary
recheck_top_concepts = False
for conc in sorted(rdf.subjects(RDF.type, SKOS.Concept)):
if conc not in status:
recheck_top_concepts = True
_hierarchy_cycles_visit(
rdf, conc, None, fix, status=status)
return recheck_top_concepts
def disjoint_relations(rdf, fix=False):
"""Check if the graph contains concepts connected by both of the semantically
disjoint semantic skos:related and skos:broaderTransitive (S27),
and optionally remove the involved skos:related relations.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing skos:related relations that
overlap with skos:broaderTransitive.
"""
for conc1, conc2 in sorted(rdf.subject_objects(SKOS.related)):
if conc2 in sorted(rdf.transitive_objects(conc1, SKOS.broader)):
if fix:
logging.warning(
"Concepts %s and %s connected by both "
"skos:broaderTransitive and skos:related, "
"removing skos:related",
conc1, conc2)
rdf.remove((conc1, SKOS.related, conc2))
rdf.remove((conc2, SKOS.related, conc1))
else:
logging.warning(
"Concepts %s and %s connected by both "
"skos:broaderTransitive and skos:related, "
"but keeping it because keep_related is enabled",
conc1, conc2)
def hierarchical_redundancy(rdf, fix=False):
"""Check for and optionally remove extraneous skos:broader relations.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing skos:broader relations between
concepts that are otherwise connected by skos:broaderTransitive.
"""
for conc, parent1 in sorted(rdf.subject_objects(SKOS.broader)):
for parent2 in sorted(rdf.objects(conc, SKOS.broader)):
if parent1 == parent2:
continue # must be different
if parent2 in rdf.transitive_objects(parent1, SKOS.broader):
if fix:
logging.warning(
"Eliminating redundant hierarchical relationship: "
"%s skos:broader %s",
conc, parent2)
rdf.remove((conc, SKOS.broader, parent2))
rdf.remove((conc, SKOS.broaderTransitive, parent2))
rdf.remove((parent2, SKOS.narrower, conc))
rdf.remove((parent2, SKOS.narrowerTransitive, conc))
else:
logging.warning(
"Redundant hierarchical relationship "
"%s skos:broader %s found, but not eliminated "
"because eliminate_redundancy is not set",
conc, parent2)
def preflabel_uniqueness(rdf, policy='all'):
"""Check that concepts have no more than one value of skos:prefLabel per
language tag (S14), and optionally move additional values to skos:altLabel.
:param Graph rdf: An rdflib.graph.Graph object.
:param str policy: Policy for deciding which value to keep as prefLabel
when multiple prefLabels are found. Possible values are 'shortest'
(keep the shortest label), 'longest' (keep the longest label),
'uppercase' (prefer uppercase), 'lowercase' (prefer uppercase) or
'all' (keep all, just log the problems). Alternatively, a list of
policies to apply in order, such as ['shortest', 'lowercase'], may
be used.
"""
resources = set(
(res for res, label in rdf.subject_objects(SKOS.prefLabel)))
policy_fn = {
'shortest': len,
'longest': lambda x: -len(x),
'uppercase': lambda x: int(x[0].islower()),
'lowercase': lambda x: int(x[0].isupper())
}
if type(policy) not in (list, tuple):
policies = policy.split(',')
else:
policies = policy
for p in policies:
if p not in policy_fn:
logging.critical("Unknown preflabel-policy: %s", policy)
return
def key_fn(label):
return [policy_fn[p](label) for p in policies] + [str(label)]
for res in sorted(resources):
prefLabels = {}
for label in rdf.objects(res, SKOS.prefLabel):
lang = label.language
if lang not in prefLabels:
prefLabels[lang] = []
prefLabels[lang].append(label)
for lang, labels in prefLabels.items():
if len(labels) > 1:
if policies[0] == 'all':
logging.warning(
"Resource %s has more than one prefLabel@%s, "
"but keeping all of them due to preflabel-policy=all.",
res, lang)
continue
chosen = sorted(labels, key=key_fn)[0]
logging.warning(
"Resource %s has more than one prefLabel@%s: "
"choosing %s (policy: %s)",
res, lang, chosen, str(policy))
for label in labels:
if label != chosen:
rdf.remove((res, SKOS.prefLabel, label))
rdf.add((res, SKOS.altLabel, label))
def label_overlap(rdf, fix=False):
"""Check if concepts have the same value for any two of the pairwise
disjoint properties skos:prefLabel, skos:altLabel and skos:hiddenLabel
(S13), and optionally remove the least significant property.
:param Graph rdf: An rdflib.graph.Graph object.
:param bool fix: Fix the problem by removing the least significant property
(altLabel or hiddenLabel).
"""
def label_warning(res, label, keep, remove):
if fix:
logging.warning(
"Resource %s has '%s'@%s as both %s and %s; removing %s",
res, label, label.language, keep, remove, remove
)
else:
logging.warning(
"Resource %s has '%s'@%s as both %s and %s",
res, label, label.language, keep, remove
)
for res, label in find_prop_overlap(rdf, SKOS.prefLabel, SKOS.altLabel):
label_warning(res, label, 'prefLabel', 'altLabel')
if fix:
rdf.remove((res, SKOS.altLabel, label))
for res, label in find_prop_overlap(rdf, SKOS.prefLabel, SKOS.hiddenLabel):
label_warning(res, label, 'prefLabel', 'hiddenLabel')
if fix:
rdf.remove((res, SKOS.hiddenLabel, label))
for res, label in find_prop_overlap(rdf, SKOS.altLabel, SKOS.hiddenLabel):
label_warning(res, label, 'altLabel', 'hiddenLabel')
if fix:
rdf.remove((res, SKOS.hiddenLabel, label))
| NatLibFi/Skosify | skosify/check.py | Python | mit | 8,938 |
# -*- coding:utf-8 -*-
import unittest, sys, os
sys.path[:0] = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
from saklient.cloud.enums.eserverinstancestatus import EServerInstanceStatus
class TestEnum(unittest.TestCase):
def test_should_be_defined(self):
self.assertEqual(EServerInstanceStatus.UP, "up");
self.assertEqual(EServerInstanceStatus.DOWN, "down");
def test_should_be_compared(self):
self.assertEqual(EServerInstanceStatus.compare("up", "up"), 0);
self.assertEqual(EServerInstanceStatus.compare("up", "down"), 1);
self.assertEqual(EServerInstanceStatus.compare("down", "up"), -1);
self.assertEqual(EServerInstanceStatus.compare("UNDEFINED-SYMBOL", "up"), None);
self.assertEqual(EServerInstanceStatus.compare("up", "UNDEFINED-SYMBOL"), None);
self.assertEqual(EServerInstanceStatus.compare(None, "up"), None);
self.assertEqual(EServerInstanceStatus.compare("up", None), None);
self.assertEqual(EServerInstanceStatus.compare(None, None), None);
if __name__ == '__main__':
unittest.main()
| sakura-internet/saklient.python | tests/test_enum.py | Python | mit | 1,117 |
__author__ = 'las3wh'
print('goodbye') | lukestarchief/cs3240-labdemo | goodbye.py | Python | mit | 39 |
# http://rosalind.info/problems/long/
def superstring(arr, accumulator=''):
# We now have all strings
if len(arr) == 0:
return accumulator
# Initial call
elif len(accumulator) == 0:
accumulator = arr.pop(0)
return superstring(arr, accumulator)
# Recursive call
else:
for i in range(len(arr)):
sample = arr[i]
l = len(sample)
for p in range(l / 2):
q = l - p
if accumulator.startswith(sample[p:]):
arr.pop(i)
return superstring(arr, sample[:p] + accumulator)
if accumulator.endswith(sample[:q]):
arr.pop(i)
return superstring(arr, accumulator + sample[q:])
f = open("rosalind_long.txt", "r")
dnas = {}
currentKey = ''
for content in f:
# Beginning of a new sample
if '>' in content:
key = content.rstrip().replace('>', '')
currentKey = key
dnas[currentKey] = ''
else:
dnas[currentKey] += content.rstrip()
print superstring(dnas.values())
| AntoineAugusti/katas | rosalind/long.py | Python | mit | 1,108 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('attracker_app', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='segment',
name='additional_miles',
field=models.FloatField(default=0, verbose_name='Non-AT miles hiked with the segment'),
),
]
| chrispaul/attracker | attracker_app/migrations/0002_segment_additional_miles.py | Python | mit | 455 |
# !/usr/local/bin/python3.4.2
# ----Copyright (c) 2017 Carnegie Hall | The MIT License (MIT)----
# ----For the full license terms, please visit https://github.com/CarnegieHall/linked-data/blob/master/LICENSE----
##needs further refinement to eliminate non-instrument link results
## Argument[0] is script to run
import csv
import httplib2
import json
import os
import sys
from bs4 import BeautifulSoup
mbz_instDict = {}
h = httplib2.Http()
link = 'https://musicbrainz.org/instruments'
uri_root = 'https://musicbrainz.org'
resp, html_doc = h.request(link, "GET")
soup = BeautifulSoup(html_doc, "lxml")
for result in soup.body.select(
'a[href^"/instrument/"]'):
label = result.contents[0].string
uri = ''.join([uri_root, result.get('href')])
mbz_instDict[str(uri)] = label
mbz_instDict_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'source-files', 'mbz_instDict.json')
mbz_instList_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'source-files', 'mbz_instList.csv')
with open(mbz_instDict_path, 'w') as f1:
json.dump(mbz_instDict, f1)
with open(mbz_instList_path, 'w', newline='') as csvfile:
w = csv.writer(csvfile, dialect='excel', delimiter=',')
for k,v in mbz_instDict.items():
w.writerow([k,v])
print("Finished gathering MusicBrainz instrument URIs and labels")
| CarnegieHall/linked-data | scripts/scrape_mbz_instruments.py | Python | mit | 1,346 |
#Synonyms experiment. Pass a string to see its "synonyms"
from pyspark.sql import SparkSession, Row
from pyspark.ml.feature import Word2Vec, Tokenizer, StopWordsRemover, Word2VecModel
import sys;
from string import punctuation
def strip_punctuation(arr):
return [''.join(c for c in s if c not in punctuation) for s in arr]
def main():
spark = SparkSession.builder \
.appName("Spark CV-job ad matching") \
.config("spark.some.config.option", "some-value") \
.master("local[*]") \
.getOrCreate()
df_categories = spark.read.json("allcategories4rdd/allcategories.jsonl")
tokenizer = Tokenizer(inputCol="skillText", outputCol="words")
tokenized = tokenizer.transform(df_categories)
remover = StopWordsRemover(inputCol="words", outputCol="filtered")
removed = remover.transform(tokenized)
stripped = removed.select('filtered').rdd.map(lambda x: strip_punctuation(x[0]))\
.map(lambda x: Row(filtered=x)).toDF(['filtered'])
# word2vec = Word2Vec(vectorSize=100, inputCol="filtered", outputCol="result")
# model = word2vec.fit(stripped)
#model.save("word2vec-model")
model = Word2VecModel.load("word2vec-model")
synonyms = model.findSynonyms(sys.argv[1], 10)
synonyms.show(truncate=False)
# for word, cosine_distance in synonyms:
# print("{}: {}".format(word, cosine_distance))
if __name__ == '__main__':
main()
| bcanvural/thesis | synonyms.py | Python | mit | 1,419 |
from django.shortcuts import render,get_object_or_404
from django.http import Http404
# Create your views here.
from django.http import HttpResponse
def home(request):
return render(request,'index.html');
| iscarecrow/sb | server/views.py | Python | mit | 208 |
from clickerft.cft import Cft
from time import sleep
class Suite(Cft):
def test_buy_item_4(self):
while int(self.clicksPerGeneration.text) < 2:
if int(self.clicksOwned.text) < 1:
sleep(.5)
continue
self.increaseClicksPerGeneration.click()
while int(self.tr1.text) < int(self.pi4r1.text):
self.click_r_test('r1')
while int(self.tr2.text) < int(self.pi4r2.text):
self.click_r_test('r2')
self.i4.click()
assert int(self.oi4.text) == 1
sleep(1)
# todo put the modifier into the DOM to parse
assert int(self.tr1.text) == 5
pass
if __name__ == '__main__':
Suite()
| Victory/clicker-me-bliss | functional-tests/buy-item4.py | Python | mit | 727 |
#-*- coding: utf-8 -*-
import os
from clang.cindex import Config, Index, TypeKind
class ClangExtractor(object):
def __init__(self, libclang_path, srcdir):
if Config.library_file != libclang_path:
Config.set_library_file(libclang_path)
self.srcdir = srcdir
def extract(self):
protos = dict()
for dirpath, dirnames, filenames in os.walk(self.srcdir):
for fname in filenames:
fpath = dirpath + "/" + fname
fext = fname.split(".")[-1]
if fext == "c" or fext == "h":
index = Index.create()
tu = index.parse(fpath)
self.__clang_find_protos(tu.cursor, protos)
return protos
def __clang_find_protos(self, node, protos):
if (node.type.kind == TypeKind.FUNCTIONPROTO): # or node.type.kind == TypeKind.FUNCTIONNOPROTO):
if node.spelling not in protos.keys():
protos[node.spelling] = list()
if len(protos[node.spelling]) == 0:
if (node.result_type.spelling == "Lisp_Object"):
protos[node.spelling].append("void *")
else:
protos[node.spelling].append(node.result_type.get_canonical().spelling)
for c in node.get_arguments():
if (c.type.spelling == "Lisp_Object"):
protos[node.spelling].append("void *")
else:
protos[node.spelling].append(c.type.get_canonical().spelling)
if node.type.is_function_variadic():
protos[node.spelling].append("...")
for c in node.get_children():
self.__clang_find_protos(c, protos)
| Frky/scat | src/shell/data/clangextractor.py | Python | mit | 1,769 |
"""Contains utility functions for working with the shell"""
from contextlib import contextmanager
import datetime
from decimal import Decimal
import json
import pprint
import sys
import time
import traceback
SHELL_CONTROL_SEQUENCES = {
'BLUE': '\033[34m',
'LTBLUE': '\033[94m',
'GREEN': '\033[32m',
'LTGREEN': '\033[92m',
'YELLOW': '\033[33m',
'LTYELLOW': '\033[93m',
'RED': '\033[31m',
'LTRED': '\033[91m',
'CYAN': '\033[36m',
'LTCYAN': '\033[96m',
'MAGENTA': '\033[35m',
'LTMAGENTA': '\033[95m',
'ENDC': '\033[0m',
'BOLD': '\033[1m',
'UNDERLINE': '\033[4m',
}
BLUE = "{BLUE}"
LTBLUE = "{LTBLUE}"
GREEN = "{GREEN}"
LTGREEN = "{LTGREEN}"
YELLOW = "{YELLOW}"
LTYELLOW = "{LTYELLOW}"
RED = "{RED}"
LTRED = "{LTRED}"
CYAN = "{CYAN}"
LTCYAN = "{LTCYAN}"
MAGENTA = "{MAGENTA}"
LTMAGENTA = "{LTMAGENTA}"
ENDC = "{ENDC}"
BOLD = "{BOLD}"
UNDERLINE = "{UNDERLINE}"
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Decimal):
return float(o)
elif isinstance(o, (datetime.datetime, datetime.date, datetime.time)):
return str(o)
return super(JSONEncoder, self).default(o)
def read_json(timeout=0):
"""Read json data from stdin"""
data = read()
if data:
return json.loads(data)
def write_output(writer, *output, **kwargs):
"""Write the output to the writer, used for printing to stdout/stderr"""
to_print = kwargs.get("sep", " ").join(output) + kwargs.get("end", "\n")
if isinstance(writer, list):
writer.append(to_print)
else:
writer.write(to_print)
if kwargs.get("flush"):
writer.flush()
def write_json(output, end='', raw=False, file=None, flush=False):
file = file or sys.stdout
if len(output) == 1:
output = output[0]
if raw:
json.dump(output, file, separators=(',', ':'), cls=JSONEncoder)
else:
json.dump(output, file, indent=4, sort_keys=True, cls=JSONEncoder)
if flush:
file.flush()
if end:
write_output(file, '', end=end, sep='', flush=flush)
def read():
"""Read from stdin"""
return sys.stdin.read()
def choice(choices, msg='Enter your choice: ', color=True, default=None, **kwargs):
if isinstance(choices, dict):
choices_dict = choices
choices = sorted(choices_dict.keys())
elif isinstance(choices, (tuple, list)):
choices_dict = None
choice_msg = ['']
validate = []
for idx, item in enumerate(choices):
if color:
choice_msg.append("\t{LTYELLOW}%d{LTMAGENTA}: %s" % (idx, str(item)))
else:
choice_msg.append("\t%d: %s" % (idx, str(item)))
validate.append(str(idx))
choice_msg.append("")
if color:
choice_msg.append("{LTMAGENTA}{BOLD}"+msg+"{ENDC}")
else:
choice_msg.append(msg)
output = ask("\n".join(choice_msg), validate=validate, default=default, color=None, **kwargs)
if choices_dict:
key = choices[int(output)]
return choices_dict[key]
else:
return choices[int(output)]
def ask(*args, **kwargs):
"""Ask for input"""
if not sys.stdin.isatty():
error("Cannot ask user for input, no tty exists")
sys.exit(1)
print_args = list(args)
print_args.append(kwargs.get("end", "\n"))
if kwargs["color"]:
print_args.insert(0, "{" + kwargs["color"] + "}")
print_args.append(ENDC)
while True:
stderr(*print_args, end='', **kwargs)
in_ = input()
if in_:
if not kwargs["validate"]:
return in_
if isinstance(kwargs["validate"], (tuple, list)) and in_ in kwargs["validate"]:
return in_
if callable(kwargs["validate"]) and kwargs["validate"](in_):
return in_
if kwargs["default"] is not None:
return kwargs["default"]
if kwargs["error_msg"] is not None:
error("\n" + kwargs["error_msg"] + "\n")
else:
error("\nYou didn't enter a valid choice!\n")
time.sleep(1)
def pretty(output):
"""Pretty format for shell output"""
return pprint.pformat(output, indent=2, width=100)
def _shell_format(output, **kwargs):
"""Formats the output for printing to a shell"""
kwargs.update(SHELL_CONTROL_SEQUENCES)
for idx, item in enumerate(output):
try:
output[idx] = item.format(**kwargs)
except KeyError:
pass # Can happen if some item is not in the kwargs dict
return output
def _convert_print(*args):
"""Convert the given arguments to a string for printing. Concantenate them together"""
output = []
for arg in args:
if not isinstance(arg, str):
arg = pretty(arg)
output.append(arg)
return output
def stdout_to_stderr():
"""Temporarily redirects stdout to stderr. Returns no-arg function to turn it back on."""
stdout = sys.stdout
sys.stdout = sys.stderr
def restore_stdout():
sys.stdout = stdout
return restore_stdout
def write_info_output(writer, *output, **kwargs):
if kwargs.get("json"):
return write_json(output, **kwargs)
if not kwargs.get("raw", False):
output = _convert_print(*output)
output = _shell_format(output, **kwargs)
write_output(writer, *output, **kwargs)
def stdout(*output, **kwargs):
"""Print to stdout. Supports colors"""
write_info_output(sys.stdout, *output, **kwargs)
def stderr(*output, **kwargs):
"""Print to stderr. Supports colors"""
write_info_output(sys.stderr, *output, **kwargs)
def print_color(color, *output, **kwargs):
"""Print message to stderr in the given color"""
print_args = list(output)
print_args.append(ENDC)
if "file" in kwargs:
write_output(kwargs["file"], *output, **kwargs)
else:
stderr(color, *print_args, **kwargs)
def debug(*output, **kwargs):
"""Print debug message to stderr"""
print_color(BLUE, *output, **kwargs)
def info(*output, **kwargs):
"""Print info message to stderr"""
print_color(GREEN, *output, **kwargs)
def warning(*output, **kwargs):
"""Print warning message to stderr"""
print_color(YELLOW, *output, **kwargs)
def error(*output, **kwargs):
"""Print error message to stderr"""
print_color(RED, *output, **kwargs)
def exception(*output, **kwargs):
"""Print error message to stderr with last exception info"""
exc = traceback.format_exc()
print_args = list(output)
print_args.append("\nAn exception occurred:\n{exc}".format(exc=exc))
print_color(RED, *print_args, **kwargs)
def timestamp():
return int(time.time())
@contextmanager
def elapsed(output, **kwargs):
"""Context Manager that prints to stderr how long a process took"""
start = timestamp()
info("Starting: ", output, **kwargs)
yield
info("Completed: " + output + " {MAGENTA}(Elapsed Time: {elapsed}s){ENDC}", elapsed=timestamp()-start, **kwargs)
def elapsed_decorator(output):
"""Decorator that prints to stderr how long a process took"""
def wrapper(fn):
def wrapped_fn(*args, **kwargs):
with elapsed(output, **kwargs):
fn(*args, **kwargs)
return wrapped_fn
return wrapper
def print_section(color, *output, **kwargs):
"""Prints a section title header"""
output = ["\n\n", 60 * "#", "\n", "# "] + list(output) + ["\n", 60 * "#", "\n"]
print_color(color, *output, end="\n", **kwargs)
def print_table(headers, *table_data, **kwargs):
if not table_data:
return
if isinstance(table_data[0], dict):
all_data = []
for d in table_data:
new_output = []
for header in headers:
new_output.append(d[header])
all_data.append(new_output)
else:
all_data = table_data
print(all_data)
all_data.insert(0, headers)
widths = [max(len(d[idx]) for d in all_data) for idx, _ in enumerate(headers)]
output = []
for row_idx, data in enumerate(all_data):
line = []
pad = "<" if row_idx == 0 else ">"
for idx, item in enumerate(data):
print(item)
print(idx)
formatter = "{item: " + pad + str(widths[idx]) + "}"
line.append(formatter.format(item=item))
output.append("| " + " | ".join(line) + " |")
write_output(kwargs.get("file", sys.stderr), *output, **kwargs)
| johnnadratowski/git-reviewers | python_lib/shell.py | Python | mit | 8,544 |
"""
Django settings for school project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)))
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, '../templates'),
)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'koeorn$p_9&6!%1!84=erv*)#40-f$&z+_hq1^a1+2#93_ev%y'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
# 'django.contrib.admin',
# 'django.contrib.auth',
# 'django.contrib.contenttypes',
# 'django.contrib.sessions',
# 'django.contrib.messages',
# 'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'school.urls'
WSGI_APPLICATION = 'school.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'asia/chongqing'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
| raymondyan/django_school | school/settings.py | Python | mit | 2,155 |
"""
A simple client to query a TensorFlow Serving instance.
Example:
$ python client.py \
--images IMG_0932_sm.jpg \
--num_results 10 \
--model_name inception \
--host localhost \
--port 9000 \
--timeout 10
Author: Grant Van Horn
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import time
import tfserver
def parse_args():
parser = argparse.ArgumentParser(description='Command line classification client. Sorts and prints the classification results.')
parser.add_argument('--images', dest='image_paths',
help='Path to one or more images to classify (jpeg or png).',
type=str, nargs='+', required=True)
parser.add_argument('--num_results', dest='num_results',
help='The number of results to print. Set to 0 to print all classes.',
required=False, type=int, default=0)
parser.add_argument('--model_name', dest='model_name',
help='The name of the model to query.',
required=False, type=str, default='inception')
parser.add_argument('--host', dest='host',
help='Machine host where the TensorFlow Serving model is.',
required=False, type=str, default='localhost')
parser.add_argument('--port', dest='port',
help='Port that the TensorFlow Server is listening on.',
required=False, type=int, default=9000)
parser.add_argument('--timeout', dest='timeout',
help='Amount of time to wait before failing.',
required=False, type=int, default=10)
args = parser.parse_args()
return args
def main():
args = parse_args()
# Read in the image bytes
image_data = []
for fp in args.image_paths:
with open(fp) as f:
data = f.read()
image_data.append(data)
# Get the predictions
t = time.time()
predictions = tfserver.predict(image_data, model_name=args.model_name,
host=args.host, port=args.port, timeout=args.timeout
)
dt = time.time() - t
print("Prediction call took %0.4f seconds" % (dt,))
# Process the results
results = tfserver.process_classification_prediction(predictions, max_classes=args.num_results)
# Print the results
for i, fp in enumerate(args.image_paths):
print("Results for image: %s" % (fp,))
for name, score in results[i]:
print("%s: %0.3f" % (name, score))
print()
if __name__ == '__main__':
main() | visipedia/tf_classification | tfserving/client.py | Python | mit | 2,561 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('merchants', '0006_classfy'),
]
operations = [
migrations.CreateModel(
name='RegionItem',
fields=[
('name', models.CharField(max_length=20)),
('id', models.CharField(max_length=10, serialize=False, primary_key=True)),
],
options={
'db_table': 'T_region',
},
),
migrations.RenameModel(
old_name='Classfy',
new_name='ClassfyItem',
),
migrations.RenameModel(
old_name='MerchantInfo',
new_name='MerchantItem',
),
]
| myhearter/dianping | ddp/app/merchants/migrations/0007_auto_20171119_0852.py | Python | mit | 807 |
import json
import os
from processes.postgres import Postgres
from processes.gather_exception import GatherException
try:
DB_SERVER = os.environ['DB_SERVER']
DB_PORT = os.environ['DB_PORT']
DB_DATABASE = os.environ['DB_DATABASE']
DB_USER = os.environ['DB_USER']
DB_PASSWORD = os.environ['DB_PASSWORD']
except KeyError:
try:
from processes.GLOBALS import DB_SERVER, DB_PORT, DB_DATABASE, DB_USER, DB_PASSWORD
except ImportError:
print("No parameters provided")
exit()
class Main(object):
def __init__(self):
self.pg = Postgres(DB_SERVER, DB_PORT, DB_DATABASE, DB_USER, DB_PASSWORD)
self.source_topic = 'youtube'
self.destination_topic = 'movies'
def run(self, data):
"""
This inserts the relevant json information
into the table kino.movies.
:param data: json data holding information on films.
"""
imdb_id = data['imdb_id']
omdb_movie_data = data['omdb_main']
tmdb_movie_data = data['tmdb_main']
sql = """insert into kino.languages(language)
select y.language
from json_to_recordset(%s) x (original_language varchar(1000))
join kino.iso2language y
on x.original_language = y.iso3166
where language not in (select language
from kino.languages)"""
self.pg.pg_cur.execute(sql, (json.dumps(tmdb_movie_data),))
self.pg.pg_conn.commit()
# We delete our record from kino.movies first.
# Due to foreign keys with 'on delete cascade', this clears all records from
# the database associated with that imdb_id.
sql = """delete from kino.movies
where imdb_id = '{0}'""".format(imdb_id)
self.pg.pg_cur.execute(sql)
self.pg.pg_conn.commit()
# We also delete any records in errored attached to this imdb_id, as
# we have successfully gathered information for the film.
sql = """delete from kino.errored
where imdb_id = '{0}'""".format(imdb_id)
self.pg.pg_cur.execute(sql)
self.pg.pg_conn.commit()
sql = """insert into kino.movies (imdb_id, title, runtime, rated, released, orig_language, plot, tstamp)
select x.imdb_id
, y.title
, y.runtime
, x.rated
, y.release_date::date
, z.language
, y.plot
, CURRENT_DATE
from json_to_recordset(%s) x ( imdb_id varchar(15), rated varchar(10) )
join json_to_recordset(%s) y ( imdb_id varchar(15), title varchar(1000), runtime integer
, release_date date, plot varchar(4000), original_language varchar(1000))
on x.imdb_id = y.imdb_id
join kino.iso2language z
on y.original_language = z.iso3166
"""
self.pg.pg_cur.execute(sql, (json.dumps(omdb_movie_data), json.dumps(tmdb_movie_data)))
if self.pg.pg_cur.rowcount != 1:
raise GatherException(omdb_movie_data[0]['imdb_id'], 'No insert into movies, most likely due to a new language')
self.pg.pg_conn.commit()
sql = """insert into kino.kino_ratings (imdb_id, rating) values (%s, 3) on conflict do nothing"""
self.pg.pg_cur.execute(sql, (imdb_id,))
self.pg.pg_conn.commit()
return data
| kinoreel/kino-gather | processes/insert_movies.py | Python | mit | 3,618 |
# mininode.py - Sscoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a sscoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# sscoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from binascii import hexlify, unhexlify
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import sscoin_hash
BIP0031_VERSION = 60000
MY_VERSION = 70206 # current MIN_PEER_PROTO_VERSION
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
MAX_INV_SZ = 50000
MAX_BLOCK_SIZE = 1000000
COIN = 100000000L # 1 btc in satoshis
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def sscoinhash(s):
return sscoin_hash.getPoWHash(s)
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return struct.pack("B", len(s)) + s
elif len(s) < 0x10000:
return struct.pack("<BH", 253, len(s)) + s
elif len(s) < 0x100000000L:
return struct.pack("<BI", 254, len(s)) + s
return struct.pack("<BQ", 255, len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return hexlify(obj.serialize()).decode('ascii')
# Objects that map to sscoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), hexlify(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(sscoinhash(r))
self.hash = encode(sscoinhash(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += ser_vector(self.vtx)
return r
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in sscoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0L
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Spin until verack message is received from the node.
# Tests may want to use this as a signal that the test can begin.
# This can be called from the testing thread, so it needs to acquire the
# global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
def deliver(self, conn, message):
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
getattr(self, 'on_' + message.command)(conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
class SingleNodeConnCB(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout)
self.ping_counter += 1
return success
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
}
MAGIC_BYTES = {
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
"testnet3": b"\xce\xe2\xca\xff", # testnet3
"regtest": b"\xfc\xc1\xb7\xdc" # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print 'MiniNode: Connecting to Sscoin Node IP # ' + dstaddr + ':' \
+ str(dstport)
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
length = len(self.sendbuf)
return (length > 0)
def handle_write(self):
with mininode_lock:
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
except Exception as e:
print 'got_data:', repr(e)
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| rdqw/sscoin | qa/rpc-tests/test_framework/mininode.py | Python | mit | 39,022 |
import urllib.parse
import urllib.request
import json
import logging
import requests
log = logging.getLogger('tyggbot')
class APIBase:
@staticmethod
def _get(url, headers={}):
try:
req = urllib.request.Request(url, None, headers)
response = urllib.request.urlopen(req)
except Exception as e:
return None
try:
return response.read().decode('utf-8')
except Exception as e:
log.error(e)
return None
return None
@staticmethod
def _get_json(url, headers={}):
try:
data = APIBase._get(url, headers)
if data:
return json.loads(data)
else:
return data
except Exception:
log.exception('Caught exception while trying to parse json data.')
return None
return None
def get_url(self, endpoints=[], parameters={}):
return self.base_url + '/'.join(endpoints) + ('' if len(parameters) == 0 else '?' + urllib.parse.urlencode(parameters))
def getraw(self, endpoints=[], parameters={}):
return APIBase._get(self.get_url(endpoints, parameters), self.headers)
def get(self, endpoints, parameters={}):
try:
data = self.getraw(endpoints, parameters)
if data:
return json.loads(data)
else:
return data
except Exception as e:
log.error(e)
return None
return None
def post(self, endpoints=[], parameters={}, data={}):
try:
req = urllib.request.Request(self.get_url(endpoints, parameters), urllib.parse.urlencode(data).encode('utf-8'), self.headers)
response = urllib.request.urlopen(req)
except Exception as e:
log.error(e)
return None
try:
return response.read().decode('utf-8')
except Exception as e:
log.error(e)
return None
return None
class ChatDepotAPI(APIBase):
def __init__(self):
APIBase.__init__(self)
self.base_url = 'http://chatdepot.twitch.tv/'
self.headers = {
'Accept': 'application/vnd.twitchtv.v3+json'
}
class ImraisingAPI(APIBase):
def __init__(self, apikey):
APIBase.__init__(self)
self.base_url = 'https://imraising.tv/api/v1/'
self.headers = {
'Authorization': 'APIKey apikey="{0}"'.format(apikey),
'Content-Type': 'application/json',
}
class StreamtipAPI(APIBase):
def __init__(self, client_id, access_token):
APIBase.__init__(self)
self.base_url = 'https://streamtip.com/api/'
self.headers = {
'Authorization': client_id + ' ' + access_token,
}
class TwitchAPI(APIBase):
def __init__(self, client_id=None, oauth=None, type='kraken'):
APIBase.__init__(self)
self.base_url = 'https://api.twitch.tv/{0}/'.format(type)
self.headers = {
'Accept': 'application/vnd.twitchtv.v3+json',
}
if client_id:
self.headers['Client-ID'] = client_id
if oauth:
self.headers['Authorization'] = 'OAuth ' + oauth
class SafeBrowsingAPI:
def __init__(self, apikey, appname, appvers):
self.apikey = apikey
self.appname = appname
self.appvers = appvers
return
def check_url(self, url):
base_url = 'https://sb-ssl.google.com/safebrowsing/api/lookup?client=' + self.appname + '&key=' + self.apikey + '&appver=' + self.appvers + '&pver=3.1&url='
url2 = base_url + urllib.parse.quote(url, '')
r = requests.get(url2)
if r.status_code == 200:
return True # malware or phishing
return False # some handling of error codes should be added, they're just ignored for now
| 0rmi/tyggbot | apiwrappers.py | Python | mit | 3,975 |
'''
Created on Dec 13, 2015
@author: Shannon Litwin
'''
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
import Lib_LCD as LCD
import Lib_Main as BBB
import sys
import signal
import time
leftForward = "P8_46"
leftBackward = "P8_45"
rightForward = "P9_14"
rightBackward = "P9_16"
def Control_C_Exit(signal, frame):
GPIO.cleanup()
PWM.cleanup()
print("\nProgram halted! Exiting program!")
sys.exit()
signal.signal(signal.SIGINT, Control_C_Exit) # For cleaning up mid run
'''Keep to show Dr. Berry'''
LCD.init()
time.sleep(1)
LCD.backlight("on")
time.sleep(2)
LCD.backlight("off")
time.sleep(1)
line_message = "Hi Dr. Berry."
LCD.write_line(line_message)
time.sleep(5)
LCD.cursor_home()
long_message = "This is 35 chars and needs 2 lines."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "Which is fine because the screen can hold up to 80 characters."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "However, if the message is too long it will truncate. That is why you cannot read this entire message."
LCD.write_screen(long_message)
time.sleep(5)
LCD.clear()
m1 = "It works 1"
m2 = "It works 2"
m3 = "It works 3"
m4 = "It works 4"
time.sleep(1)
LCD.goto_line(4)
LCD.write_line(m4)
time.sleep(1)
LCD.goto_line(3)
LCD.write_line(m3)
time.sleep(1)
LCD.goto_line(2)
LCD.write_line(m2)
time.sleep(1)
LCD.goto_line(1)
LCD.write_line(m1)
LCD.clear()
#pause with while loop example
#start = time.time()
#end = time.time()
#while((end - start) < 3):
# end = time.time()
BBB.cleanup_all()
| ValRose/Rose_Bone | PythonLibraries/lcd_demo.py | Python | mit | 1,617 |
"""
Custom managers for Django models registered with the tagging
application.
"""
from django.contrib.contenttypes.models import ContentType
from django.db import models
class ModelTagManager(models.Manager):
"""
A manager for retrieving tags for a particular model.
"""
def __init__(self, tag_model):
super(ModelTagManager, self).__init__()
self.tag_model = tag_model
def get_query_set(self):
content_type = ContentType.objects.get_for_model(self.model)
return self.tag_model.objects.filter(
items__content_type__pk=content_type.pk).distinct()
def related(self, tags, *args, **kwargs):
return self.tag_model.objects.related_for_model(tags, self.model, *args, **kwargs)
def usage(self, *args, **kwargs):
return self.tag_model.objects.usage_for_model(self.model, *args, **kwargs)
class ModelTaggedItemManager(models.Manager):
"""
A manager for retrieving model instances based on their tags.
"""
def __init__(self, tag_model):
super(ModelTaggedItemManager, self).__init__()
self.intermediary_table_model = tag_model.objects.intermediary_table_model
def related_to(self, obj, queryset=None, num=None):
if queryset is None:
return self.intermediary_table_model.objects.get_related(obj, self.model, num=num)
else:
return self.intermediary_table_model.objects.get_related(obj, queryset, num=num)
def with_all(self, tags, queryset=None):
if queryset is None:
return self.intermediary_table_model.objects.get_by_model(self.model, tags)
else:
return self.intermediary_table_model.objects.get_by_model(queryset, tags)
def with_any(self, tags, queryset=None):
if queryset is None:
return self.intermediary_table_model.objects.get_union_by_model(self.model, tags)
else:
return self.intermediary_table_model.objects.get_union_by_model(queryset, tags)
class TagDescriptor(object):
"""
A descriptor which provides access to a ``ModelTagManager`` for
model classes and simple retrieval, updating and deletion of tags
for model instances.
"""
def __init__(self, tag_model):
self.tag_model = tag_model
def __get__(self, instance, owner):
if not instance:
tag_manager = ModelTagManager(self.tag_model)
tag_manager.model = owner
return tag_manager
else:
return self.tag_model.objects.get_for_object(instance)
def __set__(self, instance, value):
self.tag_model.objects.update_tags(instance, value)
def __del__(self, instance):
self.tag_model.objects.update_tags(instance, [])
| mstepniowski/django-newtagging | newtagging/managers.py | Python | mit | 2,765 |
import parsers
import tokenizer
import context
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in unicode(text))
class Tag(object):
def __init__(self, args):
self.args = args
def render(self, context):
return ''
class PairedTag(Tag):
def __init__(self, args):
self.children = []
super(PairedTag, self).__init__(args)
def render(self, context):
char_buffer = ''
for child in self.children:
char_buffer += unicode(child.render(context))
return char_buffer
class SingleLineTag(Tag):
pass
class TemplateContentTag(PairedTag):
pass
class LiteralContent(Tag):
def __init__(self, content):
self.content = content
def render(self, context):
return unicode(self.content)
#tags should support expressions, like #index+1
class EscapedContentTag(Tag):
def render(self, context):
ct = tokenizer.ExpressionTokenizer()
parser = parsers.TopDownParser(ct.yield_tokens(' '.join(self.args)))
return html_escape(parser.parse().eval(context))
#tags should support expressions, like index+1
class UnescapedContentTag(Tag):
def render(self, context):
ct = tokenizer.ExpressionTokenizer()
parser = parsers.TopDownParser(ct.yield_tokens(' '.join(self.args)))
return unicode(parser.parse().eval(context))
class CommentTag(Tag):
def render(self, context):
return ''
class IfTag(PairedTag):
closing_literal = 'if'
def render(self, context):
#if tag can have an else tag too, so we need to first check for that.
#this is a stack of groups to evaluate in order
expression_groups = []
current_group = []
current_group_conditional = self
for child in self.children:
if type(child) == ElseTag:
expression_groups.append((current_group_conditional, current_group))
current_group_conditional = child
current_group = []
else:
current_group.append(child)
expression_groups.append((current_group_conditional, current_group))
retval = ''
for conditional, tag_group in expression_groups:
ct = tokenizer.ExpressionTokenizer()
parser = parsers.TopDownParser(ct.yield_tokens(' '.join(conditional.args)))
if len(parser.tokens):
if parser.parse().eval(context):
for tag in tag_group:
retval += unicode(tag.render(context))
break
else:
for tag in tag_group:
retval += unicode(tag.render(context))
break
return retval
class ElseTag(Tag):
def render(self, context):
raise Exception("Cannot call render directly on else tag")
class ForTag(PairedTag):
closing_literal = 'for'
def render(self, var_context):
if len(self.args) <> 3:
raise Exception('The for tag takes exactly three arguments following the pattern instance_var in iterable')
for_child_tags = []
else_child_tags = []
in_else_tag = False
for child in self.children:
if in_else_tag:
else_child_tags.append(child)
else:
for_child_tags.append(child)
if type(child) == ElseTag:
in_else_tag = True
class_var = self.args[0]
iterable = var_context.eval(self.args[2])
retval = ''
cnt = 0
if iterable and len(iterable):
for item in iterable:
#add the current class var in the context dictionary for all children. it could
#overlay something already existing, but that's fine.
local_context = context.ContextWrap(var_context.context, { class_var: item, '#index' : cnt })
cnt+=1
for child in for_child_tags:
retval += child.render(local_context)
else:
for child in else_child_tags:
retval += child.render(var_context)
return retval
class VerbatimTag(PairedTag):
closing_literal = 'verbatim'
TagMap = {
'render' : EscapedContentTag,
':' : EscapedContentTag,
'>' : UnescapedContentTag,
'#' : CommentTag,
'if' : IfTag,
'else' : ElseTag,
'elif' : ElseTag,
'verbatim' : VerbatimTag,
'for' : ForTag,
}
| ceko/cekolabs_empanada | cekolabs_empanada/tags.py | Python | mit | 4,928 |
import os
import sys
import json
from optional_django import staticfiles
from optional_django.serializers import JSONEncoder
from optional_django.safestring import mark_safe
from optional_django import six
from js_host.function import Function
from js_host.exceptions import FunctionError
from react.render import RenderedComponent
from react.exceptions import ComponentSourceFileNotFound
from react.exceptions import ReactRenderingError
from react_router.conf import settings
from react_router.templates import MOUNT_JS
from react_router.bundle import bundle_component
from webpack.compiler import WebpackBundle
class RouteRenderedComponent(RenderedComponent):
def get_client_asset(self):
client_asset = None
bundled_component = self.get_bundle()
assets = bundled_component.get_assets()
for asset in assets:
if asset['path'] == self.path_to_source:
client_asset = asset
break
return client_asset
def get_var(self):
client_asset = self.get_client_asset()
if client_asset:
return 'client'
raise Exception("Client asset not found.")
def render_js(self):
client_asset = self.get_client_asset()
if client_asset:
client_bundle = mark_safe(WebpackBundle.render_tag(client_asset['url']))
return mark_safe(
'\n{bundle}\n<script>\n{mount_js}\n</script>\n'.format(
bundle=client_bundle,
mount_js=self.render_mount_js(),
)
)
def render_mount_js(self):
return mark_safe(
MOUNT_JS.format(
var=self.get_var(),
props=self.serialized_props or 'null',
container_id=self.get_container_id()
)
)
class RouteRedirect(object):
def __init__(self, pathname, query = None, state = None, *args, **kwargs):
self.path = pathname
self.query = query
if state and 'nextPathname' in state:
self.nextPath = state['nextPathname']
else:
self.nextPath = None
if self.path is None:
raise ReactRenderingError("No path returned for redirection.")
super(RouteRedirect, self).__init__(*args, **kwargs)
@property
def url(self):
if self.query:
return "%s?next=%s&%s" % (self.path, self.nextPath, self.query)
else:
return "%s?next=%s" % (self.path, self.nextPath)
class RouteNotFound(object):
def __init__(self, *args, **kwargs):
super(RouteNotFound, self).__init__(*args, **kwargs)
js_host_function = Function(settings.JS_HOST_FUNCTION)
def render_route(
# Rendering options
path, # path to routes file
client_path, # path to client routes file
request, # pass in request object
props=None,
to_static_markup=None,
# Bundling options
bundle=None,
translate=None,
# Prop handling
json_encoder=None
):
if not os.path.isabs(path):
abs_path = staticfiles.find(path)
if not abs_path:
raise ComponentSourceFileNotFound(path)
path = abs_path
if not os.path.exists(path):
raise ComponentSourceFileNotFound(path)
if not os.path.isabs(client_path):
abs_client_path = staticfiles.find(client_path)
if not abs_client_path:
raise ComponentSourceFileNotFound(client_path)
client_path = abs_client_path
if not os.path.exists(client_path):
raise ComponentSourceFileNotFound(client_path)
bundled_component = None
import re
client_re = re.compile(r"client-(?:\w*\d*).js",re.IGNORECASE)
server_re = re.compile(r"server-(?:\w*\d*).js",re.IGNORECASE)
if bundle or translate:
bundled_component = bundle_component(path, client_path, translate=translate)
assets = bundled_component.get_assets()
for asset in assets:
m = client_re.search(asset['name'])
if m:
client_path = asset['path']
m = server_re.search(asset['name'])
if m:
path = asset['path']
if json_encoder is None:
json_encoder = JSONEncoder
if props is not None:
serialized_props = json.dumps(props, cls=json_encoder)
else:
serialized_props = None
try:
location = {
'pathname': request.path,
'query': request.GET.dict()
}
cbData = json.loads(js_host_function.call(
path=path,
location=location,
serializedProps=serialized_props,
toStaticMarkup=to_static_markup
))
except FunctionError as e:
raise six.reraise(ReactRenderingError, ReactRenderingError(*e.args), sys.exc_info()[2])
if cbData['match']:
return RouteRenderedComponent(cbData['markup'], client_path, props, serialized_props, bundled_component, to_static_markup)
else:
if cbData['redirectInfo']:
return RouteRedirect(**cbData['redirectInfo'])
else:
return RouteNotFound()
| HorizonXP/python-react-router | react_router/render.py | Python | mit | 5,105 |
# Copyright (c) 2019-2020 Manfred Moitzi
# License: MIT License
import pytest
import math
import pickle
# Import from 'ezdxf.math._vector' to test Python implementation
from ezdxf.math._vector import Vec2, Vec3
from ezdxf.acc import USE_C_EXT
all_vec_classes = [Vec2, Vec3]
vec2_only = [Vec2]
if USE_C_EXT:
from ezdxf.acc.vector import Vec2 as CVec2
all_vec_classes.append(CVec2)
vec2_only.append(CVec2)
# Vec2 is a sub set of Vec3, Vec3 can do everything Vec2 can do, but not every
# operation has the same result for 2D and 3D.
@pytest.fixture(params=all_vec_classes)
def vcls(request):
return request.param
@pytest.fixture(params=vec2_only)
def vec2(request):
return request.param
def test_init_tuple(vcls):
v = vcls((2, 3))
assert v.x == 2
assert v.y == 3
def test_empty_init(vcls):
v = vcls()
assert v.x == 0.
assert v.y == 0.
def test_init_vec2(vcls):
v = Vec2(vcls(2, 3))
assert v.x == 2
assert v.y == 3
def test_compatible_to_vector():
v = Vec3(Vec2(1, 2))
assert v == (1, 2, 0)
v = Vec2(Vec3(1, 2, 3))
assert v.x == 1
assert v.y == 2
def test_vec3(vec2):
v = vec2(1, 2)
assert len(v) == 2
v3 = v.vec3
assert len(v3) == 3
assert v3 == (1, 2, 0)
def test_round(vec2):
v = vec2(1.123, 2.123)
v2 = v.round(1)
assert v2 == (1.1, 2.1)
def test_from_angle(vcls):
angle = math.radians(50)
length = 3.0
assert vcls.from_angle(angle, length) == vcls(
(math.cos(angle) * length, math.sin(angle) * length)
)
def test_vec2_as_tuple(vec2):
v = vec2(1, 2)
assert v[0] == 1
assert v[1] == 2
with pytest.raises(IndexError):
_ = v[2]
# negative indices not supported
with pytest.raises(IndexError):
_ = v[-1]
def test_iter(vcls):
assert sum(vcls(1, 2)) == 3
def test_deep_copy():
import copy
v = Vec2(1, 2)
l1 = [v, v, v]
l2 = copy.copy(l1)
assert l2[0] is l2[1]
assert l2[1] is l2[2]
assert l2[0] is v
# Vec3, CVec2 and CVec3 are immutable and do not create copies of itself!
l3 = copy.deepcopy(l1)
assert l3[0] is l3[1]
assert l3[1] is l3[2]
assert l3[0] is not v
def test_get_angle(vcls):
v = vcls(3, 3)
assert math.isclose(v.angle_deg, 45)
assert math.isclose(v.angle, math.radians(45))
def test_compare_vectors(vcls):
v1 = vcls(1, 2)
assert v1 == v1
v2 = vcls(2, 3)
assert v2 > v1
assert v1 < v2
def test_is_close(vcls):
v1 = vcls(421846.9857097387, -36908.41493252139)
v2 = vcls(421846.9857097387, -36908.41493252141)
assert v1.isclose(v2) is True
def test_is_null(vcls):
v = vcls(0, 0)
assert v.is_null is True
v1 = vcls(23.56678, 56678.56778) * (1.0 / 14.5667)
v2 = vcls(23.56678, 56678.56778) / 14.5667
assert (v2 - v1).is_null
def test_is_not_null_default_abs_tol(vcls):
assert vcls(1e-11, 0).is_null is False
def test_is_null_default_abs_tol(vcls):
assert vcls(1e-12, 0).is_null is True
def test_bool(vcls):
v = vcls((0, 0))
assert bool(v) is False
v1 = vcls(23.56678, 56678.56778) * (1.0 / 14.5667)
v2 = vcls(23.56678, 56678.56778) / 14.5667
result = v2 - v1
assert bool(result) is False
# current rel_tol=1e-9
assert not vcls(1e-8, 0).is_null
def test_magnitude(vcls):
v = vcls(3, 4)
assert math.isclose(abs(v), 5)
assert math.isclose(v.magnitude, 5)
def test_normalize(vcls):
v = vcls(2, 0)
assert v.normalize() == (1, 0)
def test_normalize_to_length(vcls):
v = vcls(2, 0)
assert v.normalize(4) == (4, 0)
def test_orthogonal_ccw(vcls):
v = vcls(3, 4)
assert v.orthogonal() == (-4, 3)
def test_orthogonal_cw(vcls):
v = vcls(3, 4)
assert v.orthogonal(False) == (4, -3)
def test_negative(vcls):
v = vcls(2, 3)
assert -v == (-2, -3)
def test_add_vector(vcls):
assert vcls(2, 3) + vcls(7, 7) == (9, 10)
def test_add_vec3(vec2):
assert vec2(2, 3) + Vec3(7, 7) == (9, 10)
def test_iadd_vector(vec2):
v = Vec2(2, 3)
v += Vec2(7, 7)
assert v == (9, 10)
def test_add_scalar_type_erorr(vcls):
with pytest.raises(TypeError):
vcls(1, 1) + 1
def test_iadd_scalar_type_error(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v += 1
def test_radd_scalar_type_error(vcls):
with pytest.raises(TypeError):
1 + vcls(1, 1)
def test_radd_tuple_type_error(vec2):
with pytest.raises(TypeError):
(1, 1) + vec2(1, 1)
def test_sub_vector(vcls):
assert vcls(2, 3) - vcls(7, 7) == (-5, -4)
def test_isub_vector(vec2):
v = Vec2(2, 3)
v -= Vec2(7, 7)
assert v == (-5, -4)
def test_sub_vec3(vec2):
assert vec2(2, 3) - Vec3(7, 7) == (-5, -4)
def test_sub_scalar_type_error(vcls):
with pytest.raises(TypeError):
vcls(1, 1) - 1
def test_isub_scalar_type_erorr(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v -= 1
def test_rsub_tuple(vec2):
with pytest.raises(TypeError):
(2, 3) - vec2(7, 7)
def test_rsub_scalar_type_error(vcls):
with pytest.raises(TypeError):
1 - vcls(1, 1)
def test_mul_scalar(vcls):
v = vcls(2, 3)
assert v * 2 == (4, 6)
def test_imul_scalar(vcls):
v = vcls(2, 3)
v *= 2
assert v == (4, 6)
def test_rmul_scalar(vcls):
assert 2 * vcls(2, 3) == (4, 6)
def test_mul_tuple_type_error(vcls):
with pytest.raises(TypeError):
vcls(2, 3) * (2, 2)
def test_rmul_tuple_type_error(vcls):
with pytest.raises(TypeError):
(2, 2) * vcls(2, 3)
def test_imul_tuple_type_error(vcls):
v = vcls(2, 3)
with pytest.raises(TypeError):
v *= (2, 2)
def test_div_scalar(vcls):
v = vcls(2, 3)
assert v / 2 == (1, 1.5)
def test_idiv_scalar(vcls):
v = vcls(2, 3)
v /= 2
assert v == (1, 1.5)
def test_dot_product(vcls):
v1 = vcls(2, 7)
v2 = vcls(3, 9)
assert math.isclose(v1.dot(v2), 69)
def test_angle_deg(vcls):
assert math.isclose(vcls((0, 1)).angle_deg, 90)
assert math.isclose(vcls((0, -1)).angle_deg, -90)
assert math.isclose(vcls((1, 1)).angle_deg, 45)
assert math.isclose(vcls((-1, 1)).angle_deg, 135)
def test_angle_between(vcls):
v1 = vcls(0, 1)
v2 = vcls(1, 1)
angle = v1.angle_between(v2)
assert math.isclose(angle, math.pi / 4)
# reverse order, same result
angle = v2.angle_between(v1)
assert math.isclose(angle, math.pi / 4)
@pytest.mark.parametrize(
"v1, v2",
[
[(1, 0), (0, 0)],
[(0, 0), (1, 0)],
[(0, 0), (0, 0)],
],
)
def test_angle_between_null_vector(vcls, v1, v2):
with pytest.raises(ZeroDivisionError):
vcls(v1).angle_between(vcls(v2))
def test_angle_between_outside_domain():
v1 = Vec3(721.046967113573, 721.0469671135688, 0.0)
v2 = Vec3(-721.0469671135725, -721.0469671135688, 0.0)
angle = v1.angle_between(v2)
assert math.isclose(angle, math.pi)
# reverse order, same result
angle = v2.angle_between(v1)
assert math.isclose(angle, math.pi)
def test_rotate(vcls):
assert vcls(2, 2).rotate_deg(90).isclose(vcls(-2, 2))
def test_lerp(vcls):
v1 = vcls(1, 1)
v2 = vcls(4, 4)
assert v1.lerp(v2, 0.5) == (2.5, 2.5)
assert v1.lerp(v2, 0) == (1, 1)
assert v1.lerp(v2, 1) == (4, 4)
def test_project(vcls):
v = vcls(10, 0)
assert v.project(vcls(5, 0)) == (5, 0)
assert v.project(vcls(5, 5)) == (5, 0)
assert v.project(vcls(5, 5)) == (5, 0)
v = vcls(10, 10)
assert v.project(vcls(10, 0)).isclose(vcls(5, 5))
def test_det(vec2):
assert vec2(1, 0).det(vec2(0, 1)) == 1
assert vec2(0, 1).det(vec2(1, 0)) == -1
def test_sum(vcls):
assert vcls.sum([]).is_null is True
assert vcls.sum([vcls(1, 1)]) == (1, 1)
assert vcls.sum([vcls(1, 1), vcls(2, 2)]) == (3, 3)
def test_picklable(vec2):
for v in [vec2((1, 2.5)), vec2(1, 2.5)]:
pickled_v = pickle.loads(pickle.dumps(v))
assert v == pickled_v
assert type(v) is type(pickled_v)
| mozman/ezdxf | tests/test_06_math/test_603_vec2.py | Python | mit | 8,066 |
from django.shortcuts import render
# Create your views here.
from .models import Course, Student, StudentCourse
from .serializers import CourseSerializer, StudentSerialiser
from rest_framework import viewsets
from rest_framework.decorators import detail_route, list_route
from rest_framework.response import Response
class StudentViewSet(viewsets.ModelViewSet):
queryset = Student.objects.all()
serializer_class = StudentSerialiser
@list_route(methods=['GET'])
def make(self, request):
username = request.GET.get('username', None)
if username:
Student.objects.get_or_create(nickname=username)
return Response({'success': True})
class CourseViewSet(viewsets.ModelViewSet):
queryset = Course.objects.all()
serializer_class = CourseSerializer
def get_queryset(self):
result = super(CourseViewSet, self).get_queryset()
username = self.request.GET.get('username', None)
active = self.request.GET.get('active', None)
if not username or active != '1':
return result
user = Student.objects.get(nickname=username)
courses_ids = StudentCourse.objects.filter(student=user, active=True).values_list('course_id', flat=True)
return result.filter(id__in=courses_ids)
@detail_route(methods=['GET'])
def start(self, request, pk=None):
username = request.GET.get('username', None)
user = Student.objects.get(nickname=username)
course = Course.objects.get(id=pk)
student_course, created = StudentCourse.objects.get_or_create(student=user, course=course)
StudentCourse.objects.filter(student=user).update(active=False)
student_course.active = True
student_course.save()
return Response({'success': True})
| Likegram/study_run | server/app/views.py | Python | mit | 1,803 |
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Deuscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test timestampindex generation and fetching
#
import time
from test_framework.test_framework import DeuscoinTestFramework
from test_framework.util import *
class TimestampIndexTest(DeuscoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self):
self.nodes = []
# Nodes 0/1 are "wallet" nodes
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-timestampindex"]))
# Nodes 2/3 are used for testing
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(3, self.options.tmpdir, ["-debug", "-timestampindex"]))
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
print "Mining 25 blocks..."
blockhashes = self.nodes[0].generate(25)
time.sleep(3)
print "Mining 25 blocks..."
blockhashes.extend(self.nodes[0].generate(25))
time.sleep(3)
print "Mining 25 blocks..."
blockhashes.extend(self.nodes[0].generate(25))
self.sync_all()
low = self.nodes[1].getblock(blockhashes[0])["time"]
high = low + 76
print "Checking timestamp index..."
hashes = self.nodes[1].getblockhashes(high, low)
assert_equal(len(hashes), len(blockhashes))
assert_equal(hashes, blockhashes)
print "Passed\n"
if __name__ == '__main__':
TimestampIndexTest().main()
| deuscoin-org/deuscoin-core | qa/rpc-tests/timestampindex.py | Python | mit | 1,959 |
#!/usr/bin/env python
# coding=utf-8
"""
A simple example demonstrating the various ways to call cmd2.Cmd.read_input() for input history and tab completion
"""
from typing import (
List,
)
import cmd2
EXAMPLE_COMMANDS = "Example Commands"
class ReadInputApp(cmd2.Cmd):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.prompt = "\n" + self.prompt
self.custom_history = ['history 1', 'history 2']
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_basic(self, _) -> None:
"""Call read_input with no history or tab completion"""
self.poutput("Tab completion and up-arrow history is off")
try:
self.read_input("> ")
except EOFError:
pass
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_basic_with_history(self, _) -> None:
"""Call read_input with custom history and no tab completion"""
self.poutput("Tab completion is off but using custom history")
try:
input_str = self.read_input("> ", history=self.custom_history)
except EOFError:
pass
else:
self.custom_history.append(input_str)
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_commands(self, _) -> None:
"""Call read_input the same way cmd2 prompt does to read commands"""
self.poutput("Tab completing and up-arrow history configured for commands")
try:
self.read_input("> ", completion_mode=cmd2.CompletionMode.COMMANDS)
except EOFError:
pass
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_custom_choices(self, _) -> None:
"""Call read_input to use custom history and choices"""
self.poutput("Tab completing with static choices list and using custom history")
try:
input_str = self.read_input(
"> ",
history=self.custom_history,
completion_mode=cmd2.CompletionMode.CUSTOM,
choices=['choice_1', 'choice_2', 'choice_3'],
)
except EOFError:
pass
else:
self.custom_history.append(input_str)
# noinspection PyMethodMayBeStatic
def choices_provider(self) -> List[str]:
"""Example choices provider function"""
return ["from_provider_1", "from_provider_2", "from_provider_3"]
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_custom_choices_provider(self, _) -> None:
"""Call read_input to use custom history and choices provider function"""
self.poutput("Tab completing with choices from provider function and using custom history")
try:
input_str = self.read_input(
"> ",
history=self.custom_history,
completion_mode=cmd2.CompletionMode.CUSTOM,
choices_provider=ReadInputApp.choices_provider,
)
except EOFError:
pass
else:
self.custom_history.append(input_str)
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_custom_completer(self, _) -> None:
"""Call read_input to use custom history and completer function"""
self.poutput("Tab completing paths and using custom history")
try:
input_str = self.read_input(
"> ", history=self.custom_history, completion_mode=cmd2.CompletionMode.CUSTOM, completer=cmd2.Cmd.path_complete
)
self.custom_history.append(input_str)
except EOFError:
pass
@cmd2.with_category(EXAMPLE_COMMANDS)
def do_custom_parser(self, _) -> None:
"""Call read_input to use a custom history and an argument parser"""
parser = cmd2.Cmd2ArgumentParser(prog='', description="An example parser")
parser.add_argument('-o', '--option', help="an optional arg")
parser.add_argument('arg_1', help="a choice for this arg", metavar='arg_1', choices=['my_choice', 'your_choice'])
parser.add_argument('arg_2', help="path of something", completer=cmd2.Cmd.path_complete)
self.poutput("Tab completing with argument parser and using custom history")
self.poutput(parser.format_usage())
try:
input_str = self.read_input(
"> ", history=self.custom_history, completion_mode=cmd2.CompletionMode.CUSTOM, parser=parser
)
except EOFError:
pass
else:
self.custom_history.append(input_str)
if __name__ == '__main__':
import sys
app = ReadInputApp()
sys.exit(app.cmdloop())
| python-cmd2/cmd2 | examples/read_input.py | Python | mit | 4,590 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.rdbms.mariadb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list(
self,
**kwargs: Any
) -> "_models.OperationListResult":
"""Lists all of the available REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationListResult, or the result of cls(response)
:rtype: ~azure.mgmt.rdbms.mariadb.models.OperationListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OperationListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/providers/Microsoft.DBForMariaDB/operations'} # type: ignore
| Azure/azure-sdk-for-python | sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/mariadb/aio/operations/_operations.py | Python | mit | 3,780 |
from __future__ import absolute_import
from .make_haploblocks import get_haploblocks
from .genetic_models import check_genetic_models
from .model_score import get_model_score
from .fix_variant import make_print_version
from .variant_annotator import VariantAnnotator
| moonso/genmod | genmod/annotate_models/__init__.py | Python | mit | 268 |
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
import copy
import json
import re
from svtplay_dl.error import ServiceError
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.fetcher.http import HTTP
from svtplay_dl.service import OpenGraphThumbMixin
from svtplay_dl.service import Service
class Vimeo(Service, OpenGraphThumbMixin):
supported_domains = ["vimeo.com", "player.vimeo.com"]
def get(self):
data = self.get_urldata()
match_cfg_url = re.search('data-config-url="([^"]+)" data-fallback-url', data)
match_clip_page_cfg = re.search(r"vimeo\.clip_page_config\s*=\s*({.+?});", data)
if match_cfg_url:
player_url = match_cfg_url.group(1).replace("&", "&")
elif match_clip_page_cfg:
page_config = json.loads(match_clip_page_cfg.group(1))
player_url = page_config["player"]["config_url"]
else:
yield ServiceError(f"Can't find video file for: {self.url}")
return
player_data = self.http.request("get", player_url).text
if player_data:
jsondata = json.loads(player_data)
if ("hls" in jsondata["request"]["files"]) and ("fastly_skyfire" in jsondata["request"]["files"]["hls"]["cdns"]):
hls_elem = jsondata["request"]["files"]["hls"]["cdns"]["fastly_skyfire"]
yield from hlsparse(self.config, self.http.request("get", hls_elem["url"]), hls_elem["url"], output=self.output)
avail_quality = jsondata["request"]["files"]["progressive"]
for i in avail_quality:
yield HTTP(copy.copy(self.config), i["url"], i["height"], output=self.output)
else:
yield ServiceError("Can't find any streams.")
return
| spaam/svtplay-dl | lib/svtplay_dl/service/vimeo.py | Python | mit | 1,817 |
from django.apps import apps
from django.contrib import admin
AccessToken = apps.get_model('oauth2', 'AccessToken')
Client = apps.get_model('oauth2', 'Client')
Grant = apps.get_model('oauth2', 'Grant')
RefreshToken = apps.get_model('oauth2', 'RefreshToken')
class AccessTokenAdmin(admin.ModelAdmin):
list_display = ('user', 'client', 'token', 'expires', 'scope')
raw_id_fields = ('user',)
class GrantAdmin(admin.ModelAdmin):
list_display = ('user', 'client', 'code', 'expires')
raw_id_fields = ('user',)
class ClientAdmin(admin.ModelAdmin):
list_display = ('url', 'user', 'redirect_uri', 'client_id', 'client_type')
raw_id_fields = ('user',)
admin.site.register(AccessToken, AccessTokenAdmin)
admin.site.register(Grant, GrantAdmin)
admin.site.register(Client, ClientAdmin)
admin.site.register(RefreshToken)
| depop/django-oauth2-provider | provider/oauth2/admin.py | Python | mit | 840 |
from rest_framework.permissions import BasePermission
class IsBuilding(BasePermission):
"""Checks if a current building (preselected by middleware)
has been assigned for this user"""
def has_permission(self, request, view):
return request.building is not None
| danjac/ownblock | ownblock/ownblock/apps/buildings/permissions.py | Python | mit | 284 |
from importlib import import_module
def import_object(object_path):
"""
Import class or function by path
:param object_path: path to the object for import
:return: imported object
"""
module_path, class_name = object_path.rsplit('.', 1)
module = import_module(module_path)
return getattr(module, class_name)
| novafloss/django-formidable | formidable/utils.py | Python | mit | 342 |
#!/usr/bin/env python3
# Questo file legge il file di configurazione,
# trova e modifica il parametro eseguendo il rispettivo "write*.py"
# Serve per la parte di gestione html in python
import cgi
import cgitb
# Abilita gli errori al server web/http
cgitb.enable()
# Le mie librerie Json, Html, flt (Thermo(Redis))
import mjl, mhl, flt
import redis,os # Questa volta servira` anche os ?
# Parametri generali
TestoPagina="Configurazione sensori di temperatura"
ConfigFile="../conf/thermo.json"
WriteFile="/cgi-bin/writesensors.py"
# Redis "key"
RedisKey = "sensore:temperatura"
# 1 wire
Dir1w = "/sys/bus/w1/devices/"
# Apro il database Redis con l'istruzione della mia libreria
MyDB = flt.OpenDBFile(ConfigFile)
# Cerco i sensori
List1wire = os.listdir(Dir1w)
List1wire.remove("w1_bus_master1")
# Genero le chiavi se non esistono
for i in List1wire:
if not MyDB.exists(RedisKey+":"+i):
MyDB.set(RedisKey+":"+i,"Sensore"+i)
# Elimino quelle che non esistono piu`
for i in MyDB.keys(RedisKey+":*"):
Esiste=""
for j in List1wire:
if flt.Decode(i) == RedisKey+":"+j:
Esiste="True"
if not Esiste:
MyDB.delete(i)
# Start web page - Uso l'intestazione "web" della mia libreria
print (mhl.MyHtml())
print (mhl.MyHtmlHead())
# Scrivo il Titolo/Testo della pagina
print ("<h1>","<center>",TestoPagina,"</center>","</h1>")
#print ("<hr/>","<br/>")
# Eventuale help/annotazione
print ("""
Questo cerca le sonde di temperatura, genera automaticamente le chiavi redis, eliminando eventuali sonde che non sono piu` collegate.
<br/>
L'inserimento e` possibile per la sola descrizione, che servira` al riconoscimento del sensore, nel caso ve ne fosse piu` di uno collegato.
<br/>
<br/>
<i>Inserire una descrizione di riconoscimento, la piu` breve possibile.</i>
<br/>
<br/>
<b>Ricorda di riconfigurare il PID ed eliminare/archiviare "temperature.csv" fer forzare la riscrittura dell'intestazione.</b>
<hr/>
<br/>
""")
# Inizio del form
print (mhl.MyActionForm(WriteFile,"POST"))
print ("<table>")
# Questa volta ho tante chiavi ..
for i in List1wire:
# La prima voce non e` modificabile ed e` la chiave Redis (solo visualizzazione)
print ("<tr>")
print ("<td>")
print ("Key: ")
print ("</td>")
print ("<td>")
print (mhl.MyTextForm("key",i,"40","required","readonly"))
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("Descrizione sensore: ")
print ("</td>")
print ("<td>")
print (mhl.MyTextForm(RedisKey+":"+i,flt.Decode(MyDB.get(RedisKey+":"+i)),"40","required",""))
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("")
print ("</td>")
print ("<td>")
print ("<hr/>")
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td colspan=\"2\">")
print ("<hr/>")
print ("</td>")
print ("</tr>")
print ("<tr>")
print ("<td>")
print ("</td>")
print ("<td colspan=\"2\">")
print (mhl.MyButtonForm("submit","Submit"))
print ("</td>")
print ("</tr>")
print ("</table>")
# End form
print (mhl.MyEndForm())
# End web page
print (mhl.MyHtmlBottom()) | raspibo/Thermo | var/www/cgi-bin/readsensors.py | Python | mit | 3,144 |
import json
from pprint import pprint
import time
import io
# from http://www.codigomanso.com/en/2011/05/trucomanso-transformar-el-tiempo-en-formato-24h-a-formato-12h-python/
def ampmformat (hhmmss):
"""
This method converts time in 24h format to 12h format
Example: "00:32" is "12:32 AM"
"13:33" is "01:33 PM"
"""
ampm = hhmmss.split (":")
if (len(ampm) == 0) or (len(ampm) > 3):
return hhmmss
# is AM? from [00:00, 12:00[
hour = int(ampm[0]) % 24
isam = (hour >= 0) and (hour < 12)
# 00:32 should be 12:32 AM not 00:32
if isam:
ampm[0] = ('12' if (hour == 0) else "%02d" % (hour))
else:
ampm[0] = ('12' if (hour == 12) else "%02d" % (hour-12))
return ':'.join (ampm) + (' AM' if isam else ' PM')
json_data=open('allData2003_2004.json')
data = json.load(json_data)
json_data.close()
# k ='690150'
# print data['690150']
output = {}
for k in data.keys():
for d in data[k]:
date = time.strptime(d['date'], "%b %d, %Y %I:%M:%S %p")
if k in output:
t = ampmformat('%02d:%02d:%02d' % (date.tm_hour, date.tm_min, date.tm_sec))
h = date.tm_hour
output[k]['sum'] += d['value']
output[k]['hourly'][h] += d['value']
else:
output[k] = { "sum": 0,
"hourly": [0]*24
}
t = ampmformat('%02d:%02d:%02d' % (date.tm_hour, date.tm_min, date.tm_sec))
h = date.tm_hour
output[k]['sum'] += d['value']
output[k]['hourly'][h] += d['value']
f = io.open('data.json', 'w', encoding='utf-8')
f.write(unicode(json.dumps(output, ensure_ascii=False)))
f.close()
json_output=open('data.json')
output_data = json.load(json_output)
pprint(output_data)
json_output.close()
| inachen/cs171-hw4-chen-ina | ProblemGeoUSA/data_wrangle_total.py | Python | mit | 1,779 |
#BitArray
#Yu.Yang
#
class bitarray():
def __init__(self,length,defaultValue=False):
if (length < 0):
raise Exception("Length param error")
self.array=[]
self.length=length
fillValue=defaultValue
for i in range(self.length):
self.array.append(defaultValue)
self.version=0
def input_from_array(self,value):
if(isinstance(value,list)==False):
raise Exception("value is not a Array")
if (value is None or len(value)!=self.length):
raise Exception("ArgumentException if value == null or value.Length != this.Length.")
for i in range(self.length):
self.Set(i,value[i])
self.version+=1
return self
def __len__(self):
return self.length
def __str__(self):
str="["
for i in range(self.length):
str+="1" if self.array[i]==True else "0"
str+=" "
str+="]"
return str
def Get (self,index):
if (index < 0 or index >=self.length):
raise Exception("ArgumentOutOfRangeException if index < 0 or index >= GetLength()")
return self.array[index]
def Set (self,index,value):
if (index < 0 or index >=self.length):
raise Exception("ArgumentOutOfRangeException if index < 0 or index >= GetLength()")
if (value):
self.array[index]=True
else:
self.array[index]=False
self.version+=1
def SetAll(self,value):
for i in range(self.length):
self.Set(i,value)
self.version+=1
def And (self,value):
if(isinstance(value,BitArray)==False):
raise Exception("value is not a BitArray")
if (value is None or len(value)!=self.length):
raise Exception("ArgumentException if value == null or value.Length != this.Length.")
for i in range(self.length):
self.array[i]&=value.Get(i)
self.version+=1
return self
def Or (self,value):
if(isinstance(value,BitArray)==False):
raise Exception("value is not a BitArray")
if (value is None or len(value)!=self.length):
raise Exception("ArgumentException if value == null or value.Length != this.Length.")
for i in range(self.length):
self.array[i]|=value.Get(i)
self.version+=1
return self
def Xor (self,value):
if(isinstance(value,BitArray)==False):
raise Exception("value is not a BitArray")
if (value is None or len(value)!=self.length):
raise Exception("ArgumentException if value == null or value.Length != this.Length.")
for i in range(self.length):
self.array[i]^=value.Get(i)
self.version+=1
return self
def Not (self):
for i in range(self.length):
self.array[i] =not self.array[i]
self.version+=1
return self
| NiuXWolf/Introduction-to-Algorithms | B/BitArray/BitArray.py | Python | mit | 2,972 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import argparse
from configparser import SafeConfigParser
class Configurable(object):
"""
Configuration processing for the network
"""
def __init__(self, *args, **kwargs):
self._name = kwargs.pop("name", "Unknown")
if args and kwargs:
raise TypeError('Configurable must take either a config parser or keyword args')
if len(args) > 1:
raise TypeError('Configurable takes at most one argument')
if args:
self._config = args[0]
else:
self._config = self._configure(**kwargs)
return
@property
def name(self):
return self._name
def _configure(self, **kwargs):
config = SafeConfigParser()
config_file = kwargs.pop("config_file", "")
config.read(config_file)
# Override the config setting if the (k,v) specified in command line
for option, value in kwargs.items():
assigned = False
for section in config.sections():
if option in config.options(section):
config.set(section, option, str(value))
assigned = True
break
if not assigned:
raise ValueError("%s is not a valid option" % option)
return config
argparser = argparse.ArgumentParser()
argparser.add_argument('--config_file')
# ======
# [OS]
@property
def model_type(self):
return self._config.get('OS', 'model_type')
argparser.add_argument('--model_type')
@property
def mode(self):
return self._config.get('OS', 'mode')
argparser.add_argument('--mode')
@property
def save_dir(self):
return self._config.get('OS', 'save_dir')
argparser.add_argument('--save_dir')
@property
def word_file(self):
return self._config.get('OS', 'word_file')
argparser.add_argument('--word_file')
@property
def target_file(self):
return self._config.get('OS', 'target_file')
argparser.add_argument('--target_file')
@property
def train_file(self):
return self._config.get('OS', 'train_file')
argparser.add_argument('--train_file')
@property
def valid_file(self):
return self._config.get('OS', 'valid_file')
argparser.add_argument('--valid_file')
@property
def test_file(self):
return self._config.get('OS', 'test_file')
argparser.add_argument('--test_file')
@property
def save_model_file(self):
return self._config.get('OS', 'save_model_file')
argparser.add_argument('--save_model_file')
@property
def restore_from(self):
return self._config.get('OS', 'restore_from')
argparser.add_argument('--restore_from')
@property
def embed_file(self):
return self._config.get('OS', 'embed_file')
argparser.add_argument('--embed_file')
@property
def use_gpu(self):
return self._config.getboolean('OS', 'use_gpu')
argparser.add_argument('--use_gpu')
# [Dataset]
@property
def n_bkts(self):
return self._config.getint('Dataset', 'n_bkts')
argparser.add_argument('--n_bkts')
@property
def n_valid_bkts(self):
return self._config.getint('Dataset', 'n_valid_bkts')
argparser.add_argument('--n_valid_bkts')
@property
def dataset_type(self):
return self._config.get('Dataset', 'dataset_type')
argparser.add_argument('--dataset_type')
@property
def min_occur_count(self):
return self._config.getint('Dataset', 'min_occur_count')
argparser.add_argument('--min_occur_count')
# [Learning rate]
@property
def learning_rate(self):
return self._config.getfloat('Learning rate', 'learning_rate')
argparser.add_argument('--learning_rate')
@property
def epoch_decay(self):
return self._config.getint('Learning rate', 'epoch_decay')
argparser.add_argument('--epoch_decay')
@property
def dropout(self):
return self._config.getfloat('Learning rate', 'dropout')
argparser.add_argument('--dropout')
# [Sizes]
@property
def words_dim(self):
return self._config.getint('Sizes', 'words_dim')
argparser.add_argument('--words_dim')
# [Training]
@property
def log_interval(self):
return self._config.getint('Training', 'log_interval')
argparser.add_argument('--log_interval')
@property
def valid_interval(self):
return self._config.getint('Training', 'valid_interval')
argparser.add_argument('--valid_interval')
@property
def train_batch_size(self):
return self._config.getint('Training', 'train_batch_size')
argparser.add_argument('--train_batch_size')
@property
def test_batch_size(self):
return self._config.getint('Training', 'test_batch_size')
argparser.add_argument('--test_batch_size')
| Impavidity/text-classification-cnn | configurable.py | Python | mit | 4,672 |
from pyquery import PyQuery as pq
from proxypool.schemas.proxy import Proxy
from proxypool.crawlers.base import BaseCrawler
from loguru import logger
BASE_URL = 'https://www.xicidaili.com/'
class XicidailiCrawler(BaseCrawler):
"""
xididaili crawler, https://www.xicidaili.com/
"""
urls = [BASE_URL]
ignore = True
def parse(self, html):
"""
parse html file to get proxies
:return:
"""
doc = pq(html)
items = doc('#ip_list tr:contains(高匿)').items()
for item in items:
country = item.find('td.country').text()
if not country or country.strip() != '高匿':
continue
host = item.find('td:nth-child(2)').text()
port = int(item.find('td:nth-child(3)').text())
yield Proxy(host=host, port=port)
if __name__ == '__main__':
crawler = XicidailiCrawler()
for proxy in crawler.crawl():
print(proxy)
| Python3WebSpider/ProxyPool | proxypool/crawlers/public/xicidaili.py | Python | mit | 969 |
from django.contrib import admin
from .models import Contact
# Register your models here.
admin.site.register(Contact)
| Busaka/esl | src/contact/admin.py | Python | mit | 121 |
from interface.design.ui_screen import Ui_wnd_gifextract
from PyQt5 import QtWidgets
import sys
import listener
import config
import ffmpeg
import queue
import interface.menus.Frame_CreateGif
import interface.menus.Frame_ExtractFrames
import interface.menus.Frame_Queue
class Screen(QtWidgets.QMainWindow):
def __init__(self, parent=None):
def setupFFMpeg():
self.ffmpeg = ffmpeg.FFmpeg(self.config)
def setupConfig():
self.config = config.Config(self)
def setupQueue():
self.queue = queue.JobQueue(self)
def setupTabs():
self.tab_video = interface.menus.Frame_ExtractFrames.Frame(self)
self.ui.tabWidget.addTab(self.tab_video, "Frame Extraction")
self.tab_gif = interface.menus.Frame_CreateGif.Frame(self)
self.ui.tabWidget.addTab(self.tab_gif, "Gif Creation")
self.tab_queue = interface.menus.Frame_Queue.Frame(self)
self.ui.tabWidget.addTab(self.tab_queue, "Queue")
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_wnd_gifextract()
self.ui.setupUi(self)
self.slots = listener.Slots(self)
self.createLinks()
setupConfig()
setupTabs()
setupFFMpeg()
setupQueue()
def createLinks(self):
self.ui.actionPreferences.triggered.connect(self.openOptions)
def openOptions(self):
import interface.menus.ConfigMenu
options = interface.menus.ConfigMenu.ConfigMenu(self, self.config)
options.show()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
program = Screen()
program.show()
sys.exit(app.exec_()) | KaiAPaulhus/GifExtract | src/alpha.py | Python | mit | 1,716 |
from django.core.management import call_command
from django.test import TestCase
from mock import call
from mock import patch
from kolibri.core.content import models as content
class DeleteChannelTestCase(TestCase):
"""
Testcase for delete channel management command
"""
fixtures = ["content_test.json"]
the_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
def delete_channel(self):
call_command("deletechannel", self.the_channel_id)
def test_channelmetadata_delete_remove_metadata_object(self):
self.delete_channel()
self.assertEquals(0, content.ChannelMetadata.objects.count())
def test_channelmetadata_delete_remove_contentnodes(self):
self.delete_channel()
self.assertEquals(0, content.ContentNode.objects.count())
def test_channelmetadata_delete_leave_unrelated_contentnodes(self):
c2c1 = content.ContentNode.objects.get(title="c2c1")
new_id = c2c1.id[:-1] + "1"
content.ContentNode.objects.create(
id=new_id,
content_id=c2c1.content_id,
kind=c2c1.kind,
channel_id=c2c1.channel_id,
available=True,
title=c2c1.title,
)
self.delete_channel()
self.assertEquals(1, content.ContentNode.objects.count())
def test_channelmetadata_delete_remove_file_objects(self):
self.delete_channel()
self.assertEquals(0, content.File.objects.count())
@patch("kolibri.core.content.models.paths.get_content_storage_file_path")
@patch("kolibri.core.content.models.os.remove")
def test_channelmetadata_delete_files(self, os_remove_mock, content_file_path):
path = "testing"
content_file_path.return_value = path
num_files = content.LocalFile.objects.filter(available=True).count()
self.delete_channel()
os_remove_mock.assert_has_calls([call(path)] * num_files)
| indirectlylit/kolibri | kolibri/core/content/test/test_deletechannel.py | Python | mit | 1,918 |
from wtforms import IntegerField, SelectMultipleField
from wtforms.validators import NumberRange
from dmutils.forms import DmForm
import flask_featureflags
class BriefSearchForm(DmForm):
page = IntegerField(default=1, validators=(NumberRange(min=1),))
status = SelectMultipleField("Status", choices=(
("live", "Open",),
("closed", "Closed",)
))
# lot choices expected to be set at runtime
lot = SelectMultipleField("Category")
def __init__(self, *args, **kwargs):
"""
Requires extra keyword arguments:
- `framework` - information on the target framework as returned by the api
- `data_api_client` - a data api client (should be able to remove the need for this arg at some point)
"""
super(BriefSearchForm, self).__init__(*args, **kwargs)
try:
# popping this kwarg so we don't risk it getting fed to wtforms default implementation which might use it
# as a data field if there were a name collision
framework = kwargs.pop("framework")
self._framework_slug = framework["slug"]
self.lot.choices = tuple((lot["slug"], lot["name"],) for lot in framework["lots"] if lot["allowsBrief"])
except KeyError:
raise TypeError("Expected keyword argument 'framework' with framework information")
try:
# data_api_client argument only needed so we can fit in with the current way the tests mock.patch the
# the data_api_client directly on the view. would be nice to able to use the global reference to this
self._data_api_client = kwargs.pop("data_api_client")
except KeyError:
raise TypeError("Expected keyword argument 'data_api_client'")
def get_briefs(self):
if not self.validate():
raise ValueError("Invalid form")
statuses = self.status.data or tuple(id for id, label in self.status.choices)
lots = self.lot.data or tuple(id for id, label in self.lot.choices)
# disable framework filtering when digital marketplace framework is live
kwargs = {} if flask_featureflags.is_active('DM_FRAMEWORK') else {"framework": self._framework_slug}
return self._data_api_client.find_briefs(
status=",".join(statuses),
lot=",".join(lots),
page=self.page.data,
per_page=75,
human=True,
**kwargs
)
def get_filters(self):
"""
generate the same "filters" structure as expected by search page templates
"""
if not self.validate():
raise ValueError("Invalid form")
return [
{
"label": field.label,
"filters": [
{
"label": choice_label,
"name": field.name,
"id": "{}-{}".format(field.id, choice_id),
"value": choice_id,
"checked": field.data and choice_id in field.data,
}
for choice_id, choice_label in field.choices
],
}
for field in (self.lot, self.status,)
]
def filters_applied(self):
"""
returns boolean indicating whether the results are actually filtered at all
"""
if not self.validate():
raise ValueError("Invalid form")
return bool(self.lot.data or self.status.data)
| AusDTO/dto-digitalmarketplace-buyer-frontend | app/main/forms/brief_forms.py | Python | mit | 3,546 |
# coding=gbk
import os
import re
import string
def isMov(filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename.split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# ±éÀúµ±Ç°Ä¿Â¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:])
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï'
| windcode/xtools | CleanMoviePrefix.py | Python | mit | 1,426 |
# coding: utf-8
import db_info
import db_cancel
import db_news
import hashlib
from tweeter import format_info, format_cancel, format_news
import settings
log = settings.log
def add_info_to_queue(q, *args):
try:
# 更新した数をカウント
updated = 0
for lec_info in args:
id = db_info.add_info(*lec_info)
if id is not False:
lec_info.append(id)
# Tweetする用に文章をフォーマット
t = format_info(*lec_info)
# キューに投入
q.put(t)
updated += 1
else:
pass
else:
# 更新した数を返す
return updated
except Exception as e:
log.exception(e)
def add_cancel_to_queue(q, *args):
try:
# 更新した数をカウント
updated = 0
for lec_cancel in args:
cancel_id = db_cancel.add_cancel(*lec_cancel)
if cancel_id is not False:
lec_cancel.append(cancel_id)
# Tweetする用に文章をフォーマット
t = format_cancel(*lec_cancel)
# キューに投入
q.put(t)
updated += 1
else:
pass
else:
# 更新数を返す
return updated
except Exception as e:
log.exception(e)
def add_news_to_queue(q, *args):
try:
# 更新した数をカウント
updated = 0
for news in args:
news_id = db_news.add_news(*news)
if news_id is not False:
news.append(news_id)
# Tweetする用に文章をフォーマット
t = format_news(*news)
# キューに投入
q.put(t)
updated += 1
else:
pass
else:
# 更新数を返す
return updated
except Exception as e:
log.exception(e)
| pddg/Qkou_kit | lib/add_db.py | Python | mit | 2,046 |
import argparse
from collections import defaultdict
def calculateJaccardIndex(x,z,neighbours):
shared = neighbours[x].intersection(neighbours[z])
combined = neighbours[x].union(neighbours[z])
return len(shared)/float(len(combined))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Calculate scores for a set of scores')
parser.add_argument('--cooccurrenceFile',type=str,required=True,help='File containing cooccurrences')
parser.add_argument('--occurrenceFile',type=str,required=True,help='File containing occurrences')
parser.add_argument('--sentenceCount',type=str,required=True,help='File containing sentence count')
parser.add_argument('--relationsToScore',type=str,required=True,help='File containing relations to score')
parser.add_argument('--anniVectors',type=str,help='File containing the raw ANNI vector data')
parser.add_argument('--anniVectorsIndex',type=str,help='File containing the index for the ANNI vector data')
parser.add_argument('--outFile',type=str,required=True,help='File to output scores to')
args = parser.parse_args()
print "Loading relationsToScore"
relationsToScore = []
entitiesToScore = set()
with open(args.relationsToScore) as f:
for line in f:
split = map(int,line.strip().split())
x,y = split[:2]
relationsToScore.append((x,y))
entitiesToScore.add(x)
entitiesToScore.add(y)
entitiesToScore = sorted(list(entitiesToScore))
print "Loaded relationsToScore"
print "Loading cooccurrences..."
neighbours = defaultdict(set)
with open(args.cooccurrenceFile) as f:
for line in f:
x,y,count = map(int,line.strip().split())
neighbours[x].add(y)
neighbours[y].add(x)
print "Loaded cooccurrences"
print "Scoring..."
with open(args.outFile,'w') as outF:
for i,(x,z) in enumerate(relationsToScore):
if (i%10000) == 0:
print i
jaccardScore = calculateJaccardIndex(x,z,neighbours)
outData = [x,z,jaccardScore]
outLine = "\t".join(map(str,outData))
outF.write(outLine+"\n")
print "Completed scoring"
print "Output to %s" % args.outFile
| jakelever/knowledgediscovery | analysis/separate/jaccard.py | Python | mit | 2,061 |
import scrapy
import xml.etree.ElementTree as ET
from locations.items import GeojsonPointItem
URL = 'http://hosted.where2getit.com/auntieannes/2014/ajax?&xml_request=%3Crequest%3E%3Cappkey%3E6B95F8A2-0C8A-11DF-A056-A52C2C77206B%3C%2Fappkey%3E%3Cformdata+id%3D%22locatorsearch%22%3E%3Cdataview%3Estore_default%3C%2Fdataview%3E%3Climit%3E250%3C%2Flimit%3E%3Cgeolocs%3E%3Cgeoloc%3E%3Caddressline%3E{}%3C%2Faddressline%3E%3Clongitude%3E%3C%2Flongitude%3E%3Clatitude%3E%3C%2Flatitude%3E%3Ccountry%3E{}%3C%2Fcountry%3E%3C%2Fgeoloc%3E%3C%2Fgeolocs%3E%3Cwhere%3E%3Cor%3E%3Chascatering%3E%3Ceq%3E%3C%2Feq%3E%3C%2Fhascatering%3E%3Chaspretzelfieldtrip%3E%3Ceq%3E%3C%2Feq%3E%3C%2Fhaspretzelfieldtrip%3E%3Cnewstores%3E%3Ceq%3E%3C%2Feq%3E%3C%2Fnewstores%3E%3C%2For%3E%3C%2Fwhere%3E%3Csearchradius%3E10%7C25%7C50%7C100%7C250%7C500%7C750%7C1000%3C%2Fsearchradius%3E%3Cstateonly%3E1%3C%2Fstateonly%3E%3C%2Fformdata%3E%3C%2Frequest%3E'
US_STATES = (
"AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
"HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
"MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
"NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
"SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY",
)
UK_Cities = (
'London', 'Birmingham', 'Manchester', 'Glasgow', 'Leeds',
'Liverpool', 'Bristol', 'Newcastle', 'Sunderland', 'Wolverhampton',
'Nottingham', 'Sheffield', 'Belfast', 'Leicester', 'Bradford',
)
UAE_Cities = (
"Abu Dhabi", "Sharjah", "Dubai", "Dayrah","Al Ain",
"Fujairah", "Ras al-Khaimah", "Ar Ruways", "As Satwah",
"Al Khan",
)
TT_Cities = (
"Arima", "San Fernando", "Princes Town", "Piarco", "RioClaro", "Port of Spain",
"Victoria", "Maraval", "Fyzabad", "Debe", "Couva", "Diego Martin", "Chaguanas",
"Penal", "Cunupia", "Curepe", "Roxborough", "San Juan", "Arouca", "Saint Joseph",
"California", "Marabella", "Siparia", "Gasparillo", "Morvant", "Barataria", "Saint Clair",
"Laventille", "Carenage", "Ward of Tacarigua", "Caroni", "Lopinot", "Tunapuna", "Santa Cruz",
"Saint Augustine", "Golden Lane", "Scarborough", "Moriah", "Saint James", "Carapichaima",
"Valsayn", "Freeport", "Claxton Bay", "Sangre Grande", "Cumuto", "Woodbrook", "Petit Valley",
"El Dorado", "Phoenix Park",
)
Thailand_Cities = (
"Bangkok", "Chumphon", "Kathu", "Phang Khon", "Sakon Nakhon", "Mueang Nonthaburi",
"Kalasin", "Chon Buri", "Loei", "Khon Kaen", "Nong Bua Lamphu", "Roi Et", "Udon Thani",
"Kumphawapi", "Kanchanaburi", "Nong Khai", "Ayutthaya", "Chiang Mai", "Songkhla",
"Chiang Rai", "Surin", "Thanyaburi", "Wiphawadi", "Phuket", "Sing Buri", "Satun",
"Prachin Buri", "Ubon Ratchathani", "Pattaya", "Yala", "Bang Na", "Samut Songkhram", "Phetchabun"
"Ratchaburi", "Lampang", "Narathiwat", "New Sukhothai", "Lopburi", "Uttaradit", "Maha Sarakham",
"Mae Hong Son", "Suphan Buri", "Chachoengsao", "Samut Sakhon", "Phrae", "Din Daeng",
"Pathum Wan", "Phayao", "Trang", "Mukdahan", "Phetchaburi", "Uthai Thani", "Krabi", "Phichit",
"Phitsanulok", "Ban Pat Mon", "Prachuap Khiri Khan", "Ban Khlong Prasong", "Yasothon",
"Ranong", "Lamphun", "Nong Bua", "Amnat Charoen", "Ban Phichit", "Bang Khae", "Thon Buri",
"Min Buri", "Ban Tham", "Sam Sen", "Ang Thong", "Mueang Samut Prakan", "Sa Kaeo", "Pathum Thani",
"Chanthaburi", "Huai Khwang", "Rayong", "Sattahip", "Phan", "Si Racha", "Phatthalung",
"Rawai", "Buriram", "Dusit", "Khlong Luang", "Trat", "Ban Bueng", "Sung Noen", "Manorom",
"Ban Bang Plong", "Tak", "Ban Tha Duea", "Amphawa", "Ban Pong Lang", "Phaya Thai", "Si Sa Ket",
"Nakhon Ratchasima", "Bang Phlat", "Ban Bang Phli Nakhon", "Salaya", "Krathum Baen",
"Hua Hin", "Ban Talat Rangsit", "Ban Khlong Ngae", "Nong Prue", "Wang Thonglang",
"Samphanthawong", "Bang Khun Thian", "Chatuchak", "Chaiyaphum",
"Nakhon Pathom", "Nan", "Bang Kruai", "Sathon", "Suan Luang", "Ban Wang Yai"
"Khlong San", "Watthana", "Lat Krabang", "Muak Lek", "Kosum Phisai", "Ban Phlam", "Non Thai",
"Photharam", "Thalang", "Bang Kapi", "Long", "Ka Bang", "Pattani", "Nakhon Si Thammarat",
"Khlong Toei", "Cha-am", "Amphoe Aranyaprathet", "Phang Nga", "Ban Tha Ruea", "Chiang Muan",
"Ban Ang Thong", "Ban Khlong Takhian", "Khan Na Yao", "Bang Sue", "Sam Khok", "Don Mueang",
"Ban Pratunam Tha Khai","Sena", "Prakanong", "Ban Tha Pai", "Bang Lamung", "Nakhon Sawan",
"San Sai", "Kamphaeng Phet", "Pak Kret", "Hat Yai", "Ban Nam Hak", "Khlung", "Makkasan",
"Bang Sao Thong", "Ban Hua Thale", "Klaeng", "Chulabhorn", "Ban Don Sak", "Phanna Nikhom",
"Ban Na", "Ban Ko Pao","Mae Sot"
)
Korea_Cities = (
"Seoul", "Incheon", "Paju", "Cheonan", "Yongin", "Kwanghui-dong", "Pon-dong",
"Gwangju", "Gwangmyeong", "Tang-ni", "Busan", "Seongnam-si", "Suwon-si", "Namyang",
"Namyangju", "Jeju-si", "Ulsan", "Osan", "Hanam", "Pyong-gol", "Anyang-si",
"Yangsan", "Daejeon", "Nonsan", "Seocho", "Wonju", "Kisa", "Daegu", "Ansan-si", "Gongju",
"Haeundae", "Sasang", "Bucheon-si", "Chuncheon", "Ilsan-dong", "Naju", "Jinju", "Uiwang",
"Gangneung", "Yongsan-dong", "Pohang", "Changwon", "Jeonju", "Yeosu",
"Songnim", "Gimhae", "Songjeong", "Hyoja-dong", "Icheon-si", "Kimso", "Iksan", "Deokjin",
"Koyang-dong", "Samsung", "Anseong", "Samjung-ni", "Mapo-dong", "Gunnae", "Nae-ri",
"Suncheon", "Okpo-dong", "Moppo", "Sangdo-dong", "Cheongju-si", "Ch'aeun",
"Taebuk", "Yeoju", "Seong-dong", "Duchon", "Gyeongju", "Andong", "Seosan City", "Asan",
"Miryang", "Wonmi-gu", "Janghowon", "Chungnim", "Songam", "Tongan", "Ap'o", "Jecheon",
"Se-ri", "Ka-ri", "Hansol", "Songang", "Hyangyang", "Gyeongsan-si", "Gumi", "Unpo",
"Ulchin", "Namhyang-dong", "T'aebaek", "Hadong", "Haesan", "Chungju", "Chilgok",
)
Singapore_Cities = (
"Singapore", "Yishun New Town", "Bedok New Town", "Ayer Raja New Town",
"Kalang", "Tampines New Town", "Ang Mo Kio New Town", "Kampong Pasir Ris", "Hougang",
"Yew Tee", "Choa Chu Kang New Town", "Punggol", "Changi Village", "Bukit Timah Estate",
"Serangoon", "Jurong Town", "Tanglin Halt", "Woodlands New Town", "Jurong East New Town",
"Bukit Panjang New Town", "Bukit Batok New Town", "Pasir Panjang", "Holland Village",
"Tai Seng", "Toa Payoh New Town", "Bukit Timah", "Jurong West New Town", "Kembangan",
"Queenstown Estate", "Boon Lay", "Simei New Town", "Pandan Valley", "Clementi New Town",
"Tanjong Pagar"
)
Saudi_Arabia_Cities = (
"Riyadh", "Dammam", "Safwa", "Al Qatif", "Dhahran", "Al Faruq", "Khobar", "Jubail",
"Sayhat", "Jeddah", "Ta'if", "Mecca", "Al Hufuf", "Medina", "Rahimah", "Rabigh",
"Yanbu` al Bahr", "Abqaiq", "Mina", "Ramdah", "Linah", "Abha", "Jizan", "Al Yamamah",
"Tabuk", "Sambah", "Ras Tanura", "At Tuwal", "Sabya", "Buraidah", "Najran", "Sakaka",
"Madinat Yanbu` as Sina`iyah", "Hayil", "Khulays", "Khamis Mushait", "Ra's al Khafji",
"Al Bahah", "Rahman", "Jazirah", "Jazirah"
)
Indonesia_Cities = (
"Jakarta", "Surabaya", "Medan", "Bandung", "Bekasi", "Palembang", "Tangerang", "Makassar",
"Semarang", "South Tangerang",
)
Malaysia_Cities = (
"Kaula Lumpur", "Kota Bharu", "Klang", "Johor Bahru", "Subang Jaya", "Ipoh", "Kuching", "Seremban",
"Petaling Jaya", "Shah Alam", 'Penang', 'Kelantan', "Pantai", "Petaling Jaya", "Kajang",
"Setapak", "Bukit Kayu Hitam", "Bayan Lepas", "Taiping", "Kuala Terengganu", "Kuantan",
"Alor Gajah",
)
Japan_Cities = (
'Tokyo', "Hiroshima", "Saitama", "Nihon'odori", "Ibaraki", "Urayasu",
"Suita", "Funabashi", "Nagareyama", "Ichikawa", "Isesaki", "Koga", "Ichihara",
"Koshigaya", "Shibukawa", "Aoicho", "Yamakita", "Gotemba", "Nisshin", "Nishinomiya",
"Den'en-chofu", "Kawasaki", "Toyama-shi", "Moriguchi", "Chita", "Sano", "Nagoya-shi",
"Kyoto", "Hamamatsu", "Shimotoda", "Yachiyo", "Tsukuba", "Chiba", "Yokohama",
"Yamanashi", "Ashihara", "Kawaguchi", "Kasukabe", "Shizuoka", "Kawanishi", "Itami",
"Kobe", "Nara", "Yao", "Osaka", "Handa", "Honjocho", "Kishiwada", "Susono", "Nagasaki",
"Setagaya-ku", "Zushi", "Sugito", "Yabasehoncho", "Yamaguchi", "Kanazawa", "Maruyama",
"Tahara", "Obu", "Nishio", "Okinawa", "Urasoe", "Naha", "Chichibu", "Asahi", "Kita-sannomaru",
"Hirokawa", "Ishigaki", "Higashine", "Tsuruoka", "Asahikawa", "Minatomachi", "Sannohe",
"Tottori-shi", "Higashiasahimachi", "Iwata", "Koriyama", "Hanno", "Takarazuka", "Kuwana-shi",
"Kakogawa", "Komaki", "Mitake", "Tondabayashi", "Matsumoto", "Kakamigahara", "Onomichi",
"Kure", "Maebaru", "Tokai",
)
COUNTRIES = {
'US': US_STATES,
'UK': UK_Cities,
'AE': UAE_Cities,
'TT': TT_Cities,
'TH': Thailand_Cities,
'KR': Korea_Cities,
'SG': Singapore_Cities,
'SA': Saudi_Arabia_Cities,
'ID': Indonesia_Cities,
'MY': Malaysia_Cities,
'JP': Japan_Cities
}
TAGS = [
'city', 'country', 'latitude', 'longitude',
'phone', 'postalcode', 'state', 'uid'
]
MAPPING = {
'latitude': 'lat', 'longitude': 'lon', 'uid': 'ref',
'postalcode': 'postcode',
}
class AuntieAnnesSpider(scrapy.Spider):
name = "auntie_annes"
allowed_domains = ["hosted.where2getit.com/auntieannes"]
download_delay = 0.2
def process_poi(self, poi):
props = {}
add_parts = []
for child in poi:
if child.tag in TAGS and child.tag in MAPPING:
if child.tag in ('latitude', 'longitude'):
props[MAPPING[child.tag]] = float(child.text)
else:
props[MAPPING[child.tag]] = child.text
elif child.tag in TAGS and child.tag not in MAPPING:
props[child.tag] = child.text
elif child.tag in ('address1', 'address2', 'address3', ):
add_parts.append(child.text if child.text else '')
props.update({'addr_full': ', '.join(filter(None, add_parts))})
return GeojsonPointItem(**props)
def start_requests(self):
for country, locations in COUNTRIES.items():
for location in locations:
loc = "+".join(location.split(' '))
url = URL.format(location, country)
yield scrapy.Request(url, callback=self.parse)
def parse(self, response):
root = ET.fromstring(response.text)
collection = root.getchildren()[0]
pois = collection.findall('poi')
for poi in pois:
yield self.process_poi(poi)
| iandees/all-the-places | locations/spiders/aunt_annes.py | Python | mit | 10,953 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .update_resource import UpdateResource
class VirtualMachineUpdate(UpdateResource):
"""Describes a Virtual Machine.
Variables are only populated by the server, and will be ignored when
sending a request.
:param tags: Resource tags
:type tags: dict[str, str]
:param plan: Specifies information about the marketplace image used to
create the virtual machine. This element is only used for marketplace
images. Before you can use a marketplace image from an API, you must
enable the image for programmatic use. In the Azure portal, find the
marketplace image that you want to use and then click **Want to deploy
programmatically, Get Started ->**. Enter any required information and
then click **Save**.
:type plan: ~azure.mgmt.compute.v2017_12_01.models.Plan
:param hardware_profile: Specifies the hardware settings for the virtual
machine.
:type hardware_profile:
~azure.mgmt.compute.v2017_12_01.models.HardwareProfile
:param storage_profile: Specifies the storage settings for the virtual
machine disks.
:type storage_profile:
~azure.mgmt.compute.v2017_12_01.models.StorageProfile
:param os_profile: Specifies the operating system settings for the virtual
machine.
:type os_profile: ~azure.mgmt.compute.v2017_12_01.models.OSProfile
:param network_profile: Specifies the network interfaces of the virtual
machine.
:type network_profile:
~azure.mgmt.compute.v2017_12_01.models.NetworkProfile
:param diagnostics_profile: Specifies the boot diagnostic settings state.
<br><br>Minimum api-version: 2015-06-15.
:type diagnostics_profile:
~azure.mgmt.compute.v2017_12_01.models.DiagnosticsProfile
:param availability_set: Specifies information about the availability set
that the virtual machine should be assigned to. Virtual machines specified
in the same availability set are allocated to different nodes to maximize
availability. For more information about availability sets, see [Manage
the availability of virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
<br><br> For more information on Azure planned maintainance, see [Planned
maintenance for virtual machines in
Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
<br><br> Currently, a VM can only be added to availability set at creation
time. An existing VM cannot be added to an availability set.
:type availability_set: ~azure.mgmt.compute.v2017_12_01.models.SubResource
:ivar provisioning_state: The provisioning state, which only appears in
the response.
:vartype provisioning_state: str
:ivar instance_view: The virtual machine instance view.
:vartype instance_view:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineInstanceView
:param license_type: Specifies that the image or disk that is being used
was licensed on-premises. This element is only used for images that
contain the Windows Server operating system. <br><br> Possible values are:
<br><br> Windows_Client <br><br> Windows_Server <br><br> If this element
is included in a request for an update, the value must match the initial
value. This value cannot be updated. <br><br> For more information, see
[Azure Hybrid Use Benefit for Windows
Server](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-hybrid-use-benefit-licensing?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json)
<br><br> Minimum api-version: 2015-06-15
:type license_type: str
:ivar vm_id: Specifies the VM unique ID which is a 128-bits identifier
that is encoded and stored in all Azure IaaS VMs SMBIOS and can be read
using platform BIOS commands.
:vartype vm_id: str
:param identity: The identity of the virtual machine, if configured.
:type identity:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineIdentity
:param zones: The virtual machine zones.
:type zones: list[str]
"""
_validation = {
'provisioning_state': {'readonly': True},
'instance_view': {'readonly': True},
'vm_id': {'readonly': True},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'plan': {'key': 'plan', 'type': 'Plan'},
'hardware_profile': {'key': 'properties.hardwareProfile', 'type': 'HardwareProfile'},
'storage_profile': {'key': 'properties.storageProfile', 'type': 'StorageProfile'},
'os_profile': {'key': 'properties.osProfile', 'type': 'OSProfile'},
'network_profile': {'key': 'properties.networkProfile', 'type': 'NetworkProfile'},
'diagnostics_profile': {'key': 'properties.diagnosticsProfile', 'type': 'DiagnosticsProfile'},
'availability_set': {'key': 'properties.availabilitySet', 'type': 'SubResource'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'instance_view': {'key': 'properties.instanceView', 'type': 'VirtualMachineInstanceView'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'vm_id': {'key': 'properties.vmId', 'type': 'str'},
'identity': {'key': 'identity', 'type': 'VirtualMachineIdentity'},
'zones': {'key': 'zones', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(VirtualMachineUpdate, self).__init__(**kwargs)
self.plan = kwargs.get('plan', None)
self.hardware_profile = kwargs.get('hardware_profile', None)
self.storage_profile = kwargs.get('storage_profile', None)
self.os_profile = kwargs.get('os_profile', None)
self.network_profile = kwargs.get('network_profile', None)
self.diagnostics_profile = kwargs.get('diagnostics_profile', None)
self.availability_set = kwargs.get('availability_set', None)
self.provisioning_state = None
self.instance_view = None
self.license_type = kwargs.get('license_type', None)
self.vm_id = None
self.identity = kwargs.get('identity', None)
self.zones = kwargs.get('zones', None)
| lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2017_12_01/models/virtual_machine_update.py | Python | mit | 6,810 |
"""Family module for Wikitech."""
#
# (C) Pywikibot team, 2005-2020
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Wikitech family
class Family(family.WikimediaOrgFamily):
"""Family class for Wikitech."""
name = 'wikitech'
code = 'en'
def protocol(self, code) -> str:
"""Return the protocol for this family."""
return 'https'
| wikimedia/pywikibot-core | pywikibot/families/wikitech_family.py | Python | mit | 405 |
# -*- coding: utf-8 -*-
import os
import argparse
import datetime
from lxml import etree, html
from lxml.html.clean import Cleaner
import fnmatch # To match files by pattern
import re
import time
import pandas as pd
def timeit(method):
"""Time methods."""
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
print('%r %2.2f sec' %
(method.__name__, te-ts))
return result
return timed
class TransformHtmlProceedingsToXml(object):
"""Get proceedings of the European Parliament."""
@timeit
def __init__(self):
self.cli()
self.infiles = self.get_files(self.indir, self.pattern)
self.n_proceedings = 0
self.rm_a = Cleaner(remove_tags=['a'])
self.main()
def __str__(self):
message = "Information for {} MEPs extracted!".format(
str(self.n_proceedings))
return message
def get_files(self, directory, fileclue):
"""Get all files in a directory matching a pattern.
Keyword arguments:
directory -- a string for the input folder path
fileclue -- a string as glob pattern
"""
matches = []
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, fileclue):
matches.append(os.path.join(root, filename))
return matches
def read_html(self, infile):
"""Parse a HTML file."""
with open(infile, encoding='utf-8', mode='r') as input:
return html.parse(input)
def serialize(self, infile, root):
ofile_name = os.path.splitext(os.path.basename(infile))[0]
ofile_path = os.path.join(self.outdir, ofile_name+'.xml')
xml = etree.tostring(
root,
encoding='utf-8',
xml_declaration=True,
pretty_print=True).decode('utf-8')
with open(ofile_path, mode='w', encoding='utf-8') as ofile:
ofile.write(xml)
pass
def get_name(self, tree):
name = tree.xpath('//li[@class="mep_name"]')[0]
name = self.rm_a.clean_html(name)
name = html.tostring(name).decode('utf-8')
name = re.sub(r'[\t\n]', r'', name)
name = name.split('<br>')
name = [html.fromstring(x).text_content() for x in name]
name = ' '.join(name)
return name
def get_nationality(self, tree):
nationality = tree.find_class('nationality')[0]
nationality = nationality.text.strip()
return nationality
def get_id(self, infile):
id = os.path.splitext(os.path.basename(infile))[0]
return id
def parse_date(self, a_date, a_pattern):
output = datetime.datetime.strptime(a_date, a_pattern).date()
return output
def get_birth(self, tree):
birth = tree.xpath('.//span[@class="more_info"]')
birth_date = None
birth_place = None
death_date = None
death_place = None
for i in birth:
if i.text is not None:
birth_text = re.sub(r'[\t\n]', r'', i.text)
birth_text = birth_text.strip()
if re.match(r'^Date of birth: (.+?), (.+)$', birth_text):
info = re.match(
r'^Date of birth: (.+?), (.+)$', birth_text)
birth_date = self.parse_date(info.group(1), "%d %B %Y")
birth_place = info.group(2)
elif re.match(r'^Date of birth: (.+?)$', birth_text):
info = re.match(r'^Date of birth: (.+?)$', birth_text)
birth_date = self.parse_date(info.group(1), "%d %B %Y")
birth_place = None
elif re.match(r'^Date of death: (.+?), (.+)$', birth_text):
info = re.match(
r'^Date of death: (.+?), (.+)$', birth_text)
death_date = self.parse_date(info.group(1), "%d %B %Y")
death_place = info.group(2)
elif re.match(r'^Date of death: (.+?)$', birth_text):
info = re.match(r'^Date of death: (.+?)$', birth_text)
death_date = self.parse_date(info.group(1), "%d %B %Y")
death_place = None
return birth_date, birth_place, death_date, death_place
def get_political_groups(self, tree, id):
political_groups = tree.xpath('.//div[@class="boxcontent nobackground"]/h4[contains(., "Political groups")]/following-sibling::ul[1]//li')
output = []
for i in political_groups:
info = i.text
info = re.sub(r'\n', r'', info)
info = re.sub(r'\t+', r'\t', info)
info = re.sub(r' \t/ ', r'\t', info)
info = re.sub(r'\t:\t', r'\t', info)
info = re.sub(r' - ', r'\t', info)
info = re.sub(r'\t$', r'', info)
info = info.strip()
info = info.split('\t')
info = [x.strip() for x in info]
m_state = i.attrib['class']
s_date = self.parse_date(info[0], "%d.%m.%Y")
if info[1] == '...':
e_date = self.date
else:
e_date = self.parse_date(info[1], "%d.%m.%Y")
p_group = info[2]
p_group_role = info[3]
output.append({
'id': id,
'm_state': m_state,
's_date': s_date,
'e_date': e_date,
'p_group': p_group,
'p_group_role': p_group_role})
return output
def get_national_parties(self, tree, id):
political_groups = tree.xpath('.//div[@class="boxcontent nobackground"]/h4[contains(., "National parties")]/following-sibling::ul[1]//li')
output = []
for i in political_groups:
info = i.text
info = re.sub(r'\n', r'', info)
info = re.sub(r'\t+', r'\t', info)
info = re.sub(r' \t/ ', r'\t', info)
info = re.sub(r'\t:\t', r'\t', info)
info = re.sub(r' - ', r'\t', info)
info = re.sub(r'\t$', r'', info)
info = info.strip()
info = info.split('\t')
info = [x.strip() for x in info]
s_date = self.parse_date(info[0], "%d.%m.%Y")
if info[1] == '...':
e_date = self.date
else:
e_date = self.parse_date(info[1], "%d.%m.%Y")
n_party = info[2]
output.append({
'id': id,
's_date': s_date,
'e_date': e_date,
'n_party': n_party})
return output
def extract_info(self, infile):
id = self.get_id(infile)
tree = self.read_html(infile).getroot()
name = self.get_name(tree)
nationality = self.get_nationality(tree)
birth_date, birth_place, death_date, death_place = self.get_birth(tree)
self.meps[id] = {
'name': name,
'nationality': nationality,
'birth_date': birth_date,
'birth_place': birth_place,
'death_date': death_date,
'death_place': death_place
}
self.political_groups = (
self.political_groups + self.get_political_groups(tree, id))
self.national_parties = (
self.national_parties + self.get_national_parties(tree, id))
pass
def serialize_dict_of_dicts(self, dict_of_dicts, ofile_name):
df = pd.DataFrame.from_dict(dict_of_dicts, orient='index')
opath = os.path.join(self.outdir, ofile_name)
df.to_csv(
opath,
sep='\t',
mode='w',
encoding='utf-8',
index_label='id')
pass
def serialize_list_of_dicts(self, list_of_dicts, ofile_name, col_order):
df = pd.DataFrame(list_of_dicts)
df = df[col_order]
opath = os.path.join(self.outdir, ofile_name)
df.to_csv(opath, sep='\t', mode='w', encoding='utf-8', index=False)
pass
def main(self):
self.meps = {}
self.political_groups = []
self.national_parties = []
for infile in self.infiles:
print(infile)
if self.date is None:
self.date = datetime.datetime.fromtimestamp(
os.path.getmtime(infile)).date()
self.extract_info(infile)
self.n_proceedings += 1
self.serialize_dict_of_dicts(self.meps, 'meps.csv')
self.serialize_list_of_dicts(
self.political_groups,
'political_groups.csv',
['id', 'm_state', 's_date', 'e_date', 'p_group', 'p_group_role'])
self.serialize_list_of_dicts(
self.national_parties,
'national_parties.csv',
['id', 's_date', 'e_date', 'n_party'])
pass
def cli(self):
"""CLI parses command-line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
required=True,
help="path to the input directory.")
parser.add_argument(
"-o", "--output",
required=True,
help="path to the output directory.")
parser.add_argument(
'-p', "--pattern",
required=False,
default="*.html",
help="glob pattern to filter files.")
parser.add_argument(
'-d', "--date",
required=False,
default=None,
help="date of download of HTML files.")
args = parser.parse_args()
self.indir = args.input
self.outdir = args.output
if not os.path.exists(self.outdir):
os.makedirs(self.outdir)
self.pattern = args.pattern
self.date = args.date
pass
print(TransformHtmlProceedingsToXml())
| chozelinek/europarl | meps_ie.py | Python | mit | 9,953 |
# solve cliff-walking task with Q-Learning, very similar to SARSA
# original example problem from the book, introduction for reinforcement learning
# Author: Wenbin Li
# numeric backend
import pygame
from pygame.locals import *
import numpy as np
grid_size = 100
n_row = 4
n_col = 12
state = np.zeros((n_row * grid_size, n_col * grid_size))
step_size = 0.5
epsilon = 0.1 # parameter for epislon-greedy
N_actions = 4 # number of actions {left,up,right,down}
N_episodes = 600 # number of episodes
# as suggested by the book, reach optimality by 8000 time steps
# rewards of -1 until the goal state is reached
# -100 for entering cliff region and instantly return to starting position
# specify goal location
goal_r = 3
goal_c = 11
# specify start location
start_r = 3
start_c = 0
# initialize state-action value function
q = np.zeros((n_row,n_col,N_actions)) # num_row by num_col by num_states
# Note: Q(terminal-state,.) = 0
# undiscounted and episodic task
n_steps = 0
n_episodes = 0
# epsilon-greedy strategy
def ep_greedy(epsilon,num_actions,q,i,j):
roll = np.random.uniform(0,1)
# epsilon-greedy strategy
if roll < epsilon: # exploration
a = np.random.randint(0,num_actions)
else: # exploitation
a = np.argmax(q[i,j,:])
return a
# translate action into state-change
def action2state(i,j,a):
# Note: coordintate system start from the upper-left corner and
# right/downwards are the positive direction
if a == 0: # to left
i_next = i
j_next = j - 1
elif a == 1: # upwards
i_next = i - 1
j_next = j
elif a == 2: # to right
i_next = i
j_next = j + 1
else: # downwards
i_next = i + 1
j_next = j
return i_next,j_next
# Sarsa method
while n_episodes < N_episodes:
# begin of an episode
i = start_r
j = start_c
# end of an episode
n_episodes += 1
print "episode ",str(n_episodes),"..."
while True:
n_steps += 1
# print " step ",str(n_steps),"..."
# choose A from S using policy derived from Q (epsilon-greedy)
a = ep_greedy(epsilon,N_actions,q,i,j)
# translate action into state-change with windy effect
i_next,j_next = action2state(i,j,a)
# update the state-action value function with Sarsa/Q-Learning of choice
# state transitions end in the goal state
# state should be in the range of the gridworld
if i_next == goal_r and j_next == goal_c: # reach the goal position
# q[i,j] = q[i,j] + step_size * (-1 + 0 - q[i,j]) #the Q(terminal,.) = 0
q[i,j,a] = q[i,j,a] + step_size * (-1 + 0 - q[i,j,a]) #the Q(terminal,.) = 0
# Note, transition from noterminal to terminal also gets reward of -1 in this case
break
# different reward/consequence when entering the cliff region
elif i_next == 3 and j_next > 1 and j_next < n_col - 1:
i_next = start_r
j_next = start_c
r = -100
elif i_next < 0 or i_next > n_row -1:
i_next = i
r = -1
elif j_next < 0 or j_next > n_col - 1:
j_next = j
r = -1
else:
r = -1
# a_next = ep_greedy(epsilon,N_actions,q,i_next,j_next)
q[i,j,a] = q[i,j,a] + step_size * (r + max(q[i_next,j_next,:]) - q[i,j,a])
i = i_next
j = j_next
# visualize the solution/GUI-backend
# plot the gridworld as background
# (optional) mark wind direction
pygame.init()
pygame.display.set_mode((n_col * grid_size,n_row * grid_size))
pygame.display.set_caption('Cliff Walking')
screen = pygame.display.get_surface()
surface = pygame.Surface(screen.get_size())
bg = pygame.Surface(screen.get_size())
# draw background, with mark on start/end states & cliff region
def draw_bg(surface,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c):
for i in range(n_col):
for j in range(n_row):
x = i * grid_size
y = j * grid_size
coords = pygame.Rect(x,y,grid_size,grid_size)
pygame.draw.rect(surface,(255,255,255),coords,1)
# draw start state
pygame.draw.circle(surface,(192,192,192),(start_c * grid_size + grid_size/2,
start_r * grid_size + grid_size/2),grid_size/4)
# draw goal state
pygame.draw.circle(surface,(102,204,0),(goal_c * grid_size + grid_size/2,
goal_r * grid_size + grid_size/2),grid_size/4)
# draw cliff region
x = 1 * grid_size
y = 3 * grid_size
coords = pygame.Rect(x,y,grid_size*10,grid_size)
pygame.draw.rect(surface,(192,192,192),coords)
# use state-action function to find one-step optimal policy
def step_q(q,s_r,s_c,n_row,n_col):
print "state-action value:"
print q[s_r,s_c,:]
a = np.argmax(q[s_r,s_c,:]) # greedy only
# display debug
if a == 0:
print "move left"
elif a == 1:
print "move upward"
elif a == 2:
print "move right"
else:
print "move downwards"
s_r_next,s_c_next = action2state(s_r,s_c,a)
# define rules especially when the agent enter the cliff region
if s_r_next == 3 and s_c_next > 1 and s_c_next < n_col - 1:
s_r_next = start_r
s_c_next = start_c
# in theory, the produced optimal policy should not enter this branch
elif s_r_next < 0 or s_r_next > n_row -1:
s_r_next = s_r
elif s_c_next < 0 or s_c_next > n_col - 1:
s_c_next = s_c
return s_r_next,s_c_next
s_r = start_r
s_c = start_c
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# draw gridworld background
draw_bg(bg,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c)
screen.blit(bg,(0,0))
# draw the state of the agent, i.e. the path (start --> end) as the foreground
surface.fill((0,0,0))
# use state-action function to find a optimal policy
# in the loop, should provide a step function
#print (s_r,s_c)
s_r_next,s_c_next = step_q(q,s_r,s_c,n_row,n_col)
#print (s_r_next,s_c_next)
if s_r_next != goal_r or s_c_next != goal_c:
pygame.draw.circle(surface,(255,255,255),(s_c_next * grid_size + grid_size/2,
s_r_next * grid_size + grid_size/2),grid_size/4)
bg.blit(surface,(0,0))
pygame.display.flip() # update
pygame.time.delay(1000)
s_r,s_c = s_r_next,s_c_next # update coordinate
| wenbinli/rl | cliffWalk_QL.py | Python | mit | 6,866 |
__author__ = 'jdaniel'
import copy
import random
import itertools
import operator
import math
import struct
import os
import sys
import json
from collections import defaultdict
class AlgorithmBase(object):
def __init__(self, objective_function):
"""
Base Algorithm class which contains utility functionality
common to all other algorithms and acts as the standalone
API for Algorithm usage.
:param objective_function: <function> The model function to be used
def my_objective(x):
f = list_of_objective_values
h = list_of_equality_constraint_values
g = list_of_inequality_constraint_values
return [f,h,g]
:return: None
"""
self._objective_function = objective_function
self._variables = []
self._equality_constraints = []
self._inequality_constraints = []
self._objectives = []
# Algorithm Options
self._pop_size = None
self._generations = None
self._conv_tol = None
self._eqcon_tol = None
self._seed = None
self._eta_c = None
self._eta_m = None
self._p_cross = None
self._p_mut = None
self._islands = None
self._epoch = None
self._migrants = None
self._spheres = None
# Problem information
self._ndim = None
self._neqcon = None
self._nneqcon = None
self._lower_bound = []
self._upper_bound = []
# Data objects
self._history = History()
self._archive = Archive()
self._metadata = Metadata()
# Random number generator
self._rnd = random.Random()
def register_variable(self, name, lower, upper):
"""
Register a decision variable with the algorithm
:param name: <string> Reference name of the decision variable
:param lower: <float> Lower bound for the variable
:param upper: <float> Upper bound for the variable
:return: None
"""
var = Variable(name, lower, upper)
self._variables.append(var)
def register_constraint(self, name, ctype):
"""
Register a constraint variable with the algorithm
:param name: <string> Reference name of the constraint variable
:param ctype: <string> Set constraint type, 'e': equality constraint; 'i': inequality constraint
:return: None
"""
con = Constraint(name)
if ctype == 'e':
self._equality_constraints.append(con)
elif ctype == 'i':
self._inequality_constraints.append(con)
else:
err_msg = 'Unrecognized constraint type ' + repr(ctype)
raise AlgorithmException(err_msg)
def register_objective(self, name):
"""
Register an objective variable with the algorithm
:param name: <string> Reference name of the objective variable
:return: None
"""
obj = Objective(name)
self._objectives.append(obj)
def set_options(self, option, value):
"""
Set an algorithm option value
:param option: <string> Name of the option to set
:param value: <int, float> Value of the option to set
:return: None
"""
if option == 'population_size':
self.check_population_size(value)
self._pop_size = value
elif option == 'generations':
self.check_generations(value)
self._generations = value
elif option == 'conv_tol':
self.check_conv_tol(value)
self._conv_tol = value
elif option == 'eqcon_tol':
self.check_eqcon_tol(value)
self._eqcon_tol = value
elif option == 'eta_c':
self.check_eta_c(value)
self._eta_c = value
elif option == 'eta_m':
self.check_eta_m(value)
self._eta_m = value
elif option == 'p_cross':
self.check_p_cross(value)
self._p_cross = value
elif option == 'p_mut':
self.check_p_mut(value)
self._p_mut = value
elif option == 'islands':
self.check_islands(value)
self._islands = value
elif option == 'epoch':
self.check_epoch(value)
self._epoch = value
elif option == 'migrants':
self.check_migrants(value)
self._migrants = value
elif option == 'spheres':
self.check_spheres(value)
self._spheres = value
elif option == 'seed':
self.set_seed(value)
else:
err_msg = 'Unrecognized option ' + repr(option)
raise AlgorithmException(err_msg)
def set_seed(self, value):
"""
Set the seed value for the optimisation
:param value: Value to set
:return: None
"""
if value == 0:
self._seed = struct.unpack("<L", os.urandom(4))[0]
else:
self._seed = value
self._rnd.seed(self._seed)
@staticmethod
def check_population_size(value):
"""
Check the population value
:param value: Value to set
:return:
"""
# Check if integer
if not isinstance(value, (int, long)):
err_msg = 'Population is not an integer'
raise AlgorithmException(err_msg)
# Check if greater than zero
if value <= 0:
err_msg = 'Population size must be greater than zero'
raise AlgorithmException(err_msg)
# Check if divisible by 4
if value % 4 != 0:
err_msg = 'Population size must be evenly divisible by four'
raise AlgorithmException(err_msg)
@staticmethod
def check_generations(value):
"""
Check the generations value
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'The generations value but be an integer greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_conv_tol(value):
"""
Check the convergence tolerance value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value >= 1.0 or value <= 0.0:
err_msg = 'The convergence tolerance value conv_tol must be between (0.0, 1.0)'
raise AlgorithmException(err_msg)
@staticmethod
def check_eqcon_tol(value):
"""
Check the equality constraint tolerance value
:param value: Value to set
:return: None
"""
# Check if greater than 0
if value <= 0.0:
err_msg = 'The equality constraint tolerance value eqcon_tol must be greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_c(value):
"""
Check the crossover distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The crossover distribution index eta_c must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_m(value):
"""
Check the mutation distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The mutation distribution index eta_m must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_cross(value):
"""
Check the crossover probability value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value < 0.0 or value > 1.0:
err_msg = 'The crossover probability p_cross must be between 0.0 and 1.0'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_mut(value):
"""
Check the mutation probability value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value < 0.0 or value > 1.0:
err_msg = 'The mutation probability p_mut must be between 0.0 and 1.0'
raise AlgorithmException(err_msg)
@staticmethod
def check_islands(value):
"""
Check the number of islands
:param value: Value to set
:return: None
"""
# Check greater than zero
if value <= 0:
err_msg = 'Number of islands must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_spheres(value):
"""
Check the number of spheres
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'Number of spheres must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_epoch(value):
"""
Check the epoch rate
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'The epoch rate must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_migrants(value):
"""
Check the migrants value
:param value: Value to set
:return: None
"""
if value < 0:
err_msg = 'The number of migrants must be zero or greater'
raise AlgorithmException(err_msg)
def setup_variables(self):
"""
Get information about the model once all the variables
have been added.
:return: None
"""
self._ndim = len(self._variables)
self._neqcon = len(self._equality_constraints)
self._nneqcon = len(self._inequality_constraints)
for var in self._variables:
self._lower_bound.append(var.lower)
self._upper_bound.append(var.upper)
def evaluate_population(self, population):
"""
Evaluate a population
:param population: <Population> Population to evaluate
:return: None
"""
for ind in population:
self.evaluate(ind)
def evaluate(self, individual):
"""
Evaluate an individual
:param individual: <Individual> Individual to evaluate
:return: None
"""
f, h, g = self._objective_function(individual.x)
individual.f = f
individual.h = h
individual.g = g
# Calculate the constraint violation
s = 0.0
for i in xrange(self._neqcon):
s += math.fabs(h[i]) - self._eqcon_tol
for i in xrange(self._nneqcon):
s += max(0.0, g[i])
self._history.add_point(individual)
class Individual(object):
def __init__(self):
"""
Class for holding information and methods related to the concept
of an individual.
:return: None
"""
# Decision variables
self.x = None
# Objective variables
self.f = None
# Equality constraint variables
self.h = None
# Inequality constraint variables
self.g = None
# Distance metric
self.d = None
# Constraint violation
self.s = None
# Domination count
self.n = None
# Rank
self.r = None
# ID
self.id = None
def dominates(self, other):
"""
Method to determine if the individual dominates another individual using
the constraint dominance approach.
:param other: <Individual> Other individual to test against
:return: <bool> True if self dominates other, False if other dominates self or both are equal
"""
not_equal = False
flag1 = 0
flag2 = 0
if self.s > 0:
flag1 = 1
if other.s > 0:
flag2 = 1
# Both individuals are invalid
if flag1 == 1 and flag2 == 1:
if self.s < other.s:
return True
else:
return False
# One of the individuals is invalid
elif flag1 ^ flag2:
if flag1:
return False
else:
return True
# Both individuals are valid
else:
for self_fit, other_fit in zip(self.f, other.f):
if self_fit > other_fit:
return False
elif self_fit < other_fit:
not_equal = True
return not_equal
def __hash__(self):
"""
Return a hash based on the objective values of the individuals decision values
:return: Hash for the individual
"""
return hash(repr(self.x))
def __repr__(self):
s = 'ID: ' + repr(self.id) + '\n'
s += 'x: ' + repr(self.x) + '\n'
s += 'f: ' + repr(self.f) + '\n'
if self.h is not None:
s += 'h: ' + repr(self.h) + '\n'
if self.g is not None:
s += 'g: ' + repr(self.g) + '\n'
if self.d is not None:
s += 'd: ' + repr(self.d) + '\n'
if self.s is not None:
s += 's: ' + repr(self.s) + '\n'
if self.r is not None:
s += 'r: ' + repr(self.r) + '\n'
return s
def __eq__(self, other):
for self_x, other_x in zip(self.x, other.x):
if self_x != other_x:
return False
return True
class Population(list):
def __init__(self):
super(Population, self).__init__()
def populate(self, individuals):
"""
Populate the population with a list of individuals
:param individuals: <List<Individual>> List of individuals to use
:return: None
"""
for ind in individuals:
self.append(copy.deepcopy(ind))
def to_json(self):
return json.dumps(self, default=lambda o: o.__dict__)
def __repr__(self):
s = ''
for idx, ind in enumerate(self):
s += repr(ind) + '\n'
return s
class SubPopulation(list):
def __init__(self):
super(SubPopulation, self).__init__()
def populate(self, individuals):
"""
Populate the sub-population with a list of individuals
:param individuals: <List<Individual>> List of individuals to use
:return: None
"""
for ind in individuals:
self.append(copy.deepcopy(ind))
class Archive(object):
def __init__(self):
"""
Optimization run archive of non-dominated solutions at each generation
which is used to predict convergence of the algorithm.
:return: None
"""
# Tracks the archive of non-dominated solutions
self._archive = []
# Tracks the size of the non-dominated archive
self._idx = 0
# Tracks the consolidation ratio
self._consolidation_ratio = []
# Population size
self._population_size = None
def initialize(self, population):
"""
Initialize the archive
:param population: <Population> Individuals to initialize the population with
:return: None
"""
self._archive.append(nondominated_sort(population, len(population), first_front_only=True)[0])
self._consolidation_ratio.append(0)
self._population_size = len(population)
def update(self, population):
"""
Update the archive
:param population: <Population> Population to update the archive with
:return: None
"""
nondominated_solutions = nondominated_sort(copy.deepcopy(population), len(population), first_front_only=True)[0]
archive_copy = copy.deepcopy(self._archive[self._idx])
archive_copy = archive_copy + nondominated_solutions
nondominated_solutions = nondominated_sort(archive_copy, len(archive_copy), first_front_only=True)[0]
# Remove copies
nondominated_solutions = list(set(nondominated_solutions))
# Update the archive
self._archive.append(nondominated_solutions)
self._idx += 1
self._consolidation_ratio.append(len(self._archive[self._idx])/float(2*self._population_size))
def get_consolidation_ratio(self):
"""
Return the most recent calculated consolidation ratio
:return: <float> Current consolidation ratio value
"""
return self._consolidation_ratio[self._idx]
def get_consolidation_ratio_history(self):
"""
Return the consolidation ratio history
:return: <List<float>> Consolidation ratio history
"""
return self._consolidation_ratio
def get_archive(self):
"""
Get the saved archive at each update
:return: <List<Population>> archive
"""
return self._archive
class Metadata:
def __init__(self):
self.end_msg = None
self.fevals = None
self.gen = None
def __repr__(self):
s = '\n' + self.end_msg + '\n'
s += 'fevals: ' + repr(self.fevals) + '\n'
s += 'gen: ' + repr(self.gen) + '\n'
return s
class History(list):
def __init__(self):
super(History, self).__init__()
def add_point(self, individual):
"""
Add a design point to the history
:param individual: <Individual> Individual to add to the history
:return: None
"""
self.append(individual)
class Variable(object):
def __init__(self, name, lower, upper):
"""
Data structure that contains decision variable information.
:param name: <string> Reference name for the decision variable
:param lower: <float> Lower bound of the decision variable
:param upper: <float> Upper bound of the decision variable
:return: None
"""
self.name = name
self.lower = lower
self.upper = upper
class Constraint(object):
def __init__(self, name):
"""
Data structure that contains constraint variable information.
:param name: <string> Reference name for the constraint variable
:return: None
"""
self.name = name
class Objective(object):
def __init__(self, name):
"""
Data structure that contains objective variable information.
:param name: <string> Reference name for the objective variable
:return: None
"""
self.name = name
class AlgorithmException(Exception):
def __init__(self, message):
"""
Exception class that gets raised when an error occurs with the algorithm.
:param message: Error message to display
:return: None
"""
Exception.__init__(self, message)
# Utility functions for dealing with algorithms containing sub-populations
def flatten_population(population_list):
"""
Combine each of the sub-populations into a single global population
:param population_list: <List<Population>>
:return: <List<Individual>>
"""
global_pop = Population()
for pop in population_list:
global_pop.append(pop[:])
return global_pop
# Genetic Operators
def mutation(population, n_dim, lower, upper, eta_m, p_mut):
"""
Performs bounded polynomial mutation on the population.
:param population: <Population> Population to perform mutation on
:param n_dim: <int> Number of decision variable dimensions
:param lower: <list<float>> List of decision variable lower bound values
:param upper: <list<float>> List of upper bound decision variable values
:param eta_m: <float> Mutation index
:param p_mut: <float> Mutation probability
:return: None
"""
for ind in population:
mutate(ind, n_dim, lower, upper, eta_m, p_mut)
def mutate(individual, n_dim, lower, upper, eta_m, p_mut):
"""
Performs bounded polynomial mutation on an individual.
:param individual: <Individual> Individual to perform mutation on
:param n_dim: <int> Number of decision variable dimension
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_m: <float> Mutation index
:param p_mut: <float> Mutation probability
:return: None
"""
for i, xl, xu in zip(xrange(n_dim), lower, upper):
if random.random() <= p_mut:
x = copy.deepcopy(individual.x[i])
delta_1 = (x - xl) / (xu - xl)
delta_2 = (xu - x) / (xu - xl)
rand = random.random()
mut_pow = 1.0 / (eta_m + 1.0)
if rand < 0.5:
xy = 1.0 - delta_1
val = 2.0 * rand + (1.0 - 2.0*rand)*(xy**(eta_m + 1))
delta_q = val**mut_pow - 1.0
else:
xy = 1.0 - delta_2
val = 2.0 * (1.0 - rand) + 2.0 * (rand - 0.5)*(xy**(eta_m + 1))
delta_q = 1.0 - val**mut_pow
x += delta_q * (xu - xl)
x = min(max(x, xl), xu)
individual.x[i] = x
def crossover(population, n_dim, lower, upper, eta_c, p_cross):
"""
Perform simulated binary crossover on the population.
:param population: <Population> Population to perform crossover on.
:param n_dim: <int> Number of decision variable dimensions.
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_c: <float> Crossover index.
:param p_cross: <float> Crossover probability.
:return: <Population> Child population
"""
child_pop = Population()
child_pop.populate(population)
for ind1, ind2 in zip(child_pop[::2], child_pop[1::2]):
if random.random() <= p_cross:
mate(ind1, ind2, n_dim, lower, upper, eta_c)
return child_pop
def mate(ind1, ind2, n_dim, lower, upper, eta_c):
"""
Performs simulated binary crossover between two individuals to produce
two offspring.
:param ind1: <Individual> First individual involved in crossover
:param ind2: <Individual> Second individual involved in crossover
:param n_dim: <int> Number of decision variable dimensions.
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_c: <float> Crossover index.
:return: None
"""
for i, xl, xu in zip(xrange(n_dim), lower, upper):
if random.random() <= 0.5:
if abs(ind1.x[i] - ind2.x[i]) > 1e-14:
x1 = min(ind1.x[i], ind2.x[i])
x2 = max(ind1.x[i], ind2.x[i])
rand = random.random()
beta = 1.0 + (2.0*(x1 - xl) / (x2 - x1))
alpha = 2.0 - beta**-(eta_c + 1)
if rand <= 1.0 / alpha:
beta_q = (rand*alpha)**(1.0 / (eta_c + 1))
else:
beta_q = (1.0 / (2.0 - rand*alpha))**(1.0 / (eta_c + 1))
c1 = 0.5 * (x1 + x2 - beta_q * (x2 - x1))
beta = 1.0 + (2.0*(xu - x2) / (x2 - x1))
alpha = 2.0 - beta**-(eta_c + 1)
if rand <= 1.0 / alpha:
beta_q = (rand*alpha)**(1.0 / (eta_c + 1))
else:
beta_q = (1.0 / (2.0 - rand * alpha))**(1.0 / (eta_c + 1))
c2 = 0.5 * (x1 + x2 + beta_q*(x2 - x1))
c1 = min(max(c1, xl), xu)
c2 = min(max(c2, xl), xu)
if random.random() <= 0.5:
ind1.x[i] = c2
ind2.x[i] = c1
else:
ind1.x[i] = c1
ind2.x[i] = c2
def selection(population, k):
"""
Apply the NSGA-II selection operator on a population of individuals.
:param population: <Population> Population of individuals to select from
:param k: <int> The number of individuals to select
:return: <Population> Selected population of individuals
"""
pareto_fronts = nondominated_sort(population, k)
for front in pareto_fronts:
assign_crowding_distance(front)
chosen = list(itertools.chain(*pareto_fronts[:-1]))
k -= len(chosen)
if k > 0:
sorted_front = sorted(pareto_fronts[-1], key=operator.attrgetter("d"), reverse=True)
chosen.extend(sorted_front[:k])
return copy.deepcopy(chosen)
def nondominated_sort(population, k, first_front_only=False):
"""
Sort the first k individuals from the population into different nondomination
levels using the Fast Nondominated Sorting Approach proposed by Deb et al.
Function structure and implementation adapted from the DEAP package.
:param first_front_only:
:param population: <Population> Population of individuals to sort
:param k: The number of individuals to select
:return: <List<Individual>> A list of ordered Pareto fronts
"""
if k == 0:
return []
map_fit_ind = defaultdict(list)
for ind in population:
map_fit_ind[(tuple(ind.f))] = ind
fits = map_fit_ind.keys()
current_front = []
next_front = []
dominating_fits = defaultdict(int)
dominated_fits = defaultdict(list)
# Rank first Pareto front
for i, fit_i in enumerate(fits):
for fit_j in fits[i+1:]:
if map_fit_ind[tuple(fit_i)].dominates(map_fit_ind[tuple(fit_j)]):
dominating_fits[fit_j] += 1
dominated_fits[fit_i].append(fit_j)
elif map_fit_ind[tuple(fit_j)].dominates(map_fit_ind[tuple(fit_i)]):
dominating_fits[fit_i] += 1
dominated_fits[fit_j].append(fit_i)
if dominating_fits[fit_i] == 0:
map_fit_ind[tuple(fit_i)].r = 1
current_front.append(fit_i)
fronts = [[]]
for fit in current_front:
fronts[-1].append(map_fit_ind[tuple(fit)])
pareto_sorted = len(fronts[-1])
# Rank the next front until all individuals are sorted or
# the given number of individual are sorted.
if not first_front_only:
N = min(len(population), k)
while pareto_sorted < N:
fronts.append([])
for fit_p in current_front:
for fit_d in dominated_fits[fit_p]:
dominating_fits[fit_d] -= 1
if dominating_fits[fit_d] == 0:
next_front.append(fit_d)
pareto_sorted += 1
fronts[-1].append(map_fit_ind[tuple(fit_d)])
map_fit_ind[tuple(fit_d)].r = len(fronts)
current_front = next_front
next_front = []
return copy.deepcopy(fronts)
def assign_crowding_distance(individuals):
"""
Assign the crowding distance to each individual.
:param individuals: <Population, List> Individuals to assign crowding distance to.
:return: None
"""
if len(individuals) == 0:
return
distances = [0.0] * len(individuals)
crowd = [(ind.f, i) for i, ind in enumerate(individuals)]
nobj = len(individuals[0].f)
for i in xrange(nobj):
crowd.sort(key=lambda element: element[0][i])
distances[crowd[0][1]] = float("inf")
distances[crowd[-1][1]] = float("inf")
if crowd[-1][0][i] == crowd[0][0][i]:
continue
norm = nobj * float(crowd[-1][0][i] - crowd[0][0][i])
for prev, cur, nexxt in zip(crowd[:-2], crowd[1:-1], crowd[2:]):
distances[cur[1]] += (nexxt[0][i] - prev[0][i]) / norm
for i, dist in enumerate(distances):
individuals[i].d = dist
def tournament_select(population, k):
"""
Tournament selection based on the constraint dominance principle and the
crowding distance.
:param population: <Population, List> Individuals to select from
:param k: <int> The number of individuals to select.
:return: <List<Individual>> The list of selected individuals
"""
def tournament(ind1, ind2):
if ind1.dominates(ind2):
return copy.deepcopy(ind1)
elif ind2.dominates(ind1):
return copy.deepcopy(ind2)
if ind1.d < ind2.d:
return copy.deepcopy(ind2)
elif ind1.d > ind2.d:
return copy.deepcopy(ind1)
if random.random() <= 0.5:
return copy.deepcopy(ind1)
return copy.deepcopy(ind2)
population_1 = random.sample(population, len(population))
population_2 = random.sample(population, len(population))
chosen = []
for i in xrange(0, k, 4):
chosen.append(tournament(population_1[i], population_1[i+1]))
chosen.append(tournament(population_1[i+1], population_1[i+3]))
chosen.append(tournament(population_2[i], population_2[i+1]))
chosen.append(tournament(population_2[i+2], population_2[i+3]))
return chosen
def update_progress(progress):
bar_length = 20 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(bar_length*progress))
text = "\rPercent: [{0}] {1}% {2}".format("="*block + " "*(bar_length-block), progress*100, status)
sys.stdout.write(text)
sys.stdout.flush()
| jldaniel/Gaia | Algorithms/algorithm_base.py | Python | mit | 29,920 |
"""update cascading rules
Revision ID: 619fe6fe066c
Revises: 73ea6c072986
Create Date: 2017-03-15 10:51:12.494508
"""
# revision identifiers, used by Alembic.
revision = "619fe6fe066c"
down_revision = "73ea6c072986"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
| Clinical-Genomics/housekeeper | alembic/versions/619fe6fe066c_update_cascading_rules.py | Python | mit | 554 |
"""Freebox component constants."""
from __future__ import annotations
import socket
from homeassistant.components.sensor import SensorEntityDescription
from homeassistant.const import DATA_RATE_KILOBYTES_PER_SECOND, PERCENTAGE, Platform
DOMAIN = "freebox"
SERVICE_REBOOT = "reboot"
APP_DESC = {
"app_id": "hass",
"app_name": "Home Assistant",
"app_version": "0.106",
"device_name": socket.gethostname(),
}
API_VERSION = "v6"
PLATFORMS = [Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR, Platform.SWITCH]
DEFAULT_DEVICE_NAME = "Unknown device"
# to store the cookie
STORAGE_KEY = DOMAIN
STORAGE_VERSION = 1
CONNECTION_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="rate_down",
name="Freebox download speed",
native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND,
icon="mdi:download-network",
),
SensorEntityDescription(
key="rate_up",
name="Freebox upload speed",
native_unit_of_measurement=DATA_RATE_KILOBYTES_PER_SECOND,
icon="mdi:upload-network",
),
)
CONNECTION_SENSORS_KEYS: list[str] = [desc.key for desc in CONNECTION_SENSORS]
CALL_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="missed",
name="Freebox missed calls",
icon="mdi:phone-missed",
),
)
DISK_PARTITION_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="partition_free_space",
name="free space",
native_unit_of_measurement=PERCENTAGE,
icon="mdi:harddisk",
),
)
# Icons
DEVICE_ICONS = {
"freebox_delta": "mdi:television-guide",
"freebox_hd": "mdi:television-guide",
"freebox_mini": "mdi:television-guide",
"freebox_player": "mdi:television-guide",
"ip_camera": "mdi:cctv",
"ip_phone": "mdi:phone-voip",
"laptop": "mdi:laptop",
"multimedia_device": "mdi:play-network",
"nas": "mdi:nas",
"networking_device": "mdi:network",
"printer": "mdi:printer",
"router": "mdi:router-wireless",
"smartphone": "mdi:cellphone",
"tablet": "mdi:tablet",
"television": "mdi:television",
"vg_console": "mdi:gamepad-variant",
"workstation": "mdi:desktop-tower-monitor",
}
| rohitranjan1991/home-assistant | homeassistant/components/freebox/const.py | Python | mit | 2,269 |
""" Functions and classes dealing with commands. """
| robobrobro/coffer | coffer/command/commands/__init__.py | Python | mit | 53 |
Subsets and Splits