repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
emonty/burrow | burrow/tests/__init__.py | 1 | 1565 | # Copyright (C) 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Unittests for burrow.'''
import atexit
import os
import signal
import sys
import time
import burrow
def start_server():
'''Fork and start the server, saving the pid in a file.'''
kill_server()
pid = os.fork()
if pid == 0:
server = burrow.Server(add_default_log_handler=False)
server.frontends[0].default_ttl = 0
server.run()
sys.exit(0)
pid_file = open('TestHTTP.pid', 'w')
pid_file.write(str(pid))
pid_file.close()
atexit.register(kill_server)
time.sleep(1)
def kill_server():
'''Try killing the server if the pid file exists.'''
try:
pid_file = open('TestHTTP.pid', 'r')
pid = pid_file.read()
pid_file.close()
try:
os.kill(int(pid), signal.SIGUSR1)
time.sleep(1)
os.kill(int(pid), signal.SIGTERM)
except OSError:
pass
os.unlink('TestHTTP.pid')
except IOError:
pass
start_server()
| apache-2.0 | -4,100,098,598,457,448,400 | 25.982759 | 74 | 0.646645 | false |
CopyChat/Plotting | Downscaling/bias.RSDS.GCMs.py | 1 | 5090 | #!/usr/bin/env python
########################################
#Globale Karte fuer tests
# from Rabea Amther
########################################
# http://gfesuite.noaa.gov/developer/netCDFPythonInterface.html
import math
import numpy as np
import pylab as pl
import Scientific.IO.NetCDF as IO
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import matplotlib.lines as lines
from mpl_toolkits.basemap import Basemap , addcyclic
from matplotlib.colors import LinearSegmentedColormap
import textwrap
pl.close('all')
########################## for CMIP5 charactors
DIR='/Users/tang/climate/CMIP5/hist/SWIO'
VARIABLE='rsds'
PRODUCT='Amon'
ENSEMBLE='r1i1p1'
EXPERIMENT='hist'
TIME='195001-200512'
#OBS='CRU'
OBS='CERES'
season='summer'
#season='winter'
K=0
NonData=['EC-EARTH-XXXX','CSIRO-Mk3-6-0-XXXXXX']
GCMs=[\
'ACCESS1-0',\
'BNU-ESM',\
'CCSM4',\
'CESM1-BGC',\
'CESM1-CAM5',\
'CESM1-FASTCHEM',\
'CESM1-WACCM',\
'CMCC-CESM',\
'CNRM-CM5',\
'CSIRO-Mk3-6-0',\
'CanESM2',\
'EC-EARTH',\
'FIO-ESM',\
'GFDL-ESM2M',\
'GISS-E2-H',\
'HadGEM2-AO',\
'HadGEM2-ES',\
'IPSL-CM5A-LR',\
'IPSL-CM5A-MR',\
'MIROC-ESM-CHEM',\
'MIROC-ESM',\
'MIROC5',\
'MPI-ESM-LR',\
'MPI-ESM-MR',\
'MPI-ESM-P',\
'MRI-CGCM3',\
'MRI-ESM1',\
'NorESM1-ME',\
'NorESM1-M',\
'bcc-csm1-1-m',\
'bcc-csm1-1',\
'inmcm4',\
]
ENSEMBLE=[ \
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r12i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r2i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
'r1i1p1',\
]
COLOR=['darkred','darkblue','darkgreen','deeppink',\
'black','orangered','cyan','magenta']
# read CERES data:
if OBS == 'CERES':
oVar='rsds'
obs1='/Users/tang/climate/GLOBALDATA/OBSDATA/CERES/rsds_CERES-EBAF_L3B_Ed2-8_2001-2005.NDJFMA.SWIO.nc'
else:
# read MODIS data:
oVar='clt'
obs1='/Users/tang/climate/GLOBALDATA/OBSDATA/MODIS/clt_MODIS_L3_C5_200101-200512.ymonmean.NDJFMA.SWIO.nc'
print obs1
obsfile1=IO.NetCDFFile(obs1,'r')
ObsVar=obsfile1.variables[oVar][0][:][:].copy()
for idx,Model in enumerate(GCMs):
if OBS == 'CERES':
infile1=DIR+\
'/rsds_Amon_'+Model+'_historical_'+ENSEMBLE[idx]+\
'_200101-200512.summer.remap.CERES.SWIO.nc'
#GFDL-ESM2M/clt_Amon_GFDL-ESM2M_historical_r1i1p1_200101-200512.nc.summer.mean.nc.remap.nc
#rsds_Amon_bcc-csm1-1_historical_r1i1p1_200101-200512.summer.remap.CERES.SWIO.nc
else:
infile1=DIR+'/'+\
'clt_Amon_'+Model+'_historical_'+ENSEMBLE[idx]+\
'_200101-200512.'+season+'.remap.modis.SWIO.nc'
print infile1
if Model in NonData:
infile1=obsfile1
VAR=infile1.variables[oVar][0,:,:].copy()
else:
print 'k=',idx
infile1=IO.NetCDFFile(infile1,'r')
VAR=infile1.variables[VARIABLE][0,:,:].copy()
print 'the variable tas ===============: '
print VAR
#open input files
# read the variables:
lat = infile1.variables['lat'][:].copy()
lon = infile1.variables['lon'][:].copy()
print np.shape(VAR)
print np.shape(ObsVar)
Bias=VAR-ObsVar
print np.shape(Bias)
#quit()
CoLev=10 #number of levels of colorbar
#=================================================== to plot
fig=plt.subplot(8,4,idx+1,aspect='equal')
print "============="
print idx; print Model
map=Basemap(projection='cyl',llcrnrlat=np.min(lat),urcrnrlat=np.max(lat),\
llcrnrlon=np.min(lon),urcrnrlon=np.max(lon),resolution='l')
map.drawcoastlines(linewidth=0.35)
map.drawparallels(np.arange(-90.,91.,15.),labels=[1,0,0,0],linewidth=0.35)
map.drawmeridians(np.arange(-180.,181.,20.),labels=[0,0,0,1],linewidth=0.35)
map.drawmapboundary()
x,y=map(lon,lat)
cmap=plt.get_cmap('bwr')
#cmap=plt.get_cmap('RdBu_r')
pic=map.pcolormesh(x,y,Bias,cmap=cmap)
plt.title(GCMs[idx])
#plt.figtext(0.68,0.73,timestamp, size="small")
#set the same colorbar range
pic.set_clim(vmin=-100,vmax=100)
plt.subplots_adjust(bottom=0.1, right=0.8, top=0.9)
cax = plt.axes([0.85, 0.1, 0.01, 0.8])
plt.colorbar(cax=cax)
#if idx > 11:
#plt.colorbar(orientation='horizontal') # draw colorbar
#plt.legend(loc=2)
plt.suptitle('seasonal mean bias of Surface Downwelling SW radiation (W m-2) vs CERES',fontsize=18)
plt.show()
quit()
| gpl-3.0 | -1,949,927,725,766,545,200 | 24.323383 | 109 | 0.550295 | false |
pumbaEO/undiff1c | setup.py | 1 | 3623 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Based on https://github.com/pypa/sampleproject/blob/master/setup.py."""
from __future__ import unicode_literals
# To use a consistent encoding
import codecs
import os
from setuptools import setup, find_packages
import sys
# Shortcut for building/publishing to Pypi
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist bdist_wheel upload')
sys.exit()
def parse_reqs(req_path='./requirements.txt'):
"""Recursively parse requirements from nested pip files."""
install_requires = []
with codecs.open(req_path, 'r') as handle:
# remove comments and empty lines
lines = (line.strip() for line in handle
if line.strip() and not line.startswith('#'))
for line in lines:
# check for nested requirements files
if line.startswith('-r'):
# recursively call this function
install_requires += parse_reqs(req_path=line[3:])
else:
# add the line as a new requirement
install_requires.append(line)
return install_requires
def parse_readme():
"""Parse contents of the README."""
# Get the long description from the relevant file
here = os.path.abspath(os.path.dirname(__file__))
readme_path = os.path.join(here, 'README.md')
with codecs.open(readme_path, encoding='utf-8') as handle:
long_description = handle.read()
return long_description
setup(
name='undiff1c',
# Versions should comply with PEP440. For a discussion on
# single-sourcing the version across setup.py and the project code,
# see http://packaging.python.org/en/latest/tutorial.html#version
version='1.0.1',
description='Vanguard contains all the boilerplate you need to bootstrap a modern Python package.',
long_description=parse_readme(),
# What does your project relate to? Separate with spaces.
keywords='undiff1c',
author='Shenja Sosna',
author_email='[email protected]',
license='Apache 2.0',
# The project's main homepage
url='https://github.com/pumbaEO/undiff1c',
packages=find_packages(exclude=('tests*', 'docs', 'examples')),
# If there are data files included in your packages that need to be
# installed, specify them here.
include_package_data=True,
zip_safe=False,
# Install requirements loaded from ``requirements.txt``
install_requires=parse_reqs(),
test_suite='tests',
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and
# allow pip to create the appropriate form of executable for the
# target platform.
entry_points=dict(
console_scripts=[
'undiff1c = undiff1c.undiff1c:main',
],
),
# See: http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are:
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3.4',
'Environment :: Console',
],
)
| apache-2.0 | 4,677,452,139,056,141,000 | 31.63964 | 103 | 0.646426 | false |
dabrahams/zeroinstall | tests/basetest.py | 1 | 5935 | #!/usr/bin/env python
import sys, tempfile, os, shutil, imp
import unittest
import logging
import warnings
from xml.dom import minidom
from io import BytesIO
warnings.filterwarnings("ignore", message = 'The CObject type')
# Catch silly mistakes...
os.environ['HOME'] = '/home/idontexist'
os.environ['LANGUAGE'] = 'C'
sys.path.insert(0, '..')
from zeroinstall.injector import qdom
from zeroinstall.injector import iface_cache, download, distro, model, handler, policy, reader, trust
from zeroinstall.zerostore import NotStored, Store, Stores; Store._add_with_helper = lambda *unused: False
from zeroinstall import support
from zeroinstall.support import basedir, tasks
dpkgdir = os.path.join(os.path.dirname(__file__), 'dpkg')
empty_feed = qdom.parse(BytesIO(b"""<interface xmlns='http://zero-install.sourceforge.net/2004/injector/interface'>
<name>Empty</name>
<summary>just for testing</summary>
</interface>"""))
import my_dbus
sys.modules['dbus'] = my_dbus
sys.modules['dbus.glib'] = my_dbus
my_dbus.types = my_dbus
sys.modules['dbus.types'] = my_dbus
sys.modules['dbus.mainloop'] = my_dbus
sys.modules['dbus.mainloop.glib'] = my_dbus
mydir = os.path.dirname(__file__)
# Catch us trying to run the GUI and return a dummy string instead
old_execvp = os.execvp
def test_execvp(prog, args):
if prog == sys.executable and args[1].endswith('/0launch-gui'):
prog = os.path.join(mydir, 'test-gui')
return old_execvp(prog, args)
os.execvp = test_execvp
test_locale = (None, None)
assert model.locale
class TestLocale:
LC_ALL = 'LC_ALL' # Note: LC_MESSAGES not present on Windows
def getlocale(self, x = None):
assert x is not TestLocale.LC_ALL
return test_locale
model.locale = TestLocale()
class DummyPackageKit:
available = False
def get_candidates(self, package, factory, prefix):
pass
class DummyHandler(handler.Handler):
__slots__ = ['ex', 'tb', 'allow_downloads']
def __init__(self):
handler.Handler.__init__(self)
self.ex = None
self.allow_downloads = False
def wait_for_blocker(self, blocker):
self.ex = None
handler.Handler.wait_for_blocker(self, blocker)
if self.ex:
support.raise_with_traceback(self.ex, self.tb)
def report_error(self, ex, tb = None):
assert self.ex is None, self.ex
self.ex = ex
self.tb = tb
#import traceback
#traceback.print_exc()
class DummyKeyInfo:
def __init__(self, fpr):
self.fpr = fpr
self.info = [minidom.parseString('<item vote="bad"/>')]
self.blocker = None
class TestFetcher:
def __init__(self, config):
self.allowed_downloads = set()
self.allowed_feed_downloads = {}
self.config = config
def allow_download(self, digest):
assert isinstance(self.config.stores, TestStores)
self.allowed_downloads.add(digest)
def allow_feed_download(self, url, feed):
self.allowed_feed_downloads[url] = feed
def download_impls(self, impls, stores):
@tasks.async
def fake_download():
yield
for impl in impls:
assert impl.id in self.allowed_downloads, impl
self.allowed_downloads.remove(impl.id)
self.config.stores.add_fake(impl.id)
return fake_download()
def download_and_import_feed(self, feed_url, iface_cache, force = False):
@tasks.async
def fake_download():
yield
assert feed_url in self.allowed_feed_downloads, feed_url
self.config.iface_cache._feeds[feed_url] = self.allowed_feed_downloads[feed_url]
del self.allowed_feed_downloads[feed_url]
return fake_download()
def fetch_key_info(self, fingerprint):
return DummyKeyInfo(fingerprint)
class TestStores:
def __init__(self):
self.fake_impls = set()
def add_fake(self, digest):
self.fake_impls.add(digest)
def lookup_maybe(self, digests):
for d in digests:
if d in self.fake_impls:
return '/fake_store/' + d
return None
def lookup_any(self, digests):
path = self.lookup_maybe(digests)
if path:
return path
raise NotStored()
class TestConfig:
freshness = 0
help_with_testing = False
network_use = model.network_full
key_info_server = None
auto_approve_keys = False
def __init__(self):
self.iface_cache = iface_cache.IfaceCache()
self.handler = DummyHandler()
self.stores = Stores()
self.fetcher = TestFetcher(self)
self.trust_db = trust.trust_db
self.trust_mgr = trust.TrustMgr(self)
class BaseTest(unittest.TestCase):
def setUp(self):
warnings.resetwarnings()
self.config_home = tempfile.mktemp()
self.cache_home = tempfile.mktemp()
self.cache_system = tempfile.mktemp()
self.gnupg_home = tempfile.mktemp()
os.environ['GNUPGHOME'] = self.gnupg_home
os.environ['XDG_CONFIG_HOME'] = self.config_home
os.environ['XDG_CONFIG_DIRS'] = ''
os.environ['XDG_CACHE_HOME'] = self.cache_home
os.environ['XDG_CACHE_DIRS'] = self.cache_system
imp.reload(basedir)
assert basedir.xdg_config_home == self.config_home
os.mkdir(self.config_home, 0o700)
os.mkdir(self.cache_home, 0o700)
os.mkdir(self.cache_system, 0o500)
os.mkdir(self.gnupg_home, 0o700)
if 'DISPLAY' in os.environ:
del os.environ['DISPLAY']
self.config = TestConfig()
policy._config = self.config # XXX
iface_cache.iface_cache = self.config.iface_cache
logging.getLogger().setLevel(logging.WARN)
download._downloads = {}
self.old_path = os.environ['PATH']
os.environ['PATH'] = dpkgdir + ':' + self.old_path
distro._host_distribution = distro.DebianDistribution(dpkgdir + '/status')
distro._host_distribution._packagekit = DummyPackageKit()
my_dbus.system_services = {}
def tearDown(self):
if self.config.handler.ex:
support.raise_with_traceback(self.config.handler.ex, self.config.handler.tb)
shutil.rmtree(self.config_home)
support.ro_rmtree(self.cache_home)
shutil.rmtree(self.cache_system)
shutil.rmtree(self.gnupg_home)
os.environ['PATH'] = self.old_path
def import_feed(self, url, path):
iface_cache = self.config.iface_cache
iface_cache.get_interface(url)
feed = iface_cache._feeds[url] = reader.load_feed(path)
return feed
| lgpl-2.1 | -457,739,905,672,195,800 | 26.733645 | 115 | 0.714743 | false |
matevzmihalic/wlansi-store | wlansi_store/migrations/0003_auto__add_field_product_language.py | 1 | 5905 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Product.language'
db.add_column('wlansi_store_product', 'language',
self.gf('django.db.models.fields.CharField')(default='en', max_length=15),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Product.language'
db.delete_column('wlansi_store_product', 'language')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shop.product': {
'Meta': {'object_name': 'Product'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_shop.product_set'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'unit_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'wlansi_store.item': {
'Meta': {'object_name': 'Item'},
'has_nodewatcher_firmware': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
'wlansi_store.price': {
'Meta': {'object_name': 'Price'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'}),
'price_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"})
},
'wlansi_store.product': {
'Meta': {'object_name': 'Product', '_ormbases': ['shop.Product']},
'language': ('django.db.models.fields.CharField', [], {'max_length': '15'}),
'placeholders': ('djangocms_utils.fields.M2MPlaceholderField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'product_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shop.Product']", 'unique': 'True', 'primary_key': 'True'})
},
'wlansi_store.productplugin': {
'Meta': {'object_name': 'ProductPlugin', 'db_table': "'cmsplugin_productplugin'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wlansi_store.Product']"})
}
}
complete_apps = ['wlansi_store'] | agpl-3.0 | 71,648,788,243,909,860 | 65.359551 | 193 | 0.55597 | false |
andredalton/bcc | 2014/MAC0242/miniep5/test.py | 1 | 8640 | #! /usr/bin/env python3
import os, random, unittest, subprocess
from miniep5 import Calc
def gera_var():
""" Funcao que gera um nome de variavel aleatorio com 1 a 10 caracteres. """
p = ""
for i in range(random.randint(1,10)):
p += random.choice(["a", "e", "i", "o", "u"])
return p
def gera_teste(num, op, vr=0):
"""
Funcao que gera um teste aleatorio onde:
num: Numeros de floats e variaveis gerado
op: Numero de operadores gerados
vr: Probabilidade de inserir uma nova variavel
"""
lst = []
var = []
var_atrr = set()
i = 0 # Numero de floats e variáveis
j = 0 # Numero de operadores
p = random.random() # Probabilidade de um operador ser inserido no início da expressao
pws = random.random() # Probabilidade de insercao de espacos em branco
pvr = vr # Probabilidade de ocorrencia de variáveis
patr = random.random() # Probabilidade de atribuição em uma variável.
tokens = ["+", "-", "*", "/"]
while i < num:
r = random.random()
if r < pws:
# Inserindo espacos em branco
lst.append( random.choice([" ", "\t"]) * random.randint(1, 30) )
if r < patr:
if len(var) > 0 and var[-1]['num']==1:
# Atribuindo a uma variavel
v = var.pop()
var_atrr.add(v['nome'])
lst.append("=")
if len(var)>0:
var[-1]['num'] += 1
j += 1
elif i > j + 1 + len(var) + len(var_atrr):
# Inserindo um operador.
if len(var) == 0 or ( len(var)>0 and var[-1]['num']>0 ):
if len(var) > 0:
var[-1]['num'] -= 1
lst.append( random.choice(tokens) )
j += 1
if i < num-1 and r < pvr:
# Inserindo uma variavel.
v = gera_var()
var.append({'nome': v, 'num': 0})
lst.append(v)
else:
# Inserindo numero
if len(var) > 0:
var[-1]['num'] += 1
lst.append( str(random.random()*10**random.randint(-30,30)) )
i += 1
while len(var)>0:
if var[-1]['num'] <= 1:
var.pop()
lst.append("=")
if len(var) > 0:
var[-1]['num'] += 1
else:
lst.append( random.choice(tokens) )
var[-1]['num'] -= 1
j += 1
for j in range(j, op):
lst.append( random.choice(tokens) )
return " ".join(lst)
class Test(unittest.TestCase):
def setUp(self):
""" Inicializa a calculadora."""
self.calc = Calc(True)
# Testando inf, -inf e nan
def test_inf(self):
""" Verifica a ocorrencia do inf."""
inf = self.calc.analisar("1" * 1000)
self.assertEqual( inf, float("inf") )
def test_minf(self):
""" Verifica a ocorrencia do -inf."""
minf = self.calc.analisar("0 %s -" % ("1"*1000))
self.assertEqual( minf, float("-inf") )
def test_nan(self):
""" Verifica a ocorrencia do nan."""
out = self.calc.analisar("%(b)s 0 %(b)s - +" % {'b': "1"*1000})
self.assertNotEqual( out, out )
# Testando propriedades básicas das operações.
def test_som_comutativa(self):
""" Verifica a comutatividade da soma."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f +" % dic)
bout = self.calc.analisar("%(b)f %(a)f +" % dic)
self.assertEqual( aout, bout )
def test_som_elemento_neutro(self):
""" Verifica o elemento neutro da soma."""
a = random.random()
aout = self.calc.analisar("%.100f 0 +" % a)
self.assertEqual( a, aout )
def test_som_anulamento(self):
""" Verifica o anulamento da soma."""
dic = {'a': random.random()}
aout = self.calc.analisar("%(a)f 0 %(a)f - +" %dic)
self.assertEqual( 0, aout )
def test_sub_comutativa(self):
""" Verifica a não comutatividade da subtração."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f -" % dic)
bout = self.calc.analisar("%(b)f %(a)f -" % dic)
self.assertNotEqual( aout, bout )
def test_sub_elemento_neutro(self):
""" Verifica o elemento neutro da subtração."""
a = random.random()
aout = self.calc.analisar("%.100f 0 -" % a)
self.assertEqual( a, aout )
def test_sub_anulamento(self):
""" Verifica o anulamento da subtração."""
dic = {'a': random.random()}
aout = self.calc.analisar("%(a)f %(a)f -" % dic)
self.assertEqual( 0, aout )
def test_mul_comutativa(self):
""" Verifica a comutatividade da multiplicacao."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f *" % dic)
bout = self.calc.analisar("%(a)f %(b)f *" % dic)
self.assertEqual( aout, bout )
def test_mul_elemento_neutro(self):
""" Verifica o elemento neutro da multiplicacao."""
a = random.random()
aout = self.calc.analisar("%.100f 1 *" % a)
self.assertEqual( a, aout )
def test_mul_elemento_nulo(self):
""" Verifica o elemento nulo da multiplicacao."""
aout = self.calc.analisar("%.100f 0 *" % random.random())
self.assertEqual( 0, aout )
def test_div_comutativa(self):
""" Verifica a não comutatividade da divisão."""
dic = {'a': random.random(), 'b': random.random()}
aout = self.calc.analisar("%(a)f %(b)f /" % dic)
bout = self.calc.analisar("%(b)f %(a)f /" % dic)
self.assertNotEqual( aout, bout )
def test_div_elemento_neutro(self):
""" Verifica o elemento neutro da divisão."""
a = random.random()
aout = self.calc.analisar("%.100f 1 /" % a)
self.assertEqual( a, aout )
def test_div_zero(self):
""" Verifica a divisao por zero."""
a = random.random()
self.assertRaises(ZeroDivisionError, self.calc.analisar, "%.100f 0 /" % a)
# Testes de sintaxe.
def test_sintaxe(self):
"""
Verifica sintaxe quando existem numeros = operações - 1.
Queria comparar com o não erro, mas não encontrei a maneira adequada de se fazer isso.
"""
n = random.randint(1, 10000)
s = gera_teste(n, n-1)
try:
out = float(self.calc.analisar(s))
conv = True
except ValueError:
conv = False
self.assertTrue(conv)
def test_erro_sintaxe1(self):
""" Verifica erros de sintaxe quando existem mais numeros do que operações - 1."""
n = random.randint(1, 10000)
s = gera_teste(n + random.randint(2, 100), n)
self.assertRaises(LookupError, self.calc.analisar, s)
def test_erro_sintaxe2(self):
""" Verifica erros de sintaxe quando existem menos numeros do que operações - 1."""
n = random.randint(1, 10000)
s = gera_teste(n, n + random.randint(0, 100))
self.assertRaises(LookupError, self.calc.analisar, s)
def test_caracter_incorreto(self):
""" Verifica a ocorrencia de erro quando se utiliza um caracter não especificado."""
self.assertRaises(TypeError, self.calc.analisar, random.choice(["!", "@", "$", "?"]) )
# Teste com variaveis.
def test_variavel_nao_inicializada(self):
""" Verifica a ocorrencia de erro quando se utiliza uma variável não inicializa."""
self.assertRaises(KeyError, self.calc.analisar, gera_var())
def test_sintaxe_atribuicao(self):
"""
Verifica sintaxe quando existem numeros + variaveis = operações - 1.
Queria comparar com o não erro, mas não encontrei a maneira adequada de se fazer isso.
"""
n = random.randint(1, 10000)
s = gera_teste(n, n-1, 0.3)
try:
out = float(self.calc.analisar(s))
conv = True
except ValueError:
conv = False
self.assertTrue(conv)
def test_atribuicao(self):
""" Verifica a ocorrencia de erro ao se atribuir o valor de uma variavel e reutiliza-lo na mesma expressao."""
a = random.random()
dic = {'a': a, 'v': gera_var()}
self.assertEqual(a*a+a, self.calc.analisar("%(v)s %(a).100f = %(v)s %(v)s * +" % dic))
if __name__ == '__main__':
unittest.main() | apache-2.0 | -2,664,951,957,438,070,300 | 36.421739 | 118 | 0.537881 | false |
8l/beri | cheritest/trunk/tests/fpu/test_fpu_x_underflow.py | 2 | 1487 | #-
# Copyright (c) 2013 Michael Roe
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from nose.plugins.attrib import attr
class test_fpu_x_underflow(BaseBERITestCase):
@attr('floatexception')
def test_fpu_x_underflow(self):
'''Test floating point underflow raises an exception'''
self.assertRegisterEqual(self.MIPS.a2, 1, "Floating point underflow did not raise an exception")
| apache-2.0 | -1,707,603,281,068,638,700 | 40.305556 | 97 | 0.763282 | false |
graphql-python/graphql-core | src/graphql/validation/rules/unique_operation_names.py | 1 | 1401 | from typing import Any, Dict
from ...error import GraphQLError
from ...language import NameNode, OperationDefinitionNode, VisitorAction, SKIP
from . import ASTValidationContext, ASTValidationRule
__all__ = ["UniqueOperationNamesRule"]
class UniqueOperationNamesRule(ASTValidationRule):
"""Unique operation names
A GraphQL document is only valid if all defined operations have unique names.
"""
def __init__(self, context: ASTValidationContext):
super().__init__(context)
self.known_operation_names: Dict[str, NameNode] = {}
def enter_operation_definition(
self, node: OperationDefinitionNode, *_args: Any
) -> VisitorAction:
operation_name = node.name
if operation_name:
known_operation_names = self.known_operation_names
if operation_name.value in known_operation_names:
self.report_error(
GraphQLError(
"There can be only one operation"
f" named '{operation_name.value}'.",
[known_operation_names[operation_name.value], operation_name],
)
)
else:
known_operation_names[operation_name.value] = operation_name
return SKIP
@staticmethod
def enter_fragment_definition(*_args: Any) -> VisitorAction:
return SKIP
| mit | 7,453,067,494,000,858,000 | 34.025 | 86 | 0.619557 | false |
goshow-jp/Kraken | Python/kraken_examples/bob_rig.py | 1 | 10457 | from kraken.core.maths import Vec3, Quat, Xfo
from kraken.core.objects.rig import Rig
from kraken_components.generic.mainSrt_component import MainSrtComponentRig
from kraken_components.biped.head_component import HeadComponentRig
from kraken_components.biped.clavicle_component import ClavicleComponentGuide, ClavicleComponentRig
from kraken_components.biped.arm_component import ArmComponentGuide, ArmComponentRig
from kraken_components.biped.leg_component import LegComponentGuide, LegComponentRig
from kraken_components.biped.spine_component import SpineComponentRig
from kraken_components.biped.neck_component import NeckComponentGuide, NeckComponentRig
from kraken.core.profiler import Profiler
class BobRig(Rig):
"""Simple biped test rig.
This example shows how to create a simple scripted biped rig that loads data
onto component rig classes and also onto guide classes. It also demonstrates
how to make connections between components.
"""
def __init__(self, name):
Profiler.getInstance().push("Construct BobRig:" + name)
super(BobRig, self).__init__(name)
# Add Components
mainSrtComponent = MainSrtComponentRig("mainSrt", self)
spineComponent = SpineComponentRig("spine", self)
spineComponent.loadData(data={
'cogPosition': Vec3(0.0, 11.1351, -0.1382),
'spine01Position': Vec3(0.0, 11.1351, -0.1382),
'spine02Position': Vec3(0.0, 11.8013, -0.1995),
'spine03Position': Vec3(0.0, 12.4496, -0.3649),
'spine04Position': Vec3(0.0, 13.1051, -0.4821),
'numDeformers': 4
})
neckComponentGuide = NeckComponentGuide("neck")
neckComponentGuide.loadData({
"location": "M",
"neckXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 16.0, -0.75), sc=Vec3(1.00000011921, 1.0, 1.00000011921)),
"neckMidXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 16.5, -0.5), sc=Vec3(1.00000011921, 1.0, 1.00000011921)),
"neckEndXfo": Xfo(ori=Quat(Vec3(-0.371748030186, -0.601501047611, 0.371748059988), 0.601500988007), tr=Vec3(0.0, 17.0, -0.25), sc=Vec3(1.0, 1.0, 1.0))
})
neckComponent = NeckComponentRig("neck", self)
neckComponent.loadData(neckComponentGuide.getRigBuildData())
headComponent = HeadComponentRig("head", self)
headComponent.loadData(data={
"headXfo": Xfo(Vec3(0.0, 17.5, -0.5)),
"eyeLeftXfo": Xfo(tr=Vec3(0.375, 18.5, 0.5), ori=Quat(Vec3(-0.0, -0.707106769085, -0.0), 0.707106769085)),
"eyeRightXfo": Xfo(tr=Vec3(-0.375, 18.5, 0.5), ori=Quat(Vec3(-0.0, -0.707106769085, -0.0), 0.707106769085)),
"jawXfo": Xfo(Vec3(0.0, 17.875, -0.275))
})
clavicleLeftComponentGuide = ClavicleComponentGuide("clavicle")
clavicleLeftComponentGuide.loadData({
"location": "L",
"clavicleXfo": Xfo(Vec3(0.1322, 15.403, -0.5723)),
"clavicleUpVXfo": Xfo(Vec3(0.0, 1.0, 0.0)),
"clavicleEndXfo": Xfo(Vec3(2.27, 15.295, -0.753))
})
clavicleLeftComponent = ClavicleComponentRig("clavicle", self)
clavicleLeftComponent.loadData(data=clavicleLeftComponentGuide.getRigBuildData())
clavicleRightComponentGuide = ClavicleComponentGuide("clavicle")
clavicleRightComponentGuide.loadData({
"location": "R",
"clavicleXfo": Xfo(Vec3(-0.1322, 15.403, -0.5723)),
"clavicleUpVXfo": Xfo(Vec3(0.0, 1.0, 0.0)),
"clavicleEndXfo": Xfo(Vec3(-2.27, 15.295, -0.753))
})
clavicleRightComponent = ClavicleComponentRig("clavicle", self)
clavicleRightComponent.loadData(data=clavicleRightComponentGuide.getRigBuildData())
armLeftComponentGuide = ArmComponentGuide("arm")
armLeftComponentGuide.loadData({
"location": "L",
"bicepXfo": Xfo(Vec3(2.27, 15.295, -0.753)),
"forearmXfo": Xfo(Vec3(5.039, 13.56, -0.859)),
"wristXfo": Xfo(Vec3(7.1886, 12.2819, 0.4906)),
"handXfo": Xfo(tr=Vec3(7.1886, 12.2819, 0.4906),
ori=Quat(Vec3(-0.0865, -0.2301, -0.2623), 0.9331)),
"bicepFKCtrlSize": 1.75,
"forearmFKCtrlSize": 1.5
})
armLeftComponent = ArmComponentRig("arm", self)
armLeftComponent.loadData(data=armLeftComponentGuide.getRigBuildData())
armRightComponentGuide = ArmComponentGuide("arm")
armRightComponentGuide.loadData({
"location": "R",
"bicepXfo": Xfo(Vec3(-2.27, 15.295, -0.753)),
"forearmXfo": Xfo(Vec3(-5.039, 13.56, -0.859)),
"wristXfo": Xfo(Vec3(-7.1886, 12.2819, 0.4906)),
"handXfo": Xfo(tr=Vec3(-7.1886, 12.2819, 0.4906),
ori=Quat(Vec3(-0.2301, -0.0865, -0.9331), 0.2623)),
"bicepFKCtrlSize": 1.75,
"forearmFKCtrlSize": 1.5
})
armRightComponent = ArmComponentRig("arm", self)
armRightComponent.loadData(data=armRightComponentGuide.getRigBuildData())
legLeftComponentGuide = LegComponentGuide("leg")
legLeftComponentGuide.loadData({
"name": "Leg",
"location": "L",
"femurXfo": Xfo(Vec3(0.9811, 9.769, -0.4572)),
"kneeXfo": Xfo(Vec3(1.4488, 5.4418, -0.5348)),
"ankleXfo": Xfo(Vec3(1.841, 1.1516, -1.237)),
"toeXfo": Xfo(Vec3(1.85, 0.4, 0.25)),
"toeTipXfo": Xfo(Vec3(1.85, 0.4, 1.5))
})
legLeftComponent = LegComponentRig("leg", self)
legLeftComponent.loadData(data=legLeftComponentGuide.getRigBuildData())
legRightComponentGuide = LegComponentGuide("leg")
legRightComponentGuide.loadData({
"name": "Leg",
"location": "R",
"femurXfo": Xfo(Vec3(-0.9811, 9.769, -0.4572)),
"kneeXfo": Xfo(Vec3(-1.4488, 5.4418, -0.5348)),
"ankleXfo": Xfo(Vec3(-1.85, 1.1516, -1.237)),
"toeXfo": Xfo(Vec3(-1.85, 0.4, 0.25)),
"toeTipXfo": Xfo(Vec3(-1.85, 0.4, 1.5))
})
legRightComponent = LegComponentRig("leg", self)
legRightComponent.loadData(data=legRightComponentGuide.getRigBuildData())
# ============
# Connections
# ============
# Spine to Main SRT
mainSrtRigScaleOutput = mainSrtComponent.getOutputByName('rigScale')
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
spineGlobalSrtInput = spineComponent.getInputByName('globalSRT')
spineGlobalSrtInput.setConnection(mainSrtOffsetOutput)
spineRigScaleInput = spineComponent.getInputByName('rigScale')
spineRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Neck to Main SRT
neckGlobalSrtInput = neckComponent.getInputByName('globalSRT')
neckGlobalSrtInput.setConnection(mainSrtOffsetOutput)
# Neck to Spine
spineEndOutput = spineComponent.getOutputByName('spineEnd')
neckSpineEndInput = neckComponent.getInputByName('neckBase')
neckSpineEndInput.setConnection(spineEndOutput)
# Head to Main SRT
headGlobalSrtInput = headComponent.getInputByName('globalSRT')
headGlobalSrtInput.setConnection(mainSrtOffsetOutput)
headBaseInput = headComponent.getInputByName('worldRef')
headBaseInput.setConnection(mainSrtOffsetOutput)
# Head to Neck
neckEndOutput = neckComponent.getOutputByName('neckEnd')
headBaseInput = headComponent.getInputByName('neckRef')
headBaseInput.setConnection(neckEndOutput)
# Clavicle to Spine
spineEndOutput = spineComponent.getOutputByName('spineEnd')
clavicleLeftSpineEndInput = clavicleLeftComponent.getInputByName('spineEnd')
clavicleLeftSpineEndInput.setConnection(spineEndOutput)
clavicleRightSpineEndInput = clavicleRightComponent.getInputByName('spineEnd')
clavicleRightSpineEndInput.setConnection(spineEndOutput)
# Arm to Global SRT
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
armLeftGlobalSRTInput = armLeftComponent.getInputByName('globalSRT')
armLeftGlobalSRTInput.setConnection(mainSrtOffsetOutput)
armLeftRigScaleInput = armLeftComponent.getInputByName('rigScale')
armLeftRigScaleInput.setConnection(mainSrtRigScaleOutput)
armRightGlobalSRTInput = armRightComponent.getInputByName('globalSRT')
armRightGlobalSRTInput.setConnection(mainSrtOffsetOutput)
armRightRigScaleInput = armRightComponent.getInputByName('rigScale')
armRightRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Arm To Clavicle Connections
clavicleLeftEndOutput = clavicleLeftComponent.getOutputByName('clavicleEnd')
armLeftClavicleEndInput = armLeftComponent.getInputByName('root')
armLeftClavicleEndInput.setConnection(clavicleLeftEndOutput)
clavicleRightEndOutput = clavicleRightComponent.getOutputByName('clavicleEnd')
armRightClavicleEndInput = armRightComponent.getInputByName('root')
armRightClavicleEndInput.setConnection(clavicleRightEndOutput)
# Leg to Global SRT
mainSrtOffsetOutput = mainSrtComponent.getOutputByName('offset')
legLeftGlobalSRTInput = legLeftComponent.getInputByName('globalSRT')
legLeftGlobalSRTInput.setConnection(mainSrtOffsetOutput)
legLeftRigScaleInput = legLeftComponent.getInputByName('rigScale')
legLeftRigScaleInput.setConnection(mainSrtRigScaleOutput)
legRightGlobalSRTInput = legRightComponent.getInputByName('globalSRT')
legRightGlobalSRTInput.setConnection(mainSrtOffsetOutput)
legRightRigScaleInput = legRightComponent.getInputByName('rigScale')
legRightRigScaleInput.setConnection(mainSrtRigScaleOutput)
# Leg To Pelvis Connections
spinePelvisOutput = spineComponent.getOutputByName('pelvis')
legLeftPelvisInput = legLeftComponent.getInputByName('pelvisInput')
legLeftPelvisInput.setConnection(spinePelvisOutput)
legRightPelvisInput = legRightComponent.getInputByName('pelvisInput')
legRightPelvisInput.setConnection(spinePelvisOutput)
Profiler.getInstance().pop()
| bsd-3-clause | 4,258,437,837,112,419,000 | 44.663755 | 182 | 0.670556 | false |
trevor/calendarserver | txdav/who/idirectory.py | 1 | 5509 | # -*- test-case-name: txdav.who.test -*-
##
# Copyright (c) 2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
from __future__ import absolute_import
"""
Calendar and contacts directory extensions to L{twext.who.idirectory}.
"""
__all__ = [
"AutoScheduleMode",
"RecordType",
"FieldName",
]
from twisted.python.constants import Names, NamedConstant
from twext.who.idirectory import FieldName as BaseFieldName
#
# Data types
#
class AutoScheduleMode(Names):
"""
Constants for automatic scheduling modes.
@cvar none: Invitations are not automatically handled.
@cvar accept: Accept all invitations.
@cvar decline: Decline all invitations.
@cvar acceptIfFree: Accept invitations that do not conflict with a busy
time slot. Other invitations are not automatically handled.
@cvar declineIfBusy: Decline invitations that conflict with a busy time
slot. Other invitations are not automatically handled.
@cvar acceptIfFreeDeclineIfBusy: Accept invitations that do not conflict
with a busy time slot. Decline invitations that conflict with a busy
time slot. Other invitations are not automatically handled.
"""
none = NamedConstant()
none.description = u"no action"
accept = NamedConstant()
accept.description = u"accept"
decline = NamedConstant()
decline.description = u"decline"
acceptIfFree = NamedConstant()
acceptIfFree.description = u"accept if free"
declineIfBusy = NamedConstant()
declineIfBusy.description = u"decline if busy"
acceptIfFreeDeclineIfBusy = NamedConstant()
acceptIfFreeDeclineIfBusy.description = u"accept if free, decline if busy"
class RecordType(Names):
"""
Constants for calendar and contacts directory record types.
@cvar location: Location record.
Represents a schedulable location (eg. a meeting room).
@cvar resource: Resource record.
Represents a schedulable resource (eg. a projector, conference line,
etc.).
@cvar address: Address record.
Represents a physical address (street address and/or geolocation).
"""
location = NamedConstant()
location.description = u"location"
resource = NamedConstant()
resource.description = u"resource"
address = NamedConstant()
address.description = u"physical address"
class FieldName(Names):
"""
Constants for calendar and contacts directory record field names.
Fields as associated with either a single value or an iterable of values.
@cvar serviceNodeUID: For a calendar and contacts service with multiple
nodes, this denotes the node that the user's data resides on.
The associated value must be a L{unicode}.
@cvar loginAllowed: Determines whether a record can log in.
The associated value must be a L{bool}.
@cvar hasCalendars: Determines whether a record has calendar data.
The associated value must be a L{bool}.
@cvar hasContacts: Determines whether a record has contact data.
The associated value must be a L{bool}.
@cvar autoScheduleMode: Determines the auto-schedule mode for a record.
The associated value must be a L{NamedConstant}.
@cvar autoAcceptGroup: Contains the UID for a group record which contains
members for whom auto-accept will behave as "accept if free", even if
auto-accept is set to "manual".
The associated value must be a L{NamedConstant}.
"""
serviceNodeUID = NamedConstant()
serviceNodeUID.description = u"service node UID"
loginAllowed = NamedConstant()
loginAllowed.description = u"login permitted"
loginAllowed.valueType = bool
hasCalendars = NamedConstant()
hasCalendars.description = u"has calendars"
hasCalendars.valueType = bool
hasContacts = NamedConstant()
hasContacts.description = u"has contacts"
hasContacts.valueType = bool
autoScheduleMode = NamedConstant()
autoScheduleMode.description = u"auto-schedule mode"
autoScheduleMode.valueType = AutoScheduleMode
autoAcceptGroup = NamedConstant()
autoAcceptGroup.description = u"auto-accept group"
autoAcceptGroup.valueType = BaseFieldName.valueType(BaseFieldName.uid)
# For "locations", i.e., scheduled spaces:
associatedAddress = NamedConstant()
associatedAddress.description = u"associated address UID"
capacity = NamedConstant()
capacity.description = u"room capacity"
capacity.valueType = int
floor = NamedConstant()
floor.description = u"building floor"
# For "addresses", i.e., non-scheduled areas containing locations:
abbreviatedName = NamedConstant()
abbreviatedName.description = u"abbreviated name"
geographicLocation = NamedConstant()
geographicLocation.description = u"geographic location URI"
streetAddress = NamedConstant()
streetAddress.description = u"street address"
| apache-2.0 | -5,969,269,903,024,639,000 | 29.605556 | 78 | 0.721002 | false |
bparzella/secsgem | secsgem/secs/functions/s01f00.py | 1 | 1407 | #####################################################################
# s01f00.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Class for stream 01 function 00."""
from secsgem.secs.functions.base import SecsStreamFunction
class SecsS01F00(SecsStreamFunction):
"""
abort transaction stream 1.
**Structure**::
>>> import secsgem.secs
>>> secsgem.secs.functions.SecsS01F00
Header only
**Example**::
>>> import secsgem.secs
>>> secsgem.secs.functions.SecsS01F00()
S1F0 .
:param value: function has no parameters
:type value: None
"""
_stream = 1
_function = 0
_data_format = None
_to_host = True
_to_equipment = True
_has_reply = False
_is_reply_required = False
_is_multi_block = False
| lgpl-2.1 | 1,722,601,378,394,116,000 | 26.057692 | 69 | 0.61194 | false |
jittat/ku-eng-direct-admission | result/migrations/0006_remove_order_index.py | 1 | 2739 | # -*- coding: utf-8 -*-
from south.db import db
from django.db import models
from adm.result.models import *
class Migration:
def forwards(self, orm):
db.delete_index('result_qualifiedapplicant', ['order'])
def backwards(self, orm):
db.create_index('result_qualifiedapplicant', ['order'], unique=True)
models = {
'application.applicant': {
'activation_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'doc_submission_method': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'has_logged_in': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_related_model': ('IntegerListField', [], {'default': 'None'}),
'hashed_password': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_offline': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_submitted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'result.qualifiedapplicant': {
'applicant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['application.Applicant']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['result.ReportCategory']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'ticket_number': ('django.db.models.fields.CharField', [], {'max_length': '15'})
},
'result.reportcategory': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'result_set_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
}
}
complete_apps = ['result']
| agpl-3.0 | -5,794,286,000,010,861,000 | 57.276596 | 119 | 0.556042 | false |
mangpo/cacheall-proxy-server | httpmessage/_headers.py | 1 | 2137 | import _setup
import inspect
import pprint
from httpmessage._multidict import MultiDict
def header_case(header_key):
return "-".join([part.capitalize() for part in header_key.split("-")])
def _key_wrap(func):
"""creates a function where the value of the 'key' argument, if there
is one, has the function 'header_case' run on it.
"""
varnames = func.func_code.co_varnames
def key_filter(kv):
name, value = kv
if name == 'key':
return header_case(value)
else:
return value
def wrapped(*args):
if len(args) == len(varnames):
args = [key_filter(kv) for kv in zip(varnames, args)]
return func(*args)
wrapped.func_name = func.func_name
wrapped.func_doc = func.func_doc
return wrapped
class Headers(MultiDict):
for attrname in dir(MultiDict):
attrvalue = getattr(MultiDict, attrname)
if inspect.ismethod(attrvalue):
attrvalue = attrvalue.im_func
if inspect.isfunction(attrvalue) and \
'key' in attrvalue.func_code.co_varnames:
locals()[attrname] = _key_wrap(attrvalue)
#---------------------------------------------------------------
def iteritems(self):
return iter(sorted(super(Headers,self).iteritems()))
#---------------------------------------------------------------
def __repr__(self):
data = pprint.pformat(list(self.iteritems()))
if '\n' in data:
data = ''.join([data[0], '\n ', data[1:-1], '\n', data[-1]])
return '<%s(%s)>' % (
type(self).__name__, data
)
#---------------------------------------------------------------
def __copy__(self):
dup = Headers()
for k,v in self.iteritems():
dup.append_at(k,v)
return dup
if __name__ == "__main__":
h = Headers()
h['foo'] = 'bar'
h['content-lenGth'] = 5
print h
h['CONTENT-length'] = 10
print h
del h['foO']
print h
h['content-type'] = 'wack wack wackiness'
h['rover-dookie'] = 'oh yah, lots'
print h | bsd-2-clause | -1,192,584,434,577,835,800 | 26.766234 | 74 | 0.504913 | false |
mikoro/pymazing | pymazing/level_loader.py | 1 | 4241 | """Load and generate world mesh data from files."""
# Copyright © 2014 Mikko Ronkainen <[email protected]>
# License: MIT, see the LICENSE file.
from pymazing import color, mesh
# http://en.wikipedia.org/wiki/Truevision_TGA
def generate_blocks_from_tga(file_name):
"""
Generate block data from a TGA formatted image file - each pixels corresponds to one block.
:param string file_name: A path to the image file.
:return: A two dimensional array of colors representing the blocks.
"""
blocks = None
with open(file_name, "rb") as file:
file.read(1) # image ID length
file.read(1) # color map type
# image type
if file.read(1) != b"\x02":
raise Exception("Invalid file format")
# color map specification
file.read(2)
file.read(2)
file.read(1)
file.read(2) # x-origin
file.read(2) # y-origin
width = int.from_bytes(file.read(2), byteorder="little")
height = int.from_bytes(file.read(2), byteorder="little")
depth = file.read(1)[0]
if width < 1 or height < 1 or depth != 32:
raise Exception("Invalid file format")
file.read(1) # image descriptor
blocks = [[None] * width for _ in range(height)]
for y in range(0, height):
for x in range(0, width):
pixel_data = file.read(4)
if len(pixel_data) != 4:
raise Exception("Invalid file format")
r = pixel_data[2]
g = pixel_data[1]
b = pixel_data[0]
a = pixel_data[3]
if a > 0:
blocks[y][x] = color.from_int(r, g, b, a)
return blocks
def generate_full_meshes(blocks):
"""
Generate mesh data from the block data.
:param blocks: A two dimensional array of colors.
:return: A list of meshes.
"""
meshes = []
height = len(blocks)
width = len(blocks[0])
# add the floor plane
mesh_ = mesh.create_partial_cube(color.from_int(80, 80, 80), mesh.TOP)
mesh_.scale = [width / 2.0 + 2.0, 1.0, height / 2.0 + 2.0]
mesh_.position = [width / 2.0, -1.0, -height / 2.0]
meshes.append(mesh_)
for y in range(height):
for x in range(width):
color_ = blocks[y][x]
if color_ is not None:
mesh_ = mesh.create_cube(color_)
mesh_.scale = [0.5, 0.5, 0.5]
mesh_.position[0] = 1.0 * x + 0.5
mesh_.position[1] = 0.5
mesh_.position[2] = -1.0 * y - 0.5
meshes.append(mesh_)
return meshes
def generate_partial_meshes(blocks):
"""
Generate mesh data from the block data - but leave out sides that are not visible.
:param blocks: A two dimensional array of colors.
:return: A list of meshes.
"""
meshes = []
height = len(blocks)
width = len(blocks[0])
# add the floor plane
mesh_ = mesh.create_partial_cube(color.from_int(80, 80, 80), mesh.TOP)
mesh_.scale = [width / 2.0 + 2.0, 1.0, height / 2.0 + 2.0]
mesh_.position = [width / 2.0, -1.0, -height / 2.0]
meshes.append(mesh_)
for y in range(height):
for x in range(width):
color_ = blocks[y][x]
if color_ is not None:
sides = mesh.TOP
if x == 0 or (blocks[y][x - 1] is None):
sides |= mesh.LEFT
if x == (width - 1) or (blocks[y][x + 1] is None):
sides |= mesh.RIGHT
if y == 0 or (blocks[y - 1][x] is None):
sides |= mesh.FRONT
if y == (height - 1) or (blocks[y + 1][x] is None):
sides |= mesh.BACK
mesh_ = mesh.create_partial_cube(color_, sides)
mesh_.scale = [0.5, 0.5, 0.5]
mesh_.position[0] = 1.0 * x + 0.5
mesh_.position[1] = 0.5
mesh_.position[2] = -1.0 * y - 0.5
meshes.append(mesh_)
return meshes
| mit | -3,347,367,948,999,858,000 | 28.724638 | 95 | 0.504245 | false |
foone/7gen | code/obj.py | 1 | 9542 | #!/usr/env python
from error import LoadError
from bmdl import BRenderModel
import pygame
from colorsys import rgb_to_hsv,hsv_to_rgb
import os
COLORWIDTH=8
class WavefrontModel:
def __init__(self,filename=None):
if filename is not None:
self.load(filename)
def load(self,filename):
fop=open(filename,'r')
self.verts=[(0,0,0)] # Start from 1 my ass. IDIOTS.
#self.tris=[]
self.colors={'DefaultColor':(255,255,255)}
self.color_order=['DefaultColor']
self.trispiles={}
current_pile_name='DefaultColor'
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
for i,line in enumerate(fop):
self.linenumber=i
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='v':
x,y,z=[float(x) for x in stuff[1].split(' ')][:3] # ignore w
self.verts.append((x,y,z))
elif command=='f':
pieces=stuff[1].split(' ')
if len(pieces)==3:
verts,tex,normals=self.faceref(pieces)
current_pile.append(verts)
elif len(pieces)==4:
verts,tex,normals=self.faceref(pieces)
current_pile.append((verts[0],verts[1],verts[2]))
current_pile.append((verts[0],verts[2],verts[3]))
elif command=='usemtl':
current_pile_name=stuff[1].strip()
if current_pile_name not in self.trispiles:
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
if current_pile_name not in self.colors:
self.colors[current_pile_name]=(255,255,255) # default to white.
if current_pile_name not in self.color_order:
self.color_order.append(current_pile_name)
elif command=='mtllib':
try:
self.loadMTL(stuff[1])
except IOError:
pass # Couldn't load colors/textures. OH WELL.
def loadMTL(self,filename):
current_name=None
fop=open(filename,'r')
for line in fop:
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='newmtl':
current_name=stuff[1]
elif command=='kd':
if current_name is not None:
r,g,b=[int(float(x)*255.0) for x in stuff[1].strip().split(' ')]
self.colors[current_name]=(r,g,b)
if current_name not in self.color_order:
self.color_order.append(current_name)
def dump(self):
print 'Verts:',len(self.verts)
print 'Tris:',len(self.tris)
def faceref(self,pieces):
verts,tex,normal=[],[],[]
for piece in pieces:
parts=piece.split('/')
if len(parts)>3:
raise LoadError('Too many parts in faceref, line %i' % (self.linenumber))
if len(parts)==0:
raise LoadError('Too few parts in faceref, line %i' % (self.linenumber))
if len(parts)==1:
verts.append(self.vref(int(parts[0])))
tex.append(None)
normal.append(None)
elif len(parts)==2:
verts.append(self.vref(int(parts[0])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None)
elif len(parts)==3:
verts.append(self.vref(int(parts[0])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None) # TODO: Fix. Create nref?
return verts,tex,normal
def vref(self,v):
if v<0:
return len(self.verts)+v
else:
return v
def makeBMDL(self,statusfunc=None):
bmdl=BRenderModel()
bmdl.tris_normals=[]
bmdl.filename='<JKL>'
bmdl.normals=True
# for x,y,z in self.verts:
# bmdl.verts.append((x,y,z,0,0))
width=float(len(self.color_order))
for x,color in enumerate(self.color_order):
u=(x+0.5)/width
if color in self.trispiles:
r,g,b=self.colors[color]
else:
r,g,b=(255,0,255) # default to white if we are missing this color.
if statusfunc is not None:
statusfunc('Converting %i verts in %s' % (len(self.trispiles[color]),color))
for v1,v2,v3 in self.trispiles[color]:
x,y,z=self.verts[v1]
a=bmdl.happyVertex(x,y,z,u,0.5)
x,y,z=self.verts[v2]
b=bmdl.happyVertex(x,y,z,u,0.5)
x,y,z=self.verts[v3]
c=bmdl.happyVertex(x,y,z,u,0.5)
bmdl.tris.append((a,b,c))
if statusfunc is not None:
statusstr='%i verts, %i tris' % (len(bmdl.verts),len(bmdl.tris))
statusfunc(statusstr)
return bmdl
def makeTexture(self,palette_surf,enhance_color=True):
size=(len(self.color_order)*COLORWIDTH,COLORWIDTH)
surf=pygame.Surface(size,pygame.SWSURFACE,palette_surf)
surf.set_palette(palette_surf.get_palette())
for x,color in enumerate(self.color_order):
r,g,b=self.colors[color]
if enhance_color:
h,s,v=rgb_to_hsv(r/255.0,g/255.0,b/255.0)
s=min(1.0,s+0.1)
r,g,b=[int(temp*255.0) for temp in hsv_to_rgb(h,s,v)]
nearest=None
ndiff=None
for i,(nr,ng,nb) in enumerate(palette_surf.get_palette()):
rdelta=r-nr
gdelta=g-ng
bdelta=b-nb
diff=rdelta**2 + gdelta**2 + bdelta**2
if nearest is None or diff<ndiff:
ndiff=diff
nearest=i
surf.fill(nearest,(x*COLORWIDTH,0,COLORWIDTH,COLORWIDTH))
return surf
class WavefrontModelTextured:
def __init__(self,filename=None):
if filename is not None:
self.load(filename)
def load(self,filename):
fop=open(filename,'r')
self.verts=[(0,0,0)] # Start from 1 my ass. IDIOTS.
self.texverts=[(0,0,0)]
self.colors={'DefaultColor':(255,255,255)}
self.color_order=['DefaultColor']
self.textures={}
self.trispiles={}
current_pile_name='DefaultColor'
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
for i,line in enumerate(fop):
self.linenumber=i
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='v':
x,y,z=[float(x) for x in stuff[1].split(' ')][:3] # ignore w
self.verts.append((x,y,z))
elif command=='vt':
u,v=[float(x) for x in stuff[1].split(' ')]
self.texverts.append((u,v))
elif command=='usemtl':
current_pile_name=stuff[1].strip()
if current_pile_name not in self.trispiles:
self.trispiles[current_pile_name]=[]
current_pile=self.trispiles[current_pile_name]
if current_pile_name not in self.colors:
self.colors[current_pile_name]=(255,255,255) # default to white.
if current_pile_name not in self.color_order:
self.color_order.append(current_pile_name)
elif command=='f':
pieces=stuff[1].split(' ')
if len(pieces)==3:
verts,tex,normals=self.faceref(pieces)
current_pile.append(verts+tex)
elif len(pieces)==4:
verts,tex,normals=self.faceref(pieces)
current_pile.append((verts[0],verts[1],verts[2],tex[0],tex[1],tex[2]))
current_pile.append((verts[0],verts[2],verts[3],tex[0],tex[2],tex[3]))
elif command=='mtllib':
try:
self.loadMTL(stuff[1])
except IOError:
pass # Couldn't load colors/textures. OH WELL.
def loadMTL(self,filename):
current_name=None
fop=open(filename,'r')
for line in fop:
if line[0:1]=='#':
continue
stuff=line.strip().split(' ',1)
if len(stuff)==2:
command=stuff[0].lower()
if command=='newmtl':
current_name=stuff[1]
elif command=='kd':
if current_name is not None:
r,g,b=[int(float(x)*255.0) for x in stuff[1].strip().split(' ')]
self.colors[current_name]=(r,g,b)
if current_name not in self.color_order:
self.color_order.append(current_name)
elif command=='map_kd':
filename=stuff[1]
if not os.path.exists(filename):
raise LoadError('Texture Missing: ' +filename)
self.textures[current_name]=filename
def dump(self):
print 'Verts:',len(self.verts)
print 'Tris:',len(self.tris)
print 'Textures:'
for texname in self.textures:
r,g,b=self.colors[texname]
print ' %s:%s (%i,%i,%i)' % (texname,self.textures[texname],r,g,b)
def faceref(self,pieces):
verts,tex,normal=[],[],[]
for piece in pieces:
parts=piece.split('/')
if len(parts)>3:
raise LoadError('Too many parts in faceref, line %i' % (self.linenumber))
if len(parts)==0:
raise LoadError('Too few parts in faceref, line %i' % (self.linenumber))
if len(parts)==1:
verts.append(self.vref(int(parts[0])))
tex.append(None)
normal.append(None)
elif len(parts)==2:
verts.append(self.vref(int(parts[0])))
tex.append(self.tref(int(parts[1])))
tex.append(None) # TODO: Fix. Create tref?
normal.append(None)
elif len(parts)==3:
verts.append(self.vref(int(parts[0])))
tex.append(self.tref(int(parts[1])))
normal.append(None) # TODO: Fix. Create nref?
return verts,tex,normal
def vref(self,v):
if v<0:
return len(self.verts)+v
else:
return v
def tref(self,t):
if t<0:
return len(self.texterts)+t
else:
return t
def getTextureGroups(self):
out=[]
for key in self.trispiles:
if len(self.trispiles[key])>0:
out.append(key)
return out
def getTextureNames(self):
out={}
for key in self.trispiles:
if len(self.trispiles[key])>0:
out[self.textures[key]]=True
return out.keys()
def makeBMDL(self,pile,statusfunc=None):
bmdl=BRenderModel()
bmdl.tris_normals=[]
bmdl.filename='<JKL>'
bmdl.normals=True
# for x,y,z in self.verts:
# bmdl.verts.append((x,y,z,0,0))
width=float(len(self.color_order))
for pilename in self.trispiles:
if self.textures.has_key(pilename) and self.textures[pilename]==pile:
for v1,v2,v3,t1,t2,t3 in self.trispiles[pilename]:
vs=[]
for vi,ti in ((v1,t1),(v2,t2),(v3,t3)):
x,y,z=self.verts[vi]
u,v=self.texverts[ti]
vs.append(bmdl.happyVertex(x,y,z,u,v))
bmdl.tris.append(vs)
return bmdl
| gpl-2.0 | -6,269,479,059,025,849,000 | 31.455782 | 80 | 0.646301 | false |
jpardobl/naman | naman/core/pypelib/persistence/backends/rawfile/RAWFile.py | 1 | 1316 | try:
import cPickle as pickle
except:
import pickle
from threading import Lock
from naman.core.pypelib.resolver.Resolver import Resolver
'''
@author: lbergesio,omoya,cbermudo
@organization: i2CAT, OFELIA FP7
RAWFile
Implementes persistence engine to a raw file for RuleTables
'''
class RAWFile():
#XXX: lbergesio: Is it necessary to use a mutex here?
_mutex = Lock()
@staticmethod
def save(obj, parser, **kwargs):
if "fileName" not in kwargs:
raise Exception("FileName is required")
with RAWFile._mutex:
fileObj = open(kwargs["fileName"], "wb" )
try:
cObj = obj.clone()
except Exception,e:
print "Could not clone original obj %s\n%s" %(str(obj),str(e))
pickle.dump(cObj,fileObj)
fileObj.close()
@staticmethod
def load(tableName, resolverMappings, parser, **kwargs):
with RAWFile._mutex:
if not kwargs["fileName"]:
raise Exception("FileName is required")
fileObj = open(kwargs["fileName"], "r" )
table = pickle.load(fileObj)
table._mutex = Lock()
table._mappings = resolverMappings
table._resolver = Resolver(table._mappings)
fileObj.close()
if table.name != tableName:
raise Exception("Table name mismatch; did you specify the correct file?")
return table
| bsd-3-clause | 2,492,388,815,307,089,400 | 22.5 | 77 | 0.663374 | false |
bmya/tkobr-addons | tko_l10n_br_point_of_sale/res_company.py | 1 | 1548 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# ThinkOpen Solutions Brasil
# Copyright (C) Thinkopen Solutions <http://www.tkobr.com>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, fields, _
from openerp.osv import osv
class res_compamy(models.Model):
_inherit = 'res.company'
average_federal_tax = fields.Float(
'Average Federal Tax [%]',
company_dependent=True,
help='The average federal tax percentage [0..100]')
average_state_tax = fields.Float(
'Average State Tax Value [%]',
company_dependent=True,
help='The average state tax percentage [0..100]')
| agpl-3.0 | -3,421,115,839,997,290,000 | 38.692308 | 78 | 0.618217 | false |
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractHeyitsmehyupperstranslationsCom.py | 1 | 1096 | def extractHeyitsmehyupperstranslationsCom(item):
'''
Parser for 'heyitsmehyupperstranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
if item['tags'] == ['Uncategorized']:
titlemap = [
('TIAOFM Chapcter ', 'This is an Obvious Fraudulent Marriage', 'translated'),
('TIAOFM Chapter ', 'This is an Obvious Fraudulent Marriage', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | bsd-3-clause | -5,802,952,744,336,902,000 | 33.28125 | 105 | 0.643248 | false |
aelkikhia/pyduel_engine | pyduel_engine/content/default_decks.py | 1 | 6633 |
from pyduel_engine.content import default_cards as cd
# from pyduel_engine.content.engine_states import Focus, Target
# ############################### RED DECKS ##################################
# Anakin Skywalker Deck
def anakin_deck():
return [cd.atk_disc(8), cd.atk_disc(8), cd.move_draw(8, 5),
cd.move_draw(8, 5), cd.def_dmg(20, 1), cd.def_dmg(20, 1),
cd.move_dmg(7), cd.move_dmg(7), cd.move_dmg(7)].append(red_deck())
# Luke Skywalker Deck
def luke_deck():
return [cd.move_draw(6, 2), cd.move_draw(6, 2), cd.move_draw(6, 2),
cd.discard(), cd.discard(), cd.discard(), cd.atk_alone(4),
cd.atk_alone(4)].append(red_deck())
# Darth Maul Deck
def maul_deck():
return [cd.atk_move(8, 6), cd.atk_move(8, 6), cd.atk_move(8, 6),
cd.atk_not_act(3), cd.atk_not_act(3), cd.atk_not_act(3),
cd.atk_not_act(4), cd.atk_not_act(4), cd.atk_not_act(4),
cd.def_dmg(0, 3), cd.def_dmg(0, 3), cd.def_draw(10)
].append(red_deck())
# Darth Vader Deck
def vader_deck():
return [cd.atk_unblocked(3, 20), cd.damage(6), cd.damage(6), cd.damage(6),
cd.atk_drain(3), cd.atk_drain(3), cd.damage(4), cd.damage(4),
cd.damage(2), cd.damage(2), cd.damage(2), cd.discard()
].append(red_deck())
# ############################### BLUE DECKS ##################################
# Mace Windu Deck
def mace_deck():
return [cd.atk_draw(5)] * 2 + \
[cd.combat_hand_size()] * 4 + \
[cd.damage(4)] * 2 + \
[cd.move_draw(5, 1)] * 4 + \
blue_deck()
# Obi-wan Kenobi Deck
def obi_wan_deck():
return [cd.discard_draw(3), cd.discard_draw(3), cd.discard_draw(3),
cd.move_draw(8, 1), cd.move_draw(8, 1), cd.atk_move(6, 6),
cd.atk_move(6, 6), cd.atk_move(6, 6), cd.def_draw(12),
cd.def_draw(12), cd.def_draw(12), cd.get_card('type', 'discard')
].append(blue_deck())
# Count Dooku Deck
def dooku_deck():
return [cd.atk_draw(7)] * 4 + \
[cd.push(1)] * 2 + \
[cd.draw(3)] * 2 + \
[cd.move_squad(4)] * 3 + \
[cd.discard(2)] + \
blue_deck()
# ############################### GREEN DECKS ################################
# Yoda Deck
def yoda_deck():
return [cd.lift(), cd.lift(), cd.lift(), cd.push(3), cd.push(3),
cd.def_dmg(20, 0), cd.atk_draw(6), cd.atk_draw(6), cd.discard(1),
cd.discard(1), cd.def_draw(15), cd.def_draw(15)
].append(green_deck())
# Emperor Deck
def emperor_deck():
return [cd.dmg_discard(3, 1), cd.dmg_discard(3, 1), cd.dmg_discard(3, 1),
cd.dmg_discard(3, 1), cd.reorder_draw(4, 1), cd.reorder_draw(4, 1),
cd.discard(2), cd.discard(2), cd.heal_no_draw(4),
cd.heal_no_draw(4), cd.swap_main_minor(), cd.discard()
].append(green_deck())
# ############################## Yellow(+) DECKS #############################
# Han Solo Deck
def han_deck():
return [cd.atk_disc(4, 1), cd.atk_disc(4, 1), cd.atk_disc(4, 1),
cd.atk_move(5, 5), cd.atk_move(5, 5), cd.dmg_shuffle_discard(2),
cd.dmg_shuffle_discard(2)].append(yellow_deck_plus())
# Boba Fett Deck
def boba_deck():
return [cd.atk_draw(7, 2), cd.atk_draw(9, 3), cd.atk_move(4),
cd.atk_move(4), cd.atk_move(4), cd.dmg_splash_dmg(4),
cd.dmg_splash_dmg(4), cd.dmg_less_act(2), cd.dmg_less_act(2)
].append(yellow_deck())
# Jango Fett Deck
def jango_deck():
return [cd.push(2), cd.atk_draw(7, 4), cd.atk_move(4), cd.atk_move(4),
cd.atk_move(4), cd.move_not_act, cd.dmg_less_act(2),
cd.dmg_less_act(2)].append(yellow_deck())
# ############################### STRONG DECKS ##############################
# Padme Amidala Deck
def padme_deck():
return [cd.atk_disc_draw, cd.heal(4), cd.atk_move(6, 6)
].append(strong_deck())
# Leia Skywalker Deck
def leia_deck():
return [cd.atk_draw(7, 1, 7), cd.atk_draw(7, 1, 7), cd.heal(3),
cd.heal(3)].append(strong_deck())
# Zam Wesell Deck
def zam_deck():
return [cd.atk_move(7), cd.atk_unblocked(3, 6), cd.atk_unblocked(3, 6),
cd.atk_unblocked(3, 6)].append(strong_deck())
# Greedo Deck
def greedo_deck():
return [cd.kill_or_die(7), cd.move_adj_to_not_act(), cd.move_adj_to_not_act
].append(strong_deck())
# ############################### STRONG+ DECKS ##############################
# Chewbacca Deck Deck
def chewbacca_deck():
return [cd.atk_draw(11), cd.push(3, 3), cd.push(3, 3),
cd.heal_and_move(3, 5), cd.get_card('wookie instinct', 'deck')
].append(strong_deck_plus())
# ############################### GENERIC DECKS ##############################
def blue_deck():
return [cd.combat(5, 1)] * 2 + \
[cd.combat(4, 2)] * 2 + \
[cd.combat(3, 3)] * 2 + \
[cd.combat(1, 4)] * 2 + \
[cd.combat(4, 1), cd.combat(2, 3)]
def red_deck():
return [cd.combat(5, 1)] * 4 + \
[cd.combat(4, 2)] * 2 + \
[cd.combat(4, 1), cd.combat(3, 2), cd.combat(2, 2), cd.combat(1, 4)]
def green_deck():
return [cd.combat(4, 2)] * 4 + \
[cd.combat(3, 3)] * 3 + \
[cd.combat(2, 4)] * 2 + \
[cd.combat(1, 5)]
def yellow_deck():
return [cd.combat(4, 1)] * 3 + \
[cd.combat(3, 1)] * 2 + \
[cd.combat(3, 2)] + \
[cd.combat(2, 2)] * 2 + \
[cd.combat(1, 4)] * 2
def yellow_deck_plus():
return [cd.combat(4, 1)] * 3 + \
[cd.combat(3, 1)] * 2 + \
[cd.combat(3, 2), cd.combat(2, 2), cd.combat(2, 3)] + \
[cd.combat(1, 4)] * 2
def weak_deck():
return [cd.combat(3, 1)] * 2 + \
[cd.combat(2, 1)] * 3 + \
[cd.combat(1, 2)] * 4
def strong_deck():
return [cd.combat(4, 1)] * 2 + \
[cd.combat(3, 1)] * 2 + \
[cd.combat(3, 2)] + \
[cd.combat(2, 3)] * 2 + \
[cd.combat(1, 4)] * 2
def strong_deck_plus():
return [cd.combat(5, 1), cd.combat(4, 1)] + \
[cd.combat(3, 1)] * 2 + \
[cd.combat(3, 2)] + \
[cd.combat(2, 3)] * 2 + \
[cd.combat(1, 4)] * 2
def strong_deck_plus_plus():
return [cd.combat(5, 1), cd.combat(4, 1)] + \
[cd.combat(3, 1)] * 2 + \
[cd.combat(3, 2)] + \
[cd.combat(2, 3)] * 2 + \
[cd.combat(2, 4)] + \
[cd.combat(1, 4)]
if __name__ == '__main__':
deck = weak_deck()
print(deck)
| apache-2.0 | 1,336,933,643,753,592,600 | 28.611607 | 79 | 0.475803 | false |
spierepf/mpf | tests/test_BallDeviceHoldCoil.py | 1 | 14866 | import unittest
from mpf.system.machine import MachineController
from MpfTestCase import MpfTestCase
from mock import MagicMock
import time
class TestBallDevicesHoldCoil(MpfTestCase):
def getConfigFile(self):
return 'test_hold_coil.yaml'
def getMachinePath(self):
return '../tests/machine_files/ball_device/'
def test_holdcoil_with_direct_release(self):
self.machine.coils['hold_coil'].enable = MagicMock()
self.machine.coils['hold_coil'].disable = MagicMock()
# after hold switch was posted it should enable the hold_coil
self.machine.events.post('test_hold_event')
self.advance_time_and_run(0.1)
self.machine.coils['hold_coil'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil'].disable.called
# wait some more. coil should stay active
self.advance_time_and_run(300)
self.machine.coils['hold_coil'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil'].disable.called
# we trigger entrance switch
self.assertEqual(0, self.machine.ball_devices['test'].balls)
self.machine.coils['hold_coil'].enable = MagicMock()
self.machine.coils['hold_coil'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance',state=1);
self.machine.switch_controller.process_switch(name='s_entrance',state=0);
self.advance_time_and_run(300)
self.assertEqual(0, self.machine.ball_devices['test'].balls)
# the device should eject the ball right away because nobody claimed it
self.machine.coils['hold_coil'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil'].enable.called
def test_holdcoil_which_keeps_ball(self):
# add one ball
self.assertEqual(0, self.machine.ball_devices['test2'].balls)
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.events.post('test_hold_event2')
self.machine.switch_controller.process_switch(name='s_entrance2',state=1);
self.machine.switch_controller.process_switch(name='s_entrance2',state=0);
self.advance_time_and_run(300)
self.machine.coils['hold_coil2'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil2'].disable.called
self.assertEqual(1, self.machine.ball_devices['test2'].balls)
# add a second ball
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.events.post('test_hold_event2')
self.machine.switch_controller.process_switch(name='s_entrance2',state=1);
self.machine.switch_controller.process_switch(name='s_entrance2',state=0);
self.advance_time_and_run(300)
self.machine_run()
self.machine.coils['hold_coil2'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil2'].disable.called
self.assertEqual(2, self.machine.ball_devices['test2'].balls)
# eject one ball
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.ball_devices['test2'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil2'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil2'].enable.called
# it should reenable the hold coil after 1s because there is a second ball
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.advance_time_and_run(2)
assert not self.machine.coils['hold_coil2'].disable.called
self.machine.coils['hold_coil2'].enable.assert_called_once_with()
self.assertEqual(1, self.machine.ball_devices['test2'].balls)
def test_holdcoil_which_keeps_ball_multiple_entries(self):
# add one ball
self.machine.ball_devices['test2'].balls = 1
self.machine.ball_devices['test2'].available_balls = 1
# eject one ball
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.ball_devices['test2'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil2'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil2'].enable.called
# during the hold add another ball. it should not enable hold now
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.events.post('test_hold_event2')
self.machine.switch_controller.process_switch(name='s_entrance2',state=1);
self.machine.switch_controller.process_switch(name='s_entrance2',state=0);
self.advance_time_and_run(0.2)
assert not self.machine.coils['hold_coil2'].disable.called
assert not self.machine.coils['hold_coil2'].enable.called
# it should reenable the hold coil after 1s because there is a second ball
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.advance_time_and_run(2)
assert not self.machine.coils['hold_coil2'].disable.called
self.machine.coils['hold_coil2'].enable.assert_called_once_with()
self.assertEqual(1, self.machine.ball_devices['test2'].balls)
# eject that ball. coil should stay off
self.machine.coils['hold_coil2'].enable = MagicMock()
self.machine.coils['hold_coil2'].disable = MagicMock()
self.machine.ball_devices['test2'].eject()
self.advance_time_and_run(300)
self.machine.coils['hold_coil2'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil2'].enable.called
self.assertEqual(0, self.machine.ball_devices['test2'].balls)
def test_holdcoil_with_hold_and_entry_switch(self):
# add one ball
self.assertEqual(0, self.machine.ball_devices['test3'].balls)
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=1);
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=0);
self.advance_time_and_run(300)
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].disable.called
self.assertEqual(1, self.machine.ball_devices['test3'].balls)
# add a second ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=1);
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=0);
self.advance_time_and_run(300)
self.machine_run()
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].disable.called
self.assertEqual(2, self.machine.ball_devices['test3'].balls)
# eject one ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.ball_devices['test3'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil3'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].enable.called
# it should reenable the hold coil after 1s because there is a second ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.advance_time_and_run(2)
assert not self.machine.coils['hold_coil3'].disable.called
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
self.assertEqual(1, self.machine.ball_devices['test3'].balls)
# eject another ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.ball_devices['test3'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil3'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].enable.called
self.assertEqual(0, self.machine.ball_devices['test3'].balls)
# coil should not reenable
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.advance_time_and_run(30)
assert not self.machine.coils['hold_coil3'].enable.called
def test_holdcoil_with_ball_switches(self):
# add one ball
self.assertEqual(0, self.machine.ball_devices['test4'].balls)
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_ball4_1',state=1);
self.advance_time_and_run(300)
self.machine.coils['hold_coil4'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].disable.called
self.assertEqual(1, self.machine.ball_devices['test4'].balls)
# add a second ball
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_ball4_2',state=1);
self.advance_time_and_run(300)
self.machine_run()
self.machine.coils['hold_coil4'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].disable.called
self.assertEqual(2, self.machine.ball_devices['test4'].balls)
# eject one ball
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.machine.ball_devices['test4'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil4'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].enable.called
self.machine.switch_controller.process_switch(name='s_ball4_1',state=0);
self.machine.switch_controller.process_switch(name='s_ball4_2',state=0);
self.machine.switch_controller.process_switch(name='s_ball4_1',state=1);
# it should reenable the hold coil after 1s because there is a second ball
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.advance_time_and_run(2)
assert not self.machine.coils['hold_coil4'].disable.called
self.machine.coils['hold_coil4'].enable.assert_called_once_with()
self.assertEqual(1, self.machine.ball_devices['test4'].balls)
def test_holdcoil_with_ball_switches_eject_fail(self):
# add one ball
self.assertEqual(0, self.machine.ball_devices['test4'].balls)
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_ball4_1',state=1);
self.advance_time_and_run(300)
self.machine.coils['hold_coil4'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].disable.called
self.assertEqual(1, self.machine.ball_devices['test4'].balls)
# eject one ball
self.machine.coils['hold_coil4'].enable = MagicMock()
self.machine.coils['hold_coil4'].disable = MagicMock()
self.machine.ball_devices['test4'].eject()
self.advance_time_and_run(0.2)
self.machine.coils['hold_coil4'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].enable.called
self.machine.coils['hold_coil4'].disable = MagicMock()
# after 2s the coil should get enabled again because there is still a ball in the device
self.advance_time_and_run(2)
self.machine.coils['hold_coil4'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].disable.called
self.machine.coils['hold_coil4'].enable = MagicMock()
# no ball switches change. eject should fail
self.advance_time_and_run(8)
# it ejects again
self.machine.coils['hold_coil4'].disable.assert_called_once_with()
assert not self.machine.coils['hold_coil4'].enable.called
# ball leaves
self.machine.switch_controller.process_switch(name='s_ball4_1',state=0);
self.advance_time_and_run(1)
self.assertEqual(0, self.machine.ball_devices['test4'].balls)
self.advance_time_and_run(10)
self.assertEqual("idle", self.machine.ball_devices['test4']._state)
assert not self.machine.coils['hold_coil4'].enable.called
def test_entrance_switch_and_device_is_full(self):
# add one ball
self.assertEqual(0, self.machine.ball_devices['test3'].balls)
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=1);
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=0);
self.advance_time_and_run(300)
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].disable.called
self.assertEqual(1, self.machine.ball_devices['test3'].balls)
self.assertFalse(self.machine.ball_devices['test3'].is_full())
# add a second ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=1);
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=0);
self.advance_time_and_run(300)
self.machine_run()
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].disable.called
self.assertEqual(2, self.machine.ball_devices['test3'].balls)
self.assertTrue(self.machine.ball_devices['test3'].is_full())
# add a third ball
self.machine.coils['hold_coil3'].enable = MagicMock()
self.machine.coils['hold_coil3'].disable = MagicMock()
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=1);
self.machine.switch_controller.process_switch(name='s_entrance_and_hold3',state=0);
self.advance_time_and_run(300)
self.machine_run()
self.machine.coils['hold_coil3'].enable.assert_called_once_with()
assert not self.machine.coils['hold_coil3'].disable.called
self.assertEqual(2, self.machine.ball_devices['test3'].balls)
self.assertTrue(self.machine.ball_devices['test3'].is_full())
| mit | -6,126,895,013,305,631,000 | 47.581699 | 94 | 0.692184 | false |
kgullikson88/General | StellarModel.py | 1 | 48138 | __author__ = 'Kevin Gullikson'
import os
import sys
import re
from collections import defaultdict
import warnings
from collections import OrderedDict
import itertools
import FittingUtilities
import logging
from astropy import units
from scipy.interpolate import InterpolatedUnivariateSpline as spline, LinearNDInterpolator, NearestNDInterpolator, \
interp1d
import pandas
import numpy as np
import h5py
import DataStructures
import HelperFunctions
import Broaden
"""
This code provides the GetModelList function.
It is used in GenericSearch.py and SensitivityAnalysis.py
"""
if "darwin" in sys.platform:
modeldir = "/Volumes/DATADRIVE/Stellar_Models/Sorted/Stellar/Vband/"
HDF5_FILE = '/Volumes/DATADRIVE/PhoenixGrid/Search_Grid.hdf5'
elif "linux" in sys.platform:
modeldir = "/media/FreeAgent_Drive/SyntheticSpectra/Sorted/Stellar/Vband/"
HDF5_FILE = '/media/ExtraSpace/PhoenixGrid/Search_Grid.hdf5'
else:
modeldir = raw_input("sys.platform not recognized. Please enter model directory below: ")
if not modeldir.endswith("/"):
modeldir = modeldir + "/"
def GetModelList(type='phoenix',
metal=[-0.5, 0, 0.5],
logg=[4.5, ],
temperature=range(3000, 6900, 100),
alpha=[0, 0.2],
model_directory=modeldir,
hdf5_file=HDF5_FILE):
"""This function searches the model directory (hard coded in StellarModels.py) for stellar
models with the appropriate parameters
:param type: the type of models to get. Right now, only 'phoenix', 'kurucz', and 'hdf5' are implemented
:param metal: a list of the metallicities to include
:param logg: a list of the surface gravity values to include
:param temperature: a list of the temperatures to include
:param model_directory: The absolute path to the model directory (only used for type=phoenix or kurucz)
:param hdf5_file: The absolute path to the HDF5 file with the models (only used for type=hdf5)
:return: a list of filenames for the requested models
"""
# Error checking
metal = np.atleast_1d(metal)
logg = np.atleast_1d(logg)
temperature = np.atleast_1d(temperature)
alpha = np.atleast_1d(alpha)
if type.lower() == 'phoenix':
all_models = sorted([f for f in os.listdir(model_directory) if 'phoenix' in f.lower()])
chosen_models = []
for model in all_models:
Teff, gravity, metallicity = ClassifyModel(model)
if Teff in temperature and gravity in logg and metallicity in metal:
chosen_models.append("{:s}{:s}".format(model_directory, model))
elif type.lower() == "kurucz":
all_models = [f for f in os.listdir(modeldir) if f.startswith("t") and f.endswith(".dat.bin.asc")]
chosen_models = []
for model in all_models:
Teff, gravity, metallicity, a = ClassifyModel(model, type='kurucz')
if Teff in temperature and gravity in logg and metallicity in metal and a in alpha:
chosen_models.append("{:s}{:s}".format(model_directory, model))
elif type.lower() == 'hdf5':
hdf5_int = HDF5Interface(hdf5_file)
chosen_models = []
for par in hdf5_int.list_grid_points:
if par['temp'] in temperature and par['logg'] in logg and par['Z'] in metal and par['alpha'] in alpha:
chosen_models.append(par)
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
return chosen_models
def ClassifyModel(filename, type='phoenix'):
"""Get the effective temperature, log(g), and [Fe/H] of a stellar model from the filename
:param filename:
:param type: Currently, only phoenix type files are supported
:return:
"""
if not isinstance(filename, basestring):
raise TypeError("Filename must be a string!")
if type.lower() == 'phoenix':
segments = re.split("-|\+", filename.split("/")[-1])
temp = float(segments[0].split("lte")[-1]) * 100
gravity = float(segments[1])
metallicity = float(segments[2][:3])
if not "+" in filename and metallicity > 0:
metallicity *= -1
return temp, gravity, metallicity
elif type.lower() == 'kurucz':
fname = filename.split('/')[-1]
temp = float(fname[1:6])
gravity = float(fname[8:12])
metallicity = float(fname[14:16]) / 10.0
alpha = float(fname[18:20]) / 10.0
if fname[13] == "m":
metallicity *= -1
if fname[17] == "m":
alpha *= -1
return temp, gravity, metallicity, alpha
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
return temp, gravity, metallicity
def MakeModelDicts(model_list, vsini_values=[10, 20, 30, 40], type='phoenix',
vac2air=True, logspace=False, hdf5_file=HDF5_FILE, get_T_sens=False):
"""This will take a list of models, and output two dictionaries that are
used by GenericSearch.py and Sensitivity.py
:param model_list: A list of model filenames
:param vsini_values: a list of vsini values to broaden the spectrum by (we do that later!)
:param type: the type of models. Currently, phoenix, kurucz, and hdf5 are implemented
:param vac2air: If true, assumes the model is in vacuum wavelengths and converts to air
:param logspace: If true, it will rebin the data to a constant log-spacing
:param hdf5_file: The absolute path to the HDF5 file with the models. Only used if type=hdf5
:param get_T_sens: Boolean flag for getting the temperature sensitivity.
If true, it finds the derivative of each pixel dF/dT
:return: A dictionary containing the model with keys of temperature, gravity, metallicity, and vsini,
and another one with a processed flag with the same keys
"""
vsini_values = np.atleast_1d(vsini_values)
if type.lower() == 'phoenix':
modeldict = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint))))
processed = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool))))
for fname in model_list:
temp, gravity, metallicity = ClassifyModel(fname)
print "Reading in file %s" % fname
data = pandas.read_csv(fname,
header=None,
names=["wave", "flux"],
usecols=(0, 1),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['flux'].values
# x, y = np.loadtxt(fname, usecols=(0, 1), unpack=True)
if vac2air:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=10 ** y)
if logspace:
xgrid = np.logspace(np.log(model.x[0]), np.log(model.x[-1]), model.size(), base=np.e)
model = FittingUtilities.RebinData(model, xgrid)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][vsini] = model
processed[temp][gravity][metallicity][vsini] = False
elif type.lower() == 'kurucz':
modeldict = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
processed = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool)))))
for fname in model_list:
temp, gravity, metallicity, a = ClassifyModel(fname)
print "Reading in file %s" % fname
data = pandas.read_csv(fname,
header=None,
names=["wave", "flux"],
usecols=(0, 1),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['flux'].values
# x, y = np.loadtxt(fname, usecols=(0, 1), unpack=True)
if vac2air:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=10 ** y)
if logspace:
xgrid = np.logspace(np.log(model.x[0]), np.log(model.x[-1]), model.size(), base=np.e)
model = FittingUtilities.RebinData(model, xgrid)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][a][vsini] = model
processed[temp][gravity][metallicity][a][vsini] = False
elif type.lower() == 'hdf5':
hdf5_int = HDF5Interface(hdf5_file)
x = hdf5_int.wl
wave_hdr = hdf5_int.wl_header
if vac2air:
if not wave_hdr['air']:
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
elif wave_hdr['air']:
raise GridError(
'HDF5 grid is in air wavelengths, but you requested vacuum wavelengths. You need a new grid!')
modeldict = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
processed = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(bool)))))
for pars in model_list:
temp, gravity, metallicity, a = pars['temp'], pars['logg'], pars['Z'], pars['alpha']
y = hdf5_int.load_flux(pars)
model = DataStructures.xypoint(x=x * units.angstrom.to(units.nm), y=y)
for vsini in vsini_values:
modeldict[temp][gravity][metallicity][a][vsini] = model
processed[temp][gravity][metallicity][a][vsini] = False
else:
raise NotImplementedError("Sorry, the model type ({:s}) is not available!".format(type))
if get_T_sens:
# Get the temperature sensitivity. Warning! This assumes the wavelength grid is the same in all models.
sensitivity = defaultdict(
lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(DataStructures.xypoint)))))
Tvals = sorted(modeldict.keys())
for i, T in enumerate(Tvals):
gvals = sorted(modeldict[T].keys())
for gravity in gvals:
metal_vals = sorted(modeldict[T][gravity].keys())
for metal in metal_vals:
alpha_vals = sorted(modeldict[T][gravity][metal].keys())
for alpha in alpha_vals:
# get the temperature just under this one
lower, l_idx = get_model(modeldict, Tvals, i, gravity, metal, vsini_values[0], alpha, mode='lower')
upper, u_idx = get_model(modeldict, Tvals, i, gravity, metal, vsini_values[0], alpha, mode='upper')
T_low = Tvals[l_idx]
T_high = Tvals[u_idx]
slope = (upper.y - lower.y) / (T_high - T_low)
for vsini in vsini_values:
sensitivity[T][gravity][metal][alpha][vsini] = slope**2
return modeldict, processed, sensitivity
return modeldict, processed
def get_model(mdict, Tvals, i, logg, metal, vsini, alpha=None, mode='same'):
"""
Get the model with the requested parameters
:param mode: How to get the model. valid options:
- 'same': Get the model with the exact requested parameters.
- 'lower': Get model with the exact values of everything except temperature (find the next lowest temperature)
- 'upper': Get model with the exact values of everything except temperature (find the next highest temperature)
"""
if mode == 'same':
if alpha is None:
mdict[Tvals[i]][logg][metal][vsini]
else:
return mdict[Tvals[i]][logg][metal][alpha][vsini]
elif mode == 'lower':
done = False
idx = i - 1
idx = max(0, idx)
idx = min(len(Tvals), idx)
while not done:
if idx == 0 or idx == len(Tvals) - 1:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
try:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
except KeyError:
idx -= 1
elif mode == 'upper':
done = False
idx = i +1
idx = max(0, idx)
idx = min(len(Tvals)-1, idx)
while not done:
if idx == 0 or idx == len(Tvals) - 1:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
try:
return get_model(mdict, Tvals, idx, logg, metal, vsini, alpha, mode='same'), idx
except KeyError:
idx += 1
class HDF5Interface:
'''
Connect to an HDF5 file that stores spectra. Stolen shamelessly from Ian Czekala's Starfish code
'''
def __init__(self, filename, ranges={"temp": (0, np.inf),
"logg": (-np.inf, np.inf),
"Z": (-np.inf, np.inf),
"alpha": (-np.inf, np.inf)}):
'''
:param filename: the name of the HDF5 file
:type param: string
:param ranges: optionally select a smaller part of the grid to use.
:type ranges: dict
'''
self.filename = filename
self.flux_name = "t{temp:.0f}g{logg:.1f}z{Z:.1f}a{alpha:.1f}"
grid_parameters = ("temp", "logg", "Z", "alpha") # Allowed grid parameters
grid_set = frozenset(grid_parameters)
with h5py.File(self.filename, "r") as hdf5:
self.wl = hdf5["wl"][:]
self.wl_header = dict(hdf5["wl"].attrs.items())
grid_points = []
for key in hdf5["flux"].keys():
# assemble all temp, logg, Z, alpha keywords into a giant list
hdr = hdf5['flux'][key].attrs
params = {k: hdr[k] for k in grid_set}
#Check whether the parameters are within the range
for kk, vv in params.items():
low, high = ranges[kk]
if (vv < low) or (vv > high):
break
else:
#If all parameters have passed successfully through the ranges, allow.
grid_points.append(params)
self.list_grid_points = grid_points
# determine the bounding regions of the grid by sorting the grid_points
temp, logg, Z, alpha = [], [], [], []
for param in self.list_grid_points:
temp.append(param['temp'])
logg.append(param['logg'])
Z.append(param['Z'])
alpha.append(param['alpha'])
self.bounds = {"temp": (min(temp), max(temp)),
"logg": (min(logg), max(logg)),
"Z": (min(Z), max(Z)),
"alpha": (min(alpha), max(alpha))}
self.points = {"temp": np.unique(temp),
"logg": np.unique(logg),
"Z": np.unique(Z),
"alpha": np.unique(alpha)}
self.ind = None #Overwritten by other methods using this as part of a ModelInterpolator
def load_flux(self, parameters):
'''
Load just the flux from the grid, with possibly an index truncation.
:param parameters: the stellar parameters
:type parameters: dict
:raises KeyError: if spectrum is not found in the HDF5 file.
:returns: flux array
'''
key = self.flux_name.format(**parameters)
with h5py.File(self.filename, "r") as hdf5:
try:
if self.ind is not None:
fl = hdf5['flux'][key][self.ind[0]:self.ind[1]]
else:
fl = hdf5['flux'][key][:]
except KeyError as e:
raise GridError(e)
# Note: will raise a KeyError if the file is not found.
return fl
grid_parameters = ("temp", "logg", "Z", "alpha") # Allowed grid parameters
grid_set = frozenset(grid_parameters)
var_default = {"temp": 5800, "logg": 4.5, "Z": 0.0, "alpha": 0.0,
"vsini": 0.0, "FWHM": 0.0, "vz": 0.0, "Av": 0.0, "logOmega": 0.0}
class IndexInterpolator:
'''
Object to return fractional distance between grid points of a single grid variable.
:param parameter_list: list of parameter values
:type parameter_list: 1-D list
'''
def __init__(self, parameter_list):
self.parameter_list = np.unique(parameter_list)
self.index_interpolator = interp1d(self.parameter_list, np.arange(len(self.parameter_list)), kind='linear')
pass
def __call__(self, value):
'''
Evaluate the interpolator at a parameter.
:param value:
:type value: float
:raises C.InterpolationError: if *value* is out of bounds.
:returns: ((low_val, high_val), (frac_low, frac_high)), the lower and higher bounding points in the grid
and the fractional distance (0 - 1) between them and the value.
'''
try:
index = self.index_interpolator(value)
except ValueError as e:
raise InterpolationError("Requested value {} is out of bounds. {}".format(value, e))
high = np.ceil(index)
low = np.floor(index)
frac_index = index - low
return ((self.parameter_list[low], self.parameter_list[high]), ((1 - frac_index), frac_index))
class Interpolator:
'''
Quickly and efficiently interpolate a synthetic spectrum for use in an MCMC simulation. Caches spectra for
easier memory load.
:param interface: :obj:`HDF5Interface` (recommended) or :obj:`RawGridInterface` to load spectra
:param DataSpectrum: data spectrum that you are trying to fit. Used for truncating the synthetic spectra to the relevant region for speed.
:type DataSpectrum: :obj:`spectrum.DataSpectrum`
:param cache_max: maximum number of spectra to hold in cache
:type cache_max: int
:param cache_dump: how many spectra to purge from the cache once :attr:`cache_max` is reached
:type cache_dump: int
:param trilinear: Should this interpolate in temp, logg, and [Fe/H] AND [alpha/Fe], or just the first three parameters.
:type trilinear: bool
Setting :attr:`trilinear` to **True** is useful for when you want to do a run with [Fe/H] > 0.0
'''
def __init__(self, interface, DataSpectrum, cache_max=256, cache_dump=64, trilinear=False, log=True):
'''
Param log decides how to chunk up the returned spectrum. If we are using a pre-instrument convolved grid,
then we want to use log=True.
If we are using the raw synthetic grid, then we want to use log=False.
'''
self.interface = interface
self.DataSpectrum = DataSpectrum
# If alpha only includes one value, then do trilinear interpolation
(alow, ahigh) = self.interface.bounds['alpha']
if (alow == ahigh) or trilinear:
self.parameters = grid_set - set(("alpha",))
else:
self.parameters = grid_set
self.wl = self.interface.wl
self.wl_dict = self.interface.wl_header
if log:
self._determine_chunk_log()
else:
self._determine_chunk()
self.setup_index_interpolators()
self.cache = OrderedDict([])
self.cache_max = cache_max
self.cache_dump = cache_dump #how many to clear once the maximum cache has been reached
def _determine_chunk_log(self, tol=50):
'''
Using the DataSpectrum, determine the minimum chunksize that we can use and then truncate the synthetic
wavelength grid and the returned spectra.
Assumes HDF5Interface is LogLambda spaced, because otherwise you shouldn't need a grid with 2^n points,
because you would need to interpolate in wl space after this anyway.
'''
wave_grid = self.interface.wl
wl_min, wl_max = np.min(self.DataSpectrum.wls) - tol, np.max(self.DataSpectrum.wls) + tol
# Length of the raw synthetic spectrum
len_wg = len(wave_grid)
#ind_wg = np.arange(len_wg) #Labels of pixels
#Length of the data
len_data = np.sum(
(self.wl > wl_min - tol) & (self.wl < wl_max + tol)) # How much of the synthetic spectrum do we need?
#Find the smallest length synthetic spectrum that is a power of 2 in length and larger than the data spectrum
chunk = len_wg
self.interface.ind = (0, chunk) #Set to be the full spectrum
while chunk > len_data:
if chunk / 2 > len_data:
chunk = chunk // 2
else:
break
assert type(chunk) == np.int, "Chunk is no longer integer!. Chunk is {}".format(chunk)
if chunk < len_wg:
# Now that we have determined the length of the chunk of the synthetic spectrum, determine indices
# that straddle the data spectrum.
# What index corresponds to the wl at the center of the data spectrum?
median_wl = np.median(self.DataSpectrum.wls)
median_ind = (np.abs(wave_grid - median_wl)).argmin()
#Take the chunk that straddles either side.
ind = [median_ind - chunk // 2, median_ind + chunk // 2]
if ind[0] < 0:
ind[1] -= ind[0]
ind[0] = 0
elif ind[1] >= len_wg:
ind[0] -= (ind[1] - len_wg - 1)
ind[1] -= (ind[1] - len_wg - 1)
ind = tuple(ind)
self.wl = self.wl[ind[0]:ind[1]]
assert min(self.wl) < wl_min and max(self.wl) > wl_max, "ModelInterpolator chunking ({:.2f}, {:.2f}) " \
"didn't encapsulate full DataSpectrum range ({:.2f}, {:.2f}).".format(
min(self.wl),
max(self.wl), wl_min, wl_max)
self.interface.ind = ind
print("Determine Chunk Log: Wl is {}".format(len(self.wl)))
def _determine_chunk(self):
'''
Using the DataSpectrum, set the bounds of the interpolator to +/- 50 Ang
'''
wave_grid = self.interface.wl
wl_min, wl_max = np.min(self.DataSpectrum.wls), np.max(self.DataSpectrum.wls)
ind_low = (np.abs(wave_grid - (wl_min - 50.))).argmin()
ind_high = (np.abs(wave_grid - (wl_max + 50.))).argmin()
self.wl = self.wl[ind_low:ind_high]
assert min(self.wl) < wl_min and max(self.wl) > wl_max, "ModelInterpolator chunking ({:.2f}, {:.2f}) " \
"didn't encapsulate full DataSpectrum range ({:.2f}, {:.2f}).".format(
min(self.wl),
max(self.wl), wl_min, wl_max)
self.interface.ind = (ind_low, ind_high)
print("Wl is {}".format(len(self.wl)))
def __call__(self, parameters):
'''
Interpolate a spectrum
:param parameters: stellar parameters
:type parameters: dict
Automatically pops :attr:`cache_dump` items from cache if full.
'''
if len(self.cache) > self.cache_max:
[self.cache.popitem(False) for i in range(self.cache_dump)]
self.cache_counter = 0
try:
return self.interpolate(parameters)
except:
logging.warning('Warning! Interpolation error found! Returning ones array!')
return np.ones_like(self.wl)
def setup_index_interpolators(self):
# create an interpolator between grid points indices. Given a temp, produce fractional index between two points
self.index_interpolators = {key: IndexInterpolator(self.interface.points[key]) for key in self.parameters}
lenF = self.interface.ind[1] - self.interface.ind[0]
self.fluxes = np.empty((2 ** len(self.parameters), lenF)) #8 rows, for temp, logg, Z
def interpolate(self, parameters):
'''
Interpolate a spectrum without clearing cache. Recommended to use :meth:`__call__` instead.
:param parameters: stellar parameters
:type parameters: dict
:raises C.InterpolationError: if parameters are out of bounds.
Now the interpolator also returns the 24 error spectra along with weights.
'''
# Here it really would be nice to return things in a predictable order
# (temp, logg, Z)
odict = OrderedDict()
for key in ("temp", "logg", "Z"):
odict[key] = parameters[key]
try:
edges = OrderedDict()
for key, value in odict.items():
edges[key] = self.index_interpolators[key](value)
except InterpolationError as e:
raise InterpolationError("Parameters {} are out of bounds. {}".format(parameters, e))
#Edges is a dictionary of {"temp": ((6000, 6100), (0.2, 0.8)), "logg": (())..}
names = [key for key in edges.keys()] #list of ["temp", "logg", "Z"],
params = [edges[key][0] for key in names] #[(6000, 6100), (4.0, 4.5), ...]
weights = [edges[key][1] for key in names] #[(0.2, 0.8), (0.4, 0.6), ...]
param_combos = itertools.product(*params) #Selects all the possible combinations of parameters
#[(6000, 4.0, 0.0), (6100, 4.0, 0.0), (6000, 4.5, 0.0), ...]
weight_combos = itertools.product(*weights)
#[(0.2, 0.4, 1.0), (0.8, 0.4, 1.0), ...]
parameter_list = [dict(zip(names, param)) for param in param_combos]
if "alpha" not in parameters.keys():
[param.update({"alpha": var_default["alpha"]}) for param in parameter_list]
key_list = [self.interface.flux_name.format(**param) for param in parameter_list]
weight_list = np.array([np.prod(weight) for weight in weight_combos])
assert np.allclose(np.sum(weight_list), np.array(1.0)), "Sum of weights must equal 1, {}".format(
np.sum(weight_list))
#Assemble flux vector from cache
for i, param in enumerate(parameter_list):
key = key_list[i]
if key not in self.cache.keys():
try:
fl = self.interface.load_flux(param) #This method allows loading only the relevant region from HDF5
except KeyError as e:
raise InterpolationError("Parameters {} not in master HDF5 grid. {}".format(param, e))
self.cache[key] = fl
#Note: if we are dealing with a ragged grid, a C.GridError will be raised here because a Z=+1, alpha!=0 spectrum can't be found.
self.fluxes[i, :] = self.cache[key] * weight_list[i]
return np.sum(self.fluxes, axis=0)
class DataSpectrum:
'''
Object to manipulate the data spectrum.
:param wls: wavelength (in AA)
:type wls: 1D or 2D np.array
:param fls: flux (in f_lam)
:type fls: 1D or 2D np.array
:param sigmas: Poisson noise (in f_lam)
:type sigmas: 1D or 2D np.array
:param masks: Mask to blot out bad pixels or emission regions.
:type masks: 1D or 2D np.array of boolean values
If the wl, fl, are provided as 1D arrays (say for a single order), they will be converted to 2D arrays with length 1
in the 0-axis.
.. note::
For now, the DataSpectrum wls, fls, sigmas, and masks must be a rectangular grid. No ragged Echelle orders allowed.
'''
def __init__(self, wls, fls, sigmas, masks=None, orders='all', name=None):
self.wls = np.atleast_2d(wls)
self.fls = np.atleast_2d(fls)
self.sigmas = np.atleast_2d(sigmas)
self.masks = np.atleast_2d(masks) if masks is not None else np.ones_like(self.wls, dtype='b')
self.shape = self.wls.shape
assert self.fls.shape == self.shape, "flux array incompatible shape."
assert self.sigmas.shape == self.shape, "sigma array incompatible shape."
assert self.masks.shape == self.shape, "mask array incompatible shape."
if orders != 'all':
# can either be a numpy array or a list
orders = np.array(orders) #just to make sure
self.wls = self.wls[orders]
self.fls = self.fls[orders]
self.sigmas = self.sigmas[orders]
self.masks = self.masks[orders]
self.shape = self.wls.shape
self.orders = orders
else:
self.orders = np.arange(self.shape[0])
self.name = name
class GridError(Exception):
'''
Raised when a spectrum cannot be found in the grid.
'''
def __init__(self, msg):
self.msg = msg
class InterpolationError(Exception):
'''
Raised when the :obj:`Interpolator` or :obj:`IndexInterpolator` cannot properly interpolate a spectrum,
usually grid bounds.
'''
def __init__(self, msg):
self.msg = msg
class KuruczGetter():
def __init__(self, modeldir, rebin=True, T_min=7000, T_max=9000, logg_min=3.5, logg_max=4.5, metal_min=-0.5,
metal_max=0.5, alpha_min=0.0, alpha_max=0.4, wavemin=0, wavemax=np.inf, debug=False):
"""
This class will read in a directory with Kurucz models
The associated methods can be used to interpolate a model at any
temperature, gravity, metallicity, and [alpha/Fe] value that
falls within the grid
modeldir: The directory where the models are stored. Can be a list of model directories too!
rebin: If True, it will rebin the models to a constant x-spacing
other args: The minimum and maximum values for the parameters to search.
You need to keep this as small as possible to avoid memory issues!
The whole grid would take about 36 GB of RAM!
"""
self.rebin = rebin
self.debug = debug
# First, read in the grid
if HelperFunctions.IsListlike(modeldir):
# There are several directories to combine
Tvals = []
loggvals = []
metalvals = []
alphavals = []
for i, md in enumerate(modeldir):
if i == 0:
T, G, Z, A, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max, logg_min=logg_min,
logg_max=logg_max, metal_min=metal_min, metal_max=metal_max,
alpha_min=alpha_min, alpha_max=alpha_max, wavemin=wavemin,
wavemax=wavemax,
xaxis=None)
spectra = np.array(S)
else:
T, G, Z, A, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max, logg_min=logg_min,
logg_max=logg_max, metal_min=metal_min, metal_max=metal_max,
alpha_min=alpha_min, alpha_max=alpha_max, wavemin=wavemin,
wavemax=wavemax,
xaxis=self.xaxis)
S = np.array(S)
spectra = np.vstack((spectra, S))
Tvals = np.hstack((Tvals, T))
loggvals = np.hstack((loggvals, G))
metalvals = np.hstack((metalvals, Z))
alphavals = np.hstack((alphavals, A))
else:
Tvals, loggvals, metalvals, alphavals, spectra = self.read_grid(modeldir,
rebin=rebin,
T_min=T_min,
T_max=T_max,
logg_min=logg_min,
logg_max=logg_max,
metal_min=metal_min,
metal_max=metal_max,
alpha_min=alpha_min,
alpha_max=alpha_max,
wavemin=wavemin,
wavemax=wavemax,
xaxis=None)
# Check if there are actually two different values of alpha/Fe
alpha_varies = True if max(alphavals) - min(alphavals) > 0.1 else False
# Scale the variables so they all have about the same range
self.T_scale = ((max(Tvals) + min(Tvals)) / 2.0, max(Tvals) - min(Tvals))
self.metal_scale = ((max(metalvals) + min(metalvals)) / 2.0, max(metalvals) - min(metalvals))
self.logg_scale = ((max(loggvals) + min(loggvals)) / 2.0, max(loggvals) - min(loggvals))
if alpha_varies:
self.alpha_scale = ((max(alphavals) + min(alphavals)) / 2.0, max(alphavals) - min(alphavals))
Tvals = (np.array(Tvals) - self.T_scale[0]) / self.T_scale[1]
loggvals = (np.array(loggvals) - self.logg_scale[0]) / self.logg_scale[1]
metalvals = (np.array(metalvals) - self.metal_scale[0]) / self.metal_scale[1]
if alpha_varies:
alphavals = (np.array(alphavals) - self.alpha_scale[0]) / self.alpha_scale[1]
print self.T_scale
print self.metal_scale
print self.logg_scale
if alpha_varies:
print self.alpha_scale
# Make the grid and interpolator instances
if alpha_varies:
self.grid = np.array((Tvals, loggvals, metalvals, alphavals)).T
else:
self.grid = np.array((Tvals, loggvals, metalvals)).T
self.spectra = np.array(spectra)
self.interpolator = LinearNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.NN_interpolator = NearestNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.alpha_varies = alpha_varies
def read_grid(self, modeldir, rebin=True, T_min=7000, T_max=9000, logg_min=3.5, logg_max=4.5, metal_min=-0.5,
metal_max=0.5, alpha_min=0.0, alpha_max=0.4, wavemin=0, wavemax=np.inf, xaxis=None):
Tvals = []
loggvals = []
metalvals = []
alphavals = []
spectra = []
firstkeeper = True
modelfiles = [f for f in os.listdir(modeldir) if f.startswith("t") and f.endswith(".dat.bin.asc")]
for i, fname in enumerate(modelfiles):
T = float(fname[1:6])
logg = float(fname[8:12])
metal = float(fname[14:16]) / 10.0
alpha = float(fname[18:20]) / 10.0
if fname[13] == "m":
metal *= -1
if fname[17] == "m":
alpha *= -1
# Read in and save file if it falls in the correct parameter range
if (T_min <= T <= T_max and
logg_min <= logg <= logg_max and
metal_min <= metal <= metal_max and
alpha_min <= alpha <= alpha_max):
if self.debug:
print "Reading in file {:s}".format(fname)
data = pandas.read_csv("{:s}/{:s}".format(modeldir, fname),
header=None,
names=["wave", "norm"],
usecols=(0, 3),
sep=' ',
skipinitialspace=True)
x, y = data['wave'].values, data['norm'].values
# x, y = np.loadtxt("{:s}/{:s}".format(modeldir, fname), usecols=(0, 3), unpack=True)
x *= units.angstrom.to(units.nm)
y[np.isnan(y)] = 0.0
left = np.searchsorted(x, wavemin)
right = np.searchsorted(x, wavemax)
x = x[left:right]
y = y[left:right]
if rebin:
if firstkeeper:
xgrid = np.logspace(np.log10(x[0]), np.log10(x[-1]), x.size)
else:
xgrid = self.xaxis
fcn = spline(x, y)
x = xgrid
y = fcn(xgrid)
if firstkeeper:
self.xaxis = x if xaxis is None else xaxis
firstkeeper = False
elif np.max(np.abs(self.xaxis - x) > 1e-4):
warnings.warn("x-axis for file {:s} is different from the master one! Not saving!".format(fname))
continue
Tvals.append(T)
loggvals.append(logg)
metalvals.append(metal)
alphavals.append(alpha)
spectra.append(y)
return Tvals, loggvals, metalvals, alphavals, spectra
def __call__(self, T, logg, metal, alpha, vsini=0.0, return_xypoint=True, **kwargs):
"""
Given parameters, return an interpolated spectrum
If return_xypoint is False, then it will only return
a numpy.ndarray with the spectrum
Before interpolating, we will do some error checking to make
sure the requested values fall within the grid
"""
# Scale the requested values
if self.debug:
print T, logg, metal, alpha, vsini
T = (T - self.T_scale[0]) / self.T_scale[1]
logg = (logg - self.logg_scale[0]) / self.logg_scale[1]
metal = (metal - self.metal_scale[0]) / self.metal_scale[1]
if self.alpha_varies:
alpha = (alpha - self.alpha_scale[0]) / self.alpha_scale[1]
# Get the minimum and maximum values in the grid
T_min = min(self.grid[:, 0])
T_max = max(self.grid[:, 0])
logg_min = min(self.grid[:, 1])
logg_max = max(self.grid[:, 1])
metal_min = min(self.grid[:, 2])
metal_max = max(self.grid[:, 2])
alpha_min = min(self.grid[:, 3]) if self.alpha_varies else 0.0
alpha_max = max(self.grid[:, 3]) if self.alpha_varies else 0.0
if self.alpha_varies:
input_list = (T, logg, metal, alpha)
else:
input_list = (T, logg, metal)
# Check to make sure the requested values fall within the grid
if (T_min <= T <= T_max and
logg_min <= logg <= logg_max and
metal_min <= metal <= metal_max and
(not self.alpha_varies or alpha_min <= alpha <= alpha_max)):
y = self.interpolator(input_list)
else:
if self.debug:
warnings.warn("The requested parameters fall outside the model grid. Results may be unreliable!")
# print T, T_min, T_max
# print logg, logg_min, logg_max
#print metal, metal_min, metal_max
#print alpha, alpha_min, alpha_max
y = self.NN_interpolator(input_list)
# Test to make sure the result is valid. If the requested point is
# outside the Delaunay triangulation, it will return NaN's
if np.any(np.isnan(y)):
if self.debug:
warnings.warn("Found NaNs in the interpolated spectrum! Falling back to Nearest Neighbor")
y = self.NN_interpolator(input_list)
model = DataStructures.xypoint(x=self.xaxis, y=y)
vsini *= units.km.to(units.cm)
model = Broaden.RotBroad(model, vsini, linear=self.rebin)
# Return the appropriate object
if return_xypoint:
return model
else:
return model.y
"""
=======================================================================
=======================================================================
=======================================================================
"""
class PhoenixGetter():
def __init__(self, modeldir, rebin=True, T_min=3000, T_max=6800, metal_min=-0.5,
metal_max=0.5, wavemin=0, wavemax=np.inf, debug=False):
"""
This class will read in a directory with Phoenix models
The associated methods can be used to interpolate a model at any
temperature, and metallicity value that
falls within the grid
modeldir: The directory where the models are stored. Can be a list of model directories too!
rebin: If True, it will rebin the models to a constant x-spacing
other args: The minimum and maximum values for the parameters to search.
You need to keep this as small as possible to avoid memory issues!
"""
self.rebin = rebin
self.debug = debug
# First, read in the grid
if HelperFunctions.IsListlike(modeldir):
# There are several directories to combine
Tvals = []
metalvals = []
for i, md in enumerate(modeldir):
if i == 0:
T, Z, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=None)
spectra = np.array(S)
else:
T, Z, S = self.read_grid(md, rebin=rebin, T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=self.xaxis)
S = np.array(S)
spectra = np.vstack((spectra, S))
Tvals = np.hstack((Tvals, T))
metalvals = np.hstack((metalvals, Z))
else:
Tvals, metalvals, spectra = self.read_grid(modeldir, rebin=rebin,
T_min=T_min, T_max=T_max,
metal_min=metal_min, metal_max=metal_max,
wavemin=wavemin, wavemax=wavemax, xaxis=None)
# Scale the variables so they all have about the same range
self.T_scale = ((max(Tvals) + min(Tvals)) / 2.0, max(Tvals) - min(Tvals))
self.metal_scale = ((max(metalvals) + min(metalvals)) / 2.0, max(metalvals) - min(metalvals))
Tvals = (np.array(Tvals) - self.T_scale[0]) / self.T_scale[1]
metalvals = (np.array(metalvals) - self.metal_scale[0]) / self.metal_scale[1]
# Make the grid and interpolator instances
self.grid = np.array((Tvals, metalvals)).T
self.spectra = np.array(spectra)
self.interpolator = LinearNDInterpolator(self.grid, self.spectra) # , rescale=True)
self.NN_interpolator = NearestNDInterpolator(self.grid, self.spectra) # , rescale=True)
def read_grid(self, modeldir, rebin=True, T_min=3000, T_max=6800, metal_min=-0.5,
metal_max=0.5, wavemin=0, wavemax=np.inf, xaxis=None, debug=False):
Tvals = []
metalvals = []
spectra = []
firstkeeper = True
modelfiles = [f for f in os.listdir(modeldir) if
f.startswith("lte") and "PHOENIX" in f and f.endswith(".sorted")]
for i, fname in enumerate(modelfiles):
T, logg, metal = ClassifyModel(fname)
# Read in and save file if it falls in the correct parameter range
if (T_min <= T <= T_max and
metal_min <= metal <= metal_max and
logg == 4.5):
if self.debug:
print "Reading in file {:s}".format(fname)
data = pandas.read_csv("{:s}{:s}".format(modeldir, fname),
header=None,
names=["wave", "flux", "continuum"],
usecols=(0, 1, 2),
sep=' ',
skipinitialspace=True)
x, y, c = data['wave'].values, data['flux'].values, data['continuum'].values
n = 1.0 + 2.735182e-4 + 131.4182 / x ** 2 + 2.76249e8 / x ** 4
x /= n
x *= units.angstrom.to(units.nm)
y = 10 ** y / 10 ** c
left = np.searchsorted(x, wavemin)
right = np.searchsorted(x, wavemax)
x = x[left:right]
y = y[left:right]
if rebin:
if firstkeeper:
xgrid = np.logspace(np.log10(x[0]), np.log10(x[-1]), x.size)
else:
xgrid = self.xaxis
fcn = spline(x, y)
x = xgrid
y = fcn(xgrid)
if firstkeeper:
self.xaxis = x if xaxis is None else xaxis
firstkeeper = False
elif np.max(np.abs(self.xaxis - x) > 1e-4):
warnings.warn("x-axis for file {:s} is different from the master one! Not saving!".format(fname))
continue
Tvals.append(T)
metalvals.append(metal)
spectra.append(y)
return Tvals, metalvals, spectra
def __call__(self, T, metal, vsini=0.0, return_xypoint=True, **kwargs):
"""
Given parameters, return an interpolated spectrum
If return_xypoint is False, then it will only return
a numpy.ndarray with the spectrum
Before interpolating, we will do some error checking to make
sure the requested values fall within the grid
"""
# Scale the requested values
T = (T - self.T_scale[0]) / self.T_scale[1]
metal = (metal - self.metal_scale[0]) / self.metal_scale[1]
# Get the minimum and maximum values in the grid
T_min = min(self.grid[:, 0])
T_max = max(self.grid[:, 0])
metal_min = min(self.grid[:, 1])
metal_max = max(self.grid[:, 1])
input_list = (T, metal)
# Check to make sure the requested values fall within the grid
if (T_min <= T <= T_max and
metal_min <= metal <= metal_max):
y = self.interpolator(input_list)
else:
if self.debug:
warnings.warn("The requested parameters fall outside the model grid. Results may be unreliable!")
print T, T_min, T_max
print metal, metal_min, metal_max
y = self.NN_interpolator(input_list)
# Test to make sure the result is valid. If the requested point is
# outside the Delaunay triangulation, it will return NaN's
if np.any(np.isnan(y)):
if self.debug:
warnings.warn("Found NaNs in the interpolated spectrum! Falling back to Nearest Neighbor")
y = self.NN_interpolator(input_list)
model = DataStructures.xypoint(x=self.xaxis, y=y)
vsini *= units.km.to(units.cm)
model = Broaden.RotBroad(model, vsini, linear=self.rebin)
# Return the appropriate object
if return_xypoint:
return model
else:
return model.y
| gpl-3.0 | 7,087,829,652,987,181,000 | 42.563801 | 144 | 0.544809 | false |
Kriechi/mitmproxy | test/mitmproxy/net/test_socks.py | 1 | 6067 | import ipaddress
from io import BytesIO
import pytest
from mitmproxy.net import socks
from mitmproxy.test import tutils
# this is a temporary placeholder here, we remove the file-based API when we transition socks proxying to sans-io.
class tutils: # noqa
@staticmethod
def treader(data: bytes):
io = BytesIO(data)
io.safe_read = io.read
return io
def test_client_greeting():
raw = tutils.treader(b"\x05\x02\x00\xBE\xEF")
out = BytesIO()
msg = socks.ClientGreeting.from_file(raw)
msg.assert_socks5()
msg.to_file(out)
assert out.getvalue() == raw.getvalue()[:-1]
assert msg.ver == 5
assert len(msg.methods) == 2
assert 0xBE in msg.methods
assert 0xEF not in msg.methods
def test_client_greeting_assert_socks5():
raw = tutils.treader(b"\x00\x00")
msg = socks.ClientGreeting.from_file(raw)
with pytest.raises(socks.SocksError):
msg.assert_socks5()
raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100)
msg = socks.ClientGreeting.from_file(raw)
try:
msg.assert_socks5()
except socks.SocksError as e:
assert "Invalid SOCKS version" in str(e)
assert "HTTP" not in str(e)
else:
assert False
raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100)
msg = socks.ClientGreeting.from_file(raw)
try:
msg.assert_socks5()
except socks.SocksError as e:
assert "Invalid SOCKS version" in str(e)
assert "HTTP" in str(e)
else:
assert False
raw = tutils.treader(b"XX")
with pytest.raises(socks.SocksError):
socks.ClientGreeting.from_file(raw, fail_early=True)
def test_server_greeting():
raw = tutils.treader(b"\x05\x02")
out = BytesIO()
msg = socks.ServerGreeting.from_file(raw)
msg.assert_socks5()
msg.to_file(out)
assert out.getvalue() == raw.getvalue()
assert msg.ver == 5
assert msg.method == 0x02
def test_server_greeting_assert_socks5():
raw = tutils.treader(b"HTTP/1.1 200 OK" + b" " * 100)
msg = socks.ServerGreeting.from_file(raw)
try:
msg.assert_socks5()
except socks.SocksError as e:
assert "Invalid SOCKS version" in str(e)
assert "HTTP" in str(e)
else:
assert False
raw = tutils.treader(b"GET / HTTP/1.1" + b" " * 100)
msg = socks.ServerGreeting.from_file(raw)
try:
msg.assert_socks5()
except socks.SocksError as e:
assert "Invalid SOCKS version" in str(e)
assert "HTTP" not in str(e)
else:
assert False
def test_username_password_auth():
raw = tutils.treader(b"\x01\x03usr\x03psd\xBE\xEF")
out = BytesIO()
auth = socks.UsernamePasswordAuth.from_file(raw)
auth.assert_authver1()
assert raw.read(2) == b"\xBE\xEF"
auth.to_file(out)
assert out.getvalue() == raw.getvalue()[:-2]
assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT
assert auth.username == "usr"
assert auth.password == "psd"
def test_username_password_auth_assert_ver1():
raw = tutils.treader(b"\x02\x03usr\x03psd\xBE\xEF")
auth = socks.UsernamePasswordAuth.from_file(raw)
with pytest.raises(socks.SocksError):
auth.assert_authver1()
def test_username_password_auth_response():
raw = tutils.treader(b"\x01\x00\xBE\xEF")
out = BytesIO()
auth = socks.UsernamePasswordAuthResponse.from_file(raw)
auth.assert_authver1()
assert raw.read(2) == b"\xBE\xEF"
auth.to_file(out)
assert out.getvalue() == raw.getvalue()[:-2]
assert auth.ver == socks.USERNAME_PASSWORD_VERSION.DEFAULT
assert auth.status == 0
def test_username_password_auth_response_auth_assert_ver1():
raw = tutils.treader(b"\x02\x00\xBE\xEF")
auth = socks.UsernamePasswordAuthResponse.from_file(raw)
with pytest.raises(socks.SocksError):
auth.assert_authver1()
def test_message():
raw = tutils.treader(b"\x05\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF")
out = BytesIO()
msg = socks.Message.from_file(raw)
msg.assert_socks5()
assert raw.read(2) == b"\xBE\xEF"
msg.to_file(out)
assert out.getvalue() == raw.getvalue()[:-2]
assert msg.ver == 5
assert msg.msg == 0x01
assert msg.atyp == 0x03
assert msg.addr == ("example.com", 0xDEAD)
def test_message_assert_socks5():
raw = tutils.treader(b"\xEE\x01\x00\x03\x0bexample.com\xDE\xAD\xBE\xEF")
msg = socks.Message.from_file(raw)
with pytest.raises(socks.SocksError):
msg.assert_socks5()
def test_message_ipv4():
# Test ATYP=0x01 (IPV4)
raw = tutils.treader(b"\x05\x01\x00\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF")
out = BytesIO()
msg = socks.Message.from_file(raw)
left = raw.read(2)
assert left == b"\xBE\xEF"
msg.to_file(out)
assert out.getvalue() == raw.getvalue()[:-2]
assert msg.addr == ("127.0.0.1", 0xDEAD)
def test_message_ipv6():
# Test ATYP=0x04 (IPV6)
ipv6_addr = "2001:db8:85a3:8d3:1319:8a2e:370:7344"
raw = tutils.treader(
b"\x05\x01\x00\x04" +
ipaddress.IPv6Address(ipv6_addr).packed +
b"\xDE\xAD\xBE\xEF")
out = BytesIO()
msg = socks.Message.from_file(raw)
assert raw.read(2) == b"\xBE\xEF"
msg.to_file(out)
assert out.getvalue() == raw.getvalue()[:-2]
assert msg.addr[0] == ipv6_addr
def test_message_invalid_host():
raw = tutils.treader(b"\xEE\x01\x00\x03\x0bexample@com\xDE\xAD\xBE\xEF")
with pytest.raises(socks.SocksError, match="Invalid hostname: b'example@com'"):
socks.Message.from_file(raw)
def test_message_invalid_rsv():
raw = tutils.treader(b"\x05\x01\xFF\x01\x7f\x00\x00\x01\xDE\xAD\xBE\xEF")
with pytest.raises(socks.SocksError):
socks.Message.from_file(raw)
def test_message_unknown_atyp():
raw = tutils.treader(b"\x05\x02\x00\x02\x7f\x00\x00\x01\xDE\xAD\xBE\xEF")
with pytest.raises(socks.SocksError):
socks.Message.from_file(raw)
m = socks.Message(5, 1, 0x02, ("example.com", 5050))
with pytest.raises(socks.SocksError):
m.to_file(BytesIO())
| mit | -4,691,458,891,179,515,000 | 27.890476 | 114 | 0.6448 | false |
kstilwell/tcex | tcex/threat_intelligence/mappings/victim.py | 1 | 14882 | """ThreatConnect TI Victim"""
from .mappings import Mappings
# import local modules for dynamic reference
module = __import__(__name__)
class Victim(Mappings):
"""Unique API calls for Victim API Endpoints"""
def __init__(self, tcex, **kwargs):
"""Initialize Class properties.
Args:
tcex (TcEx): An instantiated instance of TcEx object.
owner (str, kwargs): The owner for this Victim. Default to default Org when not provided
name (str, kwargs): [Required for Create] The name for this Victim.
"""
super().__init__(tcex, 'Victim', 'victims', None, 'victim', None, kwargs.pop('owner', None))
self.name = None
for arg, value in kwargs.items():
self.add_key_value(arg, value)
def _set_unique_id(self, json_response):
"""Set the unique id of the Group."""
self.unique_id = json_response.get('id', '')
def add_asset(self, asset_type, body):
"""Add an asset to the Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_type: (str) Either email, network, phone, social, or web.
body: (dict) the body of the asset being added.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if body is None:
body = {}
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_add_asset(self.unique_id, asset_type, body)
def add_email_asset(self, address, address_type):
"""Add a email asset to the Victim
Args:
address: (str) The asset address.
address_type: (str) The asset address_type
Return:
requests.Response: The response from the API call.
"""
asset_data = {'address': address, 'addressType': address_type}
return self.add_asset('EMAIL', asset_data)
def add_key_value(self, key, value):
"""Add the key-value to the Victim. """
key = self._metadata_map.get(key, key)
if key in ['unique_id', 'id']:
self._unique_id = str(value)
else:
self._data[key] = value
@property
def _metadata_map(self):
"""Return metadata map for Group objects."""
return {'work_location': 'workLocation'}
def add_network_asset(self, account, network):
"""Add a network asset to the Victim
Args:
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {'account': account, 'network': network}
return self.add_asset('NETWORK', asset_data)
def add_phone_asset(self, phone_type):
"""Add a phone asset to the Victim
Args:
phone_type: (str) The asset phone type.
Return:
requests.Response: The response from the API call.
"""
asset_data = {'phoneType': phone_type}
return self.add_asset('PHONE', asset_data)
def add_social_asset(self, account, network):
"""Add a social asset to the Victim
Args:
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {'account': account, 'network': network}
return self.add_asset('SOCIAL', asset_data)
def add_web_asset(self, web_site):
"""Add a web asset to the Victim
Args:
web_site: (str) The asset account.
Return:
requests.Response: The response from the API call.
"""
asset_data = {'webSite': web_site}
return self.add_asset('WEB', asset_data)
@property
def as_entity(self):
"""Return the entity representation of the Victim."""
return {
'type': 'Victim',
'value': self.name,
'id': int(self.unique_id) if self.unique_id else None,
}
def assets(self, asset_type=None):
"""
Gets the assets of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_type: (str) The type of asset to be retrieved. Defaults to all of them.
Yield:
Json: The asset being retrieved.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
return self.tc_requests.victim_assets(
self.api_type, self.api_branch, self.unique_id, asset_type
)
def can_create(self):
"""Return True if victim can be create."""
return self.data.get('name') is not None
def delete_asset(self, asset_id, asset_type):
"""Delete an asset of the Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) Either email, network, phone, social, or web.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_delete_asset(self.unique_id, asset_type, asset_id)
def delete_email_asset(self, asset_id):
"""Delete an email asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'EMAIL')
def delete_network_asset(self, asset_id):
"""Delete an network asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'NETWORK')
def delete_phone_asset(self, asset_id):
"""Delete an phone asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'PHONE')
def delete_social_asset(self, asset_id):
"""Delete an social asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'SOCIAL')
def delete_web_asset(self, asset_id):
"""Delete an web asset of the Victim
Args:
asset_id: (int) the id of the asset being deleted.
Return:
requests.Response: The response from the API call.
"""
return self.delete_asset(asset_id, 'WEB')
def email_assets(self):
"""
Gets the email assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='EMAIL')
def get_asset(self, asset_id, asset_type):
"""
Gets the assets of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) The type of asset to be retrieved.
Return:
requests.Response: The response from the API call.
"""
if not self.can_update():
self._tcex.handle_error(910, [self.type])
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_get_asset(self.unique_id, asset_type, asset_id)
def get_email_asset(self, asset_id):
"""Retrieve an email asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'EMAIL')
def get_network_asset(self, asset_id):
"""Retrieve an network asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'NETWORK')
def get_phone_asset(self, asset_id):
"""Retrieve an phone asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'PHONE')
def get_social_asset(self, asset_id):
"""Retrieve an social asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'SOCIAL')
def get_web_asset(self, asset_id):
"""Retrieve an web asset of the Victim
Args:
asset_id: (int) the id of the asset being retrieved.
Return:
requests.Response: The response from the API call.
"""
return self.get_asset(asset_id, 'WEB')
@staticmethod
def is_victim():
"""Return True if object is a victim."""
return True
@property
def name(self):
"""Return the Victim name."""
return self._data.get('name')
@name.setter
def name(self, name):
"""Set the Victim name."""
self._data['name'] = name
def network_assets(self):
"""
Gets the network assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='NETWORK')
def social_assets(self):
"""
Gets the social assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='SOCIAL')
def phone_assets(self):
"""
Gets the phone assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='PHONE')
def update_asset(self, asset_type, asset_id, body=None):
"""
Updates a asset of a Victim
Valid asset_type and optional identifier:
+ email
+ network
+ phone
+ social
+ web
Args:
asset_id: (int) the id of the asset being deleted.
asset_type: (str) The type of asset to be retrieved.
body: (dict) the body of the asset being updated.
Return:
requests.Response: The response from the API call.
"""
if body is None:
body = {}
if asset_type is None:
self._tcex.handle_error(
925, ['asset_type', 'update_asset', 'asset_type', 'asset_type', asset_type]
)
return self.tc_requests.victim_update_asset(self.unique_id, asset_type, asset_id, body)
def update_email_asset(self, asset_id, address=None, address_type=None):
"""Update a email asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
address: (str) The asset address.
address_type: (str) The asset address type
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if address:
asset_data['address'] = address
if address_type:
asset_data['addressType'] = address_type
return self.update_asset('EMAIL', asset_id, asset_data)
def update_network_asset(self, asset_id, account=None, network=None):
"""Update a network asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if account:
asset_data['account'] = account
if network:
asset_data['network'] = network
return self.update_asset('NETWORK', asset_id, asset_data)
def update_phone_asset(self, asset_id, phone_type=None):
"""Update a phone asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
phone_type: (str) The phone type account.
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if phone_type:
asset_data['phoneType'] = phone_type
return self.update_asset('PHONE', asset_id, asset_data)
def update_social_asset(self, asset_id, account=None, network=None):
"""Update a social asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
account: (str) The asset account.
network: (str) The asset network
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if account:
asset_data['account'] = account
if network:
asset_data['network'] = network
return self.update_asset('SOCIAL', asset_id, asset_data)
def update_web_asset(self, asset_id, web_site=None):
"""Update a web asset of the Victim
Args:
asset_id: (int) the id of the asset being updated.
web_site: (str) The asset web_site.
Return:
requests.Response: The response from the API call.
"""
asset_data = {}
if web_site:
asset_data['webSite'] = web_site
return self.update_asset('WEB', asset_id, asset_data)
def web_assets(self):
"""
Gets the web assets of a Victim
Yield:
Json: The asset being retrieved.
"""
return self.assets(asset_type='WEB')
| apache-2.0 | 6,182,735,286,011,776,000 | 28.237721 | 100 | 0.556511 | false |
wraithan/rplay | replayswithfriends/profiles/backends/urls.py | 1 | 2568 | """
URLconf for registration and activation, using django-registration's
default backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.default.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead.
"""
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from registration.views import activate
from registration.views import register
from replayswithfriends.profiles.backends.forms import Sc2RegForm
urlpatterns = patterns('',
url(r'^activate/complete/$',
direct_to_template,
{'template': 'registration/activation_complete.html'},
name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
activate,
{'backend': 'registration.backends.default.DefaultBackend'},
name='registration_activate'),
url(r'^register/$',
register,
{
'backend': 'registration.backends.default.DefaultBackend',
},
name='registration_register'),
url(r'^register/complete/$',
direct_to_template,
{'template': 'registration/registration_complete.html'},
name='registration_complete'),
url(r'^register/closed/$',
direct_to_template,
{'template': 'registration/registration_closed.html'},
name='registration_disallowed'),
(r'', include('registration.auth_urls')),
)
| mit | -2,084,832,934,221,354,800 | 44.857143 | 99 | 0.551791 | false |
maximebf/budgettracker | budgettracker/categories.py | 1 | 3249 | from collections import namedtuple
from .data import filter_transactions_period
import re
class Category(namedtuple('Category', ['name', 'color', 'keywords', 'warning_threshold'])):
@classmethod
def from_dict(cls, dct):
return cls(name=dct['name'], color=dct.get('color'), keywords=dct.get('keywords', []),
warning_threshold=dct.get('warning_threshold'))
def to_dict(self):
return {
'name': self.name,
'color': self.color,
'keywords': self.keywords,
'warning_threshold': self.warning_threshold
}
class ComputedCategory(namedtuple('ComputedCategory', ['name', 'color', 'keywords', 'warning_threshold', 'amount', 'pct'])):
@classmethod
def from_category(cls, category, **kwargs):
warning_threshold_multiplier = kwargs.pop('warning_threshold_multiplier', 1)
warning_threshold = category.warning_threshold * warning_threshold_multiplier if category.warning_threshold else None
return cls(name=category.name, color=category.color, keywords=category.keywords,
warning_threshold=warning_threshold, **kwargs)
@property
def has_warning(self):
return self.warning_threshold and self.amount > self.warning_threshold
def to_str(self, famount):
return "%s = %s (%s%%)%s" % (self.name or 'Uncategorized', famount(self.amount), self.pct,
' /!\ %s' % (famount(self.warning_threshold)) if self.has_warning else '')
def compute_categories(transactions, categories=None, start_date=None, end_date=None, warning_threshold_multiplier=1):
categories = {c.name: c for c in categories or []}
amounts = {}
total = 0
for tx in filter_transactions_period(transactions, start_date, end_date):
if tx.amount >= 0:
continue
if not tx.categories:
total += abs(tx.amount)
continue
for name in sorted(tx.categories or []):
amounts.setdefault(name, 0)
amounts[name] += abs(tx.amount)
total += abs(tx.amount)
categorized_total = sum(amounts.values())
if total - categorized_total > 0:
amounts[None] = total - categorized_total
final = []
for name, amount in sorted(amounts.items(), key=lambda t: t[0]):
pct = round(amount * 100 / total, 0)
if name in categories:
final.append(ComputedCategory.from_category(categories[name], amount=amount, pct=pct,
warning_threshold_multiplier=warning_threshold_multiplier))
else:
final.append(ComputedCategory(name=name, color=None, keywords=[],
warning_threshold=None, amount=amount, pct=pct))
for category in categories.values():
if category.name not in amounts:
final.append(ComputedCategory.from_category(category, amount=0, pct=0,
warning_threshold_multiplier=warning_threshold_multiplier))
return final
def match_categories(categories, label):
matches = []
for category in categories:
for keyword in (category.keywords or []):
if re.search(r"\b%s\b" % keyword, label, re.I):
matches.append(category.name)
continue
return matches | mit | 3,897,346,843,493,143,000 | 40.139241 | 125 | 0.63558 | false |
tellapart/taba | src/taba/handlers/totals_counter.py | 1 | 2149 | # Copyright 2014 TellApart, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple Tab Handler that accepts single numeric inputs and tracks the count
and total values.
"""
from taba.handlers.tab_handler import TabHandler
class TotalsCounterState(object):
"""Wrapper class for TotalCounter States.
"""
def __init__(self, count, total):
self.count = count
self.total = total
class TotalsCounter(TabHandler):
"""Simple Tab Handler that accepts single numeric inputs and tracks the count
and total values.
"""
CURRENT_VERSION = 0
def NewState(self, client_id, name):
"""See base class definition."""
return TotalsCounterState(0, 0.0)
def FoldEvents(self, state, events):
"""See base class definition."""
count = 0
total = 0.0
for event in events:
count += 1
total += int(event.payload[0])
state.count += count
state.total += total
return state
def Reduce(self, states):
"""See base class definition."""
if len(states) == 0:
return None
elif len(states) == 1:
return states[0]
base = states[0]
for state in states[1:]:
base.count += state.count
base.total += state.total
return base
def Render(self, state, accept):
"""See base class definition."""
avg = state.total / state.count if state.count != 0 else 0
return '{"count": %d, "total": %.2f, "average": %.2f}' % (
state.count, state.total, avg)
def Upgrade(self, state, version):
"""See base class definition."""
return state
def ShouldPrune(self, state):
"""See base class definition."""
return (state.count == 0)
| apache-2.0 | -6,829,965,901,479,492,000 | 27.276316 | 79 | 0.669614 | false |
jrrpanix/master | examples/python/math/numpyExample.py | 1 | 6283 | import numpy as np
"""
by jrr
"""
#
# output array as csv
#
def printArray( A ) :
if len(A.shape) == 1 :
r=A.shape[0]
# if python3
# for i in range(0,r) : print( ( '%5.2f' ) % ( A[i] ) ,end=',')
# print('')
for i in range(0,r) : print( ( '%5.2f,' ) % ( A[i] ) ) ,
print('')
else :
r,c=A.shape
for i in range(0,r) :
for j in range(0,c) :print( ( '%5.2f,' ) % ( A[i,j] ) ) ,
print('')
#
# np.array sizes
#
def getNumRows(A) :
return A.shape[0]
def getNumCols(A) :
if len(A.shape) == 1 : return 1
return A.shape[1]
def getNumElements(A) :
return A.size
def getNumDim(A) :
return A.ndim
#
# different ways to create a numpy array
#
def simple(N=4) :
return np.arange(N)
def nbyN(N=4) :
return np.arange(N*N).reshape(N,N)
def nbyN_identity(N=4,dtype=float) :
return np.identity(N,dtype=dtype)
def nbyN_ones(N=4,dtype=float) :
return np.ones(N*N,dtype=dtype).reshape(N,N)
def nbyN_ones_alternate(N=4,dtype=float) :
return np.ones((N,N),dtype=dtype)
def nbyN_zeros(N=4,dtype=float) :
return np.zeros(N*N,dtype=dtype).reshape(N,N)
def random_uniform(rows,cols) :
return np.random.rand(rows,cols)
def fromList_Example0(dtype=float) :
myvec=[ [ 0 , 1 ] , # row 0
[ 1 , 0 ] # row 1
]
return np.array(myvec,dtype=dtype)
def fromList_Example1(dtype=float) :
return np.array([[1,2,3],[4,5,6],[7,8,9]],dtype=dtype)
def fromList_Example2(dtype=float) :
vec=[i for i in range(0,16)]
return np.array(vec).reshape(4,4)
def fromList_Example3(dtype=float) :
return np.array( range(16) , float ).reshape(4,4)
#
# Examples of Math for rows or colums for 2D matricies
# Note that is just done by setting axis parameter
# for 2-D matrix
# axis=0 are for operations column wise
# axis=1 are for operations row wise
# this extends to higher dimension matricies via the axis parameter
def meanCols(A) :
return np.mean(A,axis=0)
def maxInEachCol(A) :
return np.max(A,axis=0)
def minInEachCol(A) :
return np.min(A,axis=0)
def sumCols(A) :
return np.sum(A,axis=0)
def stdCols(A) :
return np.std(A,axis=0)
def sumRows(A) :
return np.sum(A,axis=1)
def minInEachRow(A) :
return np.min(A,axis=1)
def maxInEachRow(A) :
return np.max(A,axis=1)
#
# run creation examples
#
def creationExamples() :
print('one dimensional ')
printArray(simple())
print('n x n')
printArray(nbyN())
print('idenity ')
printArray(nbyN_identity())
print('nxn of 0\'s')
printArray(nbyN_zeros())
print('nxn of 1\'s')
printArray(nbyN_ones())
print('nxn of 1\'s alternative way of creating')
printArray(nbyN_ones_alternate())
print('random uniform 4x3 matrix')
printArray(random_uniform(4,3))
print('create from python list,example0')
printArray(fromList_Example0())
print('create from python list,example1')
printArray(fromList_Example1())
print('create from python list example2')
printArray(fromList_Example2())
print('create from python list example3')
printArray(fromList_Example3())
#
# run math Examples
#
def mathExamples(A) :
print('math examples input matrix')
printArray(A)
# math by columns
print('mean col')
printArray(meanCols(A))
print('std dev col')
printArray(stdCols(A))
print('sum col')
printArray(sumCols(A))
print('min col')
printArray(minInEachCol(A))
print('max col')
printArray(maxInEachCol(A))
# math by rows
print('sum rows')
printArray(sumRows(A))
print('min rows')
printArray(minInEachRow(A))
print('max rows')
printArray(maxInEachRow(A))
#
# size Examples
#
def sizeExamples(A) :
print('size examples')
printArray(A)
print('rows =',getNumRows(A))
print('cols =',getNumCols(A))
print('size =',getNumElements(A))
print('dim =',getNumDim(A))
#
# slice examples
#
# for a 2 Dimensional npArray
# suppose X is a 2 Dimensional Array
# all the rows -> X[:,some_col] , use ':'
# example :
# X[:,1] -> get every thing from col 1 (all the rows, just col 1)
#
# n0:n1 -> interperted range(n0,n1) start at n0 stop n1 but don't include n1
# example :
# X[1:3,:] -> get rows 1 and 2 and all the colums
#
# Advantage of numpy array is that one can get rows or columns via random access
# example get first and last column of Matrix (this cant' be done with python list
# X[:,[0,-1]]
def sliceExamples(X) :
print('slice examples')
printArray(X)
print('get all elements in col 0')
printArray(X[:,0])
print('get all elements in row 1')
printArray(X[1,:])
print('get last column')
printArray(X[:,-1])
print('get last row')
printArray(X[-1,:])
print('get first 2 elements in row 0')
printArray(X[0,0:2])
print('get last 2 elements in row 2')
cols=getNumCols(X)
printArray(X[2,-2:cols])
print('get column 1 values in 1st and 2nd rows')
printArray(X[1:3,1])
print('get colum 0 and last column')
printArray(X[:,[0,-1]])
print('get 1st,3rd rows')
printArray(X[[1,3],:])
#
# conditional examples
#
def conditionalExamples(X,cutoff=5) :
# retrun tuples of where elements are < cutoff in X
print('contitonal examples input matrix X,cutoff vlaue=',cutoff)
printArray(X)
a=np.where(X < cutoff)
print('np.where(X < cutoff)')
print(a)
# get all the elements that are less than X
print('X[np.where(X < cutoff)]')
a=X[np.where(X < cutoff)]
print(a)
# for the matrix X if the value is < cutoff put in value from X
# otherwise put in 99
print('np.where(X < cutoff,X,99)')
a=np.where(X < cutoff,X,99)
print(a)
#
# lets change uniform 0-1 data to be either -1 if < .5 or 1 if >= .5
#
U=random_uniform(5,5)
print('random 5 x 5 matrix')
printArray(U)
print('change matrix for -1 or 1 if < .5 -1 otherwise 1')
print('U2=np.where(U<.5,-1,1)')
U2=np.where(U<.5,-1,1)
printArray(U2)
if __name__ == '__main__' :
creationExamples()
mathExamples(nbyN())
sizeExamples(nbyN(3))
sliceExamples(nbyN(5))
conditionalExamples(nbyN(4),cutoff=6)
| gpl-3.0 | 1,206,430,020,656,617,500 | 20.891986 | 82 | 0.614515 | false |
axsemantics/rohrpost | tests/test_ping.py | 1 | 1151 | from rohrpost.handlers import handle_ping
def test_ping(consumer):
handle_ping(consumer=consumer, request={"id": 123})
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert "data" not in data
def test_ping_additional_data(consumer):
handle_ping(
consumer=consumer,
request={
"id": 123,
"type": "ping",
"data": {"some": "data", "other": "data", "handler": "foo"},
},
)
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert data["data"]["some"] == "data"
assert data["data"]["handler"] == "foo"
def test_ping_additional_non_dict_data(consumer):
handle_ping(consumer=consumer, request={"id": 123, "type": "ping", "data": 1})
assert consumer.closed is False
assert len(consumer.data) == 1
data = consumer.data[-1]
assert data["id"] == 123
assert data["type"] == "pong"
assert data["data"]["data"] == 1
| mit | 487,192,422,104,183,300 | 26.404762 | 82 | 0.586447 | false |
project-oak/hafnium-verification | experiments/ownership-inference/infer/infer/lib/python/inferlib/capture/ant.py | 1 | 2742 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
from . import util
from inferlib import jwlib
MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
ant [options] [target]
Analysis examples:
infer -- ant compile'''
LANG = ['java']
def gen_instance(*args):
return AntCapture(*args)
# This creates an empty argparser for the module, which provides only
# description/usage information and no arguments.
create_argparser = util.base_argparser(MODULE_DESCRIPTION, MODULE_NAME)
class AntCapture:
def __init__(self, args, cmd):
self.args = args
util.log_java_version()
logging.info(util.run_cmd_ignore_fail([cmd[0], '-version']))
# TODO: make the extraction of targets smarter
self.build_cmd = [cmd[0], '-verbose'] + cmd[1:]
def is_interesting(self, content):
return self.is_quoted(content) or content.endswith('.java')
def is_quoted(self, argument):
quote = '\''
return len(argument) > 2 and argument[0] == quote\
and argument[-1] == quote
def remove_quotes(self, argument):
if self.is_quoted(argument):
return argument[1:-1]
else:
return argument
def get_infer_commands(self, verbose_output):
javac_pattern = '[javac]'
argument_start_pattern = 'Compilation arguments'
calls = []
javac_arguments = []
collect = False
for line in verbose_output.split('\n'):
if javac_pattern in line:
if argument_start_pattern in line:
collect = True
if javac_arguments != []:
capture = jwlib.create_infer_command(javac_arguments)
calls.append(capture)
javac_arguments = []
if collect:
pos = line.index(javac_pattern) + len(javac_pattern)
content = line[pos:].strip()
if self.is_interesting(content):
arg = self.remove_quotes(content)
javac_arguments.append(arg)
if javac_arguments != []:
capture = jwlib.create_infer_command(javac_arguments)
calls.append(capture)
javac_arguments = []
return calls
def capture(self):
(code, (verbose_out, _)) = util.get_build_output(self.build_cmd)
if code != os.EX_OK:
return code
cmds = self.get_infer_commands(verbose_out)
return util.run_compilation_commands(cmds)
| apache-2.0 | 8,061,196,830,654,131,000 | 31.642857 | 77 | 0.588257 | false |
Yelp/paasta | tests/test_paasta_maintenance.py | 1 | 8242 | # Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from paasta_tools import paasta_maintenance
@mock.patch("paasta_tools.mesos_maintenance.is_host_drained", autospec=True)
@mock.patch(
"paasta_tools.mesos_maintenance.get_hosts_past_maintenance_start", autospec=True
)
def test_is_safe_to_kill(mock_get_hosts_past_maintenance_start, mock_is_host_drained):
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = []
assert not paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = False
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = ["blah"]
assert paasta_maintenance.is_safe_to_kill("blah")
mock_is_host_drained.return_value = True
mock_get_hosts_past_maintenance_start.return_value = []
assert paasta_maintenance.is_safe_to_kill("blah")
@mock.patch("paasta_tools.paasta_maintenance.is_hostname_local", autospec=True)
def test_is_safe_to_drain_rejects_non_localhosts(mock_is_hostname_local,):
mock_is_hostname_local.return_value = False
assert paasta_maintenance.is_safe_to_drain("non-localhost") is False
@mock.patch("paasta_tools.paasta_maintenance.getfqdn", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.gethostname", autospec=True)
def test_is_hostname_local_works(mock_gethostname, mock_getfqdn):
mock_gethostname.return_value = "foo"
mock_getfqdn.return_value = "foo.bar"
assert paasta_maintenance.is_hostname_local("localhost") is True
assert paasta_maintenance.is_hostname_local("foo") is True
assert paasta_maintenance.is_hostname_local("foo.bar") is True
assert paasta_maintenance.is_hostname_local("something_different") is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
def test_are_local_tasks_in_danger_fails_safe_with_false(
mock_load_system_paasta_config,
):
"""If something unexpected happens that we don't know how to
interpret, we make sure that we fail with "False" so that processes
move on and don't deadlock. In general the answer to "is it safe to drain"
is "yes" if mesos can't be reached, etc"""
mock_load_system_paasta_config.side_effect = Exception
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
def test_are_local_tasks_in_danger_is_false_with_nothing_running(
mock_marathon_services_running_here, mock_load_system_paasta_config
):
mock_marathon_services_running_here.return_value = []
assert paasta_maintenance.are_local_tasks_in_danger() is False
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
def test_are_local_tasks_in_danger_is_false_with_an_unhealthy_service(
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = False
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is False
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
@mock.patch(
"paasta_tools.paasta_maintenance.utils.load_system_paasta_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.marathon_services_running_here", autospec=True
)
@mock.patch("paasta_tools.paasta_maintenance.get_backends", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.is_healthy_in_haproxy", autospec=True)
@mock.patch("paasta_tools.paasta_maintenance.synapse_replication_is_low", autospec=True)
def test_are_local_tasks_in_danger_is_true_with_an_healthy_service_in_danger(
mock_synapse_replication_is_low,
mock_is_healthy_in_haproxy,
mock_get_backends,
mock_marathon_services_running_here,
mock_load_system_paasta_config,
):
mock_is_healthy_in_haproxy.return_value = True
mock_synapse_replication_is_low.return_value = True
mock_marathon_services_running_here.return_value = [("service", "instance", 42)]
assert paasta_maintenance.are_local_tasks_in_danger() is True
mock_is_healthy_in_haproxy.assert_called_once_with(42, mock.ANY)
assert mock_synapse_replication_is_low.call_count == 1
@mock.patch(
"paasta_tools.paasta_maintenance.load_marathon_service_config", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.load_smartstack_info_for_service", autospec=True
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_expected_instance_count_for_namespace",
autospec=True,
)
@mock.patch(
"paasta_tools.paasta_maintenance.get_replication_for_services", autospec=True
)
def test_synapse_replication_is_low_understands_underreplicated_services(
mock_get_replication_for_services,
mock_get_expected_instance_count_for_namespace,
mock_load_smartstack_info_for_service,
mock_load_marathon_service_config,
):
mock_load_marathon_service_config.return_value.get_registrations.return_value = (
"service.main"
)
mock_get_expected_instance_count_for_namespace.return_value = 3
mock_load_smartstack_info_for_service.return_value = {
"local_region": {"service.main": "up"}
}
mock_get_replication_for_services.return_value = {"service.main": 1}
local_backends = ["foo"]
system_paasta_config = mock.MagicMock()
assert (
paasta_maintenance.synapse_replication_is_low(
service="service",
instance="instance",
system_paasta_config=system_paasta_config,
local_backends=local_backends,
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_harproxy_healthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{"status": "UP", "pxname": "service.main", "svname": "192.0.2.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is True
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_unhealthy_path(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{"status": "DOWN", "pxname": "service.main", "svname": "192.0.2.1:42_hostname"}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
@mock.patch("paasta_tools.paasta_maintenance.gethostbyname", autospec=True)
def test_is_healthy_in_haproxy_missing_backend_entirely(mock_gethostbyname,):
mock_gethostbyname.return_value = "192.0.2.1"
local_port = 42
backends = [
{
"status": "DOWN",
"pxname": "service.main",
"svname": "192.0.2.4:666_otherhostname",
}
]
assert (
paasta_maintenance.is_healthy_in_haproxy(
local_port=local_port, backends=backends
)
is False
)
| apache-2.0 | -2,317,963,221,787,612,700 | 37.334884 | 88 | 0.715846 | false |
thom-at-redhat/cfme_tests | cfme/tests/infrastructure/test_advanced_search_host.py | 1 | 10380 | # -*- coding: utf-8 -*-
"""This testing module tests the behaviour of the search box in the Hosts section"""
import fauxfactory
import pytest
from itertools import dropwhile
from cfme.fixtures import pytest_selenium as sel
from cfme.infrastructure import host
from utils.providers import setup_a_provider
from cfme.web_ui import search
from cfme.web_ui.cfme_exception import (assert_no_cfme_exception,
is_cfme_exception, cfme_exception_text)
from utils.version import current_version
@pytest.fixture(scope="module")
def hosts():
"""Ensure the infra providers are set up and get list of hosts"""
try:
setup_a_provider(prov_type="infra")
except Exception:
pytest.skip("It's not possible to set up any providers, therefore skipping")
sel.force_navigate("infrastructure_hosts")
search.ensure_no_filter_applied()
return host.get_all_hosts()
@pytest.fixture(scope="module")
def hosts_with_vm_count(hosts):
"""Returns a list of tuples (hostname, vm_count)"""
hosts_with_vm_count = []
for host_name in hosts:
hosts_with_vm_count.append((host_name, int(host.find_quadicon(host_name, True).no_vm)))
return sorted(hosts_with_vm_count, key=lambda tup: tup[1])
@pytest.yield_fixture(scope="function")
def close_search():
"""We must do this otherwise it's not possible to navigate after test!"""
yield
search.ensure_advanced_search_closed()
def get_expression(user_input=False, op=">"):
if current_version() >= "5.4":
expression = "fill_count(Host / Node.VMs, %s" % op
else:
expression = "fill_count(Host.VMs, %s" % op
if user_input:
return expression + ")"
else:
return expression + ", %d)"
pytestmark = [pytest.mark.usefixtures("close_search")]
@pytest.fixture(scope="module")
def host_with_median_vm(hosts_with_vm_count):
"""We'll pick a host with median number of vms"""
return hosts_with_vm_count[len(hosts_with_vm_count) // 2]
def test_can_do_advanced_search():
sel.force_navigate("infrastructure_hosts")
assert search.is_advanced_search_possible(), "Cannot do advanced search here!"
@pytest.mark.requires("test_can_do_advanced_search")
def test_can_open_advanced_search():
sel.force_navigate("infrastructure_hosts")
search.ensure_advanced_search_open()
@pytest.mark.requires("test_can_open_advanced_search")
def test_filter_without_user_input(hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
# We will filter out hosts with less than median VMs
more_than_median_hosts = list(dropwhile(lambda h: h[1] <= median_vm_count, hosts_with_vm_count))
# Set up the filter
search.fill_and_apply_filter(get_expression(False) % median_vm_count)
assert_no_cfme_exception()
assert len(more_than_median_hosts) == len(host.get_all_hosts(do_not_navigate=True))
@pytest.mark.requires("test_can_open_advanced_search")
@pytest.mark.meta(blockers=["GH#ManageIQ/manageiq:2322"])
def test_filter_with_user_input(hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
# We will filter out hosts with less than median VMs
more_than_median_hosts = list(dropwhile(lambda h: h[1] <= median_vm_count, hosts_with_vm_count))
# Set up the filter
search.fill_and_apply_filter(get_expression(True), {"COUNT": median_vm_count})
assert_no_cfme_exception()
assert len(more_than_median_hosts) == len(host.get_all_hosts(do_not_navigate=True))
@pytest.mark.requires("test_can_open_advanced_search")
@pytest.mark.meta(blockers=["GH#ManageIQ/manageiq:2322"])
def test_filter_with_user_input_and_cancellation(hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
# Set up the filter
search.fill_and_apply_filter(
get_expression(True),
{"COUNT": median_vm_count},
cancel_on_user_filling=True
)
assert_no_cfme_exception()
@pytest.mark.requires("test_can_open_advanced_search")
def test_filter_save_cancel(hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
filter_name = fauxfactory.gen_alphanumeric()
# Try save filter
search.save_filter(get_expression(True), filter_name, cancel=True)
assert_no_cfme_exception()
with pytest.raises(sel.NoSuchElementException):
search.load_filter(filter_name) # does not exist
@pytest.mark.requires("test_can_open_advanced_search")
def test_filter_save_and_load(request, hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
# We will filter out hosts with less than median VMs
more_than_median_hosts = list(dropwhile(lambda h: h[1] <= median_vm_count, hosts_with_vm_count))
filter_name = fauxfactory.gen_alphanumeric()
# Try save filter
search.save_filter(get_expression(True), filter_name)
assert_no_cfme_exception()
search.reset_filter()
search.load_and_apply_filter(filter_name, fill_callback={"COUNT": median_vm_count})
assert_no_cfme_exception()
request.addfinalizer(search.delete_filter)
assert len(more_than_median_hosts) == len(host.get_all_hosts(do_not_navigate=True))
@pytest.mark.requires("test_can_open_advanced_search")
def test_filter_save_and_cancel_load(request, hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
filter_name = fauxfactory.gen_alphanumeric()
# Try save filter
search.save_filter(get_expression(True), filter_name)
def cleanup():
sel.force_navigate("infrastructure_hosts")
search.load_filter(filter_name)
search.delete_filter()
request.addfinalizer(cleanup)
assert_no_cfme_exception()
search.reset_filter()
search.load_filter(filter_name, cancel=True)
assert_no_cfme_exception()
@pytest.mark.requires("test_can_open_advanced_search")
def test_filter_save_and_load_cancel(request, hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
filter_name = fauxfactory.gen_alphanumeric()
# Try save filter
search.save_filter(get_expression(True), filter_name)
def cleanup():
sel.force_navigate("infrastructure_hosts")
search.load_filter(filter_name)
search.delete_filter()
request.addfinalizer(cleanup)
assert_no_cfme_exception()
search.reset_filter()
search.load_and_apply_filter(
filter_name,
fill_callback={"COUNT": median_vm_count},
cancel_on_user_filling=True
)
assert_no_cfme_exception()
def test_quick_search_without_filter(request, hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
search.ensure_no_filter_applied()
assert_no_cfme_exception()
median_host, median_vm_count = host_with_median_vm
# Make sure that we empty the regular search field after the test
request.addfinalizer(search.ensure_normal_search_empty)
# Filter this host only
search.normal_search(median_host)
assert_no_cfme_exception()
# Check it is there
all_hosts_visible = host.get_all_hosts(do_not_navigate=True)
assert len(all_hosts_visible) == 1 and median_host in all_hosts_visible
def test_quick_search_with_filter(request, hosts, hosts_with_vm_count, host_with_median_vm):
sel.force_navigate("infrastructure_hosts")
median_host, median_vm_count = host_with_median_vm
search.fill_and_apply_filter(
get_expression(False, ">=") % median_vm_count
)
assert_no_cfme_exception()
# Make sure that we empty the regular search field after the test
request.addfinalizer(search.ensure_normal_search_empty)
# Filter this host only
search.normal_search(median_host)
assert_no_cfme_exception()
# Check it is there
all_hosts_visible = host.get_all_hosts(do_not_navigate=True)
assert len(all_hosts_visible) == 1 and median_host in all_hosts_visible
def test_can_delete_filter():
sel.force_navigate("infrastructure_hosts")
filter_name = fauxfactory.gen_alphanumeric()
search.save_filter(get_expression(False) % 0, filter_name)
assert_no_cfme_exception()
search.reset_filter()
assert_no_cfme_exception()
search.load_filter(filter_name)
assert_no_cfme_exception()
if not search.delete_filter():
raise pytest.fail("Cannot delete filter! Probably the delete button is not present!")
assert_no_cfme_exception()
@pytest.mark.meta(blockers=[1097150])
def test_delete_button_should_appear_after_save(request):
"""Delete button appears only after load, not after save"""
sel.force_navigate("infrastructure_hosts")
filter_name = fauxfactory.gen_alphanumeric()
search.save_filter(get_expression(False) % 0, filter_name)
def cleanup():
sel.force_navigate("infrastructure_hosts")
search.load_filter(filter_name)
search.delete_filter()
request.addfinalizer(cleanup)
if not search.delete_filter(): # Returns False if the button is not present
pytest.fail("Could not delete filter right after saving!")
@pytest.mark.meta(blockers=[1097150])
def test_cannot_delete_more_than_once(request):
"""When Delete button appars, it does not want to go away"""
sel.force_navigate("infrastructure_hosts")
filter_name = fauxfactory.gen_alphanumeric()
search.save_filter(get_expression(False) % 0, filter_name)
search.load_filter(filter_name) # circumvent the thing happening in previous test
# Delete once
if not search.delete_filter():
pytest.fail("Could not delete the filter even first time!")
assert_no_cfme_exception()
# Try it second time
if search.delete_filter(): # If the button is there, it says True
# This should not happen
msg = "Delete twice accepted!"
if is_cfme_exception():
msg += " CFME Exception text: `{}`".format(cfme_exception_text())
pytest.fail(msg)
| gpl-2.0 | 7,486,006,980,132,398,000 | 36.608696 | 100 | 0.703757 | false |
us-ignite/us_ignite | us_ignite/testbeds/views.py | 1 | 2088 | from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from us_ignite.common import pagination
from us_ignite.common.response import json_response
from us_ignite.maps.utils import get_location_dict
from us_ignite.testbeds.models import Testbed
from us_ignite.testbeds.forms import TestbedFilterForm
def testbed_detail(request, slug):
"""Detail of a ``testbed``."""
instance = get_object_or_404(
Testbed.objects.select_related('contact'), slug__exact=slug)
if not instance.is_visible_by(request.user):
raise Http404
context = {
'object': instance,
'is_editable': instance.is_editable_by(request.user),
'app_list': instance.applications.all(),
}
return TemplateResponse(request, 'testbed/object_detail.html', context)
def get_testbed_query(data):
"""Transform cleaned data in Testbed."""
query = {}
for key, value in data.items():
if key.startswith('passes_'):
key = '%s__gte' % key
if value:
query[key] = value
return query
def testbed_list(request):
"""List of all the testbeds."""
testbed_query = {}
if request.GET:
form = TestbedFilterForm(request.GET)
if form.is_valid():
testbed_query = get_testbed_query(form.cleaned_data)
else:
form = TestbedFilterForm()
page_no = pagination.get_page_no(request.GET)
object_list = Testbed.active.filter(**testbed_query)
page = pagination.get_page(object_list, page_no)
context = {
'page': page,
'form': form,
}
return TemplateResponse(request, 'testbed/object_list.html', context)
def get_app_list(testbed):
return [get_location_dict(a, 'app') for a in testbed.applications.all()]
def testbed_locations_json(request, slug):
testbed = get_object_or_404(Testbed.active, slug__exact=slug)
item_list =[get_location_dict(testbed, 'testbed')]
item_list += get_app_list(testbed)
return json_response(item_list, callback='map.render')
| bsd-3-clause | 2,731,798,187,636,176,400 | 31.625 | 76 | 0.670019 | false |
team23/django_backend | django_backend/backend/form_tabs.py | 1 | 7071 | from .renderable import Renderable
class BaseFormElement(Renderable):
def __init__(self, template_name=None, position=0):
self.position = position
super(BaseFormElement, self).__init__(template_name=template_name)
def resolve_help_text(self, context):
return None
@property
def states(self):
"""
A helper so that you can call in the template::
{% render tab.states %}
"""
tab = self
class RenderableStates(object):
def render(self, context=None):
return ' '.join(tab.get_states(context))
return RenderableStates()
def get_states(self, context):
"""
Return a list of states that this element is in. This could be ``error``
for example if a containing field has an error. Those states can be
added as css classes to the template. You can then use those to style it
accordingly.
Where and whether the css classes are added to the template is up the
the subclass like tabs, rows, etc.
"""
return []
class FormTab(BaseFormElement):
template_name = 'django_backend/formlayout/table.html'
def __init__(self, name, rows, *args, **kwargs):
self.name = name
self._rows = map(self._initialize_row, rows)
super(FormTab, self).__init__(*args, **kwargs)
def add_row(self, row):
self._rows.append(self._initialize_row(row))
# Make calls chainable.
return self
def _initialize_row(self, row):
if isinstance(row, dict):
return FormRow(row.get('label', ''), row.get('fields', []))
# TODO: Add possibility to just add field list directly
# (row should be created on the fly, using the first field label)
if isinstance(row, list):
return FormRow(None, row)
return row
def resolve_has_error(self, context):
"""
Return ``True`` if one of the containing rows contains an form
validation error.
"""
return any(
row.resolve_has_error(context)
for row in self._rows
if hasattr(row, 'resolve_has_error'))
def get_states(self, context):
states = list(super(FormTab, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def get_context_data(self, context, **kwargs):
kwargs.update({
'tab': self,
'tab_rows': self.rows,
})
return super(FormTab, self).get_context_data(context, **kwargs)
@property
def rows(self):
return list(sorted(self._rows, cmp=lambda x,y: cmp(x.position, y.position)))
@property
def fields(self):
fields = []
for row in self.rows:
fields = fields + row.fields
return fields
class FormRow(BaseFormElement):
template_name = 'django_backend/formlayout/tr.html'
def __init__(self, label, fields, help_text=None, *args, **kwargs):
self.label = label
self._fields = map(self._initialize_field, fields)
self.help_text = help_text
super(FormRow, self).__init__(*args, **kwargs)
def add_field(self, field):
self._fields.append(self._initialize_field(field))
# Make calls chainable.
return self
def _initialize_field(self, field):
if isinstance(field, basestring):
return FormField(field)
return field
def resolve_has_error(self, context):
"""
Return ``True`` if one of the containing fields contains an form
validation error.
"""
return any(
field.resolve_has_error(context)
for field in self._fields
if hasattr(field, 'resolve_has_error'))
def get_states(self, context):
states = list(super(FormRow, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def resolve_default_label(self, context):
if self.label:
return self.label
if len(self.fields) == 1:
return self.fields[0].resolve_label(context)
return ''
def resolve_help_text(self, context):
if self.help_text:
return self.help_text
if len(self.fields) == 1:
return self.fields[0].resolve_help_text(context)
return ''
def resolve_required(self, context):
return any(f.resolve_required(context) for f in self._fields)
def get_context_data(self, context, **kwargs):
kwargs.update({
'row': self,
'row_label': self.resolve_default_label(context),
'row_fields': self.fields,
# I think that's not required anymore.
#'row_form_fields': [f.resolve_field(context) for f in self.fields],
'row_help_text': self.resolve_help_text(context),
'row_required': self.resolve_required(context),
})
return kwargs
@property
def fields(self):
return list(sorted(self._fields,
cmp=lambda x, y: cmp(x.position, y.position)))
def field_names(self):
return [field.field for field in self.fields]
class FormField(BaseFormElement):
template_name = 'django_backend/formlayout/field.html'
def __init__(self, field, *args, **kwargs):
self.field = field
super(FormField, self).__init__(*args, **kwargs)
def get_states(self, context):
states = list(super(FormField, self).get_states(context))
if self.resolve_has_error(context):
states += ['has-error']
return states
def resolve_has_error(self, context):
field = self.resolve_field(context)
if field and hasattr(field, 'errors'):
return bool(field.errors)
return False
def resolve_form(self, context):
if 'form' in context:
return context['form']
def resolve_field(self, context):
form = self.resolve_form(context)
if form is None:
return # we need the form to exists
try:
return form[self.field]
except KeyError:
return
def resolve_label(self, context):
return self.resolve_field(context).label
def resolve_help_text(self, context):
return self.resolve_field(context).help_text
def resolve_required(self, context):
return self.resolve_field(context).field.required
def get_context_data(self, context, **kwargs):
form_field = self.resolve_field(context)
kwargs.update({
'field': self,
'field_name': self.field,
'field_form_field': form_field,
})
return super(FormField, self).get_context_data(context, **kwargs)
def render(self, context):
form_field = self.resolve_field(context)
if form_field is None:
return ''
return super(FormField, self).render(context)
| bsd-3-clause | -1,468,785,885,214,893,300 | 30.426667 | 84 | 0.589733 | false |
edx/credentials | credentials/apps/credentials/models.py | 1 | 12660 | """
Models for the credentials service.
"""
import logging
import uuid
import bleach
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.db import models
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django_extensions.db.models import TimeStampedModel
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from simple_history.models import HistoricalRecords
from credentials.apps.catalog.api import get_program_details_by_uuid
from credentials.apps.catalog.models import CourseRun, Program
from credentials.apps.core.utils import _choices
from credentials.apps.credentials import constants
from credentials.apps.credentials.exceptions import NoMatchingProgramException
log = logging.getLogger(__name__)
def signatory_assets_path(instance, filename):
"""
Returns path for signatory assets.
Arguments:
instance(Signatory): Signatory object
filename(str): file to upload
Returns:
Path to asset.
"""
return f"signatories/{instance.id}/{filename}"
def validate_image(image):
"""
Validates that a particular image is small enough.
"""
if image.size > (250 * 1024):
raise ValidationError(_("The image file size must be less than 250KB."))
def validate_course_key(course_key):
"""
Validate the course_key is correct.
"""
try:
CourseKey.from_string(course_key)
except InvalidKeyError:
raise ValidationError(_("Invalid course key."))
class AbstractCredential(TimeStampedModel):
"""
Abstract Credentials configuration model.
.. no_pii: This model has no PII.
"""
site = models.ForeignKey(Site, on_delete=models.CASCADE)
is_active = models.BooleanField(default=False)
class Meta:
abstract = True
class Signatory(TimeStampedModel):
"""
Signatory model to add certificate signatories.
.. no_pii: This model has no learner PII. The name used here is the name of the professor who signed the
certificate.
"""
name = models.CharField(max_length=255)
title = models.CharField(max_length=255)
organization_name_override = models.CharField(
max_length=255,
null=True,
blank=True,
help_text=_("Signatory organization name if its different from issuing organization."),
)
image = models.ImageField(
help_text=_("Image must be square PNG files. The file size should be under 250KB."),
upload_to=signatory_assets_path,
validators=[validate_image],
)
class Meta:
verbose_name_plural = "Signatories"
def __str__(self):
return f"{self.name}, {self.title}"
def save(self, *args, **kwargs):
"""
A primary key/ID will not be assigned until the model is written to
the database. Given that our file path relies on this ID, save the
model initially with no file. After the initial save, update the file
and save again. All subsequent saves will write to the database only
once.
"""
if self.pk is None:
temp_image = self.image
self.image = None
super().save(*args, **kwargs)
self.image = temp_image
super().save(force_update=True)
class AbstractCertificate(AbstractCredential):
"""
Abstract Certificate configuration to support multiple type of certificates
i.e. Programs, Courses.
.. no_pii: This model has no PII.
"""
signatories = models.ManyToManyField(Signatory)
title = models.CharField(
max_length=255,
null=True,
blank=True,
help_text="Custom certificate title to override default display_name for a course/program.",
)
class Meta:
abstract = True
class UserCredential(TimeStampedModel):
"""
Credentials issued to a learner.
.. pii: Stores username for a user.
pii values: username
.. pii_types: username
.. pii_retirement: retained
"""
AWARDED, REVOKED = (
"awarded",
"revoked",
)
STATUSES_CHOICES = (
(AWARDED, _("awarded")),
(REVOKED, _("revoked")),
)
credential_content_type = models.ForeignKey(
ContentType,
limit_choices_to={"model__in": ("coursecertificate", "programcertificate")},
on_delete=models.CASCADE,
)
credential_id = models.PositiveIntegerField()
credential = GenericForeignKey("credential_content_type", "credential_id")
username = models.CharField(max_length=255, db_index=True)
status = models.CharField(
max_length=255,
choices=_choices(constants.UserCredentialStatus.AWARDED, constants.UserCredentialStatus.REVOKED),
default=constants.UserCredentialStatus.AWARDED,
)
download_url = models.CharField(
max_length=255, blank=True, null=True, help_text=_("URL at which the credential can be downloaded")
)
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
unique_together = (("username", "credential_content_type", "credential_id"),)
def get_absolute_url(self):
return reverse("credentials:render", kwargs={"uuid": self.uuid.hex})
def revoke(self):
"""Sets the status to revoked, and saves this instance."""
self.status = UserCredential.REVOKED
self.save()
class CourseCertificate(AbstractCertificate):
"""
Configuration for Course Certificates.
.. no_pii: This model has no PII.
"""
course_id = models.CharField(max_length=255, validators=[validate_course_key])
course_run = models.OneToOneField(CourseRun, null=True, on_delete=models.PROTECT)
certificate_available_date = models.DateTimeField(
null=True,
blank=True,
help_text=_(
"The certificate available date and time that is set in Studio and copied to Credentials. "
"This should be edited in Studio."
),
)
certificate_type = models.CharField(
max_length=255,
choices=_choices(
constants.CertificateType.HONOR,
constants.CertificateType.PROFESSIONAL,
constants.CertificateType.VERIFIED,
constants.CertificateType.NO_ID_PROFESSIONAL,
constants.CertificateType.MASTERS,
),
)
user_credentials = GenericRelation(
UserCredential,
content_type_field="credential_content_type",
object_id_field="credential_id",
related_query_name="course_credentials",
)
class Meta:
unique_together = (("course_id", "certificate_type", "site"),)
verbose_name = "Course certificate configuration"
@cached_property
def course_key(self):
return CourseKey.from_string(self.course_id)
class ProgramCertificate(AbstractCertificate):
"""
Configuration for Program Certificates.
.. no_pii: This model has no PII.
"""
program_uuid = models.UUIDField(db_index=True, null=False, blank=False, verbose_name=_("Program UUID"))
# PROTECT prevents the Program from being delete if it's being used for a program cert. This allows copy_catalog
# to be safer when deleting
program = models.OneToOneField(Program, null=True, on_delete=models.PROTECT)
user_credentials = GenericRelation(
UserCredential,
content_type_field="credential_content_type",
object_id_field="credential_id",
related_query_name="program_credentials",
)
use_org_name = models.BooleanField(
default=False,
help_text=_(
"Display the associated organization's name (e.g. ACME University) "
"instead of its short name (e.g. ACMEx)"
),
verbose_name=_("Use organization name"),
)
include_hours_of_effort = models.BooleanField(
default=False,
help_text="Display the estimated total number of hours needed to complete all courses in the program. This "
"feature will only be displayed in the certificate if the attribute 'Total hours of effort' has "
"been set for the program in Discovery.",
)
language = models.CharField(
max_length=8, null=True, help_text="Locale in which certificates for this program will be rendered"
)
def __str__(self):
return f"ProgramCertificate: {self.program_uuid}"
class Meta:
verbose_name = "Program certificate configuration"
unique_together = (("site", "program_uuid"),)
@cached_property
def program_details(self):
"""Returns details about the program associated with this certificate."""
program_details = get_program_details_by_uuid(uuid=self.program_uuid, site=self.site)
if not program_details:
msg = f"No matching program with UUID [{self.program_uuid}] in credentials catalog for program certificate"
raise NoMatchingProgramException(msg)
if self.use_org_name:
for org in program_details.organizations:
org.display_name = org.name
if not self.include_hours_of_effort:
program_details.hours_of_effort = None
program_details.credential_title = self.title
return program_details
class UserCredentialAttribute(TimeStampedModel):
"""
Different attributes of User's Credential such as white list, grade etc.
.. no_pii: This model has no PII.
"""
user_credential = models.ForeignKey(UserCredential, related_name="attributes", on_delete=models.CASCADE)
name = models.CharField(max_length=255)
value = models.CharField(max_length=255)
class Meta:
unique_together = (("user_credential", "name"),)
class ProgramCompletionEmailConfiguration(TimeStampedModel):
"""
Template to add additional content into the program completion emails.
identifier should either be a:
- UUID <string> (for a specific program)
- program type <string> (for a program type)
- or "default" (the DEFAULT_TEMPLATE_IDENTIFIER) to be the global template used for all programs
html_template should be the HTML version of the email
plaintext_template should be the plaintext version of the email
enabled is what determines if we send the emails at all
.. no_pii: This model has no PII.
"""
DEFAULT_TEMPLATE_IDENTIFIER = "default"
# identifier will either be a:
# - UUID <string> (for a specific program)
# - program type <string> (for a program type)
# - or "default" (the DEFAULT_TEMPLATE_IDENTIFIER) to be the global template used for all programs
identifier = models.CharField(
max_length=50,
unique=True,
help_text=(
"""Should be either "default" to affect all programs, the program type slug, or the UUID of the program. """
"""Values are unique."""
),
)
html_template = models.TextField(
help_text=("For HTML emails." "Allows tags include (a, b, blockquote, div, em, i, li, ol, span, strong, ul)")
)
plaintext_template = models.TextField(help_text="For plaintext emails. No formatting tags. Text will send as is.")
enabled = models.BooleanField(default=False)
history = HistoricalRecords()
def save(self, **kwargs):
self.html_template = bleach.clean(self.html_template, tags=settings.ALLOWED_EMAIL_HTML_TAGS)
super().save(**kwargs)
@classmethod
def get_email_config_for_program(cls, program_uuid, program_type_slug):
"""
Gets the email config for the program, with the most specific match being returned,
or None of there are no matches
Because the UUID of the program will have hyphens, but we want to make it easy on PCs copying values,
we will check both the hyphenated version, and an unhyphenated version (.hex)
"""
# By converting the uuid parameter to a string then back to a UUID we can guarantee it will be a UUID later on
converted_program_uuid = uuid.UUID(str(program_uuid))
return (
cls.objects.filter(identifier=converted_program_uuid).first()
or cls.objects.filter(identifier=converted_program_uuid.hex).first()
or cls.objects.filter(identifier=program_type_slug).first()
or cls.objects.filter(identifier=cls.DEFAULT_TEMPLATE_IDENTIFIER).first()
)
| agpl-3.0 | 5,030,639,838,359,889,000 | 32.76 | 120 | 0.670379 | false |
Dziolas/inspire-next | inspire/dojson/common/base.py | 1 | 8263 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014, 2015 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""MARC 21 model definition."""
from dojson import utils
from ..hep.model import hep, hep2marc
from ..conferences.model import conferences
from ..institutions.model import institutions
from ..experiments.model import experiments
from ..journals.model import journals
from ..hepnames.model import hepnames, hepnames2marc
from ..jobs.model import jobs
@institutions.over('control_number', '^001')
@hep.over('control_number', '^001')
@conferences.over('control_number', '^001')
@experiments.over('control_number', '^001')
@journals.over('control_number', '^001')
@hepnames.over('control_number', '^001')
@jobs.over('control_number', '^001')
def control_number(self, key, value):
"""Record Identifier."""
return value[0]
@hep2marc.over('001', 'control_number')
@hepnames2marc.over('001', 'control_number')
def control_number2marc(self, key, value):
"""Record Identifier."""
return value
@institutions.over('agency_code', '^003')
@hep.over('agency_code', '^003')
@conferences.over('agency_code', '^003')
@experiments.over('agency_code', '^003')
@journals.over('agency_code', '^003')
@hepnames.over('agency_code', '^003')
@jobs.over('agency_code', '^003')
def agency_code(self, key, value):
"""Control Number Identifier."""
return value[0]
@hep2marc.over('003', 'agency_code')
@hepnames2marc.over('003', 'agency_code')
def agency_code2marc(self, key, value):
"""Control Number Identifier."""
return value
@institutions.over('date_and_time_of_latest_transaction', '^005')
@hep.over('date_and_time_of_latest_transaction', '^005')
@conferences.over('date_and_time_of_latest_transaction', '^005')
@experiments.over('date_and_time_of_latest_transaction', '^005')
@journals.over('date_and_time_of_latest_transaction', '^005')
@hepnames.over('date_and_time_of_latest_transaction', '^005')
@jobs.over('date_and_time_of_latest_transaction', '^005')
def date_and_time_of_latest_transaction(self, key, value):
"""Date and Time of Latest Transaction."""
return value[0]
@hep2marc.over('005', 'date_and_time_of_latest_transaction')
@hepnames2marc.over('005', 'date_and_time_of_latest_transaction')
def date_and_time_of_latest_transaction2marc(self, key, value):
"""Date and Time of Latest Transaction."""
return value
@hep.over('oai_pmh', '^909CO')
@conferences.over('oai_pmh', '^909CO')
@institutions.over('oai_pmh', '^909CO')
@experiments.over('oai_pmh', '^909CO')
@journals.over('oai_pmh', '^909CO')
@hepnames.over('oai_pmh', '^909CO')
@jobs.over('oai_pmh', '^909CO')
@utils.for_each_value
@utils.filter_values
def oai_pmh(self, key, value):
"""Local OAI-PMH record information."""
return {
'id': value.get('o'),
'set': value.get('p'),
'previous_set': value.get('q'),
}
@hep2marc.over('909CO', 'oai_pmh')
@hepnames2marc.over('909CO', 'oai_pmh')
@utils.for_each_value
@utils.filter_values
def oai_pmh2marc(self, key, value):
"""Local OAI-PMH record information."""
return {
'o': value.get('id'),
'p': value.get('set'),
'q': value.get('previous_set')
}
@hep.over('creation_modification_date', '^961..')
@conferences.over('creation_modification_date', '^961..')
@institutions.over('creation_modification_date', '^961..')
@experiments.over('creation_modification_date', '^961..')
@journals.over('creation_modification_date', '^961..')
@hepnames.over('creation_modification_date', '^961..')
@jobs.over('creation_modification_date', '^961..')
@utils.for_each_value
@utils.filter_values
def creation_modification_date(self, key, value):
"""Original creation and modification date."""
return {
'modification_date': value.get('c'),
'creation_date': value.get('x'),
}
@hep2marc.over('961', 'creation_modification_date')
@hepnames2marc.over('961', 'creation_modification_date')
@utils.for_each_value
@utils.filter_values
def creation_modification_date2marc(self, key, value):
"""Original creation and modification date."""
return {
'c': value.get('modification_date'),
'x': value.get('creation_date')
}
@hep.over('spires_sysno', '^970..')
@conferences.over('spires_sysno', '^970..')
@institutions.over('spires_sysno', '^970..')
@experiments.over('spires_sysno', '^970..')
@journals.over('spires_sysno', '^970..')
@hepnames.over('spires_sysno', '^970..')
@jobs.over('spires_sysno', '^970..')
@utils.for_each_value
@utils.filter_values
def spires_sysno(self, key, value):
"""Old SPIRES number."""
return {
'spires_sysno': value.get('a')
}
@hep2marc.over('970', 'spires_sysno')
@hepnames2marc.over('970', 'spires_sysno')
@utils.for_each_value
@utils.filter_values
def spires_sysno2marc(self, key, value):
"""Old SPIRES number."""
return {
'a': value.get('spires_sysno')
}
@hep.over('collections', '^980..')
@conferences.over('collections', '^980..')
@institutions.over('collections', '^980..')
@experiments.over('collections', '^980..')
@journals.over('collections', '^980..')
@hepnames.over('collections', '^980..')
@jobs.over('collections', '^980..')
@utils.for_each_value
@utils.filter_values
def collections(self, key, value):
"""Collection this record belongs to."""
return {
'primary': value.get('a'),
'secondary': value.get('b'),
'deleted': value.get('c'),
}
@hep2marc.over('980', 'collections')
@hepnames2marc.over('980', 'collections')
@utils.for_each_value
@utils.filter_values
def collections2marc(self, key, value):
"""Collection this record belongs to."""
return {
'a': value.get('primary'),
'b': value.get('secondary'),
'c': value.get('deleted')
}
@hep.over('deleted_recid', '^981..')
@conferences.over('deleted_recid', '^981..')
@institutions.over('deleted_recid', '^981..')
@experiments.over('deleted_recid', '^981..')
@journals.over('deleted_recid', '^981..')
@hepnames.over('deleted_recid', '^981..')
@jobs.over('deleted_recid', '^981..')
@utils.for_each_value
@utils.filter_values
def deleted_recid(self, key, value):
"""Collection this record belongs to."""
return {
'deleted_recid': value.get('a'),
}
@hep.over('fft', '^FFT..')
@conferences.over('fft', '^FFT..')
@institutions.over('fft', '^FFT..')
@experiments.over('fft', '^FFT..')
@journals.over('fft', '^FFT..')
@utils.for_each_value
@utils.filter_values
def fft(self, key, value):
"""Collection this record belongs to."""
return {
'url': value.get('a'),
'docfile_type': value.get('t'),
'flag': value.get('o'),
'description': value.get('d'),
'filename': value.get('n'),
}
@hep.over('FFT', 'fft')
@conferences.over('FFT', 'fft')
@institutions.over('FFT', 'fft')
@experiments.over('FFT', 'fft')
@journals.over('FFT', 'fft')
@utils.for_each_value
@utils.filter_values
def fft2marc(self, key, value):
"""Collection this record belongs to."""
return {
'a': value.get('url'),
't': value.get('docfile_type'),
'o': value.get('flag'),
'd': value.get('description'),
'n': value.get('filename'),
}
@hep2marc.over('981', 'deleted_recid')
@hepnames2marc.over('981', 'deleted_recid')
@utils.for_each_value
@utils.filter_values
def deleted_recid2marc(self, key, value):
"""Collection this record belongs to."""
return {
'a': value.get('deleted_recid'),
}
| gpl-2.0 | 5,170,990,250,936,193,000 | 29.490775 | 77 | 0.65993 | false |
sharoonthomas/trytond-pos | tests/test_sale.py | 1 | 73026 | # -*- coding: utf-8 -*-
"""
tests/test_sale.py
"""
import sys
import os
import unittest
import datetime
from decimal import Decimal
from dateutil.relativedelta import relativedelta
import trytond.tests.test_tryton
from trytond.tests.test_tryton import POOL, USER, DB_NAME, CONTEXT
from trytond.transaction import Transaction
from trytond.exceptions import UserError
DIR = os.path.abspath(os.path.normpath(os.path.join(
__file__, '..', '..', '..', '..', '..', 'trytond'
)))
if os.path.isdir(DIR):
sys.path.insert(0, os.path.dirname(DIR))
class TestSale(unittest.TestCase):
'''
Sale Test Case for lie-nielsen module.
'''
def setUp(self):
"""
Set up data used in the tests.
this method is called before each test function execution.
"""
trytond.tests.test_tryton.install_module('pos')
self.Company = POOL.get('company.company')
self.Party = POOL.get('party.party')
self.Address = POOL.get('party.address')
self.Currency = POOL.get('currency.currency')
self.User = POOL.get('res.user')
self.Location = POOL.get('stock.location')
self.PriceList = POOL.get('product.price_list')
self.PaymentTerm = POOL.get('account.invoice.payment_term')
self.Sequence = POOL.get('ir.sequence')
self.Sale = POOL.get('sale.sale')
self.SaleLine = POOL.get('sale.line')
self.Channel = POOL.get('sale.channel')
self.Product = POOL.get('product.template')
self.SaleConfiguration = POOL.get('sale.configuration')
self.Invoice = POOL.get('account.invoice')
self.InvoiceLine = POOL.get('account.invoice.line')
def _create_product_category(self, name):
"""
Creates a product category
Name is mandatory while other value may be provided as keyword
arguments
:param name: Name of the product category
"""
Category = POOL.get('product.category')
return Category.create([{
'name': name,
}])
def _create_product_template(self, name, vlist, uom=u'Unit'):
"""
Create a product template with products and return its ID
:param name: Name of the product
:param vlist: List of dictionaries of values to create
:param uom: Note it is the name of UOM (not symbol or code)
"""
ProductTemplate = POOL.get('product.template')
Uom = POOL.get('product.uom')
for values in vlist:
values['name'] = name
values['default_uom'], = Uom.search([('name', '=', uom)], limit=1)
values['sale_uom'], = Uom.search([('name', '=', uom)], limit=1)
values['products'] = [
('create', [{}])
]
return ProductTemplate.create(vlist)
def _create_fiscal_year(self, date=None, company=None):
"""
Creates a fiscal year and requried sequences
"""
FiscalYear = POOL.get('account.fiscalyear')
Sequence = POOL.get('ir.sequence')
SequenceStrict = POOL.get('ir.sequence.strict')
Company = POOL.get('company.company')
if date is None:
date = datetime.date.today()
if company is None:
company, = Company.search([], limit=1)
invoice_sequence, = SequenceStrict.create([{
'name': '%s' % date.year,
'code': 'account.invoice',
'company': company,
}])
fiscal_year, = FiscalYear.create([{
'name': '%s' % date.year,
'start_date': date + relativedelta(month=1, day=1),
'end_date': date + relativedelta(month=12, day=31),
'company': company,
'post_move_sequence': Sequence.create([{
'name': '%s' % date.year,
'code': 'account.move',
'company': company,
}])[0],
'out_invoice_sequence': invoice_sequence,
'in_invoice_sequence': invoice_sequence,
'out_credit_note_sequence': invoice_sequence,
'in_credit_note_sequence': invoice_sequence,
}])
FiscalYear.create_period([fiscal_year])
return fiscal_year
def _create_coa_minimal(self, company):
"""Create a minimal chart of accounts
"""
AccountTemplate = POOL.get('account.account.template')
Account = POOL.get('account.account')
account_create_chart = POOL.get(
'account.create_chart', type="wizard")
account_template, = AccountTemplate.search(
[('parent', '=', None)]
)
session_id, _, _ = account_create_chart.create()
create_chart = account_create_chart(session_id)
create_chart.account.account_template = account_template
create_chart.account.company = company
create_chart.transition_create_account()
receivable, = Account.search([
('kind', '=', 'receivable'),
('company', '=', company),
])
payable, = Account.search([
('kind', '=', 'payable'),
('company', '=', company),
])
create_chart.properties.company = company
create_chart.properties.account_receivable = receivable
create_chart.properties.account_payable = payable
create_chart.transition_create_properties()
def _get_account_by_kind(self, kind, company=None, silent=True):
"""Returns an account with given spec
:param kind: receivable/payable/expense/revenue
:param silent: dont raise error if account is not found
"""
Account = POOL.get('account.account')
Company = POOL.get('company.company')
if company is None:
company, = Company.search([], limit=1)
accounts = Account.search([
('kind', '=', kind),
('company', '=', company)
], limit=1)
if not accounts and not silent:
raise Exception("Account not found")
return accounts[0] if accounts else None
def _create_payment_term(self):
"""Create a simple payment term with all advance
"""
PaymentTerm = POOL.get('account.invoice.payment_term')
return PaymentTerm.create([{
'name': 'Direct',
'lines': [('create', [{'type': 'remainder'}])]
}])
def _create_pricelists(self):
"""
Create the pricelists
"""
# Setup the pricelists
self.party_pl_margin = Decimal('1.10')
self.guest_pl_margin = Decimal('1.20')
user_price_list, = self.PriceList.create([{
'name': 'PL 1',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.party_pl_margin
}])
],
}])
guest_price_list, = self.PriceList.create([{
'name': 'PL 2',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price * %s' % self.guest_pl_margin
}])
],
}])
return guest_price_list.id, user_price_list.id
def setup_defaults(self):
"""
Setup Defaults
"""
Uom = POOL.get('product.uom')
AccountTax = POOL.get('account.tax')
Account = POOL.get('account.account')
Inventory = POOL.get('stock.inventory')
self.usd, = self.Currency.create([{
'name': 'US Dollar',
'code': 'USD',
'symbol': '$',
}])
Country = POOL.get('country.country')
self.country, = Country.create([{
'name': 'United States of America',
'code': 'US',
}])
Subdivision = POOL.get('country.subdivision')
self.subdivision, = Subdivision.create([{
'country': self.country.id,
'name': 'California',
'code': 'CA',
'type': 'state',
}])
self.uom, = Uom.search([('symbol', '=', 'u')], limit=1)
with Transaction().set_user(0):
self.party, = self.Party.create([{
'name': 'Openlabs',
'addresses': [('create', [{
'name': 'Lie Nielsen',
'city': 'Los Angeles',
'country': self.country.id,
'subdivision': self.subdivision.id,
}])],
}])
self.anonymous_customer, = self.Party.create([{
'name': 'Anonymous Customer Party'
}])
self.address, = self.Address.create([{
'party': self.anonymous_customer,
'name': 'Jon Doe\'s Address'
}])
self.company, = self.Company.create([{
'party': self.party.id,
'currency': self.usd
}])
user = self.User(USER)
self.User.write([user], {
'main_company': self.company.id,
'company': self.company.id,
})
with Transaction().set_context(company=self.company.id):
# Create Fiscal Year
self._create_fiscal_year(company=self.company.id)
# Create Chart of Accounts
self._create_coa_minimal(company=self.company.id)
# Create a payment term
self._create_payment_term()
sequence, = self.Sequence.search([
('code', '=', 'sale.sale'),
], limit=1)
warehouse, = self.Location.search([
('code', '=', 'WH'),
])
self.payment_term, = self.PaymentTerm.create([{
'name': 'Payment term',
'lines': [
('create', [{
'sequence': 0,
'type': 'remainder',
'days': 0,
'months': 0,
'weeks': 0,
}])]
}])
price_list, = self.PriceList.create([{
'name': 'PL 1',
'company': self.company.id,
'lines': [
('create', [{
'formula': 'unit_price'
}])
],
}])
self.channel, = self.Channel.create([{
'name': 'Channel',
'company': self.company.id,
'source': 'pos',
'currency': self.usd.id,
'anonymous_customer': self.anonymous_customer.id,
'warehouse': warehouse.id,
'backorder_warehouse': warehouse.id,
'price_list': price_list.id,
'payment_term': self.payment_term.id,
'invoice_method': 'order',
'shipment_method': 'manual',
}])
self.channel1, = self.Channel.create([{
'name': 'Channel 1',
'company': self.company.id,
'source': 'pos',
'currency': self.usd.id,
'anonymous_customer': self.anonymous_customer.id,
'warehouse': warehouse.id,
'backorder_warehouse': warehouse.id,
'price_list': price_list.id,
'payment_term': self.payment_term.id,
'invoice_method': 'order',
'shipment_method': 'manual',
}])
user = self.User(USER)
self.User.write([user], {
'create_channels': [('add', [self.channel, self.channel1])],
'current_channel': self.channel.id,
})
account = Account.search([('name', '=', 'Main Tax')])[0]
tax, = AccountTax.create([{
'name': 'Test Tax',
'description': 'Test Tax',
'rate': Decimal('0.10'),
'invoice_account': account,
'credit_note_account': account,
}])
self.category, = self._create_product_category(
'Category'
)
# Create product templates with products
self.template1, = self._create_product_template(
'product-1',
[{
'category': self.category.id,
'type': 'goods',
'salable': True,
'list_price': Decimal('10'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}]
)
self.template2, = self._create_product_template(
'product-2',
[{
'category': self.category.id,
'type': 'goods',
'salable': True,
'list_price': Decimal('15'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}]
)
self.template3, = self._create_product_template(
'product-3',
[{
'category': self.category.id,
'type': 'goods',
'salable': True,
'list_price': Decimal('15'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
'customer_taxes': [('add', [tax])]
}]
)
self.template4, = self._create_product_template(
'product-4',
[{
'category': self.category.id,
'type': 'service',
'salable': True,
'list_price': Decimal('10'),
'cost_price': Decimal('5'),
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
}]
)
self.product1 = self.template1.products[0]
self.product2 = self.template2.products[0]
self.product3 = self.template3.products[0]
self.product4 = self.template4.products[0]
inventory, = Inventory.create([{
'location': warehouse.storage_location,
'company': self.company.id,
'lines': [('create', [{
'product': self.product1,
'quantity': 20,
}])]
}])
Inventory.confirm([inventory])
def test_0010_test_sale(self):
"""
Sale model is not broken
"""
with Transaction().start(DB_NAME, USER, CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id
):
sale.pos_add_product([self.product1.id], 1)
self.assertEqual(len(sale.lines), 1)
sale.pos_add_product([self.product1.id], 2, 20)
self.assertEqual(len(sale.lines), 1)
self.assertEqual(sale.lines[0].quantity, 2)
self.assertEqual(sale.lines[0].amount, 40)
rv = sale.pos_add_product([self.product2.id], 2)
self.assertEqual(len(sale.lines), 2)
self.assertEqual(len(rv['sale']['lines']), 2)
def test_0020_test_delivery_mode_on_adding(self):
"""
Ensure that delivery mode is respected when added to cart
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id
):
rv = sale.pos_add_product([self.product1.id], 1)
# By default the lines are picked
self.assertEqual(len(rv['sale']['lines']), 1)
self.assertEqual(
rv['sale']['lines'][0]['delivery_mode'], 'pick_up'
)
self.assertEqual(rv['sale']['lines'][0]['quantity'], 1)
# Add another line, but with explicit delivery_mode
with Transaction().set_context(delivery_mode='pick_up'):
rv = sale.pos_add_product([self.product1.id], 2)
self.assertEqual(len(rv['sale']['lines']), 1)
self.assertEqual(
rv['sale']['lines'][0]['delivery_mode'], 'pick_up'
)
self.assertEqual(rv['sale']['lines'][0]['quantity'], 2)
# Add a ship line of same product
with Transaction().set_context(delivery_mode='ship'):
rv = sale.pos_add_product([self.product1.id], 1)
self.assertEqual(len(rv['sale']['lines']), 2)
for pick_line in filter(
lambda l: l['delivery_mode'] == 'pick_up',
rv['sale']['lines']):
# From the previous addition
self.assertEqual(pick_line['delivery_mode'], 'pick_up')
self.assertEqual(pick_line['quantity'], 2)
break
else:
self.fail('Expected to find pick up line, but did not')
for ship_line in filter(
lambda l: l['delivery_mode'] == 'ship',
rv['sale']['lines']):
self.assertEqual(ship_line['delivery_mode'], 'ship')
self.assertEqual(ship_line['quantity'], 1)
break
else:
self.fail('Expected to find line, but did not')
def test_0022_test_update_delivery_mode(self):
"""
Update delivery mode of saleLine
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id
):
rv = sale.pos_add_product([self.product1.id], 1)
# By default the lines are picked
self.assertEqual(len(rv['sale']['lines']), 1)
self.assertEqual(
rv['sale']['lines'][0]['delivery_mode'], 'pick_up'
)
self.assertEqual(rv['sale']['lines'][0]['quantity'], 1)
# Update delivery_mode in sale line
with Transaction().set_context(
delivery_mode='ship',
sale_line=rv['updated_lines'][0]
):
rv = sale.pos_add_product([self.product1.id], 2)
self.assertEqual(len(rv['sale']['lines']), 1)
self.assertEqual(rv['sale']['lines'][0]['delivery_mode'], 'ship')
self.assertEqual(rv['sale']['lines'][0]['quantity'], 2)
# Change product and provide saleLine
with Transaction().set_context(
delivery_mode='ship',
sale_line=rv['updated_lines'][0]
):
rv = sale.pos_add_product([self.product2.id], 2)
self.assertEqual(len(rv['sale']['lines']), 1)
# Product should not change
self.assertEqual(
rv['sale']['lines'][0]['product']['id'], self.product1.id
)
self.assertEqual(rv['sale']['lines'][0]['delivery_mode'], 'ship')
self.assertEqual(rv['sale']['lines'][0]['quantity'], 2)
def test_0025_add_taxes_on_line(self):
"""
Add a line that woudl add taxes and check that it works
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id
):
rv = sale.pos_add_product([self.product1.id], 1)
# add a product which does not have taxes
self.assertEqual(len(rv['sale']['lines']), 1)
sale_line = self.SaleLine(rv['updated_lines'][0])
self.assertFalse(sale_line.taxes)
self.assertEqual(rv['sale']['tax_amount'], 0)
rv = sale.pos_add_product([self.product3.id], 1)
# add a product which does not have taxes
self.assertEqual(len(rv['sale']['lines']), 2)
sale_line = self.SaleLine(rv['updated_lines'][0])
self.assertEqual(rv['sale']['tax_amount'], Decimal('1.5'))
# Please make that two ;)
rv = sale.pos_add_product([self.product3.id], 2)
# add a product which does not have taxes
self.assertEqual(len(rv['sale']['lines']), 2)
sale_line = self.SaleLine(rv['updated_lines'][0])
self.assertEqual(rv['sale']['tax_amount'], Decimal('3'))
def test_0030_serialization_fallback(self):
"""
Ensure that serialization for other purposes still work
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id):
rv = sale.pos_add_product([self.product1.id], 1)
sale_line = self.SaleLine(rv['updated_lines'][0])
# Serialize sale
sale.serialize()
sale_line.serialize()
def test_0035_sale_pos_serialization(self):
"""
Serialize sale for pos
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(use_anonymous_customer=True):
sale, = self.Sale.create([{
'currency': self.usd.id,
'invoice_address': self.address,
'shipment_address': self.address,
}])
with Transaction().set_context(
company=self.company.id, channel=self.channel.id):
sale.pos_add_product([self.product1.id], 1)
# Serialize sale for pos
rv = sale.pos_serialize()
self.assertEqual(rv['total_amount'], sale.total_amount)
self.assertEqual(rv['tax_amount'], sale.tax_amount)
self.assertEqual(len(rv['lines']), 1)
def test_0040_default_delivery_mode(self):
"""
Test default delivery_mode for saleLine
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(
use_anonymous_customer=True, channel=self.channel.id
):
sale, = self.Sale.create([{
'currency': self.usd.id,
}])
self.assertEqual(sale.channel.delivery_mode, 'ship')
with Transaction().set_context(
company=self.company.id, channel=self.channel.id,
current_channel=self.channel.id
):
sale_line, = self.SaleLine.create([{
'sale': sale.id,
'product': self.product1.id,
'description': 'test product',
'quantity': 1,
'unit': self.product1.default_uom.id,
'unit_price': Decimal('10'),
}])
self.assertEqual(
sale_line.delivery_mode, self.channel.delivery_mode
)
with Transaction().set_user(0):
with Transaction().set_context(
company=self.company.id, channel=None
):
new_sale_line, = self.SaleLine.create([{
'sale': sale.id,
'product': self.product4.id,
'description': 'test service product',
'quantity': 1,
'unit': self.product4.default_uom.id,
'unit_price': Decimal('10'),
}])
self.assertIsNone(new_sale_line.delivery_mode)
# Test if sale line's product type is goods
self.assertTrue(sale_line.product_type_is_goods)
self.assertFalse(new_sale_line.product_type_is_goods)
def test_0120_ship_pick_diff_warehouse(self):
"""
Ensure that backorder_warehouse is used for back orders while orders
are picked from the channel's warehouse
"""
Location = POOL.get('stock.location')
Channel = POOL.get('sale.channel')
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
backorder_warehouse, = Location.copy([self.channel.warehouse])
# Set that as the new backorder warehouse
Channel.write([self.channel], {
'backorder_warehouse': backorder_warehouse.id}
)
with Transaction().set_context({
'company': self.company.id,
'channel': self.channel.id,
'channels': [self.channel.id], }):
# Now create an order
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'order',
# Explicitly specify the channel
'channel': Channel(self.channel).id,
}])
line1, = self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
self.SaleLine.write([line1], line1.on_change_delivery_mode())
line2, = self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Shipped Item',
'product': self.product1.id
}])
self.SaleLine.write([line2], line2.on_change_delivery_mode())
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 2)
for shipment in sale.shipments:
if shipment.delivery_mode == 'pick_up':
self.assertEqual(shipment.state, 'done')
self.assertEqual(
shipment.warehouse, self.channel.warehouse
)
elif shipment.delivery_mode == 'ship':
self.assertEqual(shipment.state, 'waiting')
self.assertEqual(
shipment.warehouse, self.channel.backorder_warehouse
)
else:
self.fail('Invalid delivery mode')
def test_1010_delivery_method_2shipping_case_1(self):
"""
Ensure shipment method is respected by sale order processing
Case 1: Ship only order
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'order',
}])
sale_line, = self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 1)
self.assertEqual(sale.shipments[0].delivery_mode, 'ship')
self.assertEqual(sale.shipments[0].state, 'waiting')
def test_1020_delivery_method_2shipping_case_2(self):
"""
Ensure shipment method is respected by sale order processing
Case 2: Pick up only order
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'order',
}])
sale_line, = self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 1)
self.assertEqual(sale.shipments[0].delivery_mode, 'pick_up')
self.assertEqual(sale.shipments[0].state, 'done')
def test_1030_delivery_method_2shipping_case_3(self):
"""
Ensure shipment method is respected by sale order processing
Case 2: Pick up + Ship order for same item
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Shipped Item',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 2)
for shipment in sale.shipments:
if shipment.delivery_mode == 'pick_up':
self.assertEqual(shipment.state, 'done')
elif shipment.delivery_mode == 'ship':
self.assertEqual(shipment.state, 'waiting')
else:
self.fail('Invalid delivery mode')
def test_1040_delivery_method_2shipping_case_4(self):
"""
Manual shipping should just go ahead without messing with new
workflow
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'manual',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Shipped Item',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 0)
def test_1050_delivery_method_2shipping_case_5(self):
"""
Return Shipment
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'manual',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': -2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 0)
self.assertEqual(len(sale.shipment_returns), 1)
def test_1090_default_delivery_methods(self):
"""
Defaults should be to ship products
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
ShipmentOut = POOL.get('stock.shipment.out')
ShipmentOutReturn = POOL.get('stock.shipment.out.return')
with Transaction().set_context({'company': self.company.id}):
shipment, = ShipmentOut.create([{
'customer': self.party.id,
'delivery_address': self.party.addresses[0].id,
}])
self.assertEqual(shipment.delivery_mode, 'ship')
shipment_return, = ShipmentOutReturn.create([{
'customer': self.party.id,
'delivery_address': self.party.addresses[0].id,
}])
self.assertEqual(shipment_return.delivery_mode, 'ship')
def test_1110_shipment_invoice_case_1(self):
"""
Ensure that a posted invoice is created when a picked up order is
processed.
For the ship order, since nothing has been shipped, there should be
no invoices
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'shipment',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': -2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}])
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 1)
self.assertEqual(sale.shipments[0].delivery_mode, 'ship')
self.assertEqual(sale.shipments[0].state, 'waiting')
self.assertEqual(len(sale.shipment_returns), 1)
self.assertEqual(sale.shipment_returns[0].delivery_mode, 'ship')
# On processing return sale, the shipment is in draft.
# It's weird, but that is how tryton does it
self.assertEqual(sale.shipment_returns[0].state, 'draft')
self.assertEqual(len(sale.invoices), 0)
def test_1120_shipment_invoice_case_2(self):
"""
Ensure that a posted invoice is created when a picked up order is
processed
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'shipment',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': -2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Test description',
'product': self.product1.id
}])
with Transaction().set_context({'company': self.company.id}):
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 1)
self.assertEqual(sale.shipments[0].delivery_mode, 'pick_up')
self.assertEqual(sale.shipments[0].state, 'done')
self.assertEqual(len(sale.shipment_returns), 1)
self.assertEqual(sale.shipment_returns[0].delivery_mode, 'pick_up')
self.assertEqual(sale.shipment_returns[0].state, 'done')
self.assertEqual(len(sale.invoices), 2)
self.assertEqual(sale.invoices[0].state, 'posted')
self.assertEqual(sale.invoices[1].state, 'posted')
def test_1130_delivery_method_2shipping_case_3(self):
"""
Ensure shipment method is respected by sale order processing.
Ensures that there is only one invoice created and that it is posted.
Case 3: Pick up + Ship order for same item
"""
Date = POOL.get('ir.date')
Shipment = POOL.get('stock.shipment.out')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'reference': 'Test Sale',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
# keep invoicing out of the way for this test's sake
'invoice_method': 'shipment',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'ship',
'unit': self.uom,
'unit_price': 20000,
'description': 'Shipped Item',
'product': self.product1.id
}])
with Transaction().set_context({'company': self.company.id}):
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 2)
for shipment in sale.shipments:
if shipment.delivery_mode == 'pick_up':
self.assertEqual(shipment.state, 'done')
elif shipment.delivery_mode == 'ship':
self.assertEqual(shipment.state, 'waiting')
delivery_shipment = shipment # used later in test
else:
self.fail('Invalid delivery mode')
self.assertEqual(len(sale.invoices), 1)
self.assertEqual(sale.invoices[0].state, 'posted')
with Transaction().set_context({'company': self.company.id}):
# Now process the delivered shipment as if its been shipped
Shipment.assign_force([delivery_shipment])
Shipment.pack([delivery_shipment])
Shipment.done([delivery_shipment])
self.assertEqual(len(sale.invoices), 2)
self.assertEqual(sale.invoices[0].state, 'posted')
self.assertEqual(sale.invoices[1].state, 'posted')
def test_1140_serialize_recent_sales(self):
"""
Test that sale order which are recently updated or create are on top.
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
with Transaction().set_context(current_channel=self.channel.id):
sale1, = self.Sale.create([{
'reference': 'Test Sale 1',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
}])
sale2, = self.Sale.create([{
'reference': 'Test Sale 2',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
}])
sale3, = self.Sale.create([{
'reference': 'Test Sale 3',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
}])
sale4, = self.Sale.create([{
'reference': 'Test Sale 4',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
}])
saleLine1, = self.SaleLine.create([{
'sale': sale1,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
saleLine2, = self.SaleLine.create([{
'sale': sale2,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
saleLine3, = self.SaleLine.create([{
'sale': sale3,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
saleLine4, = self.SaleLine.create([{
'sale': sale4,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': 20000,
'description': 'Picked Item',
'product': self.product1.id
}])
rv = self.Sale.get_recent_sales()
self.assertEqual(len(rv), 4)
# Test serialized data
self.assertIn('id', rv[0])
self.assertIn('party', rv[0])
self.assertIn('total_amount', rv[0])
self.assertIn('create_date', rv[0])
def test_1150_round_off_case_1(self):
"""
Test round off in sale and invoice.
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
product, = self.Product.create([{
'name': 'Test product',
'list_price': 200,
'cost_price': 200,
'default_uom': self.uom,
'salable': True,
'sale_uom': self.uom,
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
'products': [('create', [
{}
])]
}])
sale, = self.Sale.create([{
'reference': 'Test Sale 1',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'company': self.company.id,
'lines': [('create', [
{
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal(200.25),
'description': 'sale line',
}
])]
}])
with Transaction().set_context(company=self.company.id):
self.Sale.round_down_total([sale])
self.assertEqual(len(sale.lines), 2)
round_off_line, = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should be a new line of type 'roundoff'
self.assertIsNotNone(round_off_line)
# Total order price 200.25 should have been rounded down to 200
self.assertEqual(sale.total_amount, 200)
# Difference after rounding down should be created as
# roundoff line.
self.assertEqual(round_off_line.unit_price, 0.25)
self.assertEqual(round_off_line.quantity, -1)
self.assertEqual(round_off_line.amount, -0.25)
self.SaleLine.create([
{
'sale': sale,
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal('50.95'),
'description': 'sale line',
}
])
self.Sale.round_down_total([sale])
# Previous roundoff line should be deleted.
round_off_lines = self.SaleLine.search_count([
('id', '=', round_off_line.id)
])
self.assertEqual(round_off_lines, 0)
# There should be a new roundoff line created
round_off_lines = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should only be one roundoff line.
self.assertEqual(len(round_off_lines), 1)
self.assertEqual(round_off_lines[0].amount, Decimal('-0.20'))
self.assertEqual(sale.total_amount, 251)
# Process sale
self.Sale.quote([sale])
self.Sale.confirm([sale])
# Processing sale which doesn't have round off account and
# has a roundoff line, raises UserError.
self.assertRaises(UserError, self.Sale.process, [sale])
# Set round down account.
self.saleConfiguration = self.SaleConfiguration.create([{
'round_down_account':
self._get_account_by_kind('revenue').id,
}])
self.Sale.process([sale])
invoice, = self.Invoice.search([
('sales', 'in', [sale.id])
])
# There should be an invoice created from the processed sale
self.assertEqual(invoice.total_amount, 251)
def test_1155_round_off_case_2(self):
"""
Process sale multple times and ensure only 1 invoice is created.
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
product, = self.Product.create([{
'name': 'Test product',
'list_price': 200,
'cost_price': 200,
'default_uom': self.uom,
'salable': True,
'sale_uom': self.uom,
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
'products': [('create', [
{}
])]
}])
sale, = self.Sale.create([{
'reference': 'Test Sale 1',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'company': self.company.id,
'lines': [('create', [
{
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal(200.25),
'description': 'sale line',
}
])]
}])
with Transaction().set_context(company=self.company.id):
self.Sale.round_down_total([sale])
self.assertEqual(len(sale.lines), 2)
round_off_line, = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should be a new line of type 'roundoff'
self.assertIsNotNone(round_off_line)
# Total order price 200.25 should have been rounded down to 200
self.assertEqual(sale.total_amount, 200)
# Difference after rounding down should be created as
# roundoff line.
self.assertEqual(round_off_line.unit_price, 0.25)
self.assertEqual(round_off_line.quantity, -1)
self.assertEqual(round_off_line.amount, -0.25)
self.SaleLine.create([
{
'sale': sale,
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal('50.95'),
'description': 'sale line',
}
])
self.Sale.round_down_total([sale])
# Previous roundoff line should be deleted.
round_off_lines = self.SaleLine.search_count([
('id', '=', round_off_line.id)
])
self.assertEqual(round_off_lines, 0)
# There should be a new roundoff line created
round_off_lines = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should only be one roundoff line.
self.assertEqual(len(round_off_lines), 1)
self.assertEqual(round_off_lines[0].amount, Decimal('-0.20'))
self.assertEqual(sale.total_amount, 251)
# Process sale
self.Sale.quote([sale])
self.Sale.confirm([sale])
# Processing sale which doesn't have round off account and
# has a roundoff line, raises UserError.
self.assertRaises(UserError, self.Sale.process, [sale])
# Set round down account.
self.saleConfiguration = self.SaleConfiguration.create([{
'round_down_account':
self._get_account_by_kind('revenue').id,
}])
self.Sale.process([sale])
self.Sale.process([sale])
self.Sale.process([sale])
self.Sale.process([sale])
invoices = self.Invoice.search([])
self.assertEqual(len(invoices), 1)
self.assertEqual(invoices[0].total_amount, 251)
def test_1156_round_off_case_3(self):
"""
Process sale and cancel it's invoice. Then process sale again and
ensure that a new invoice is created
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
product, = self.Product.create([{
'name': 'Test product',
'list_price': 200,
'cost_price': 200,
'default_uom': self.uom,
'salable': True,
'sale_uom': self.uom,
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
'products': [('create', [
{}
])]
}])
sale, = self.Sale.create([{
'reference': 'Test Sale 1',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'company': self.company.id,
'lines': [('create', [
{
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal(200.25),
'description': 'sale line',
}
])]
}])
with Transaction().set_context(company=self.company.id):
self.Sale.round_down_total([sale])
self.assertEqual(len(sale.lines), 2)
round_off_line, = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should be a new line of type 'roundoff'
self.assertIsNotNone(round_off_line)
# Total order price 200.25 should have been rounded down to 200
self.assertEqual(sale.total_amount, 200)
# Difference after rounding down should be created as
# roundoff line.
self.assertEqual(round_off_line.unit_price, 0.25)
self.assertEqual(round_off_line.quantity, -1)
self.assertEqual(round_off_line.amount, -0.25)
self.SaleLine.create([
{
'sale': sale,
'type': 'line',
'quantity': 1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal('50.95'),
'description': 'sale line',
}
])
self.Sale.round_down_total([sale])
# Previous roundoff line should be deleted.
round_off_lines = self.SaleLine.search_count([
('id', '=', round_off_line.id)
])
self.assertEqual(round_off_lines, 0)
# There should be a new roundoff line created
round_off_lines = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should only be one roundoff line.
self.assertEqual(len(round_off_lines), 1)
self.assertEqual(round_off_lines[0].amount, Decimal('-0.20'))
self.assertEqual(sale.total_amount, 251)
# Process sale
self.Sale.quote([sale])
self.Sale.confirm([sale])
# Processing sale which doesn't have round off account and
# has a roundoff line, raises UserError.
self.assertRaises(UserError, self.Sale.process, [sale])
# Set round down account.
self.saleConfiguration = self.SaleConfiguration.create([{
'round_down_account':
self._get_account_by_kind('revenue').id,
}])
self.Sale.process([sale])
self.Sale.process([sale])
self.Sale.process([sale])
self.Sale.process([sale])
invoices = self.Invoice.search([])
self.assertEqual(len(invoices), 1)
self.assertEqual(invoices[0].total_amount, 251)
self.Invoice.cancel(invoices)
self.Sale.process([sale])
invoices = self.Invoice.search([])
self.assertEqual(len(invoices), 1)
self.assertEqual(invoices[0].total_amount, 251)
self.Sale.process([sale])
self.Sale.process([sale])
self.Sale.process([sale])
invoices = self.Invoice.search([])
self.assertEqual(len(invoices), 1)
self.assertEqual(invoices[0].total_amount, 251)
# Manually call get invoice line for round off line and
# check if credit note is not created
round_off_lines[0].invoice_lines = None
round_off_lines[0].save()
invoice_line = round_off_lines[0].get_invoice_line(
'out_credit_note')
self.assertEqual(invoice_line, [])
def test_1157_round_off_case_4(self):
"""
Negative amount on sale
"""
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
product, = self.Product.create([{
'name': 'Test product',
'list_price': 200,
'cost_price': 200,
'default_uom': self.uom,
'salable': True,
'sale_uom': self.uom,
'account_expense': self._get_account_by_kind('expense').id,
'account_revenue': self._get_account_by_kind('revenue').id,
'products': [('create', [
{}
])]
}])
sale, = self.Sale.create([{
'reference': 'Test Sale 1',
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'company': self.company.id,
'lines': [('create', [
{
'type': 'line',
'quantity': -1,
'product': product.products[0].id,
'unit': self.uom,
'unit_price': Decimal(200.25),
'description': 'sale line',
}
])]
}])
with Transaction().set_context(company=self.company.id):
self.Sale.round_down_total([sale])
self.assertEqual(len(sale.lines), 2)
round_off_line, = self.SaleLine.search([
('is_round_off', '=', True)
])
# There should be a new line of type 'roundoff'
self.assertIsNotNone(round_off_line)
# Total order price 200.25 should have been rounded down to 200
self.assertEqual(sale.total_amount, -201)
# Difference after rounding down should be created as
# roundoff line.
self.assertEqual(round_off_line.unit_price, 0.75)
self.assertEqual(round_off_line.quantity, -1)
self.assertEqual(round_off_line.amount, -0.75)
# Process sale
self.Sale.quote([sale])
self.Sale.confirm([sale])
# Set round down account.
self.saleConfiguration = self.SaleConfiguration.create([{
'round_down_account':
self._get_account_by_kind('revenue').id,
}])
self.Sale.process([sale])
invoices = self.Invoice.search([])
self.assertEqual(len(invoices), 1)
self.assertEqual(invoices[0].type, 'out_credit_note')
self.assertEqual(invoices[0].total_amount, 201)
def test_1160_sale_stays_in_confirm_state_forever(self):
"""
If a line is pickup with zero total, sale cannot be done.
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
'invoice_method': 'shipment',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 2,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': Decimal('0'),
'description': 'Picked Item',
'product': self.product1.id
}])
with Transaction().set_context({'company': self.company.id}):
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
self.Sale.process([sale])
self.assertEqual(len(sale.shipments), 1)
self.assertEqual(len(sale.invoices), 1)
self.assertEqual(sale.invoice_state, 'paid')
self.assertEqual(sale.shipment_state, 'sent')
self.assertEqual(sale.state, 'done')
def test_1170_test_assign_pick_up_shipments(self):
"""
Test if a UserError is raised while processing sale in any of the
pick up product is out of stock
"""
Date = POOL.get('ir.date')
with Transaction().start(DB_NAME, USER, context=CONTEXT):
self.setup_defaults()
sale, = self.Sale.create([{
'payment_term': self.payment_term,
'currency': self.company.currency.id,
'party': self.party.id,
'invoice_address': self.party.addresses[0].id,
'shipment_address': self.party.addresses[0].id,
'sale_date': Date.today(),
'company': self.company.id,
'invoice_method': 'shipment',
'shipment_method': 'order',
}])
self.SaleLine.create([{
'sale': sale,
'type': 'line',
'quantity': 10,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': Decimal('100'),
'description': 'Picked Item',
'product': self.product1.id
}, {
'sale': sale,
'type': 'line',
'quantity': 10,
'delivery_mode': 'pick_up',
'unit': self.uom,
'unit_price': Decimal('15'),
'description': 'Picked Item',
'product': self.product2.id
}])
with Transaction().set_context({'company': self.company.id}):
# Quote, Confirm and Process the order
self.Sale.quote([sale])
self.Sale.confirm([sale])
with self.assertRaises(UserError):
try:
self.Sale.process([sale])
except UserError, exc:
self.assertNotEqual(
exc.message.find("out of stock"), -1)
# Product 2 must be in error message as it is out of
# stock
self.assertNotEqual(
exc.message.find(self.product2.rec_name), -1)
# Product 1 must not be in error message as it is in
# stock
self.assertEqual(
exc.message.find(self.product1.rec_name), -1)
raise
def suite():
"""
Define suite
"""
test_suite = trytond.tests.test_tryton.suite()
test_suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(TestSale)
)
return test_suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| bsd-3-clause | -362,725,666,256,599,700 | 38.156032 | 83 | 0.477857 | false |
jmosky12/huxley | scripts/assignment_db.py | 1 | 1357 | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import os
from os import environ
from os.path import abspath, dirname
import sys
sys.path.append(abspath(dirname(dirname(__file__))))
os.environ['DJANGO_SETTINGS_MODULE'] = 'huxley.settings'
from huxley.core.models import Country, Committee, Assignment
from xlrd import open_workbook
s = open_workbook('Country Matrix.xlsx').sheet_by_index(0)
country_range = s.nrows-2
committee_range = 22
for row in range(3, country_range):
Country.objects.get_or_create(name=s.cell(row, 0).value, special=(True if row > 204 else False))
for col in range(1, committee_range):
Committee.objects.get_or_create(name=s.cell(1, col).value, full_name=s.cell(2, col).value, delegation_size=(1 if s.cell(0, col).value == 'SINGLE' else 2), special=(True if col > 7 else False))
for row in range(3, country_range):
for col in range(1, committee_range):
if s.cell(row, col).value:
print s.cell(1, col).value
print s.cell(2, col).value
print s.cell(row, 0).value
print s.cell(row,col).value
print
country = Country.objects.get(name=s.cell(row, 0).value)
committee = Committee.objects.get(name=s.cell(1, col).value)
assignment = Assignment(committee=committee, country=country)
assignment.save()
| bsd-3-clause | 3,738,094,941,541,147,600 | 34.710526 | 193 | 0.725866 | false |
Naught0/qtbot | cogs/osrs.py | 1 | 15138 | import json
from urllib.parse import quote_plus
from typing import Union
import discord
from discord.ext import commands
from utils import aiohttp_wrap as aw
from utils import dict_manip as dm
from utils.user_funcs import PGDB
class OSRS(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.db = PGDB(bot.pg_con)
self.aio_session = bot.aio_session
self.redis_client = bot.redis_client
self.items_uri = "https://rsbuddy.com/exchange/names.json"
# self.api_uri = 'https://api.rsbuddy.com/grandExchange?a=guidePrice&i={}'
self.prices_uri = "https://storage.googleapis.com/osb-exchange/summary.json"
self.player_uri = (
"http://services.runescape.com/m=hiscore_oldschool/index_lite.ws?player={}"
)
self.player_click_uri = "http://services.runescape.com/m=hiscore_oldschool/hiscorepersonal.ws?user1={}"
self.skills = [
"Overall",
"Attack",
"Defense",
"Strength",
"Hitpoints",
"Ranged",
"Prayer",
"Magic",
"Cooking",
"Woodcutting",
"Fletching",
"Fishing",
"Firemaking",
"Crafting",
"Smithing",
"Mining",
"Herblore",
"Agility",
"Thieving",
"Slayer",
"Farming",
"Runecrafting",
"Hunter",
"Construction",
"Clue (Easy)",
"Clue (Medium)",
"Clue (All)",
"Bounty Hunter: Rogue",
"Bounty Hunter: Hunter",
"Clue (Hard)",
"LMS",
"Clue (Elite)",
"Clue (Master)",
]
self.statmoji = {
"attack": ":dagger:",
"strength": ":fist:",
"defense": ":shield:",
"ranged": ":bow_and_arrow:",
"prayer": ":pray:",
"magic": ":sparkles:",
"runecrafting": ":crystal_ball:",
"construction": ":house:",
"hitpoints": ":heart:",
"agility": ":runner:",
"herblore": ":herb:",
"thieving": ":spy:",
"crafting": ":hammer_pick:",
"fletching": ":cupid:",
"slayer": ":skull_crossbones:",
"hunter": ":feet:",
"mining": ":pick:",
"fishing": ":fish:",
"cooking": ":cooking:",
"firemaking": ":fire:",
"woodcutting": ":deciduous_tree:",
"farming": ":corn:",
}
self.user_missing = "Please either add a username or supply one."
self.user_not_exist = "Couldn't find a user matching {}"
self.color = discord.Color.dark_gold()
with open("data/item-data.json") as f:
self.item_data = json.load(f)
@staticmethod
def get_level(stat: str) -> int:
"""Helps parse player level from strings that look like 0,0,0"""
return int(stat.split(",")[1])
def calc_combat(self, user_info: dict) -> str:
"""Helper method which returns the player's combat level
Formula here: http://oldschoolrunescape.wikia.com/wiki/Combat_level"""
at = self.get_level(user_info["Attack"])
st = self.get_level(user_info["Strength"])
de = self.get_level(user_info["Defense"])
hp = self.get_level(user_info["Hitpoints"])
rn = self.get_level(user_info["Ranged"])
mg = self.get_level(user_info["Magic"])
pr = self.get_level(user_info["Prayer"])
base = 0.25 * (de + hp + (pr // 2))
melee = 0.325 * (at + st)
range = 0.325 * ((rn // 2) + rn)
mage = 0.325 * ((mg // 2) + mg)
return str(int(base + max(melee, range, mage)))
async def get_user_info(self, username: str) -> Union[dict, None]:
"""Helper method to see whether a user exists, if so, retrieves the data and formats it in a dict
returns None otherwise"""
user_info = await aw.aio_get_text(
self.aio_session, self.player_uri.format(quote_plus(username))
)
if user_info is None:
return None
# Player data is returned like so:
# Rank, Level, XP
# For clues, LMS, and Bounty Hunter it's:
# Rank, Score
# -1's denote no rank or xp
return dict(zip(self.skills, user_info.split()))
@commands.group(
name="osrs", aliases=["hiscores", "hiscore", "rs"], invoke_without_command=True
)
async def _osrs(self, ctx, *, username: str = None):
"""Get information about your OSRS stats"""
image = None
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
image = await self.db.fetch_user_info(ctx.author.id, "osrs_pic")
# No users found
if not username:
return await ctx.error(self.user_missing)
# User doesn't exist
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
# Create embed
em = discord.Embed(
title=f":bar_chart: {username}",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
# See get_user_info for why things are wonky and split like this
overall = user_info["Overall"].split(",")
em.add_field(
name="Combat Level", value=self.calc_combat(user_info), inline=False
)
em.add_field(name="Total Level", value=f"{int(overall[1]):,}")
em.add_field(name="Overall Rank", value=f"{int(overall[0]):,}")
# Set image if one exists & if the player == the author
if image:
em.set_image(url=image)
await ctx.send(embed=em)
@_osrs.command()
async def user(self, ctx, *, username: str):
"""Save your OSRS username so that you don't have to supply it later"""
await self.db.insert_user_info(ctx.author.id, "osrs_name", username)
await ctx.success(f"Added {username} ({ctx.author.display_name}) to database!")
@_osrs.command()
async def rmuser(self, ctx):
"""Remove your OSRS username from the database"""
await self.db.remove_user_info(ctx.author.id, "osrs_name")
await ctx.success(f"Removed username from the database.")
@_osrs.command(aliases=["avatar", "pic"])
async def picture(self, ctx, *, url: str):
"""Add a custom picture of your OSRS character to appear in the osrs command
(Only when called by you)"""
await self.db.insert_user_info(ctx.author.id, "osrs_pic", url)
await ctx.success(f"Added picture successfully")
@_osrs.command(aliases=["rmavatar", "rmpic"])
async def rmpicture(self, ctx):
"""Remove your custom OSRS picture from the database"""
await self.db.remove_user_info(ctx.author.id, "osrs_pic")
await ctx.success(f"Removed picture.")
@_osrs.command(aliases=["clues", "clu", "cluescroll", "cluescrolls"])
async def clue(self, ctx, *, username: str = None):
"""Get your clue scroll counts & ranks"""
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
if not username:
return await ctx.error(self.user_missing)
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
em = discord.Embed(
title=f":scroll: {username}'s clues",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
for item in user_info:
if {"clue"} & set(item.lower().split()):
v = user_info[item].split(",")
# Handle no rank
if v == ["-1", "-1"]:
v = ["n/a", "0"]
em.add_field(name=item, value=f"Rank: {v[0]} ({v[1]} clues)")
# Cast to int for str formatting otherwise
else:
v = [int(x) for x in v]
em.add_field(name=item, value=f"Rank: {v[0]:,} ({v[1]:,} clues)")
# Now to swap Clue (All) to the first field
overall = em._fields.pop(2)
em._fields.insert(0, overall)
await ctx.send(embed=em)
@_osrs.command(aliases=["cb"])
async def combat(self, ctx, *, username: str = None):
"""Check the combat stats of yourself or someone else"""
if username is None:
username = await self.db.fetch_user_info(ctx.author.id, "osrs_name")
if not username:
return await ctx.error(self.user_missing)
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
em = discord.Embed(
title=f":right_facing_fist::left_facing_fist: {username}'s Combat Stats",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
col1 = [
f":crossed_swords: Combat `{self.calc_combat(user_info)}`",
f':heart: Hitpoints `{self.get_level(user_info["Hitpoints"])}`',
f':dagger: Attack `{self.get_level(user_info["Attack"])}`',
f':fist: Strength `{self.get_level(user_info["Strength"])}`',
]
col2 = [
f':shield: Defence `{self.get_level(user_info["Defense"])}`',
f':bow_and_arrow: Range `{self.get_level(user_info["Ranged"])}`',
f':sparkles: Magic `{self.get_level(user_info["Magic"])}`',
f':pray: Prayer `{self.get_level(user_info["Prayer"])}`',
]
em.add_field(name="\u200B", value="\n".join(col1))
em.add_field(name="\u200B", value="\n".join(col2))
await ctx.send(embed=em)
@_osrs.command(aliases=["stats"])
async def stat(self, ctx, username: str, stat_name: str):
"""Get a specific stat for a user
Note:
Be sure to wrap the username in quotation marks if it has spaces
Username is required here per the limitations of Discord, sorry"""
user_info = await self.get_user_info(username)
if user_info is None:
return await ctx.error(self.user_not_exist.format(username))
# If input doesn't match exactly
# Hopefully this handles common abbreviations (but I'm nearly sure it won't)
if stat_name.lower() not in self.statmoji:
stat_name = dm.get_closest(self.statmoji, stat_name)
em = discord.Embed(
title=f"{self.statmoji[stat_name.lower()]} {stat_name.title()} - {username}",
url=self.player_click_uri.format(quote_plus(username)),
color=self.color,
)
labels = ["Rank", "Level", "XP"]
stat_list = user_info[stat_name.title()].split(",")
for idx, label in enumerate(labels):
em.add_field(name=label, value=f"{int(stat_list[idx]):,}")
await ctx.send(embed=em)
@_osrs.command(name="ge", invoke_without_command=True)
async def ge_search(self, ctx, *, query):
""" Get the buying/selling price and quantity of an OSRS item """
# All items in the JSON are lowercase
item = query.lower()
# Checks whether item in json file
if item in self.item_data:
item_id = self.item_data[item]["id"]
# Uses closest match to said item if no exact match
else:
item = dm.get_closest(self.item_data, item)
item_id = self.item_data[item]["id"]
if await self.redis_client.exists("osrs_prices"):
item_prices = json.loads((await self.redis_client.get("osrs_prices")))
else:
item_prices = await aw.aio_get_json(self.aio_session, self.prices_uri)
if not item_prices:
return await ctx.error(
"The RSBuddy API is dead yet again. Try again in a bit."
)
await self.redis_client.set(
"osrs_prices", json.dumps(item_prices), ex=(5 * 60)
)
# Create pretty embed
em = discord.Embed(title=item.capitalize(), color=self.color)
em.url = f"https://rsbuddy.com/exchange?id={item_id}"
em.set_thumbnail(
url=f"https://services.runescape.com/m=itemdb_oldschool/obj_big.gif?id={item_id}"
)
em.add_field(
name="Buying Price", value=f'{item_prices[item_id]["buy_average"]:,}gp'
)
em.add_field(
name="Selling Price", value=f'{item_prices[item_id]["sell_average"]:,}gp'
)
em.add_field(
name="Buying Quantity", value=f'{item_prices[item_id]["buy_quantity"]:,}/hr'
)
em.add_field(
name="Selling Quantity",
value=f'{item_prices[item_id]["sell_quantity"]:,}/hr',
)
await ctx.send(embed=em)
@commands.command(name="geupdate")
@commands.is_owner()
async def _update(self, ctx):
"""A command to update the OSRS GE item list"""
new_items = await aw.aio_get_json(self.aio_session, self.items_uri)
# This 503's a lot, if not every time, not sure yet
if new_items is None:
em = discord.Embed(
title=":no_entry_sign: RS buddy is serving up a 503!",
color=discord.Color.dark_red(),
)
return await ctx.send(embed=em)
if len(new_items) == len(self.item_data):
em = discord.Embed(
title=":no_entry_sign: Items already up-to-date boss!",
color=discord.Color.dark_red(),
)
return await ctx.send(embed=em)
filtered_items = {}
for item in new_items:
filtered_items[new_items[item]["name"].lower()] = {
"id": item,
"name": new_items[item]["name"],
}
with open("data/item-data.json", "w") as f:
json.dump(filtered_items, f, indent=2)
self.item_data = filtered_items
num_updated = len(new_items) - len(self.item_data)
await ctx.success(f"Updated `{num_updated}` item(s).")
# The osbuddy api just 503s every time, keeping this commented in the hopes that it works in the future
# em = discord.Embed(title=':white_check_mark: Check here',
# url='https://rsbuddy.com/exchange/names.json',
# color=self.color)
# em.description = ("```py\n"
# "data = requests.get('https://rsbuddy.com/exchange/names.json').json() d = {}\n\n"
# "for item in data:\n"
# "\td[data[item]['name'].lower()] = {'id': item, 'name': data[item]['name']}"
# "```")
# await ctx.send(embed=em)
def setup(bot):
bot.add_cog(OSRS(bot))
| mit | 6,672,994,157,585,110,000 | 36.93985 | 111 | 0.544854 | false |
ndp-systemes/odoo-addons | account_invoice_dunning/models/dunning.py | 1 | 9802 | # -*- coding: utf8 -*-
#
# Copyright (C) 2017 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from openerp import fields, models, api, _, osv
class AccountInvoiceRelanceConfig(models.Model):
_name = 'account.invoice.dunning.type'
def _get_domain_mail_template(self):
return [('model_id', '=', self.env.ref('account_invoice_dunning.model_account_invoice_dunning').id)]
name = fields.Char(required=True, string=u"Name")
number = fields.Integer(u"Dunning number", default=1, required=True)
sequence_id = fields.Many2one('ir.sequence', string=u"Sequence")
report_id = fields.Many2one('ir.actions.report.xml', u"Report", domain=[('model', '=', 'account.invoice.dunning')],
required=True)
mail_template_id = fields.Many2one('email.template', u"Mail Template", domain=_get_domain_mail_template,
required=True)
company_id = fields.Many2one('res.company', u"Company", groups='base.group_multi_company',
default=lambda self: self.env.user.company_id)
@api.multi
def _get_dunning_name(self):
return self.ensure_one().sequence_id and self.sequence_id._next() or ""
_sql_constraints = [
('dunning_number_unique', 'unique (number, company_id)', u"The Dunning number must be unique per company !"),
]
class AccountInvoiceRelance(models.Model):
_name = 'account.invoice.dunning'
name = fields.Char(u"Name")
date_done = fields.Date(u"Dunning date done", readonly=True)
state = fields.Selection([
('draft', u"Draft"),
('send', u"Send"),
('cancel', u"Cancel"),
('done', u"Done")], string=u"State", readonly=True, default='draft')
partner_id = fields.Many2one('res.partner', u"Partner")
company_id = fields.Many2one('res.company', u"Company", groups='base.group_multi_company',
default=lambda self: self.env.user.company_id)
dunning_type_id = fields.Many2one('account.invoice.dunning.type', string=u"Dunning Type")
report_id = fields.Many2one('ir.actions.report.xml', u"Report", related='dunning_type_id.report_id', readonly=True)
sequence_id = fields.Many2one('ir.sequence', related='dunning_type_id.sequence_id', readonly=True)
mail_template_id = fields.Many2one('email.template', u"Mail Template",
related='dunning_type_id.mail_template_id', readonly=True)
invoice_ids = fields.Many2many('account.invoice', string=u"Invoices")
amount_total_signed = fields.Float(u"Total", compute='_compute_amounts')
residual_signed = fields.Float(u"Residual", compute='_compute_amounts')
@api.multi
def _compute_amounts(self):
for rec in self:
amount = 0
residual = 0
for invoice in rec.invoice_ids:
amount += invoice.amount_total_signed
residual += invoice.residual_signed
rec.amount_total_signed = amount
rec.residual_signed = residual
@api.model
def _get_existing_dunning(self, invoice_id, dunning_config_id):
return self.search(self._get_existing_dunning_domain(invoice_id, dunning_config_id))
@api.multi
def action_done(self):
self.write({'state': 'done'})
@api.multi
def action_cancel(self):
self.write({'state': 'cancel'})
@api.model
def _get_existing_dunning_domain(self, invoice_id, dunning_type_id):
return [('partner_id', '=', invoice_id.partner_id.id),
('dunning_type_id', '=', dunning_type_id.id),
('company_id', '=', invoice_id.company_id.id),
('state', '=', 'draft')
]
@api.multi
def action_print_dunning(self):
self.ensure_one()
res = self.env['report'].with_context(active_ids=self.ids).get_action(self, self.report_id.report_name)
self.write({
'state': 'send',
'date_done': fields.Date.today(),
})
return res
@api.multi
def action_send_mail(self):
self.ensure_one()
compose_form = self.env.ref('mail.email_compose_message_wizard_form', False)
ctx = dict(
default_model=self._name,
default_res_id=self.id,
default_composition_mode='comment',
default_template_id=self.mail_template_id.ensure_one().id,
)
ctx.update(self._default_dict_send_mail_action())
return {
'name': _(u"Send a message"),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form.id, 'form')],
'view_id': compose_form.id,
'target': 'new',
'context': ctx,
}
def _default_dict_send_mail_action(self):
return {'final_dunning_state': 'send'}
@api.multi
def _get_action_view(self):
if len(self.ids) > 1:
ctx = dict(self.env.context,
search_default_group_partner_id=True,
search_default_group_dunning_type_id=True)
res = {
'name': _(u"Dunning"),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'account.invoice.dunning',
'type': 'ir.actions.act_window',
'context': ctx,
'domain': [('id', 'in', self.ids)]
}
else:
res = {
'name': self.name,
'view_type': 'form',
'view_mode': 'form',
'view_id': self.env.ref("account_invoice_dunning.invoice_dunning_form_view").id,
'res_model': 'account.invoice.dunning',
'res_id': self.id,
'type': 'ir.actions.act_window',
}
return res
class AccountInvoice(models.Model):
_inherit = 'account.invoice'
invoice_dunning_ids = fields.Many2many('account.invoice.dunning', string=u"Dunnings")
dunning_number = fields.Integer(u"Number of Dunning send", compute='_compute_dunning_number')
@api.multi
def _compute_dunning_number(self):
for rec in self:
rec.dunning_number = len(rec.invoice_dunning_ids.filtered(lambda it: it.state == 'send'))
@api.multi
def action_create_dunning(self):
result = self.env['account.invoice.dunning']
for rec in self:
rec._validate_to_create_dunning()
next_dunning_type = rec._get_next_dunning_type()
if next_dunning_type:
existing_dunning = self.env['account.invoice.dunning']._get_existing_dunning(rec, next_dunning_type)
if existing_dunning:
existing_dunning.invoice_ids = [(4, rec.id, {})]
else:
existing_dunning = self.env['account.invoice.dunning'].create(
rec._prepare_invoice_dunning(next_dunning_type))
result |= existing_dunning
else:
rec._no_next_dunning()
return result._get_action_view()
@api.multi
def _no_next_dunning(self):
raise osv.osv.except_orm(_(u"Error !"), _(u"No next Dunning Type for the invoice %s" % self.number))
@api.multi
def _validate_to_create_dunning(self):
if self.state != 'open':
raise osv.osv.except_orm(_(u"Error !"), _(u"You can't create a Dunning on an invoice with the state draft"))
if self.type != 'out_invoice':
raise osv.osv.except_orm(_(u"Error !"), _(u"You can only create a Dunning on an Sale Invoice"))
@api.multi
def _get_next_dunning_type(self):
dunning_type_ids = self.invoice_dunning_ids.filtered(lambda it: it.state == 'send').mapped('dunning_type_id')
return self.env['account.invoice.dunning.type'].search(
[('id', 'not in', dunning_type_ids.ids), ('company_id', '=', self.company_id.id)],
order='number asc', limit=1)
@api.multi
def _prepare_invoice_dunning(self, dunning_type_id):
self.ensure_one()
return {
'dunning_type_id': dunning_type_id.id,
'invoice_ids': [(4, self.id, {})],
'partner_id': self.partner_id.id,
'company_id': self.company_id.id,
'name': dunning_type_id._get_dunning_name()
}
class MailComposeMessage(models.TransientModel):
_inherit = 'mail.compose.message'
@api.multi
def send_mail(self):
context = self.env.context or {}
if context.get('default_model') == 'account.invoice.dunning' \
and context.get('default_res_id', -1) > 0 \
and context.get('final_dunning_state'):
self.env['account.invoice.dunning'].browse(context['default_res_id']).write({
'state': context.get('final_dunning_state'),
'date_done': fields.Date.today(),
})
return super(MailComposeMessage, self).send_mail()
| agpl-3.0 | -1,621,267,915,577,783,600 | 40.180672 | 120 | 0.584124 | false |
jleivaizq/freesquare | freesquare/geo/feeds/gadm.py | 1 | 4921 | # -*- coding: utf-8 -*-
import logging
import os
import zipfile
from collections import defaultdict
from django.contrib.gis.gdal import DataSource
from .utils import download
from ..models import Region
logger = logging.getLogger(__name__)
LEVELS = [r[1] for r in Region.REGION_LEVELS]
LOCAL_DICT = {
'Catalu\xf1a': 'Catalonia'
}
REGIONS_TO_IGNORE = ('las palmas', 'ceuta y melilla')
class Gadm():
"""
Download and import Region Borders from Global Administrative Areas site
"""
# Static attributes
app_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + '/../..')
url = 'http://data.biogeo.ucdavis.edu/data/gadm2/shp/'
feature_cache = defaultdict(dict)
def __init__(self, country_code, max_level=0):
if country_code:
self.country_code = country_code.upper()
self.max_level = int(max_level)
def find_region(self, iso_code, raw_name, level, parent_names):
logger.debug('Searching for region: raw_name => {} iso_code => {} level => {}'
.format(raw_name, iso_code, level))
result = None
if raw_name in LOCAL_DICT:
name = LOCAL_DICT[raw_name].lower()
else:
name = raw_name.lower()
if level == 3:
return self.find_region(iso_code,
parent_names[-1],
level - 1,
parent_names[:-1])
if name in REGIONS_TO_IGNORE:
return None
if raw_name.lower() in self.feature_cache[level]:
result = self.feature_cache[level][name]
else:
if parent_names:
parent = self.find_region(iso_code, parent_names[-1], level - 1, parent_names[:-1])
candidates = Region.objects.filter(level=min(level, Region.CITY))
if parent:
candidates = Region.objects.filter(parent=parent)
candidates = candidates.filter(name__iexact=name)
else:
candidates = Region.objects.filter(level=Region.COUNTRY)\
.filter(iso3=iso_code).filter(name__iexact=name)
if candidates:
result = candidates[0]
self.feature_cache[level][name] = result
return result
def _load_country_level_border(self, data_dir, name, level):
try:
import ipdb; ipdb.set_trace() # XXX BREAKPOINT
datasource = DataSource(os.path.join(data_dir, '{0}{1}.shp'.format(name, level)))
layer = datasource[0]
logger.info('Loading boundaries for {} ({})'.format(name, LEVELS[level]))
for feature in layer:
code = feature.get('ISO')
if level:
region_names = [feature.get('NAME_{0}'.format(l)) for l in range(level + 1)]
else:
region_names = [feature.get('NAME_ISO')]
if 'n.a.' not in region_names[-1]:
region = self.find_region(code, region_names[-1], level, region_names[:-1])
if feature.geom:
if region:
region.geom = feature.geom
region.save()
#logger.debug('Saved border for region: {}'.format(region))
else:
others = Region.objects.filter(name__iexact=region_names[-1])
if others:
logger.warning('Region not found for {} but {}'
.format(region_names, others))
except:
logger.exception('Could not load border of '
'level {} for country {}'.format(level, name))
def load(self, country_code):
try:
data_dir = os.path.join(self.app_dir, 'geo/data/gadm/{}'.format(country_code))
filename = "{}_adm.zip".format(country_code)
name, _ = filename.rsplit('.', 1)
# Download the zip file from thematicmapping
download(data_dir, filename, self.url + filename)
file_ = open(os.path.join(data_dir, filename), mode='rb')
zipfile_ = zipfile.ZipFile(file_)
zipfile_.extractall(data_dir)
zipfile_.close()
for level in range(self.max_level + 1):
self._load_country_level_border(data_dir, name, level)
except:
logger.exception('Could not load borders of country {}'.format(country_code))
def run(self):
if hasattr(self, 'country_code'):
for code in self.country_code.split(','):
self.load(code)
else:
for country in Region.objects.filter(level=Region.COUNTRY):
self.load(country.iso3)
| mit | 6,486,755,235,228,144,000 | 33.412587 | 99 | 0.527535 | false |
tensorflow/tfx | tfx/tools/cli/testdata/test_pipeline_local_1.py | 1 | 2688 | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Chicago taxi example using TFX on Local orchestrator."""
import os
from typing import Text
from absl import logging
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen
from tfx.components.schema_gen.component import SchemaGen
from tfx.components.statistics_gen.component import StatisticsGen
from tfx.orchestration import metadata
from tfx.orchestration import pipeline
from tfx.orchestration.local import local_dag_runner
_pipeline_name = 'chicago_taxi_local'
_taxi_root = os.path.join(os.environ['HOME'], 'taxi')
_data_root = os.path.join(_taxi_root, 'data', 'simple')
_tfx_root = os.path.join(os.environ['HOME'], 'tfx')
_pipeline_root = os.path.join(_tfx_root, 'pipelines', _pipeline_name)
# Sqlite ML-metadata db path.
_metadata_path = os.path.join(_tfx_root, 'metadata', _pipeline_name,
'metadata.db')
def _create_pipeline(pipeline_name: Text, pipeline_root: Text, data_root: Text,
metadata_path: Text) -> pipeline.Pipeline:
"""Implements the chicago taxi pipeline with TFX."""
# Brings data into the pipeline or otherwise joins/converts training data.
example_gen = CsvExampleGen(input_base=data_root)
# Computes statistics over data for visualization and example validation.
statistics_gen = StatisticsGen(examples=example_gen.outputs['examples'])
# Generates schema based on statistics files.
infer_schema = SchemaGen(statistics=statistics_gen.outputs['statistics'])
return pipeline.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_root,
components=[example_gen, statistics_gen, infer_schema],
enable_cache=True,
metadata_connection_config=metadata.sqlite_metadata_connection_config(
metadata_path),
additional_pipeline_args={},
)
if __name__ == '__main__':
logging.set_verbosity(logging.INFO)
local_dag_runner.LocalDagRunner().run(
_create_pipeline(
pipeline_name=_pipeline_name,
pipeline_root=_pipeline_root,
data_root=_data_root,
metadata_path=_metadata_path))
| apache-2.0 | 2,677,516,085,262,274,600 | 37.956522 | 79 | 0.725446 | false |
zoni/Rocket | tests/test_monitor.py | 1 | 4233 | # -*- coding: utf-8 -*-
# This file is part of the Rocket Web Server
# Copyright (c) 2010 Timothy Farrell
#
# See the included LICENSE.txt file for licensing details.
# Import System Modules
import time
import types
import socket
import unittest
import threading
try:
from queue import Queue
except ImportError:
from Queue import Queue
# Import Custom Modules
from rocket import monitor, listener, connection, threadpool, worker
# Constants
SERVER_PORT = 45454
# Define Tests
class MonitorTest(unittest.TestCase):
def setUp(self):
global SERVER_PORT
SERVER_PORT += 1
self.active_queue = Queue()
self.monitor_queue = Queue()
self.timeout = 10
self.interface = ("127.0.0.1", SERVER_PORT)
self.min_threads = 10
self.max_threads = 20
w = worker.Worker
self.tp = threadpool.ThreadPool(w,
dict(),
self.active_queue,
self.monitor_queue,
self.min_threads,
self.max_threads)
def _waitForEqual(self, a, b):
attempts = 20
while attempts > 0:
if isinstance(a, (types.FunctionType, types.MethodType)):
_a = a()
else:
_a = a
if isinstance(b, (types.FunctionType, types.MethodType)):
_b = b()
else:
_b = b
if _a == _b:
return True
time.sleep(0.25)
attempts -= 1
return False
def testNotActive(self):
self.monitor = monitor.Monitor(self.monitor_queue,
self.active_queue,
self.timeout,
self.tp)
self.assert_(not self.monitor.active)
def testMonitor(self):
self.testNotActive() # create self.monitor
# Start the listener
self.listener = listener.Listener(self.interface,
5,
self.active_queue)
self.listener.start()
# Create a socket connecting to listener's port
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(15)
sock.connect(self.interface)
# Verify that listener put it in the active queue
self._waitForEqual(self.active_queue.qsize, 1)
self.assertEqual(self.active_queue.qsize(), 1)
self.monitor.start()
# Put it in the monitor queue
conn = self.active_queue.get()
conn = connection.Connection(*conn)
self.monitor_queue.put(conn)
self._waitForEqual(self.monitor_queue.qsize, 1)
self.assertEqual(self.monitor_queue.qsize(), 1)
# Wait for the monitor queue to see it
self._waitForEqual(self.monitor_queue.qsize, 0)
self.assertEqual(self.monitor_queue.qsize(), 0)
# Send something to the socket to see if it gets put back on the active
# queue.
sock.send("test data")
sock.close()
# Give monitor a chance to see it
self._waitForEqual(self.active_queue.qsize, 1)
# Finally check to make sure that it's on the active queue
self.assertEqual(self.active_queue.qsize(), 1)
conn2 = self.active_queue.get()
self.assert_(conn is conn2)
def tearDown(self):
try:
self.listener.ready = False
self.listener.join(5)
self.assert_(not self.listener.isAlive())
except:
pass
try:
del self.listener
except:
pass
try:
self.monitor.stop()
except:
pass
try:
del self.monitor
except:
pass
if __name__ == '__main__':
#import logging
#log = logging.getLogger('Rocket')
#log.setLevel(logging.DEBUG)
#fmt = logging.Formatter('%(levelname)s:%(name)s:%(message)s')
#h = logging.StreamHandler()
#h.setFormatter(fmt)
#log.addHandler(h)
unittest.main()
| mit | -7,623,677,626,790,119,000 | 27.993151 | 79 | 0.53768 | false |
mverwe/UserCode | UserCode/PixelTrackletAnalyzer/test/WGM20150527Setup/processRecoHlt0T-EPOS.py | 1 | 4718 | import FWCore.ParameterSet.Config as cms
process = cms.Process("myRECO")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.load("Configuration.StandardSequences.Services_cff")
process.load("Configuration.StandardSequences.MagneticField_0T_cff")
process.load("Configuration.StandardSequences.GeometryDB_cff")
process.load("Configuration.StandardSequences.Reconstruction_cff")
process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("RecoVertex.BeamSpotProducer.BeamSpot_cfi")
# Timing service
process.Timing = cms.Service("Timing")
# MC Globaltag for 2015 dN/deta analysis
process.GlobalTag.globaltag = 'MCRUN2_74_V8::All'
process.pixelVertexFromClusters = cms.EDProducer('PixelVertexProducerClusters')
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
# 'file:RelVal_MinBias_13TeV_28478DD9-99A9-E411-891C-0025905B861C.root'
#'file:step3_RAW2DIGI_L1Reco_RECO.root'
'file:step3_EPOS_100k.root'
#'RelValMinBias_314_STARTUP31X_V2-v1-Reco.root'
)
)
# Centrality
process.load("RecoHI.HiCentralityAlgos.pACentrality_cfi")
process.pACentrality.producePixelTracks = cms.bool(False)
#process.pACentrality.produceTracks = cms.bool(False)
# Centrality Binning
process.load("RecoHI.HiCentralityAlgos.CentralityBin_cfi")
process.centralityBin.Centrality = cms.InputTag("pACentrality")
process.centralityBin.centralityVariable = cms.string("HFtowers")
process.centralityBin.nonDefaultGlauberModel = cms.string("HydjetDrum5")
# Add the HeavyIon Record: it is for PbPb cent binning, so we shoud not
# trust the centrality bin and only use the variables from the centrality
# provider
process.GlobalTag.toGet.extend([
cms.PSet(record = cms.string("HeavyIonRcd"),
tag = cms.string("CentralityTable_HFtowers200_HydjetDrum5_v740x01_mc"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_COND_31X_PHYSICSTOOLS"),
label = cms.untracked.string("HFtowersHydjetDrum5")
),
])
process.ana = cms.EDAnalyzer('PixelHitAnalyzer',
vertexSrc = cms.vstring('pixelVertexFromClusters'),
trackSrc = cms.untracked.InputTag('generalTracks'),
doTracking = cms.untracked.bool(False),
doCentrality = cms.untracked.bool(True)
)
process.anaStrip = cms.EDAnalyzer('StripHitAnalyzer',
vertexSrc = cms.vstring('pixelVertexFromClusters'),
trackSrc = cms.untracked.InputTag('generalTracks'),
doTracking = cms.untracked.bool(False),
doCentrality = cms.untracked.bool(True),
RecHitCollections = cms.VInputTag(
# cms.InputTag('siStripMatchedRecHits','rphiRecHit'),
cms.InputTag('siStripMatchedRecHits','matchedRecHit')
)
)
#process.SiStripRecHitsAnalyzer = cms.EDAnalyzer('SiStripRecHitsAnalyzer',
# RecHitCollections = cms.VInputTag( cms.InputTag('siStripMatchedRecHits','rphiRecHit'),
# cms.InputTag('siStripMatchedRecHits','stereoRecHit')
# )
#)
#process.load("HeavyIonsAnalysis.EventAnalysis.hievtanalyzer_mc_cfi")
process.load("HLTrigger.HLTanalyzers.HLTBitAnalyser_cfi")
process.hltbitanalysis.UseTFileService = cms.untracked.bool(True)
process.hltanalysis = process.hltbitanalysis.clone(
l1GtReadoutRecord = cms.InputTag("gtDigis"),
l1GctHFBitCounts = cms.InputTag("gctDigis"),
l1GctHFRingSums = cms.InputTag("gctDigis"),
l1extramu = cms.string('l1extraParticles'),
l1extramc = cms.string('l1extraParticles'),
hltresults = cms.InputTag("TriggerResults","","HLT"),
)
process.TFileService = cms.Service('TFileService',
fileName = cms.string('PixelTree-EPOS.root')
)
process.analyze = cms.Path(
process.siPixelRecHits*
process.siStripMatchedRecHits*
process.pixelVertexFromClusters*
# process.hiSelectedVertex*
process.pACentrality*
process.centralityBin*
process.hltanalysis*
# process.hiEvtAnalyzer*
process.ana*
process.anaStrip
# process.SiStripRecHitsAnalyzer
)
| cc0-1.0 | 142,290,323,717,301,280 | 40.752212 | 98 | 0.665536 | false |
danirus/django-comments-xtd | django_comments_xtd/tests/test_api_views.py | 1 | 5084 | from __future__ import unicode_literals
from datetime import datetime
import json
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.test import TestCase
from django_comments_xtd import django_comments
from django_comments_xtd import get_model
from django_comments_xtd.conf import settings
from django_comments_xtd.tests.models import Article
from django_comments_xtd.tests.utils import post_comment
app_model_options_mock = {
'tests.article': {
'who_can_post': 'users'
}
}
XtdComment = get_model()
class CommentCreateTestCase(TestCase):
def setUp(self):
patcher = patch('django_comments_xtd.views.send_mail')
self.mock_mailer = patcher.start()
self.article = Article.objects.create(
title="October", slug="october", body="What I did on October...")
self.form = django_comments.get_form()(self.article)
@patch.multiple('django_comments_xtd.conf.settings',
COMMENTS_XTD_CONFIRM_EMAIL=False)
def test_post_returns_201_response(self):
data = {"name": "Bob", "email": "[email protected]",
"followup": True, "reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine...",
"honeypot": ""}
data.update(self.form.initial)
response = post_comment(data)
self.assertEqual(response.status_code, 201)
data = json.loads(response.rendered_content)
self.assertTrue('id' in data)
self.assertEqual(data['id'], 1) # id of the new created comment.
def test_post_returns_2xx_response(self):
data = {"name": "Bob", "email": "[email protected]",
"followup": True, "reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine...",
"honeypot": ""}
data.update(self.form.initial)
response = post_comment(data)
self.assertEqual(response.status_code, 204)
self.assertEqual(self.mock_mailer.call_count, 1)
def test_post_returns_4xx_response(self):
# It uses an authenticated user, but the user has no mail address.
self.user = User.objects.create_user("bob", "", "pwd")
data = {"name": "", "email": "",
"followup": True, "reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine...",
"honeypot": ""}
data.update(self.form.initial)
response = post_comment(data, auth_user=self.user)
self.assertEqual(response.status_code, 400)
self.assertTrue('name' in response.data)
self.assertTrue('email' in response.data)
self.assertEqual(self.mock_mailer.call_count, 0)
@patch.multiple('django_comments_xtd.conf.settings',
COMMENTS_XTD_APP_MODEL_OPTIONS=app_model_options_mock)
def test_post_returns_unauthorize_response(self):
data = {"name": "Bob", "email": "[email protected]",
"followup": True, "reply_to": 0, "level": 1, "order": 1,
"comment": "Es war einmal eine kleine...",
"honeypot": ""}
data.update(self.form.initial)
response = post_comment(data)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.rendered_content, b'"User not authenticated"')
self.assertEqual(self.mock_mailer.call_count, 0)
def post_parent_comment(self):
article_ct = ContentType.objects.get(app_label="tests", model="article")
site1 = Site.objects.get(pk=1)
self.cm = XtdComment.objects.create(content_type=article_ct,
object_pk=self.article.id,
content_object=self.article,
site=site1,
comment="just a testing comment",
submit_date=datetime.now())
@patch.multiple('django_comments_xtd.conf.settings',
COMMENTS_XTD_MAX_THREAD_LEVEL=0,
COMMENTS_XTD_CONFIRM_EMAIL=False)
def test_post_reply_to_exceeds_max_thread_level_returns_400_code(self):
self.assertEqual(settings.COMMENTS_XTD_MAX_THREAD_LEVEL, 0)
self.assertEqual(XtdComment.objects.count(), 0)
self.post_parent_comment()
self.assertEqual(XtdComment.objects.count(), 1)
data = {"name": "Bob", "email": "[email protected]",
"followup": True,
"reply_to": self.cm.id, # This exceeds max thread level.
"comment": "Es war einmal eine kleine...",
"honeypot": ""}
data.update(self.form.initial)
response = post_comment(data)
self.assertEqual(XtdComment.objects.count(), 1) # Comment not added.
self.assertEqual(response.status_code, 400)
| bsd-2-clause | 7,305,543,762,599,095,000 | 42.827586 | 80 | 0.601298 | false |
jericksanjuan/lab-student-draft | lab_student_draft/students/models.py | 1 | 3490 | from django.conf import settings
from django.db import models
from django.db.models.signals import pre_save
from model_utils.models import TimeStampedModel
from model_utils import Choices
from labs.models import Lab
_PICKVAL = 100
# TODO: Get minimum_groups from settings
# TODO: Get maximum groups from settings
class Batch(TimeStampedModel):
minimum_groups = models.IntegerField(default=1)
maximum_groups = models.IntegerField(default=10)
class Meta:
verbose_name = "Batch"
verbose_name_plural = "Batches"
def __unicode__(self):
return u'{}-{}'.format(self.created.month, self.created.year)
class StudentGroup(TimeStampedModel):
batch = models.ForeignKey('Batch')
user = models.OneToOneField(settings.AUTH_USER_MODEL)
lab = models.ForeignKey(Lab, null=True, blank=True, related_name="assigned_set")
group_preferences = models.ManyToManyField(
Lab, through='GroupPreference', null=True, blank=True)
has_preference = models.BooleanField('Has Submitted Preference', default=False)
class Meta:
verbose_name = "Student Group"
verbose_name_plural = "Student Groups"
def __unicode__(self):
return u'{} group'.format(self.user)
def students(self):
return ', '.join([unicode(x) for x in self.student_set.all()])
class Student(models.Model):
student_group = models.ForeignKey('StudentGroup')
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
class Meta:
verbose_name = "Student"
verbose_name_plural = "Students"
def __unicode__(self):
return u'{} {}'.format(self.first_name, self.last_name)
class GroupPreference(models.Model):
student_group = models.ForeignKey('StudentGroup')
lab = models.ForeignKey(Lab)
preference = models.IntegerField(default=0)
class Meta:
verbose_name = "Group Preference"
verbose_name_plural = "Group Preferences"
unique_together = ('student_group', 'lab')
def __unicode__(self):
return u'{}={}-{}'.format(self.preference, self.student_group, self.lab)
class Selection(models.Model):
ITERATIONS = Choices('1', '2', '3')
lab = models.ForeignKey(Lab)
student_group = models.ForeignKey('StudentGroup')
phase = models.CharField(max_length=1, choices=ITERATIONS)
is_selected = models.BooleanField(default=False)
selection_score = models.IntegerField(default=0)
class Meta:
verbose_name = "Selection"
verbose_name_plural = "Selections"
unique_together = ('lab', 'student_group', 'phase')
def __unicode__(self):
return u'{}: {}<>{}, Phase {}'.format(
self.selection_score, self.lab,
self.student_group, self.phase)
@property
def score_color(self):
base_score = self.selection_score % 100
if base_score > 5:
return 'green'
else:
return 'yellow'
def update_selection_score(sender, instance, raw, *args, **kwargs):
if raw:
return
self = instance
if not (self.lab and self.student_group):
return
obj, _ = GroupPreference.objects.get_or_create(
lab=self.lab, student_group=self.student_group)
if self.is_selected:
score = _PICKVAL + obj.preference
else:
score = obj.preference
self.selection_score = score
pre_save.connect(update_selection_score, Selection, dispatch_uid='students.Selection')
| bsd-3-clause | -1,875,968,631,157,454,600 | 28.82906 | 86 | 0.659885 | false |
dceoy/fract | fract/model/base.py | 1 | 23500 | #!/usr/bin/env python
import json
import logging
import os
import signal
import time
from abc import ABCMeta, abstractmethod
from datetime import datetime
from math import ceil
from pathlib import Path
from pprint import pformat
import numpy as np
import pandas as pd
import yaml
from oandacli.util.logger import log_response
from v20 import Context, V20ConnectionError, V20Timeout
from .bet import BettingSystem
from .ewma import Ewma
from .kalman import Kalman
class APIResponseError(RuntimeError):
pass
class TraderCore(object):
def __init__(self, config_dict, instruments, log_dir_path=None,
quiet=False, dry_run=False):
self.__logger = logging.getLogger(__name__)
self.cf = config_dict
self.__api = Context(
hostname='api-fx{}.oanda.com'.format(
self.cf['oanda']['environment']
),
token=self.cf['oanda']['token']
)
self.__account_id = self.cf['oanda']['account_id']
self.instruments = (instruments or self.cf['instruments'])
self.__bs = BettingSystem(strategy=self.cf['position']['bet'])
self.__quiet = quiet
self.__dry_run = dry_run
if log_dir_path:
log_dir = Path(log_dir_path).resolve()
self.__log_dir_path = str(log_dir)
os.makedirs(self.__log_dir_path, exist_ok=True)
self.__order_log_path = str(log_dir.joinpath('order.json.txt'))
self.__txn_log_path = str(log_dir.joinpath('txn.json.txt'))
self._write_data(
yaml.dump(
{
'instrument': self.instruments,
'position': self.cf['position'],
'feature': self.cf['feature'],
'model': self.cf['model']
},
default_flow_style=False
).strip(),
path=str(log_dir.joinpath('parameter.yml')),
mode='w', append_linesep=False
)
else:
self.__log_dir_path = None
self.__order_log_path = None
self.__txn_log_path = None
self.__last_txn_id = None
self.pos_dict = dict()
self.balance = None
self.margin_avail = None
self.__account_currency = None
self.txn_list = list()
self.__inst_dict = dict()
self.price_dict = dict()
self.unit_costs = dict()
def _refresh_account_dicts(self):
res = self.__api.account.get(accountID=self.__account_id)
# log_response(res, logger=self.__logger)
if 'account' in res.body:
acc = res.body['account']
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
self.balance = float(acc.balance)
self.margin_avail = float(acc.marginAvailable)
self.__account_currency = acc.currency
pos_dict0 = self.pos_dict
self.pos_dict = {
p.instrument: (
{'side': 'long', 'units': int(p.long.units)} if p.long.tradeIDs
else {'side': 'short', 'units': int(p.short.units)}
) for p in acc.positions if p.long.tradeIDs or p.short.tradeIDs
}
for i, d in self.pos_dict.items():
p0 = pos_dict0.get(i)
if p0 and all([p0[k] == d[k] for k in ['side', 'units']]):
self.pos_dict[i]['dt'] = p0['dt']
else:
self.pos_dict[i]['dt'] = datetime.now()
def _place_order(self, closing=False, **kwargs):
if closing:
p = self.pos_dict.get(kwargs['instrument'])
f_args = {
'accountID': self.__account_id, **kwargs,
**{
f'{k}Units': ('ALL' if p and p['side'] == k else 'NONE')
for k in ['long', 'short']
}
}
else:
f_args = {'accountID': self.__account_id, **kwargs}
if self.__dry_run:
self.__logger.info(
os.linesep + pformat({
'func': ('position.close' if closing else 'order.create'),
'args': f_args
})
)
else:
if closing:
res = self.__api.position.close(**f_args)
else:
res = self.__api.order.create(**f_args)
log_response(res, logger=self.__logger)
if not (100 <= res.status <= 399):
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
elif self.__order_log_path:
self._write_data(res.raw_body, path=self.__order_log_path)
else:
time.sleep(0.5)
def refresh_oanda_dicts(self):
t0 = datetime.now()
self._refresh_account_dicts()
self._sleep(last=t0, sec=0.5)
self._refresh_txn_list()
self._sleep(last=t0, sec=1)
self._refresh_inst_dict()
self._sleep(last=t0, sec=1.5)
self._refresh_price_dict()
self._refresh_unit_costs()
def _refresh_txn_list(self):
res = (
self.__api.transaction.since(
accountID=self.__account_id, id=self.__last_txn_id
) if self.__last_txn_id
else self.__api.transaction.list(accountID=self.__account_id)
)
# log_response(res, logger=self.__logger)
if 'lastTransactionID' in res.body:
self.__last_txn_id = res.body['lastTransactionID']
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
if res.body.get('transactions'):
t_new = [t.dict() for t in res.body['transactions']]
self.print_log(yaml.dump(t_new, default_flow_style=False).strip())
self.txn_list = self.txn_list + t_new
if self.__txn_log_path:
self._write_data(json.dumps(t_new), path=self.__txn_log_path)
def _refresh_inst_dict(self):
res = self.__api.account.instruments(accountID=self.__account_id)
# log_response(res, logger=self.__logger)
if 'instruments' in res.body:
self.__inst_dict = {
c.name: vars(c) for c in res.body['instruments']
}
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def _refresh_price_dict(self):
res = self.__api.pricing.get(
accountID=self.__account_id,
instruments=','.join(self.__inst_dict.keys())
)
# log_response(res, logger=self.__logger)
if 'prices' in res.body:
self.price_dict = {
p.instrument: {
'bid': p.closeoutBid, 'ask': p.closeoutAsk,
'tradeable': p.tradeable
} for p in res.body['prices']
}
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def _refresh_unit_costs(self):
self.unit_costs = {
i: self._calculate_bp_value(instrument=i) * float(e['marginRate'])
for i, e in self.__inst_dict.items() if i in self.instruments
}
def _calculate_bp_value(self, instrument):
cur_pair = instrument.split('_')
if cur_pair[0] == self.__account_currency:
bpv = 1 / self.price_dict[instrument]['ask']
elif cur_pair[1] == self.__account_currency:
bpv = self.price_dict[instrument]['ask']
else:
bpv = None
for i in self.__inst_dict.keys():
if bpv:
break
elif i == cur_pair[0] + '_' + self.__account_currency:
bpv = self.price_dict[i]['ask']
elif i == self.__account_currency + '_' + cur_pair[0]:
bpv = 1 / self.price_dict[i]['ask']
elif i == cur_pair[1] + '_' + self.__account_currency:
bpv = (
self.price_dict[instrument]['ask']
* self.price_dict[i]['ask']
)
elif i == self.__account_currency + '_' + cur_pair[1]:
bpv = (
self.price_dict[instrument]['ask']
/ self.price_dict[i]['ask']
)
assert bpv, f'bp value calculatiton failed:\t{instrument}'
return bpv
def design_and_place_order(self, instrument, act):
pos = self.pos_dict.get(instrument)
if pos and act and (act == 'closing' or act != pos['side']):
self.__logger.info('Close a position:\t{}'.format(pos['side']))
self._place_order(closing=True, instrument=instrument)
self._refresh_txn_list()
if act in ['long', 'short']:
limits = self._design_order_limits(instrument=instrument, side=act)
self.__logger.debug(f'limits:\t{limits}')
units = self._design_order_units(instrument=instrument, side=act)
self.__logger.debug(f'units:\t{units}')
self.__logger.info(f'Open a order:\t{act}')
self._place_order(
order={
'type': 'MARKET', 'instrument': instrument, 'units': units,
'timeInForce': 'FOK', 'positionFill': 'DEFAULT', **limits
}
)
def _design_order_limits(self, instrument, side):
ie = self.__inst_dict[instrument]
r = self.price_dict[instrument][{'long': 'ask', 'short': 'bid'}[side]]
ts_range = [
float(ie['minimumTrailingStopDistance']),
float(ie['maximumTrailingStopDistance'])
]
ts_dist_ratio = int(
r * self.cf['position']['limit_price_ratio']['trailing_stop'] /
ts_range[0]
)
if ts_dist_ratio <= 1:
trailing_stop = ie['minimumTrailingStopDistance']
else:
ts_dist = np.float16(ts_range[0] * ts_dist_ratio)
if ts_dist >= ts_range[1]:
trailing_stop = ie['maximumTrailingStopDistance']
else:
trailing_stop = str(ts_dist)
tp = {
k: str(
np.float16(
r + r * v * {
'take_profit': {'long': 1, 'short': -1}[side],
'stop_loss': {'long': -1, 'short': 1}[side]
}[k]
)
) for k, v in self.cf['position']['limit_price_ratio'].items()
if k in ['take_profit', 'stop_loss']
}
tif = {'timeInForce': 'GTC'}
return {
'takeProfitOnFill': {'price': tp['take_profit'], **tif},
'stopLossOnFill': {'price': tp['stop_loss'], **tif},
'trailingStopLossOnFill': {'distance': trailing_stop, **tif}
}
def _design_order_units(self, instrument, side):
max_size = int(self.__inst_dict[instrument]['maximumOrderUnits'])
avail_size = max(
ceil(
(
self.margin_avail - self.balance *
self.cf['position']['margin_nav_ratio']['preserve']
) / self.unit_costs[instrument]
), 0
)
self.__logger.debug(f'avail_size:\t{avail_size}')
sizes = {
k: ceil(self.balance * v / self.unit_costs[instrument])
for k, v in self.cf['position']['margin_nav_ratio'].items()
if k in ['unit', 'init']
}
self.__logger.debug(f'sizes:\t{sizes}')
bet_size = self.__bs.calculate_size_by_pl(
unit_size=sizes['unit'],
inst_pl_txns=[
t for t in self.txn_list if (
t.get('instrument') == instrument and t.get('pl') and
t.get('units')
)
],
init_size=sizes['init']
)
self.__logger.debug(f'bet_size:\t{bet_size}')
return str(
int(min(bet_size, avail_size, max_size)) *
{'long': 1, 'short': -1}[side]
)
@staticmethod
def _sleep(last, sec=0.5):
rest = sec - (datetime.now() - last).total_seconds()
if rest > 0:
time.sleep(rest)
def print_log(self, data):
if self.__quiet:
self.__logger.info(data)
else:
print(data, flush=True)
def print_state_line(self, df_rate, add_str):
i = df_rate['instrument'].iloc[-1]
net_pl = sum([
float(t['pl']) for t in self.txn_list
if t.get('instrument') == i and t.get('pl')
])
self.print_log(
'|{0:^11}|{1:^29}|{2:^15}|'.format(
i,
'{0:>3}:{1:>21}'.format(
'B/A',
np.array2string(
df_rate[['bid', 'ask']].iloc[-1].values,
formatter={'float_kind': lambda f: f'{f:8g}'}
)
),
'PL:{:>8}'.format(f'{net_pl:.1g}')
) + (add_str or '')
)
def _write_data(self, data, path, mode='a', append_linesep=True):
with open(path, mode) as f:
f.write(str(data) + (os.linesep if append_linesep else ''))
def write_turn_log(self, df_rate, **kwargs):
i = df_rate['instrument'].iloc[-1]
df_r = df_rate.drop(columns=['instrument'])
self._write_log_df(name=f'rate.{i}', df=df_r)
if kwargs:
self._write_log_df(
name=f'sig.{i}', df=df_r.tail(n=1).assign(**kwargs)
)
def _write_log_df(self, name, df):
if self.__log_dir_path and df.size:
self.__logger.debug(f'{name} df:{os.linesep}{df}')
p = str(Path(self.__log_dir_path).joinpath(f'{name}.tsv'))
self.__logger.info(f'Write TSV log:\t{p}')
self._write_df(df=df, path=p)
def _write_df(self, df, path, mode='a'):
df.to_csv(
path, mode=mode, sep=(',' if path.endswith('.csv') else '\t'),
header=(not Path(path).is_file())
)
def fetch_candle_df(self, instrument, granularity='S5', count=5000):
res = self.__api.instrument.candles(
instrument=instrument, price='BA', granularity=granularity,
count=int(count)
)
# log_response(res, logger=self.__logger)
if 'candles' in res.body:
return pd.DataFrame([
{
'time': c.time, 'bid': c.bid.c, 'ask': c.ask.c,
'volume': c.volume
} for c in res.body['candles'] if c.complete
]).assign(
time=lambda d: pd.to_datetime(d['time']), instrument=instrument
).set_index('time', drop=True)
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
def fetch_latest_price_df(self, instrument):
res = self.__api.pricing.get(
accountID=self.__account_id, instruments=instrument
)
# log_response(res, logger=self.__logger)
if 'prices' in res.body:
return pd.DataFrame([
{'time': r.time, 'bid': r.closeoutBid, 'ask': r.closeoutAsk}
for r in res.body['prices']
]).assign(
time=lambda d: pd.to_datetime(d['time']), instrument=instrument
).set_index('time')
else:
raise APIResponseError(
'unexpected response:' + os.linesep + pformat(res.body)
)
class BaseTrader(TraderCore, metaclass=ABCMeta):
def __init__(self, model, standalone=True, ignore_api_error=False,
**kwargs):
super().__init__(**kwargs)
self.__logger = logging.getLogger(__name__)
self.__ignore_api_error = ignore_api_error
self.__n_cache = self.cf['feature']['cache']
self.__use_tick = (
'TICK' in self.cf['feature']['granularities'] and not standalone
)
self.__granularities = [
a for a in self.cf['feature']['granularities'] if a != 'TICK'
]
self.__cache_dfs = {i: pd.DataFrame() for i in self.instruments}
if model == 'ewma':
self.__ai = Ewma(config_dict=self.cf)
elif model == 'kalman':
self.__ai = Kalman(config_dict=self.cf)
else:
raise ValueError(f'invalid model name:\t{model}')
self.__volatility_states = dict()
self.__granularity_lock = dict()
def invoke(self):
self.print_log('!!! OPEN DEALS !!!')
signal.signal(signal.SIGINT, signal.SIG_DFL)
while self.check_health():
try:
self._update_volatility_states()
for i in self.instruments:
self.refresh_oanda_dicts()
self.make_decision(instrument=i)
except (V20ConnectionError, V20Timeout, APIResponseError) as e:
if self.__ignore_api_error:
self.__logger.error(e)
else:
raise e
@abstractmethod
def check_health(self):
return True
def _update_volatility_states(self):
if not self.cf['volatility']['sleeping']:
self.__volatility_states = {i: True for i in self.instruments}
else:
self.__volatility_states = {
i: self.fetch_candle_df(
instrument=i,
granularity=self.cf['volatility']['granularity'],
count=self.cf['volatility']['cache']
).pipe(
lambda d: (
np.log(d[['ask', 'bid']].mean(axis=1)).diff().rolling(
window=int(self.cf['volatility']['window'])
).std(ddof=0) * d['volume']
)
).dropna().pipe(
lambda v: (
v.iloc[-1]
> v.quantile(self.cf['volatility']['sleeping'])
)
) for i in set(self.instruments)
}
@abstractmethod
def make_decision(self, instrument):
pass
def update_caches(self, df_rate):
self.__logger.info(f'Rate:{os.linesep}{df_rate}')
i = df_rate['instrument'].iloc[-1]
df_c = self.__cache_dfs[i].append(df_rate).tail(n=self.__n_cache)
self.__logger.info('Cache length:\t{}'.format(len(df_c)))
self.__cache_dfs[i] = df_c
def determine_sig_state(self, df_rate):
i = df_rate['instrument'].iloc[-1]
pos = self.pos_dict.get(i)
pos_pct = (
round(
abs(pos['units'] * self.unit_costs[i] * 100 / self.balance), 1
) if pos else 0
)
history_dict = self._fetch_history_dict(instrument=i)
if not history_dict:
sig = {
'sig_act': None, 'granularity': None, 'sig_log_str': (' ' * 40)
}
else:
if self.cf['position']['side'] == 'auto':
inst_pls = [
t['pl'] for t in self.txn_list
if t.get('instrument') == i and t.get('pl')
]
contrary = bool(inst_pls and float(inst_pls[-1]) < 0)
else:
contrary = (self.cf['position']['side'] == 'contrarian')
sig = self.__ai.detect_signal(
history_dict=(
{
k: v for k, v in history_dict.items()
if k == self.__granularity_lock[i]
} if self.__granularity_lock.get(i) else history_dict
),
pos=pos, contrary=contrary
)
if self.cf['feature']['granularity_lock']:
self.__granularity_lock[i] = (
sig['granularity']
if pos or sig['sig_act'] in {'long', 'short'} else None
)
if pos and sig['sig_act'] and sig['sig_act'] == pos['side']:
self.pos_dict[i]['dt'] = datetime.now()
if not sig['granularity']:
act = None
state = 'LOADING'
elif not self.price_dict[i]['tradeable']:
act = None
state = 'TRADING HALTED'
elif (pos and sig['sig_act']
and (sig['sig_act'] == 'closing'
or (not self.__volatility_states[i]
and sig['sig_act'] != pos['side']))):
act = 'closing'
state = 'CLOSING'
elif (pos and not sig['sig_act']
and ((datetime.now() - pos['dt']).total_seconds()
> self.cf['position']['ttl_sec'])):
act = 'closing'
state = 'POSITION EXPIRED'
elif int(self.balance) == 0:
act = None
state = 'NO FUND'
elif (pos
and ((sig['sig_act'] and sig['sig_act'] == pos['side'])
or not sig['sig_act'])):
act = None
state = '{0:.1f}% {1}'.format(pos_pct, pos['side'].upper())
elif self._is_margin_lack(instrument=i):
act = None
state = 'LACK OF FUNDS'
elif self._is_over_spread(df_rate=df_rate):
act = None
state = 'OVER-SPREAD'
elif not self.__volatility_states[i]:
act = None
state = 'SLEEPING'
elif not sig['sig_act']:
act = None
state = '-'
elif pos:
act = sig['sig_act']
state = '{0} -> {1}'.format(
pos['side'].upper(), sig['sig_act'].upper()
)
else:
act = sig['sig_act']
state = '-> {}'.format(sig['sig_act'].upper())
return {
'act': act, 'state': state,
'log_str': (
(
'{:^14}|'.format('TICK:{:>5}'.format(len(df_rate)))
if self.__use_tick else ''
) + sig['sig_log_str'] + f'{state:^18}|'
),
**sig
}
def _fetch_history_dict(self, instrument):
df_c = self.__cache_dfs[instrument]
return {
**(
{'TICK': df_c.assign(volume=1)}
if self.__use_tick and len(df_c) == self.__n_cache else dict()
),
**{
g: self.fetch_candle_df(
instrument=instrument, granularity=g, count=self.__n_cache
).rename(
columns={'closeAsk': 'ask', 'closeBid': 'bid'}
)[['ask', 'bid', 'volume']] for g in self.__granularities
}
}
def _is_margin_lack(self, instrument):
return (
not self.pos_dict.get(instrument) and
self.balance * self.cf['position']['margin_nav_ratio']['preserve']
>= self.margin_avail
)
def _is_over_spread(self, df_rate):
return (
df_rate.tail(n=1).pipe(
lambda d: (d['ask'] - d['bid']) / (d['ask'] + d['bid']) * 2
).values[0]
>= self.cf['position']['limit_price_ratio']['max_spread']
)
| gpl-3.0 | 4,704,809,548,591,564,000 | 37.02589 | 79 | 0.481787 | false |
buwx/logger | Logger.py | 1 | 2078 | #!/usr/bin/python
# -*- coding: iso-8859-15 -*-
'''
Created on 10.03.2015
@author: micha
'''
import logging
import serial
import time
import MySQLdb as mdb
from util import formatData, description, sensor
# the main procedure
logging.basicConfig(format='%(asctime)s\t%(levelname)s\t%(message)s', level=logging.INFO)
logging.info("Starting weather station sensor logging")
con = None
port = None
try:
con = mdb.connect('localhost', 'davis', 'davis', 'davis');
cur = con.cursor()
port = serial.Serial(
port='/dev/ttyUSB0',\
baudrate=115200,\
parity=serial.PARITY_NONE,\
stopbits=serial.STOPBITS_ONE,\
bytesize=serial.EIGHTBITS,\
timeout=310)
stage = 0;
# main loop
while True:
ts_old = int(time.time())
line = port.readline().strip()
ts = int(time.time())
if ts - ts_old > 300:
logging.critical("Timeout!")
break
if stage == 0 and line[0] == '?':
stage = 1
elif stage == 1 and line[0] == '#':
port.write("x200\n") # Threshold set to -100db
stage = 2
elif stage == 2 and line[0] == '#':
port.write("t1\n") # Tansmitter 1
stage = 3
elif stage == 3 and line[0] == '#':
port.write("f1\n") # Filter on
stage = 4
elif stage == 4 and line[0] == '#':
port.write("o0\n") # Output original data
stage = 5
elif stage == 5 and line[0] == '#':
port.write("m1\n") # Frequency band 1
stage = 6
elif stage == 6 and len(line) > 3:
sid = line[0]
if sid == 'B' or sid == 'I' or sid == 'W' or sid == 'T' or sid == 'R' or sid == 'P':
cur.execute("INSERT INTO logger(dateTime,sensor,data,description) VALUES(%s,%s,%s,%s)", (ts,sensor(line),formatData(line),description(line)))
con.commit()
except Exception, e:
logging.critical(str(e))
finally:
if con:
con.close()
if port:
port.close()
| gpl-3.0 | 4,441,962,244,224,371,000 | 25.641026 | 157 | 0.532243 | false |
bgr/quadpy | demo/demo.py | 1 | 6825 | try:
import tkinter
except ImportError:
import Tkinter as tkinter
# hsmpy is Hierarchical State Machine implementation for Python
# it's used here to implement GUI logic
import quadpy
from quadpy.rectangle import Rectangle
from hsmpy import HSM, State, T, Initial, Internal, Choice, EventBus, Event
# you can enable logging to see what's going on under the hood of HSM
#import logging
#logging.basicConfig(level=logging.DEBUG)
# tool aliases
Selection_tool, Drawing_tool = ('Select', 'Draw')
# eventbus will be used for all events
# Tkinter events will also be routed through it
eb = EventBus()
# HSM events
class Tool_Changed(Event): pass
class Mouse_Event(Event):
def __init__(self, x, y):
self.x = x
self.y = y
self.data = (x, y)
class Canvas_Up(Mouse_Event): pass
class Canvas_Down(Mouse_Event): pass
class Canvas_Move(Mouse_Event): pass
# create Tkinter GUI
root = tkinter.Tk()
canvas = tkinter.Canvas(width=700, height=700,
highlightthickness=0, background='white')
canvas.pack(fill='both', expand=True, padx=6, pady=6)
frame = tkinter.Frame()
labels = []
for i, tool in enumerate([Selection_tool, Drawing_tool]):
lbl = tkinter.Label(frame, text=tool, width=8, relief='raised')
wtf = tool
def get_closure(for_tool):
return lambda _: eb.dispatch(Tool_Changed(for_tool))
lbl.bind('<Button-1>', get_closure(tool))
lbl.pack(padx=6, pady=6 * (i % 2))
labels.append(lbl)
frame.pack(side='left', fill='y', expand=True, pady=6)
canvas.bind('<Button-1>', lambda e: eb.dispatch(Canvas_Down(e.x, e.y)))
canvas.bind('<B1-Motion>', lambda e: eb.dispatch(Canvas_Move(e.x, e.y)))
canvas.bind('<ButtonRelease-1>', lambda e: eb.dispatch(Canvas_Up(e.x, e.y)))
# I'll just put these here and reference them directly later, for simplicity
quad = quadpy.Node(0, 0, 700, 700, max_depth=9)
selected_elems = []
canvas_grid = {} # quadtree grid, mapping: bounds -> tkinter rectangle id
##### HSM state and transition actions #####
def update_chosen_tool(evt, hsm):
for lbl in labels:
lbl['relief'] = 'sunken' if evt.data == lbl['text'] else 'raised'
hsm.data.canvas_tool = evt.data
# quadtree grid visualization:
def update_grid():
updated_bounds = set(quad._get_grid_bounds())
current_bounds = set(canvas_grid.keys())
deleted = current_bounds.difference(updated_bounds)
added = updated_bounds.difference(current_bounds)
for d in deleted:
canvas.delete(canvas_grid[d])
del canvas_grid[d]
for a in added:
added_id = canvas.create_rectangle(a, outline='grey')
canvas_grid[a] = added_id
# drawing new rectangle:
def initialize_rectangle(evt, hsm):
x, y = evt.data
bounds = (x, y, x + 1, y + 1)
rect = Rectangle(*bounds)
rect.canvas_id = canvas.create_rectangle(bounds, outline='blue')
hsm.data.canvas_temp_data = (x, y, rect)
quad.insert(rect)
update_grid()
def draw_rectangle(evt, hsm):
x, y, rect = hsm.data.canvas_temp_data
bounds = (x, y, evt.x, evt.y)
rect.bounds = bounds
canvas.coords(rect.canvas_id, bounds)
quad.reinsert(rect)
update_grid()
# selecting and moving:
def elems_under_cursor(evt, hsm):
return quad.get_children_under_point(evt.x, evt.y)
def select_elems(elems):
global selected_elems
[canvas.itemconfig(e.canvas_id, outline='blue') for e, _ in selected_elems]
selected_elems = [(el, el.bounds) for el in elems]
[canvas.itemconfig(e.canvas_id, outline='red') for e, _ in selected_elems]
def select_under_cursor(evt, hsm):
hsm.data.moving_start = (evt.x, evt.y)
elems = elems_under_cursor(evt, hsm)
if not elems:
assert False, "this cannot happen"
just_elems = set(el for el, _ in selected_elems)
if not any(el in just_elems for el in elems):
# clicked non-selected element, select it
select_elems([elems[0]])
else:
# hack to refresh initial bounds for each tuple in selected_elems
select_elems([el for el, _ in selected_elems])
def move_elements(evt, hsm):
x, y = hsm.data.moving_start
off_x, off_y = evt.x - x, evt.y - y
for el, original_bounds in selected_elems:
x1, y1, x2, y2 = original_bounds
el.bounds = (x1 + off_x, y1 + off_y, x2 + off_x, y2 + off_y)
canvas.coords(el.canvas_id, el.bounds)
quad.reinsert(el)
update_grid()
# selection marquee
def create_marquee_rect(evt, hsm):
rect_id = canvas.create_rectangle((evt.x, evt.y, evt.x, evt.y),
outline='orange')
hsm.data.canvas_marquee = (evt.x, evt.y, rect_id)
select_elems([])
def drag_marquee_rect(evt, hsm):
x, y, rect_id = hsm.data.canvas_marquee
bounds = (x, y, evt.x, evt.y)
select_elems(quad.get_overlapped_children(bounds))
canvas.coords(rect_id, bounds)
def clear_marquee_rect(evt, hsm):
_, _, rect_id = hsm.data.canvas_marquee
canvas.delete(rect_id)
# define HSM state structure and transitions between states:
states = {
'app': State({
'select_tool_chosen': State({
'select_tool_hovering': State(),
'dragging_marquee': State(on_enter=create_marquee_rect,
on_exit=clear_marquee_rect),
'moving_elements': State(on_enter=select_under_cursor),
}),
'draw_tool_chosen': State({
'draw_tool_hovering': State(),
'drawing': State(),
})
})
}
transitions = {
'app': {
Initial: T('draw_tool_chosen'),
Tool_Changed: Choice({
Selection_tool: 'select_tool_chosen',
Drawing_tool: 'draw_tool_chosen' },
default='select_tool_chosen',
action=update_chosen_tool)
},
'select_tool_chosen': {
Initial: T('select_tool_hovering'),
Canvas_Up: T('select_tool_hovering'),
},
####
'select_tool_hovering': {
Canvas_Down: Choice({
False: 'dragging_marquee',
True: 'moving_elements', },
default='dragging_marquee',
key=lambda e, h: len(elems_under_cursor(e, h)) > 0),
},
'dragging_marquee': {
Canvas_Move: Internal(action=drag_marquee_rect),
},
'moving_elements': {
Canvas_Move: Internal(action=move_elements),
},
###
'draw_tool_chosen': {
Initial: T('draw_tool_hovering'),
Canvas_Up: T('draw_tool_hovering'),
},
'draw_tool_hovering': {
Canvas_Down: T('drawing', action=initialize_rectangle),
},
'drawing': {
Canvas_Move: Internal(action=draw_rectangle),
},
}
# initialize HSM with defined states and transitions and run
hsm = HSM(states, transitions)
hsm.start(eb)
eb.dispatch(Tool_Changed(Drawing_tool))
root.mainloop()
| mit | 7,489,607,430,619,016,000 | 27.676471 | 79 | 0.626081 | false |
colour-science/colour-analysis | colour_analysis/visuals/__init__.py | 1 | 1700 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .primitive import Primitive, PrimitiveVisual
from .symbol import Symbol
from .axis import axis_visual
from .box import Box, BoxVisual
from .diagrams import (CIE_1931_chromaticity_diagram,
CIE_1960_UCS_chromaticity_diagram,
CIE_1976_UCS_chromaticity_diagram)
from .image import image_visual
from .plane import Plane, PlaneVisual
from .pointer_gamut import (pointer_gamut_boundaries_visual,
pointer_gamut_hull_visual, pointer_gamut_visual)
from .rgb_colourspace import (RGB_identity_cube, RGB_colourspace_volume_visual,
RGB_colourspace_whitepoint_axis_visual,
RGB_colourspace_triangle_visual)
from .rgb_scatter import RGB_scatter_visual
from .spectral_locus import (spectral_locus_visual,
chromaticity_diagram_construction_visual)
__all__ = []
__all__ += ['Primitive', 'PrimitiveVisual']
__all__ += ['Symbol']
__all__ += ['Axis', 'AxisVisual', 'axis_visual']
__all__ += ['Box', 'BoxVisual']
__all__ += [
'CIE_1931_chromaticity_diagram', 'CIE_1960_UCS_chromaticity_diagram',
'CIE_1976_UCS_chromaticity_diagram'
]
__all__ += ['image_visual']
__all__ += ['Plane', 'PlaneVisual']
__all__ += [
'pointer_gamut_boundaries_visual', 'pointer_gamut_hull_visual',
'pointer_gamut_visual'
]
__all__ += [
'RGB_identity_cube', 'RGB_colourspace_volume_visual',
'RGB_colourspace_whitepoint_axis_visual', 'RGB_colourspace_triangle_visual'
]
__all__ += ['RGB_scatter_visual']
__all__ += [
'spectral_locus_visual', 'chromaticity_diagram_construction_visual'
]
| bsd-3-clause | 7,652,326,354,322,501,000 | 36.777778 | 79 | 0.647647 | false |
jgagneastro/FireHose_OLD | 3-XIDL/23-XIDL/idlspec2d/bin/putils.py | 2 | 2033 | #!/usr/bin/env python
import sys, os, os, subprocess
import string, imp, time, shlex
import gzip
"""
putils is a set of miscellaneous python tools.
Written by Gary Kushner (LBL). Nov 2009. Latest update April 2010.
"""
def searchPath(name, paths):
"""Search a path for a name (file, direcory, link, etc). Return the absolute
path to the found file or None"""
for path in paths:
if os.path.exists(os.path.join(path, name)):
return os.path.abspath(os.path.join(path, name))
return None
def loadModuleRaw(module):
"""import a python module using a raw file name (doesn't need to end in .py)"""
path = searchPath(module, sys.path)
if path == None:
raise ImportError("No module named " + module)
return imp.load_source(module, path)
def runCommand(cmd, echo=False, logCmd=None, prefix="", shell=False):
"""Run a command with the option to asynchronously display or log output.
If shell=False, the cmd needs to be a list, but if you pass in a string
it will be parsed into a list.
echo will echo output to stdout.
logCmd is a function pointer to use to put the output into a log.
Returns (return code, output)."""
output = ""
# Handle the command parsing
if isinstance(cmd, str) and not shell:
cmd = [c for c in shlex.split(cmd)]
# Call the process
p = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.STDOUT,
shell=shell)
# Process output until process dies
while True:
l = p.stdout.readline()
if not l: break
output += l
l = l[:-1] # yea, only safe on unix...
if echo:
print prefix + l
if logCmd != None:
logCmd(prefix + l)
return (p.wait(), output)
def openRead(filename, mode = "r"):
"""Open a gzip or normal file for text reading. Valid modes are 'r' and 'rb'"""
gzSig = '\x1f\x8b'
if mode != 'r' and mode != 'rb':
raise ValueError("Illegal mode: " + mode)
f = open(filename, mode)
try:
if (f.read(2) == gzSig):
f = gzip.open(filename, mode)
finally:
f.seek(0)
return f
| gpl-2.0 | -3,833,361,437,633,648,600 | 23.493976 | 81 | 0.661584 | false |
tensorflow/federated | tensorflow_federated/python/core/impl/context_stack/symbol_binding_context.py | 1 | 1297 | # Copyright 2020, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines interface for contexts which can bind symbols."""
import abc
from typing import Any, List, Tuple
from tensorflow_federated.python.core.impl.context_stack import context_base
class SymbolBindingContext(context_base.Context, metaclass=abc.ABCMeta):
"""Interface for contexts which handle binding and tracking of references."""
@abc.abstractmethod
def bind_computation_to_reference(self, comp: Any) -> Any:
"""Binds a computation to a symbol, returns a reference to this binding."""
raise NotImplementedError
@abc.abstractproperty
def symbol_bindings(self) -> List[Tuple[str, Any]]:
"""Returns all symbols bound in this context."""
raise NotImplementedError
| apache-2.0 | -2,697,478,550,297,152,000 | 38.30303 | 79 | 0.758674 | false |
tiagocoutinho/bliss | bliss/controllers/motors/carnac.py | 1 | 2689 | # -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
"""
ID31 motion hook for the carnac motors.
"""
import logging
import gevent
from bliss.common.hook import MotionHook
class CarnacHook(MotionHook):
"""
Motion hook specific for ID31 carnac motors.
Configuration example:
.. code-block:: yaml
hooks:
- name: carnac_hook
class: CarnacHook
module: motors.hooks
plugin: bliss
controllers:
- name: ice313
class: IcePAP
host: iceid313
plugin: emotion
axes:
- name: cncx
motion_hooks:
- $carnac_hook
- name: cncy
motion_hooks:
- $carnac_hook
- name: cncz
motion_hooks:
- $carnac_hook
"""
def __init__(self, name, config):
self._log = logging.getLogger('{0}({1})'.format(self.__class__.__name__,
name))
self.debug = self._log.debug
self.config = config
self.name = name
super(CarnacHook, self).__init__()
def _wait_ready(self, axes):
with gevent.Timeout(1, RuntimeError('not all motors ready after timeout')):
while True:
ready = [axis for axis in axes if axis.state(read_hw=True).READY]
if len(ready) == len(axes):
break
self._log.debug('All motors ready!')
def pre_move(self, motion_list):
axes = [motion.axis for motion in motion_list]
axes_names = ', '.join([axis.name for axis in axes])
self._log.debug('Start power ON for %s', axes_names)
tasks = [gevent.spawn(axis.controller.set_on, axis) for axis in axes]
gevent.joinall(tasks, timeout=1, raise_error=True)
self._log.debug('Finished power ON for %s', axes_names)
# we know empirically that the carnac takes ~1.3s to reply it is
# ready after a power on
gevent.sleep(1.2)
self._wait_ready(axes)
def post_move(self, motion_list):
axes = [motion.axis for motion in motion_list]
axes_names = ', '.join([axis.name for axis in axes])
self._log.debug('Start power OFF for %s', axes_names)
tasks = [gevent.spawn(axis.controller.set_off, axis) for axis in axes]
gevent.joinall(tasks, timeout=1, raise_error=True)
self._log.debug('Finished power OFF for %s', axes_names)
self._wait_ready(axes)
| lgpl-3.0 | -3,396,081,586,227,965,400 | 31.39759 | 83 | 0.554481 | false |
amaurywalbert/twitter | communities_detection/infomap/hashmap_infomap_method_v1.0.py | 2 | 6588 | # -*- coding: latin1 -*-
################################################################################################
import snap,datetime, sys, time, json, os, os.path, shutil, time, struct, random
import subprocess
import networkx as nx
import matplotlib.pyplot as plt
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
######################################################################################################################################################################
## Status - Versão 1 - INFOMAP - http://www.mapequation.org/code.html
##
##
## # INPUT: Grafos
##
## # OUTPUT:
## Communities
######################################################################################################################################################################
######################################################################################################################################################################
#
# Cálculos iniciais sobre o conjunto de dados lidos.
#
######################################################################################################################################################################
def calculate_alg(output,net,uw,ud,g_type,alg,graphs):
if not os.path.exists(graphs):
print ("\nDiretório com grafos não encontrado: "+str(graphs)+"\n")
else:
print
print("######################################################################")
print ("Os arquivos serão armazenados em: "+str(output))
print("######################################################################")
if not os.path.exists(output):
os.makedirs(output)
i=0
for file in os.listdir(graphs):
ego_id = file.split(".edge_list")
ego_id = long(ego_id[0])
i+=1
print("Detectando comunidades: "+str(g_type)+" - "+str(alg)+" - Rede: "+str(net)+" - ego("+str(i)+"): "+str(file))
try:
if ud is False: # Para grafo Directed
execute = subprocess.Popen(["/home/amaury/algoritmos/Infomap/Infomap","-i link-list", str(graphs)+str(file), str(output), "--out-name "+str(ego_id), "-N 10", "--directed", "--two-level", "--map"], stdout=subprocess.PIPE)
else: # Para grafos Undirected
execute = subprocess.Popen(["/home/amaury/algoritmos/Infomap/Infomap","-i link-list", str(graphs)+str(file), str(output), "--out-name "+str(ego_id), "-N 10", "--undirected", "--two-level", "--map"], stdout=subprocess.PIPE)
value = execute.communicate()[0]
print value
except Exception as e:
print e
print("######################################################################")
######################################################################################################################################################################
#
# Método principal do programa.
# Realiza teste e coleta dos dados de cada user especificado no arquivo.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Detecção de Comunidades - INFOMAP Method "
print" "
print"#################################################################################"
print
print
print" 1 - Follow"
print" 9 - Follwowers"
print" 2 - Retweets"
print" 3 - Likes"
print" 4 - Mentions"
print " "
print" 5 - Co-Follow"
print" 10 - Co-Followers"
print" 6 - Co-Retweets"
print" 7 - Co-Likes"
print" 8 - Co-Mentions"
print
op = int(raw_input("Escolha uma opção acima: "))
if op in (5,6,7,8,10): # Testar se é um grafo direcionado ou não
ud = True
elif op in (1,2,3,4,9):
ud = False
else:
print("Opção inválida! Saindo...")
sys.exit()
if op == 1 or op == 9: # Testar se é um grafo direcionado ou não
uw = True
else:
uw = False
######################################################################
net = "n"+str(op)
######################################################################################################################
g_type1 = "graphs_with_ego"
g_type2 = "graphs_without_ego"
alg = "infomap"
######################################################################################################################
output = "/home/amaury/communities_hashmap/"+str(g_type1)+"/"+str(alg)+"/raw/"+str(net)+"/10/"
graphs = "/home/amaury/graphs_hashmap_infomap/"+str(net)+"/"+str(g_type1)+"/"
print ("Calculando Comunidades para a rede: "+str(net)+" - COM o ego")
calculate_alg(output,net,uw,ud,g_type1,alg,graphs)
######################################################################################################################
######################################################################################################################
output = "/home/amaury/communities_hashmap/"+str(g_type2)+"/"+str(alg)+"/raw/"+str(net)+"/10/"
graphs = "/home/amaury/graphs_hashmap_infomap/"+str(net)+"/"+str(g_type2)+"/"
print ("Calculando Comunidades para a rede: "+str(net)+" - SEM o ego")
calculate_alg(output,net,uw,ud,g_type2,alg,graphs)
######################################################################################################################
print("######################################################################")
print
print("######################################################################")
print("Script finalizado!")
print("######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
######################################################################################################################
if __name__ == "__main__": main()
| gpl-3.0 | -3,754,045,302,539,895,300 | 43.09396 | 227 | 0.318569 | false |
shodimaggio/SaivDr | appendix/pytorch/nsoltBlockDct2dLayer.py | 1 | 2466 | import torch
import torch.nn as nn
import torch_dct as dct
import math
from nsoltUtility import Direction
class NsoltBlockDct2dLayer(nn.Module):
"""
NSOLTBLOCKDCT2DLAYER
ベクトル配列をブロック配列を入力:
nSamples x nComponents x (Stride(1)xnRows) x (Stride(2)xnCols)
コンポーネント別に出力(nComponents):
nSamples x nDecs x nRows x nCols
Requirements: Python 3.7.x, PyTorch 1.7.x
Copyright (c) 2020-2021, Shogo MURAMATSU
All rights reserved.
Contact address: Shogo MURAMATSU,
Faculty of Engineering, Niigata University,
8050 2-no-cho Ikarashi, Nishi-ku,
Niigata, 950-2181, JAPAN
http://msiplab.eng.niigata-u.ac.jp/
"""
def __init__(self,
name='',
decimation_factor=[],
number_of_components=1
):
super(NsoltBlockDct2dLayer, self).__init__()
self.decimation_factor = decimation_factor
self.name = name
self.description = "Block DCT of size " \
+ str(self.decimation_factor[Direction.VERTICAL]) + "x" \
+ str(self.decimation_factor[Direction.HORIZONTAL])
#self.type = ''
self.num_outputs = number_of_components
#self.num_inputs = 1
def forward(self,X):
nComponents = self.num_outputs
nSamples = X.size(0)
height = X.size(2)
width = X.size(3)
stride = self.decimation_factor
nrows = int(math.ceil(height/stride[Direction.VERTICAL]))
ncols = int(math.ceil(width/stride[Direction.HORIZONTAL]))
ndecs = stride[0]*stride[1] #math.prod(stride)
# Block DCT (nSamples x nComponents x nrows x ncols) x decV x decH
arrayshape = stride.copy()
arrayshape.insert(0,-1)
Y = dct.dct_2d(X.view(arrayshape),norm='ortho')
# Rearrange the DCT Coefs. (nSamples x nComponents x nrows x ncols) x (decV x decH)
cee = Y[:,0::2,0::2].reshape(Y.size(0),-1)
coo = Y[:,1::2,1::2].reshape(Y.size(0),-1)
coe = Y[:,1::2,0::2].reshape(Y.size(0),-1)
ceo = Y[:,0::2,1::2].reshape(Y.size(0),-1)
A = torch.cat((cee,coo,coe,ceo),dim=-1)
Z = A.view(nSamples,nComponents,nrows,ncols,ndecs)
if nComponents<2:
return torch.squeeze(Z,dim=1)
else:
return map(lambda x: torch.squeeze(x,dim=1),torch.chunk(Z,nComponents,dim=1))
| bsd-2-clause | -1,141,814,849,013,926,000 | 33.457143 | 91 | 0.59204 | false |
mrahim/adni_fdg_pet_analysis | learn_voxels_norm_baseline_fdg_pet_adni.py | 1 | 4688 | """
A script that :
- computes a Masker from FDG PET (baseline uniform)
- cross-validates a linear SVM classifier
- computes a ROC curve and AUC
"""
import os, glob
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import nibabel as nib
from sklearn import svm
from sklearn import cross_validation
from sklearn.metrics import roc_curve, auc
from nilearn.input_data import NiftiMasker
from collections import OrderedDict
def plot_shufflesplit(score, pairwise_groups):
"""Boxplot of the accuracies
"""
bp = plt.boxplot(score, labels=['/'.join(pg) for pg in pairwise_groups])
for key in bp.keys():
for box in bp[key]:
box.set(linewidth=2)
plt.grid(axis='y')
plt.xticks([1, 1.9, 2.8, 3.8, 5, 6.3])
plt.ylabel('Accuracy')
plt.ylim([0.4, 1.0])
plt.title('ADNI baseline accuracies (voxels)')
plt.legend(loc="lower right")
for ext in ['png', 'pdf', 'svg']:
fname = '.'.join(['boxplot_adni_baseline_voxels_norm', ext])
plt.savefig(os.path.join('figures', fname), transparent=True)
def plot_roc(cv_dict):
"""Plot roc curves for each pairwise groupe
"""
for pg in cv_dict.keys():
plt.plot(crossval[pg]['fpr'],crossval[pg]['tpr'],
linewidth=2,
label='{0} (auc = {1:0.2f})'
''.format(pg, crossval[pg]['auc']))
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.grid(True)
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('ADNI baseline ROC curves (voxels)')
plt.legend(loc="lower right")
for ext in ['png', 'pdf', 'svg']:
fname = '.'.join(['roc_adni_baseline_voxels_norm', ext])
plt.savefig(os.path.join('figures', fname), transparent=True)
BASE_DIR = '/disk4t/mehdi/data/ADNI_baseline_fdg_pet'
data = pd.read_csv(os.path.join(BASE_DIR, 'description_file.csv'))
if os.path.exists('features/features_voxels_norm.npy'):
X = np.load('features/features_voxels_norm.npy')
else:
pet_files = []
pet_img = []
for idx, row in data.iterrows():
pet_file = glob.glob(os.path.join(BASE_DIR,
'I' + str(row.Image_ID), 'wI*.nii'))
if len(pet_file) > 0:
pet_files.append(pet_file[0])
img = nib.load(pet_file[0])
pet_img.append(img)
masker = NiftiMasker(mask_strategy='epi',
mask_args=dict(opening=1))
masker.fit(pet_files)
pet_masked = masker.transform_niimgs(pet_files, n_jobs=4)
X = np.vstack(pet_masked)
np.save('features/features_voxels_norm', X)
# Pairwise group comparison
pairwise_groups = [['AD', 'Normal'], ['AD', 'EMCI'], ['AD', 'LMCI'],
['LMCI', 'Normal'], ['LMCI', 'EMCI'], ['EMCI', 'Normal']]
nb_iter = 10
score = np.zeros((nb_iter, len(pairwise_groups)))
crossval = OrderedDict()
pg_counter = 0
for pg in pairwise_groups:
gr1_idx = data[data.DX_Group == pg[0]].index.values
gr2_idx = data[data.DX_Group == pg[1]].index.values
x = X[np.concatenate((gr1_idx, gr2_idx))]
y = np.ones(len(x))
y[len(y) - len(gr2_idx):] = 0
estim = svm.SVC(kernel='linear')
sss = cross_validation.StratifiedShuffleSplit(y, n_iter=nb_iter, test_size=0.2)
# 1000 runs with randoms 80% / 20% : StratifiedShuffleSplit
counter = 0
for train, test in sss:
Xtrain, Xtest = x[train], x[test]
Ytrain, Ytest = y[train], y[test]
Yscore = estim.fit(Xtrain,Ytrain)
print pg_counter, counter
score[counter, pg_counter] = estim.score(Xtest, Ytest)
counter += 1
# Cross-validation
kf = cross_validation.StratifiedKFold(y,4)
estim = svm.SVC(kernel='linear', probability=True)
yproba = np.zeros((len(y), 2))
for train, test in kf:
xtrain, xtest = x[train], x[test]
ytrain, ytest = y[train], y[test]
yproba[test] = estim.fit(xtrain, ytrain).predict_proba(xtest)
fpr, tpr, thresholds = roc_curve(1-y, yproba[:,0])
a = auc(fpr,tpr)
if a<.5:
fpr, tpr, thresholds = roc_curve(y, yproba[:,0])
a = auc(fpr,tpr)
crossval['/'.join(pg)] = {'fpr' : fpr,
'tpr' : tpr,
'thresholds' : thresholds,
'yproba' : yproba,
'auc' : a}
pg_counter += 1
plot_roc(crossval)
plt.figure()
plot_shufflesplit(score, pairwise_groups)
plt.figure()
np.save('features/score_voxels_norm', score)
np.save('features/crossval_voxels_norm', crossval) | gpl-2.0 | -6,988,921,474,710,584,000 | 32.255319 | 83 | 0.581911 | false |
ctag/cpe453 | JMRI/jython/xAPadapter.py | 1 | 7516 | # Adapter to xAP automation protocol
#
# Uses xAPlib to listen to the network, creating and
# maintaining internal Turnout and Sensor objects that
# reflect what is seen.
#
# The Turnouts' commanded state is updated, not the
# known state, so feedback needs to be considered in
# any more permanent implementation. Note that
# this does not yet send anything on modification,
# due to race conditions.
#
# Author: Bob Jacobsen, copyright 2010
# Part of the JMRI distribution
# Ver 1.2 01/11/2011 NW Changes to the input code
# Ver 1.3 07/11/2011 NW Added a "Type" to the BSC message format
# Ver 1.4 07/12/2011 NW Changes to xAP Tx Message area
#
#
#
#
# The next line is maintained by CVS, please don't change it
# $Revision: 27263 $
import jarray
import jmri
import xAPlib
# create the network
print "opening "
global myNetwork
myNetwork = xAPlib.xAPNetwork("listener.xap")
# display some info
properties = myNetwork.getProperties()
print "getBroadcastIP()", properties.getBroadcastIP()
print "getHeartbeatInterval()", properties.getHeartbeatInterval()
print "getInstance() ", properties.getInstance()
print "getPort() ", properties.getPort()
print "getSource() ", properties.getSource()
print "getUID() ", properties.getUID()
print "getVendor() ", properties.getVendor()
print "getxAPAddress() ", properties.getxAPAddress()
print
# Define thexAPRxEventListener: Print some
# information when event arrives
class InputListener(xAPlib.xAPRxEventListener):
def myEventOccurred(self, event, message):
print "==== rcvd ===="
print message
print "--------------"
# try parsing and printing
fmtMsg = xAPlib.xAPParser(message)
print "source: ", fmtMsg.getSource()
print "target: ", fmtMsg.getTarget()
print "class: ", fmtMsg.getClassName()
print "uid: ", fmtMsg.getUID()
if (fmtMsg.getClassName() == "xAPBSC.info" or fmtMsg.getClassName() == "xAPBSC.event") :
print " --- Acting on "+fmtMsg.getClassName()+" ---"
if (fmtMsg.getNameValuePair("output.state","Type") != None) :
print " --- Acting on output.state ---"
pair = fmtMsg.getNameValuePair("output.state","Type")
if (pair == None) :
print "No Type, ending"
return
type = pair.getValue().upper()
print "NWE Type:", type,":"
if (type == "TURNOUT" or type == "SIGNAL") :
print "NWE Turnout/Signal"
self.processTurnout(fmtMsg, message)
if (fmtMsg.getNameValuePair("input.state","Type") != None) :
pair = fmtMsg.getNameValuePair("input.state","Type")
type = pair.getValue().upper()
if (type == "SENSOR") :
print "NWE Sensor"
print " --- Acting on input.state ---"
self.processSensor(fmtMsg, message)
print "=============="
return
# Process Turnout
def processTurnout(self, fmtMsg, message) :
pair = fmtMsg.getNameValuePair("output.state","Name")
if (pair == None) :
print "No Name"
name = None
else :
name = pair.getValue()
print " Name:", name
pair = fmtMsg.getNameValuePair("output.state","Location")
if (pair == None) :
print "No Location"
location = None
else :
location = pair.getValue()
print " Location: ", location
pair = fmtMsg.getNameValuePair("output.state","State")
if (pair == None) :
print "No State, ending"
return
state = pair.getValue().upper()
print " State: ", state
# now create a Turnout and set
value = CLOSED
if (state == "ON") :
value = THROWN
turnout = turnouts.getTurnout("IT:XAP:XAPBSC:"+fmtMsg.getSource())
if (turnout == None) :
print " create x turnout IT:XAP:XAPBSC:"+fmtMsg.getSource()
turnout = turnouts.provideTurnout("IT:XAP:XAPBSC:"+fmtMsg.getSource())
if (name != None) :
turnout.setUserName(name)
turnout.setCommandedState(value)
print " set turnout IT:XAP:XAPBSC:"+fmtMsg.getSource()+" to", value
return
# Process Sensor
def processSensor(self, fmtMsg, message) :
pair = fmtMsg.getNameValuePair("input.state","Name")
if (pair == None) :
print "No Name"
name = None
else :
name = pair.getValue()
print " Name:", name
pair = fmtMsg.getNameValuePair("input.state","Location")
if (pair == None) :
print "No Location"
location = None
else :
location = pair.getValue()
print " Location: ", location
pair = fmtMsg.getNameValuePair("input.state","State")
if (pair == None) :
print "No State, ending"
return
state = pair.getValue().upper()
print " State: ", state
# now create a Sensor and set
value = INACTIVE
if (state == "ON") :
value = ACTIVE
sensor = sensors.getSensor("IS:XAP:XAPBSC:"+fmtMsg.getSource())
if (sensor == None) :
print " create x sensor IS:XAP:XAPBSC:"+fmtMsg.getSource()
sensor = sensors.provideSensor("IS:XAP:XAPBSC:"+fmtMsg.getSource())
if (name != None) :
sensor.setUserName(name)
sensor.setState(value)
print " set sensor IS:XAP:XAPBSC:"+fmtMsg.getSource()+" to ", value
return
# Define the turnout listener class, which drives output messages
class TurnoutListener(java.beans.PropertyChangeListener):
def propertyChange(self, event):
global myNetwork
print " ************** Sending xAP Message **************"
print "change",event.propertyName
print "from", event.oldValue, "to", event.newValue
print "source systemName", event.source.systemName
print "source userName", event.source.userName
# format and send the message
# the final message will look like this on the wire:
#
# xap-header
# {
# v=12
# hop=1
# uid=FFFF0000
# class=xAPBSC.cmd
# source=JMRI.DecoderPro.1
# destination=NWE.EVA485.DEFAULT:08
# }
# output.state.1
# {
# ID=08
# State=ON
# }
# *
myProperties = myNetwork.getProperties()
myMessage = xAPlib.xAPMessage("xAPBSC.cmd", myProperties.getxAPAddress())
myMessage.setUID(self.uid)
myMessage.setTarget(self.target)
if (event.newValue == CLOSED) :
myMessage.addNameValuePair( "output.state.1", "ID", self.id)
myMessage.addNameValuePair( "output.state.1", "State", "OFF")
myMessage.addNameValuePair( "output.state.1", "Text", "CLOSED") # Optional
else :
myMessage.addNameValuePair( "output.state.1", "ID", self.id)
myMessage.addNameValuePair( "output.state.1", "State", "ON")
myMessage.addNameValuePair( "output.state.1", "Text", "THROWN") # Optional
myNetwork.sendMessage(myMessage)
print myMessage.toString()
return
def defineTurnout(name, uid, id, target) :
t = turnouts.provideTurnout(name)
m = TurnoutListener()
m.uid = uid
m.id = id
m.target = target
t.addPropertyChangeListener(m)
return
# register xAPRxEvents listener
print "register"
myNetwork.addMyEventListener(InputListener())
# define the turnouts
defineTurnout("IT:XAP:XAPBSC:NWE.EVA485.DEFAULT:99", "FF010100", "99", "NWE.EVA485.DEFAULT")
print "End of Script"
| gpl-2.0 | 9,160,168,916,993,520 | 33.319635 | 110 | 0.612826 | false |
cjgrady/pamRandomization | convertBackToCsv.py | 1 | 3520 | """
@summary: Converts the random PAMs back into CSVs and add back in headers
"""
import concurrent.futures
import csv
import os
import numpy as np
# .............................................................................
def writeCSVfromNpy(outFn, mtxFn, headerRow, metaCols):
"""
@summary: Converts a numpy array back into a CSV file
@param outFn: The filename where to write the data
@param mtx: The numpy matrix
@param headerRow: The headers for the file
@param metaCols: Meta columns
"""
mtx = np.load(mtxFn)
# Sanity checks
numRows, numCols = mtx.shape
#print numRows, numCols, len(metaCols), len(headerRow), len(metaCols[0])
#assert numRows == len(metaCols)
#assert numCols == len(headerRow) - len(metaCols[0])
with open(outFn, 'w') as outF:
writer = csv.writer(outF, delimiter=',')
# Write header row
writer.writerow(headerRow)
# Write each row
for i in range(numRows):
row = []
row.extend(metaCols[i])
#row = metaCols[i]
row.extend(mtx[i])
writer.writerow(row)
# .............................................................................
def getHeaderRowAndMetaCols(csvFn):
"""
@summary: Extracts the header row and the meta columns
@note: Assumes that both are present
"""
headerRow = []
metaCols = []
with open(csvFn) as inF:
reader = csv.reader(inF)
headerRow = reader.next()
for row in reader:
metaCols.append(row[0:3])
return headerRow, metaCols
# .............................................................................
if __name__ == "__main__":
import glob
globFns = [
('/home/cjgrady/ecosim/rand/pam2650-*.npy', '/home/cjgrady/ecosim/csvs/pam_2650_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam2670-*.npy', '/home/cjgrady/ecosim/csvs/pam_2670_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam8550-*.npy', '/home/cjgrady/ecosim/csvs/pam_8550_reclass3.csv'),
('/home/cjgrady/ecosim/rand/pam8570-*.npy', '/home/cjgrady/ecosim/csvs/pam_8570_reclass3.csv')
]
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
for inFn, origCsv in globFns:
fns = glob.glob(inFn)
print len(fns)
#origCsv = "/home/cjgrady/ecosim/csvs/pam_presentM.csv"
headerRow, metaCols = getHeaderRowAndMetaCols(origCsv)
for fn in fns:
baseFn = os.path.splitext(os.path.basename(fn))[0]
outFn = os.path.join('/home/cjgrady/ecosim/randCsvs/', '%s.csv' % baseFn)
if not os.path.exists(outFn):
executor.submit(writeCSVfromNpy, outFn, fn, headerRow, metaCols)
#writeCSVfromNpy(outFn, fn, headerRow, metaCols)
#for fn in fns:
# print fn
# mtx = np.load(fn)
# baseFn = os.path.splitext(os.path.basename(fn))[0]
# outFn = os.path.join('/home/cjgrady/ecosim/randCsvs/', '%s.csv' % baseFn)
# print "Writing out to:", outFn
# #outFn = '/home/cjgrady/ecosim/randCsvs/pam_presentM-200.csv'
# writeCSVfromNpy(outFn, mtx, headerRow, metaCols)
# mtx = None
#def runMultiprocess(myArgs):
# with concurrent.futures.ProcessPoolExecutor(max_workers=4) as executor:
# #for url, cl in myArgs:
# # executor.submit(testGetUrl, url, cl)
# for e in executor.map(pushJobData, myArgs):
# print e
| gpl-2.0 | 6,710,682,274,103,524,000 | 33.174757 | 109 | 0.571591 | false |
ilkermanap/esos-manager | src/tools/tftp.py | 1 | 3685 | import glob
import os
def read_file(fname):
temp = []
lines = open(fname, "r").readlines()
for line in lines:
if line.strip().startswith("#"):
continue
else:
if len(line.strip()) > 0:
temp.append(line)
return temp
class TftpAppend:
def __init__(self, line="append"):
self.content = line.strip()
self.parts = self.content.split()
if (self.parts[0] != "append"):
return None
self.values = {}
if len(self.parts) > 1:
for vals in self.parts[1:]:
key, val = vals.split("=")
self.values[key] = val
def add_value(self, key, value):
self.values[key] = value
def append_str(self):
temp = " append "
for k,v in self.values.items():
temp += " %s=%s" % (k,v)
return temp
def control(self, basedir):
ans = True
msg = "OK"
for k, v in self.values.items():
if k in ["initrd", "ROOTIMAGE_FILE"]:
if not os.path.isfile("%s/%s" % (basedir, v)):
ans = False
msg = "%s/%s missing" % (basedir, v)
return (ans, msg)
if k == "TFTPSERVER_IP_ADDR":
resp = os.system("ping -c 1 %s" % v)
if resp is not 0:
ans = False
msg = "tftp server %s cannot be reached" % v
return (ans, msg)
return (ans, msg)
class TftpEntry:
def __init__(self):
self.label = None
self.menu = None
self.menulabel = None
self.kernel = None
self.append = None
def add_line(self, line):
pass
class TftpConfig:
def __init__(self, fname=None):
labelfound = False
self.header = []
self.entries = {}
if fname is None:
self.header = ['DEFAULT menu.c32','prompt 0','timeout 5']
else:
print "ttfpconfig icinde ", fname
self.lines = read_file(fname)
temp_entry = None
for line in self.lines:
if line.lower().startswith("label"):
if labelfound == True:
self.entries[temp_entry.label] = temp_entry
labelfound = True
lbl = line.split()[1]
temp_entry = TftpEntry()
temp_entry.label = lbl
if not labelfound:
self.header.append(line)
else:
if line.lower().startswith("label"):
pass
else:
if line.strip().lower().startswith("menu label"):
temp_entry.menulabel = line.split()[2]
elif line.strip().lower().startswith("menu"):
temp_entry.menu = line.split()[1]
if line.strip().lower().startswith("kernel"):
temp_entry.kernel = line.split()[1]
if line.strip().lower().startswith("append"):
temp_entry.append = TftpAppend(line = line.strip())
def add_entry(self, new_entry):
self.entries[new_entry.label] = new_entry
class Tftpboot:
def __init__(self, basedir="/tftpboot"):
self.basedir = basedir
self.config_files = glob.glob("%s/pxelinux.cfg/*" % self.basedir)
self.configs = {}
for f in self.config_files:
self.configs[f] = TftpConfig(f)
if __name__ == "__main__":
tft = Tftpboot()
| bsd-3-clause | -8,245,553,311,031,813,000 | 30.767241 | 79 | 0.464315 | false |
hvnsweeting/GitPython | git/objects/submodule/base.py | 1 | 49306 | from . import util
from .util import (
mkhead,
sm_name,
sm_section,
unbare_repo,
SubmoduleConfigParser,
find_first_remote_branch
)
from git.objects.util import Traversable
from io import BytesIO # need a dict to set bloody .name field
from git.util import (
Iterable,
join_path_native,
to_native_path_linux,
RemoteProgress,
rmtree
)
from git.config import (
SectionConstraint,
GitConfigParser,
cp
)
from git.exc import (
InvalidGitRepositoryError,
NoSuchPathError
)
from git.compat import (
string_types,
defenc
)
import stat
import git
import os
import logging
__all__ = ["Submodule", "UpdateProgress"]
log = logging.getLogger('git.objects.submodule.base')
class UpdateProgress(RemoteProgress):
"""Class providing detailed progress information to the caller who should
derive from it and implement the ``update(...)`` message"""
CLONE, FETCH, UPDWKTREE = [1 << x for x in range(RemoteProgress._num_op_codes, RemoteProgress._num_op_codes + 3)]
_num_op_codes = RemoteProgress._num_op_codes + 3
__slots__ = tuple()
BEGIN = UpdateProgress.BEGIN
END = UpdateProgress.END
CLONE = UpdateProgress.CLONE
FETCH = UpdateProgress.FETCH
UPDWKTREE = UpdateProgress.UPDWKTREE
# IndexObject comes via util module, its a 'hacky' fix thanks to pythons import
# mechanism which cause plenty of trouble of the only reason for packages and
# modules is refactoring - subpackages shoudn't depend on parent packages
class Submodule(util.IndexObject, Iterable, Traversable):
"""Implements access to a git submodule. They are special in that their sha
represents a commit in the submodule's repository which is to be checked out
at the path of this instance.
The submodule type does not have a string type associated with it, as it exists
solely as a marker in the tree and index.
All methods work in bare and non-bare repositories."""
_id_attribute_ = "name"
k_modules_file = '.gitmodules'
k_head_option = 'branch'
k_head_default = 'master'
k_default_mode = stat.S_IFDIR | stat.S_IFLNK # submodules are directories with link-status
# this is a bogus type for base class compatability
type = 'submodule'
__slots__ = ('_parent_commit', '_url', '_branch_path', '_name', '__weakref__')
_cache_attrs = ('path', '_url', '_branch_path')
def __init__(self, repo, binsha, mode=None, path=None, name=None, parent_commit=None, url=None, branch_path=None):
"""Initialize this instance with its attributes. We only document the ones
that differ from ``IndexObject``
:param repo: Our parent repository
:param binsha: binary sha referring to a commit in the remote repository, see url parameter
:param parent_commit: see set_parent_commit()
:param url: The url to the remote repository which is the submodule
:param branch_path: full (relative) path to ref to checkout when cloning the remote repository"""
super(Submodule, self).__init__(repo, binsha, mode, path)
self.size = 0
if parent_commit is not None:
self._parent_commit = parent_commit
if url is not None:
self._url = url
if branch_path is not None:
assert isinstance(branch_path, string_types)
self._branch_path = branch_path
if name is not None:
self._name = name
def _set_cache_(self, attr):
if attr == '_parent_commit':
# set a default value, which is the root tree of the current head
try:
self._parent_commit = self.repo.commit()
except ValueError:
# This fails in an empty repository.
self._parent_commit = None
# end exception handling
elif attr in ('path', '_url', '_branch_path'):
reader = self.config_reader()
# default submodule values
self.path = reader.get_value('path')
self._url = reader.get_value('url')
# git-python extension values - optional
self._branch_path = reader.get_value(self.k_head_option, git.Head.to_full_path(self.k_head_default))
elif attr == '_name':
raise AttributeError("Cannot retrieve the name of a submodule if it was not set initially")
else:
super(Submodule, self)._set_cache_(attr)
# END handle attribute name
def _get_intermediate_items(self, item):
""":return: all the submodules of our module repository"""
try:
return type(self).list_items(item.module())
except InvalidGitRepositoryError:
return list()
# END handle intermeditate items
@classmethod
def _need_gitfile_submodules(cls, git):
return git.version_info[:3] >= (1, 8, 0)
def __eq__(self, other):
"""Compare with another submodule"""
# we may only compare by name as this should be the ID they are hashed with
# Otherwise this type wouldn't be hashable
# return self.path == other.path and self.url == other.url and super(Submodule, self).__eq__(other)
return self._name == other._name
def __ne__(self, other):
"""Compare with another submodule for inequality"""
return not (self == other)
def __hash__(self):
"""Hash this instance using its logical id, not the sha"""
return hash(self._name)
def __str__(self):
return self._name
def __repr__(self):
return "git.%s(name=%s, path=%s, url=%s, branch_path=%s)"\
% (type(self).__name__, self._name, self.path, self.url, self.branch_path)
@classmethod
def _config_parser(cls, repo, parent_commit, read_only):
""":return: Config Parser constrained to our submodule in read or write mode
:raise IOError: If the .gitmodules file cannot be found, either locally or in the repository
at the given parent commit. Otherwise the exception would be delayed until the first
access of the config parser"""
try:
parent_matches_head = repo.head.commit == parent_commit
except ValueError:
# We are most likely in an empty repository, so the HEAD doesn't point to a valid ref
parent_matches_head = True
# end
if not repo.bare and parent_matches_head:
fp_module = os.path.join(repo.working_tree_dir, cls.k_modules_file)
else:
try:
fp_module = cls._sio_modules(parent_commit)
except KeyError:
raise IOError("Could not find %s file in the tree of parent commit %s" %
(cls.k_modules_file, parent_commit))
# END handle exceptions
# END handle non-bare working tree
if not read_only and (repo.bare or not parent_matches_head):
raise ValueError("Cannot write blobs of 'historical' submodule configurations")
# END handle writes of historical submodules
return SubmoduleConfigParser(fp_module, read_only=read_only)
def _clear_cache(self):
# clear the possibly changed values
for name in self._cache_attrs:
try:
delattr(self, name)
except AttributeError:
pass
# END try attr deletion
# END for each name to delete
@classmethod
def _sio_modules(cls, parent_commit):
""":return: Configuration file as BytesIO - we only access it through the respective blob's data"""
sio = BytesIO(parent_commit.tree[cls.k_modules_file].data_stream.read())
sio.name = cls.k_modules_file
return sio
def _config_parser_constrained(self, read_only):
""":return: Config Parser constrained to our submodule in read or write mode"""
parser = self._config_parser(self.repo, self._parent_commit, read_only)
parser.set_submodule(self)
return SectionConstraint(parser, sm_section(self.name))
@classmethod
def _module_abspath(cls, parent_repo, path, name):
if cls._need_gitfile_submodules(parent_repo.git):
return os.path.join(parent_repo.git_dir, 'modules', name)
else:
return os.path.join(parent_repo.working_tree_dir, path)
# end
@classmethod
def _clone_repo(cls, repo, url, path, name, **kwargs):
""":return: Repo instance of newly cloned repository
:param repo: our parent repository
:param url: url to clone from
:param path: repository-relative path to the submodule checkout location
:param name: canonical of the submodule
:param kwrags: additinoal arguments given to git.clone"""
module_abspath = cls._module_abspath(repo, path, name)
module_checkout_path = module_abspath
if cls._need_gitfile_submodules(repo.git):
kwargs['separate_git_dir'] = module_abspath
module_abspath_dir = os.path.dirname(module_abspath)
if not os.path.isdir(module_abspath_dir):
os.makedirs(module_abspath_dir)
module_checkout_path = os.path.join(repo.working_tree_dir, path)
# end
clone = git.Repo.clone_from(url, module_checkout_path, **kwargs)
if cls._need_gitfile_submodules(repo.git):
cls._write_git_file_and_module_config(module_checkout_path, module_abspath)
# end
return clone
@classmethod
def _to_relative_path(cls, parent_repo, path):
""":return: a path guaranteed to be relative to the given parent-repository
:raise ValueError: if path is not contained in the parent repository's working tree"""
path = to_native_path_linux(path)
if path.endswith('/'):
path = path[:-1]
# END handle trailing slash
if os.path.isabs(path):
working_tree_linux = to_native_path_linux(parent_repo.working_tree_dir)
if not path.startswith(working_tree_linux):
raise ValueError("Submodule checkout path '%s' needs to be within the parents repository at '%s'"
% (working_tree_linux, path))
path = path[len(working_tree_linux) + 1:]
if not path:
raise ValueError("Absolute submodule path '%s' didn't yield a valid relative path" % path)
# end verify converted relative path makes sense
# end convert to a relative path
return path
@classmethod
def _write_git_file_and_module_config(cls, working_tree_dir, module_abspath):
"""Writes a .git file containing a (preferably) relative path to the actual git module repository.
It is an error if the module_abspath cannot be made into a relative path, relative to the working_tree_dir
:note: will overwrite existing files !
:note: as we rewrite both the git file as well as the module configuration, we might fail on the configuration
and will not roll back changes done to the git file. This should be a non-issue, but may easily be fixed
if it becomes one
:param working_tree_dir: directory to write the .git file into
:param module_abspath: absolute path to the bare repository
"""
git_file = os.path.join(working_tree_dir, '.git')
rela_path = os.path.relpath(module_abspath, start=working_tree_dir)
fp = open(git_file, 'wb')
fp.write(("gitdir: %s" % rela_path).encode(defenc))
fp.close()
writer = GitConfigParser(os.path.join(module_abspath, 'config'), read_only=False, merge_includes=False)
writer.set_value('core', 'worktree', os.path.relpath(working_tree_dir, start=module_abspath))
writer.release()
#{ Edit Interface
@classmethod
def add(cls, repo, name, path, url=None, branch=None, no_checkout=False):
"""Add a new submodule to the given repository. This will alter the index
as well as the .gitmodules file, but will not create a new commit.
If the submodule already exists, no matter if the configuration differs
from the one provided, the existing submodule will be returned.
:param repo: Repository instance which should receive the submodule
:param name: The name/identifier for the submodule
:param path: repository-relative or absolute path at which the submodule
should be located
It will be created as required during the repository initialization.
:param url: git-clone compatible URL, see git-clone reference for more information
If None, the repository is assumed to exist, and the url of the first
remote is taken instead. This is useful if you want to make an existing
repository a submodule of anotherone.
:param branch: name of branch at which the submodule should (later) be checked out.
The given branch must exist in the remote repository, and will be checked
out locally as a tracking branch.
It will only be written into the configuration if it not None, which is
when the checked out branch will be the one the remote HEAD pointed to.
The result you get in these situation is somewhat fuzzy, and it is recommended
to specify at least 'master' here.
Examples are 'master' or 'feature/new'
:param no_checkout: if True, and if the repository has to be cloned manually,
no checkout will be performed
:return: The newly created submodule instance
:note: works atomically, such that no change will be done if the repository
update fails for instance"""
if repo.bare:
raise InvalidGitRepositoryError("Cannot add submodules to bare repositories")
# END handle bare repos
path = cls._to_relative_path(repo, path)
# assure we never put backslashes into the url, as some operating systems
# like it ...
if url is not None:
url = to_native_path_linux(url)
# END assure url correctness
# INSTANTIATE INTERMEDIATE SM
sm = cls(repo, cls.NULL_BIN_SHA, cls.k_default_mode, path, name, url='invalid-temporary')
if sm.exists():
# reretrieve submodule from tree
try:
return repo.head.commit.tree[path]
except KeyError:
# could only be in index
index = repo.index
entry = index.entries[index.entry_key(path, 0)]
sm.binsha = entry.binsha
return sm
# END handle exceptions
# END handle existing
# fake-repo - we only need the functionality on the branch instance
br = git.Head(repo, git.Head.to_full_path(str(branch) or cls.k_head_default))
has_module = sm.module_exists()
branch_is_default = branch is None
if has_module and url is not None:
if url not in [r.url for r in sm.module().remotes]:
raise ValueError(
"Specified URL '%s' does not match any remote url of the repository at '%s'" % (url, sm.abspath))
# END check url
# END verify urls match
mrepo = None
if url is None:
if not has_module:
raise ValueError("A URL was not given and existing repository did not exsit at %s" % path)
# END check url
mrepo = sm.module()
urls = [r.url for r in mrepo.remotes]
if not urls:
raise ValueError("Didn't find any remote url in repository at %s" % sm.abspath)
# END verify we have url
url = urls[0]
else:
# clone new repo
kwargs = {'n': no_checkout}
if not branch_is_default:
kwargs['b'] = br.name
# END setup checkout-branch
# _clone_repo(cls, repo, url, path, name, **kwargs):
mrepo = cls._clone_repo(repo, url, path, name, **kwargs)
# END verify url
# It's important to add the URL to the parent config, to let `git submodule` know.
# otherwise there is a '-' character in front of the submodule listing
# a38efa84daef914e4de58d1905a500d8d14aaf45 mymodule (v0.9.0-1-ga38efa8)
# -a38efa84daef914e4de58d1905a500d8d14aaf45 submodules/intermediate/one
writer = sm.repo.config_writer()
writer.set_value(sm_section(name), 'url', url)
writer.release()
# update configuration and index
index = sm.repo.index
writer = sm.config_writer(index=index, write=False)
writer.set_value('url', url)
writer.set_value('path', path)
sm._url = url
if not branch_is_default:
# store full path
writer.set_value(cls.k_head_option, br.path)
sm._branch_path = br.path
# END handle path
writer.release()
del(writer)
# we deliberatly assume that our head matches our index !
parent_repo_is_empty = False
try:
sm._parent_commit = repo.head.commit
except ValueError:
parent_repo_is_empty = True
# Can't set this yet, if the parent repo is empty.
# end
sm.binsha = mrepo.head.commit.binsha
index.add([sm], write=True)
if parent_repo_is_empty:
# The user is expected to make a commit, and this submodule will initialize itself when
# _parent_commit is required
del sm._parent_commit
log.debug("Will not set _parent_commit now as the parent repository has no commit yet.")
# end
return sm
def update(self, recursive=False, init=True, to_latest_revision=False, progress=None,
dry_run=False):
"""Update the repository of this submodule to point to the checkout
we point at with the binsha of this instance.
:param recursive: if True, we will operate recursively and update child-
modules as well.
:param init: if True, the module repository will be cloned into place if necessary
:param to_latest_revision: if True, the submodule's sha will be ignored during checkout.
Instead, the remote will be fetched, and the local tracking branch updated.
This only works if we have a local tracking branch, which is the case
if the remote repository had a master branch, or of the 'branch' option
was specified for this submodule and the branch existed remotely
:param progress: UpdateProgress instance or None of no progress should be shown
:param dry_run: if True, the operation will only be simulated, but not performed.
All performed operations are read-only
:note: does nothing in bare repositories
:note: method is definitely not atomic if recurisve is True
:return: self"""
if self.repo.bare:
return self
# END pass in bare mode
if progress is None:
progress = UpdateProgress()
# END handle progress
prefix = ''
if dry_run:
prefix = "DRY-RUN: "
# END handle prefix
# to keep things plausible in dry-run mode
if dry_run:
mrepo = None
# END init mrepo
# ASSURE REPO IS PRESENT AND UPTODATE
#####################################
try:
mrepo = self.module()
rmts = mrepo.remotes
len_rmts = len(rmts)
for i, remote in enumerate(rmts):
op = FETCH
if i == 0:
op |= BEGIN
# END handle start
progress.update(op, i, len_rmts, prefix + "Fetching remote %s of submodule %r" % (remote, self.name))
#===============================
if not dry_run:
remote.fetch(progress=progress)
# END handle dry-run
#===============================
if i == len_rmts - 1:
op |= END
# END handle end
progress.update(op, i, len_rmts, prefix + "Done fetching remote of submodule %r" % self.name)
# END fetch new data
except InvalidGitRepositoryError:
if not init:
return self
# END early abort if init is not allowed
# there is no git-repository yet - but delete empty paths
checkout_module_abspath = self.abspath
if not dry_run and os.path.isdir(checkout_module_abspath):
try:
os.rmdir(checkout_module_abspath)
except OSError:
raise OSError("Module directory at %r does already exist and is non-empty"
% checkout_module_abspath)
# END handle OSError
# END handle directory removal
# don't check it out at first - nonetheless it will create a local
# branch according to the remote-HEAD if possible
progress.update(BEGIN | CLONE, 0, 1, prefix + "Cloning %s to %s in submodule %r" %
(self.url, checkout_module_abspath, self.name))
if not dry_run:
mrepo = self._clone_repo(self.repo, self.url, self.path, self.name, n=True)
# END handle dry-run
progress.update(END | CLONE, 0, 1, prefix + "Done cloning to %s" % checkout_module_abspath)
if not dry_run:
# see whether we have a valid branch to checkout
try:
# find a remote which has our branch - we try to be flexible
remote_branch = find_first_remote_branch(mrepo.remotes, self.branch_name)
local_branch = mkhead(mrepo, self.branch_path)
# have a valid branch, but no checkout - make sure we can figure
# that out by marking the commit with a null_sha
local_branch.set_object(util.Object(mrepo, self.NULL_BIN_SHA))
# END initial checkout + branch creation
# make sure HEAD is not detached
mrepo.head.set_reference(local_branch, logmsg="submodule: attaching head to %s" % local_branch)
mrepo.head.ref.set_tracking_branch(remote_branch)
except IndexError:
log.warn("Failed to checkout tracking branch %s", self.branch_path)
# END handle tracking branch
# NOTE: Have to write the repo config file as well, otherwise
# the default implementation will be offended and not update the repository
# Maybe this is a good way to assure it doesn't get into our way, but
# we want to stay backwards compatible too ... . Its so redundant !
writer = self.repo.config_writer()
writer.set_value(sm_section(self.name), 'url', self.url)
writer.release()
# END handle dry_run
# END handle initalization
# DETERMINE SHAS TO CHECKOUT
############################
binsha = self.binsha
hexsha = self.hexsha
if mrepo is not None:
# mrepo is only set if we are not in dry-run mode or if the module existed
is_detached = mrepo.head.is_detached
# END handle dry_run
if mrepo is not None and to_latest_revision:
msg_base = "Cannot update to latest revision in repository at %r as " % mrepo.working_dir
if not is_detached:
rref = mrepo.head.ref.tracking_branch()
if rref is not None:
rcommit = rref.commit
binsha = rcommit.binsha
hexsha = rcommit.hexsha
else:
log.error("%s a tracking branch was not set for local branch '%s'", msg_base, mrepo.head.ref)
# END handle remote ref
else:
log.error("%s there was no local tracking branch", msg_base)
# END handle detached head
# END handle to_latest_revision option
# update the working tree
# handles dry_run
if mrepo is not None and mrepo.head.commit.binsha != binsha:
progress.update(BEGIN | UPDWKTREE, 0, 1, prefix +
"Updating working tree at %s for submodule %r to revision %s"
% (self.path, self.name, hexsha))
if not dry_run:
if is_detached:
# NOTE: for now we force, the user is no supposed to change detached
# submodules anyway. Maybe at some point this becomes an option, to
# properly handle user modifications - see below for future options
# regarding rebase and merge.
mrepo.git.checkout(hexsha, force=True)
else:
# TODO: allow to specify a rebase, merge, or reset
# TODO: Warn if the hexsha forces the tracking branch off the remote
# branch - this should be prevented when setting the branch option
mrepo.head.reset(hexsha, index=True, working_tree=True)
# END handle checkout
# END handle dry_run
progress.update(END | UPDWKTREE, 0, 1, prefix + "Done updating working tree for submodule %r" % self.name)
# END update to new commit only if needed
# HANDLE RECURSION
##################
if recursive:
# in dry_run mode, the module might not exist
if mrepo is not None:
for submodule in self.iter_items(self.module()):
submodule.update(recursive, init, to_latest_revision, progress=progress, dry_run=dry_run)
# END handle recursive update
# END handle dry run
# END for each submodule
return self
@unbare_repo
def move(self, module_path, configuration=True, module=True):
"""Move the submodule to a another module path. This involves physically moving
the repository at our current path, changing the configuration, as well as
adjusting our index entry accordingly.
:param module_path: the path to which to move our module in the parent repostory's working tree,
given as repository-relative or absolute path. Intermediate directories will be created
accordingly. If the path already exists, it must be empty.
Trailing (back)slashes are removed automatically
:param configuration: if True, the configuration will be adjusted to let
the submodule point to the given path.
:param module: if True, the repository managed by this submodule
will be moved as well. If False, we don't move the submodule's checkout, which may leave
the parent repository in an inconsistent state.
:return: self
:raise ValueError: if the module path existed and was not empty, or was a file
:note: Currently the method is not atomic, and it could leave the repository
in an inconsistent state if a sub-step fails for some reason
"""
if module + configuration < 1:
raise ValueError("You must specify to move at least the module or the configuration of the submodule")
# END handle input
module_checkout_path = self._to_relative_path(self.repo, module_path)
# VERIFY DESTINATION
if module_checkout_path == self.path:
return self
# END handle no change
module_checkout_abspath = join_path_native(self.repo.working_tree_dir, module_checkout_path)
if os.path.isfile(module_checkout_abspath):
raise ValueError("Cannot move repository onto a file: %s" % module_checkout_abspath)
# END handle target files
index = self.repo.index
tekey = index.entry_key(module_checkout_path, 0)
# if the target item already exists, fail
if configuration and tekey in index.entries:
raise ValueError("Index entry for target path did already exist")
# END handle index key already there
# remove existing destination
if module:
if os.path.exists(module_checkout_abspath):
if len(os.listdir(module_checkout_abspath)):
raise ValueError("Destination module directory was not empty")
# END handle non-emptiness
if os.path.islink(module_checkout_abspath):
os.remove(module_checkout_abspath)
else:
os.rmdir(module_checkout_abspath)
# END handle link
else:
# recreate parent directories
# NOTE: renames() does that now
pass
# END handle existence
# END handle module
# move the module into place if possible
cur_path = self.abspath
renamed_module = False
if module and os.path.exists(cur_path):
os.renames(cur_path, module_checkout_abspath)
renamed_module = True
if os.path.isfile(os.path.join(module_checkout_abspath, '.git')):
module_abspath = self._module_abspath(self.repo, self.path, self.name)
self._write_git_file_and_module_config(module_checkout_abspath, module_abspath)
# end handle git file rewrite
# END move physical module
# rename the index entry - have to manipulate the index directly as
# git-mv cannot be used on submodules ... yeah
previous_sm_path = self.path
try:
if configuration:
try:
ekey = index.entry_key(self.path, 0)
entry = index.entries[ekey]
del(index.entries[ekey])
nentry = git.IndexEntry(entry[:3] + (module_checkout_path,) + entry[4:])
index.entries[tekey] = nentry
except KeyError:
raise InvalidGitRepositoryError("Submodule's entry at %r did not exist" % (self.path))
# END handle submodule doesn't exist
# update configuration
writer = self.config_writer(index=index) # auto-write
writer.set_value('path', module_checkout_path)
self.path = module_checkout_path
writer.release()
del(writer)
# END handle configuration flag
except Exception:
if renamed_module:
os.renames(module_checkout_abspath, cur_path)
# END undo module renaming
raise
# END handle undo rename
# Auto-rename submodule if it's name was 'default', that is, the checkout directory
if previous_sm_path == self.name:
self.rename(module_checkout_path)
# end
return self
@unbare_repo
def remove(self, module=True, force=False, configuration=True, dry_run=False):
"""Remove this submodule from the repository. This will remove our entry
from the .gitmodules file and the entry in the .git/config file.
:param module: If True, the module checkout we point to will be deleted
as well. If the module is currently on a commit which is not part
of any branch in the remote, if the currently checked out branch
working tree, or untracked files,
is ahead of its tracking branch, if you have modifications in the
In case the removal of the repository fails for these reasons, the
submodule status will not have been altered.
If this submodule has child-modules on its own, these will be deleted
prior to touching the own module.
:param force: Enforces the deletion of the module even though it contains
modifications. This basically enforces a brute-force file system based
deletion.
:param configuration: if True, the submodule is deleted from the configuration,
otherwise it isn't. Although this should be enabled most of the times,
this flag enables you to safely delete the repository of your submodule.
:param dry_run: if True, we will not actually do anything, but throw the errors
we would usually throw
:return: self
:note: doesn't work in bare repositories
:note: doesn't work atomically, as failure to remove any part of the submodule will leave
an inconsistent state
:raise InvalidGitRepositoryError: thrown if the repository cannot be deleted
:raise OSError: if directories or files could not be removed"""
if not (module + configuration):
raise ValueError("Need to specify to delete at least the module, or the configuration")
# END handle parameters
# Recursively remove children of this submodule
nc = 0
for csm in self.children():
nc += 1
csm.remove(module, force, configuration, dry_run)
del(csm)
# end
if nc > 0:
# Assure we don't leave the parent repository in a dirty state, and commit our changes
# It's important for recursive, unforced, deletions to work as expected
self.module().index.commit("Removed submodule '%s'" % self.name)
# end handle recursion
# DELETE REPOSITORY WORKING TREE
################################
if module and self.module_exists():
mod = self.module()
git_dir = mod.git_dir
if force:
# take the fast lane and just delete everything in our module path
# TODO: If we run into permission problems, we have a highly inconsistent
# state. Delete the .git folders last, start with the submodules first
mp = self.abspath
method = None
if os.path.islink(mp):
method = os.remove
elif os.path.isdir(mp):
method = rmtree
elif os.path.exists(mp):
raise AssertionError("Cannot forcibly delete repository as it was neither a link, nor a directory")
# END handle brutal deletion
if not dry_run:
assert method
method(mp)
# END apply deletion method
else:
# verify we may delete our module
if mod.is_dirty(index=True, working_tree=True, untracked_files=True):
raise InvalidGitRepositoryError(
"Cannot delete module at %s with any modifications, unless force is specified"
% mod.working_tree_dir)
# END check for dirt
# figure out whether we have new commits compared to the remotes
# NOTE: If the user pulled all the time, the remote heads might
# not have been updated, so commits coming from the remote look
# as if they come from us. But we stay strictly read-only and
# don't fetch beforehand.
for remote in mod.remotes:
num_branches_with_new_commits = 0
rrefs = remote.refs
for rref in rrefs:
num_branches_with_new_commits += len(mod.git.cherry(rref)) != 0
# END for each remote ref
# not a single remote branch contained all our commits
if num_branches_with_new_commits == len(rrefs):
raise InvalidGitRepositoryError(
"Cannot delete module at %s as there are new commits" % mod.working_tree_dir)
# END handle new commits
# have to manually delete references as python's scoping is
# not existing, they could keep handles open ( on windows this is a problem )
if len(rrefs):
del(rref)
# END handle remotes
del(rrefs)
del(remote)
# END for each remote
# finally delete our own submodule
if not dry_run:
wtd = mod.working_tree_dir
del(mod) # release file-handles (windows)
rmtree(wtd)
# END delete tree if possible
# END handle force
if not dry_run and os.path.isdir(git_dir):
rmtree(git_dir)
# end handle separate bare repository
# END handle module deletion
# void our data not to delay invalid access
if not dry_run:
self._clear_cache()
# DELETE CONFIGURATION
######################
if configuration and not dry_run:
# first the index-entry
parent_index = self.repo.index
try:
del(parent_index.entries[parent_index.entry_key(self.path, 0)])
except KeyError:
pass
# END delete entry
parent_index.write()
# now git config - need the config intact, otherwise we can't query
# information anymore
writer = self.repo.config_writer()
writer.remove_section(sm_section(self.name))
writer.release()
writer = self.config_writer()
writer.remove_section()
writer.release()
# END delete configuration
return self
def set_parent_commit(self, commit, check=True):
"""Set this instance to use the given commit whose tree is supposed to
contain the .gitmodules blob.
:param commit: Commit'ish reference pointing at the root_tree
:param check: if True, relatively expensive checks will be performed to verify
validity of the submodule.
:raise ValueError: if the commit's tree didn't contain the .gitmodules blob.
:raise ValueError: if the parent commit didn't store this submodule under the
current path
:return: self"""
pcommit = self.repo.commit(commit)
pctree = pcommit.tree
if self.k_modules_file not in pctree:
raise ValueError("Tree of commit %s did not contain the %s file" % (commit, self.k_modules_file))
# END handle exceptions
prev_pc = self._parent_commit
self._parent_commit = pcommit
if check:
parser = self._config_parser(self.repo, self._parent_commit, read_only=True)
if not parser.has_section(sm_section(self.name)):
self._parent_commit = prev_pc
raise ValueError("Submodule at path %r did not exist in parent commit %s" % (self.path, commit))
# END handle submodule did not exist
# END handle checking mode
# update our sha, it could have changed
# If check is False, we might see a parent-commit that doens't even contain the submodule anymore.
# in that case, mark our sha as being NULL
try:
self.binsha = pctree[self.path].binsha
except KeyError:
self.binsha = self.NULL_BIN_SHA
# end
self._clear_cache()
return self
@unbare_repo
def config_writer(self, index=None, write=True):
""":return: a config writer instance allowing you to read and write the data
belonging to this submodule into the .gitmodules file.
:param index: if not None, an IndexFile instance which should be written.
defaults to the index of the Submodule's parent repository.
:param write: if True, the index will be written each time a configuration
value changes.
:note: the parameters allow for a more efficient writing of the index,
as you can pass in a modified index on your own, prevent automatic writing,
and write yourself once the whole operation is complete
:raise ValueError: if trying to get a writer on a parent_commit which does not
match the current head commit
:raise IOError: If the .gitmodules file/blob could not be read"""
writer = self._config_parser_constrained(read_only=False)
if index is not None:
writer.config._index = index
writer.config._auto_write = write
return writer
@unbare_repo
def rename(self, new_name):
"""Rename this submodule
:note: This method takes care of renaming the submodule in various places, such as
* $parent_git_dir/config
* $working_tree_dir/.gitmodules
* (git >=v1.8.0: move submodule repository to new name)
As .gitmodules will be changed, you would need to make a commit afterwards. The changed .gitmodules file
will already be added to the index
:return: this submodule instance
"""
if self.name == new_name:
return self
# .git/config
pw = self.repo.config_writer()
# As we ourselves didn't write anything about submodules into the parent .git/config, we will not require
# it to exist, and just ignore missing entries
if pw.has_section(sm_section(self.name)):
pw.rename_section(sm_section(self.name), sm_section(new_name))
# end
pw.release()
# .gitmodules
cw = self.config_writer().config
cw.rename_section(sm_section(self.name), sm_section(new_name))
cw.release()
self._name = new_name
# .git/modules
mod = self.module()
if mod.has_separate_working_tree():
module_abspath = self._module_abspath(self.repo, self.path, new_name)
os.renames(mod.git_dir, module_abspath)
self._write_git_file_and_module_config(mod.working_tree_dir, module_abspath)
# end move separate git repository
return self
#} END edit interface
#{ Query Interface
@unbare_repo
def module(self):
""":return: Repo instance initialized from the repository at our submodule path
:raise InvalidGitRepositoryError: if a repository was not available. This could
also mean that it was not yet initialized"""
# late import to workaround circular dependencies
module_checkout_abspath = self.abspath
try:
repo = git.Repo(module_checkout_abspath)
if repo != self.repo:
return repo
# END handle repo uninitialized
except (InvalidGitRepositoryError, NoSuchPathError):
raise InvalidGitRepositoryError("No valid repository at %s" % module_checkout_abspath)
else:
raise InvalidGitRepositoryError("Repository at %r was not yet checked out" % module_checkout_abspath)
# END handle exceptions
def module_exists(self):
""":return: True if our module exists and is a valid git repository. See module() method"""
try:
self.module()
return True
except Exception:
return False
# END handle exception
def exists(self):
"""
:return: True if the submodule exists, False otherwise. Please note that
a submodule may exist (in the .gitmodules file) even though its module
doesn't exist on disk"""
# keep attributes for later, and restore them if we have no valid data
# this way we do not actually alter the state of the object
loc = locals()
for attr in self._cache_attrs:
try:
if hasattr(self, attr):
loc[attr] = getattr(self, attr)
# END if we have the attribute cache
except cp.NoSectionError:
# on PY3, this can happen apparently ... don't know why this doesn't happen on PY2
pass
# END for each attr
self._clear_cache()
try:
try:
self.path
return True
except Exception:
return False
# END handle exceptions
finally:
for attr in self._cache_attrs:
if attr in loc:
setattr(self, attr, loc[attr])
# END if we have a cache
# END reapply each attribute
# END handle object state consistency
@property
def branch(self):
""":return: The branch instance that we are to checkout
:raise InvalidGitRepositoryError: if our module is not yet checked out"""
return mkhead(self.module(), self._branch_path)
@property
def branch_path(self):
"""
:return: full (relative) path as string to the branch we would checkout
from the remote and track"""
return self._branch_path
@property
def branch_name(self):
""":return: the name of the branch, which is the shortest possible branch name"""
# use an instance method, for this we create a temporary Head instance
# which uses a repository that is available at least ( it makes no difference )
return git.Head(self.repo, self._branch_path).name
@property
def url(self):
""":return: The url to the repository which our module-repository refers to"""
return self._url
@property
def parent_commit(self):
""":return: Commit instance with the tree containing the .gitmodules file
:note: will always point to the current head's commit if it was not set explicitly"""
return self._parent_commit
@property
def name(self):
""":return: The name of this submodule. It is used to identify it within the
.gitmodules file.
:note: by default, the name is the path at which to find the submodule, but
in git-python it should be a unique identifier similar to the identifiers
used for remotes, which allows to change the path of the submodule
easily
"""
return self._name
def config_reader(self):
"""
:return: ConfigReader instance which allows you to qurey the configuration values
of this submodule, as provided by the .gitmodules file
:note: The config reader will actually read the data directly from the repository
and thus does not need nor care about your working tree.
:note: Should be cached by the caller and only kept as long as needed
:raise IOError: If the .gitmodules file/blob could not be read"""
return self._config_parser_constrained(read_only=True)
def children(self):
"""
:return: IterableList(Submodule, ...) an iterable list of submodules instances
which are children of this submodule or 0 if the submodule is not checked out"""
return self._get_intermediate_items(self)
#} END query interface
#{ Iterable Interface
@classmethod
def iter_items(cls, repo, parent_commit='HEAD'):
""":return: iterator yielding Submodule instances available in the given repository"""
pc = repo.commit(parent_commit) # parent commit instance
try:
parser = cls._config_parser(repo, pc, read_only=True)
except IOError:
raise StopIteration
# END handle empty iterator
rt = pc.tree # root tree
for sms in parser.sections():
n = sm_name(sms)
p = parser.get_value(sms, 'path')
u = parser.get_value(sms, 'url')
b = cls.k_head_default
if parser.has_option(sms, cls.k_head_option):
b = str(parser.get_value(sms, cls.k_head_option))
# END handle optional information
# get the binsha
index = repo.index
try:
sm = rt[p]
except KeyError:
# try the index, maybe it was just added
try:
entry = index.entries[index.entry_key(p, 0)]
sm = Submodule(repo, entry.binsha, entry.mode, entry.path)
except KeyError:
raise InvalidGitRepositoryError(
"Gitmodule path %r did not exist in revision of parent commit %s" % (p, parent_commit))
# END handle keyerror
# END handle critical error
# fill in remaining info - saves time as it doesn't have to be parsed again
sm._name = n
sm._parent_commit = pc
sm._branch_path = git.Head.to_full_path(b)
sm._url = u
yield sm
# END for each section
#} END iterable interface
| bsd-3-clause | -2,055,360,205,848,469,500 | 42.633628 | 119 | 0.595648 | false |
annoviko/pyclustering | pyclustering/cluster/examples/birch_examples.py | 1 | 5638 | """!
@brief Examples of usage and demonstration of abilities of BIRCH algorithm in cluster analysis.
@authors Andrei Novikov ([email protected])
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.cluster import cluster_visualizer
from pyclustering.cluster.birch import birch
from pyclustering.container.cftree import measurement_type
from pyclustering.utils import read_sample
from pyclustering.samples.definitions import SIMPLE_SAMPLES, FCPS_SAMPLES
def template_clustering(number_clusters, path, branching_factor=50, max_node_entries=100, initial_diameter=0.5, type_measurement=measurement_type.CENTROID_EUCLIDEAN_DISTANCE, entry_size_limit=200, diameter_multiplier=1.5, show_result=True):
print("Sample: ", path)
sample = read_sample(path)
birch_instance = birch(sample, number_clusters, branching_factor, max_node_entries, initial_diameter,
type_measurement, entry_size_limit, diameter_multiplier)
birch_instance.process()
clusters = birch_instance.get_clusters()
if show_result is True:
visualizer = cluster_visualizer()
visualizer.append_clusters(clusters, sample)
visualizer.show()
return sample, clusters
def cluster_sample1():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1)
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1, 5, 5, 0.1, measurement_type.CENTROID_EUCLIDEAN_DISTANCE, 2) # only two entries available
def cluster_sample2():
template_clustering(3, SIMPLE_SAMPLES.SAMPLE_SIMPLE2)
def cluster_sample3():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE3)
def cluster_sample4():
template_clustering(5, SIMPLE_SAMPLES.SAMPLE_SIMPLE4)
def cluster_sample5():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE5)
def cluster_sample7():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE7)
def cluster_sample8():
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE8)
def cluster_elongate():
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_ELONGATE)
def cluster_lsun():
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN)
def cluster_lsun_rebuilt():
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN, entry_size_limit=20, diameter_multiplier=1.5)
def cluster_target():
template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET)
def cluster_two_diamonds():
template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS)
def cluster_wing_nut():
template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT)
def cluster_chainlink():
template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK)
def cluster_hepta():
template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA)
def cluster_tetra():
template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA)
def cluster_engy_time():
template_clustering(2, FCPS_SAMPLES.SAMPLE_ENGY_TIME)
def experiment_execution_time(ccore=False):
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_SIMPLE1)
template_clustering(3, SIMPLE_SAMPLES.SAMPLE_SIMPLE2)
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE3)
template_clustering(5, SIMPLE_SAMPLES.SAMPLE_SIMPLE4)
template_clustering(4, SIMPLE_SAMPLES.SAMPLE_SIMPLE5)
template_clustering(2, SIMPLE_SAMPLES.SAMPLE_ELONGATE)
template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN)
template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET)
template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS)
template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT)
template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK)
template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA)
template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA)
template_clustering(2, FCPS_SAMPLES.SAMPLE_ATOM)
def display_fcps_clustering_results():
(lsun, lsun_clusters) = template_clustering(3, FCPS_SAMPLES.SAMPLE_LSUN, show_result=False)
(target, target_clusters) = template_clustering(6, FCPS_SAMPLES.SAMPLE_TARGET, show_result=False)
(two_diamonds, two_diamonds_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_TWO_DIAMONDS, show_result=False)
(wing_nut, wing_nut_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_WING_NUT, show_result=False)
(chainlink, chainlink_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_CHAINLINK, show_result=False)
(hepta, hepta_clusters) = template_clustering(7, FCPS_SAMPLES.SAMPLE_HEPTA, show_result=False)
(tetra, tetra_clusters) = template_clustering(4, FCPS_SAMPLES.SAMPLE_TETRA, show_result=False)
(atom, atom_clusters) = template_clustering(2, FCPS_SAMPLES.SAMPLE_ATOM, show_result=False)
visualizer = cluster_visualizer(8, 4)
visualizer.append_clusters(lsun_clusters, lsun, 0)
visualizer.append_clusters(target_clusters, target, 1)
visualizer.append_clusters(two_diamonds_clusters, two_diamonds, 2)
visualizer.append_clusters(wing_nut_clusters, wing_nut, 3)
visualizer.append_clusters(chainlink_clusters, chainlink, 4)
visualizer.append_clusters(hepta_clusters, hepta, 5)
visualizer.append_clusters(tetra_clusters, tetra, 6)
visualizer.append_clusters(atom_clusters, atom, 7)
visualizer.show()
cluster_sample1()
cluster_sample2()
cluster_sample3()
cluster_sample4()
cluster_sample5()
cluster_sample7()
cluster_sample8()
cluster_elongate()
cluster_lsun()
cluster_lsun_rebuilt()
cluster_target()
cluster_two_diamonds()
cluster_wing_nut()
cluster_chainlink()
cluster_hepta()
cluster_tetra()
cluster_engy_time()
experiment_execution_time(True) # C++ code + Python env.
display_fcps_clustering_results()
| gpl-3.0 | 559,214,982,133,781,200 | 35.337748 | 240 | 0.73182 | false |
plantigrade/geni-tools | src/gcf/geni/util/urn_util.py | 1 | 10380 | #----------------------------------------------------------------------
# Copyright (c) 2010-2015 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
'''
URN creation and verification utilities.
'''
import re
from ...sfa.util.xrn import Xrn # for URN_PREFIX
class URN(object):
"""
A class that creates and extracts values from URNs
URN Convention:
urn:publicid:IDN+<authority>+<type>+<name>
Authority, type, and name are public ids transcribed into URN format
By convention a CH's name should be "ch" and an AM's should be "am"
The authority of the CH should be the prefix for all of your AM and user authorities
For instance: CH authority = "gcf//gpo//bbn", AM authority = "gcf//gpo/bbn//am1", user authority = "gcf//gpo//bbn"
EXAMPLES:
ch_urn = URN("gcf//gpo//bbn", "authority", "sa").urn_string() for a clearinghouse URN
am1_urn = URN("gcf//gpo//bbn//site1", "authority", "am").urn_string() for an AM at this authority
Looks like urn:publicid:IDN+gcf:gpo:bbn:site1+authority+am
am2_urn = URN("gcf//gpo//bbn//site2", "authority", "am").urn_string() for a second AM at this authority
Looks like urn:publicid:IDN+gcf:gpo:bbn:site2+authority+am
user_urn = URN("gcf//gpo//bbn", "user", "jane").urn_string() for a user made by the clearinghouse
Looks like urn:publicid:IDN+gcf:gpo:bbn+user+jane
slice_urn = URN("gcf//gpo//bbn", "slice", "my-great-experiment").urn_string()
Looks like urn:publicid:IDN+gcf:gpo:bbn+slice+my-great-experiment
resource_at_am1_urn = URN("gcf//gpo//bbn/site1", "node", "LinuxBox23").urn_string() for Linux Machine 23 managed by AM1 (at site 1)
Looks like urn:publicid:IDN+gcf:gpo:bbn:site1+node+LinuxBox23
"""
def __init__(self, authority=None, type=None, name=None, urn=None):
if not urn is None:
if not is_valid_urn(urn):
raise ValueError("Invalid URN %s" % urn)
spl = urn.split('+')
if len(spl) < 4:
raise ValueError("Invalid URN %s" % urn)
self.authority = urn_to_string_format(spl[1])
self.type = urn_to_string_format(spl[2])
self.name = urn_to_string_format('+'.join(spl[3:]))
self.urn = urn
else:
if not authority or not type or not name:
raise ValueError("Must provide either all of authority, type, and name, or a urn must be provided")
for i in [authority, type, name]:
if i.strip() == '':
raise ValueError("Parameter to create_urn was empty string")
self.authority = authority
self.type = type
self.name = name
# FIXME: check these are valid more?
if not is_valid_urn_string(authority):
authority = string_to_urn_format(authority)
if not is_valid_urn_string(type):
type = string_to_urn_format(type)
if not is_valid_urn_string(name):
name = string_to_urn_format(name)
self.urn = '%s+%s+%s+%s' % (Xrn.URN_PREFIX, authority, type, name)
if not is_valid_urn(self.urn):
raise ValueError("Failed to create valid URN from args %s, %s, %s" % (self.authority, self.type, self.name))
def __str__(self):
return self.urn_string()
def urn_string(self):
return self.urn
def getAuthority(self):
'''Get the authority in un-escaped publicid format'''
return self.authority
def getType(self):
'''Get the URN type in un-escaped publicid format'''
return self.type
def getName(self):
'''Get the name in un-escaped publicid format'''
return self.name
# Translate publicids to URN format.
# The order of these rules matters
# because we want to catch things like double colons before we
# translate single colons. This is only a subset of the rules.
# See the GENI Wiki: GAPI_Identifiers
# See http://www.faqs.org/rfcs/rfc3151.html
publicid_xforms = [('%', '%25'),
(';', '%3B'),
('+', '%2B'),
(' ', '+' ), # note you must first collapse WS
('#', '%23'),
('?', '%3F'),
("'", '%27'),
('::', ';' ),
(':', '%3A'),
('//', ':' ),
('/', '%2F')]
# FIXME: See sfa/util/xrn/Xrn.URN_PREFIX which is ...:IDN
publicid_urn_prefix = 'urn:publicid:'
def nameFromURN(instr):
'''Get the name from the given URN, or empty if not a valid URN'''
if not instr:
return ""
try:
urn = URN(urn=instr)
return urn.getName()
except Exception, e:
# print 'exception parsing urn: %s' % e
return ""
# validate urn
# Note that this is not sufficient but it is necessary
def is_valid_urn_string(instr):
'''Could this string be part of a URN'''
if instr is None or not (isinstance(instr, str) or
isinstance(instr, unicode)):
return False
#No whitespace
# no # or ? or /
if isinstance(instr, unicode):
instr = instr.encode('utf8')
if re.search("[\s|\?\/\#]", instr) is None:
return True
return False
# Note that this is not sufficient but it is necessary
def is_valid_urn(inurn):
''' Check that this string is a valid URN'''
# FIXME: This could pull out the type and do the type specific
# checks that are currently below
# FIXME: This should check for non empty authority and name pieces
return is_valid_urn_string(inurn) and \
inurn.startswith(publicid_urn_prefix) and \
len(inurn.split('+')) > 3
def is_valid_urn_bytype(inurn, urntype, logger=None):
if not is_valid_urn(inurn):
return False
urnObj = URN(urn=inurn)
if not urntype:
urntype = ""
urntype = urntype.lower()
if not urnObj.getType().lower() == urntype:
if logger:
logger.warn("URN %s not of right type: %s, not %s", inurn, urnObj.getType().lower(), urntype)
return False
if len(urnObj.getAuthority()) == 0:
if logger:
logger.warn("URN %s has empty authority", inurn)
return False
name = urnObj.getName()
if urntype == 'slice':
# Slice names are <=19 characters, only alphanumeric plus hyphen (no hyphen in first character): '^[a-zA-Z0-9][-a-zA-Z0-9]{0,18}$'
if len(name) > 19:
if logger:
logger.warn("URN %s too long. Slice names are max 19 characters", inurn)
return False
if not re.match("^[a-zA-Z0-9][-a-zA-Z0-9]{0,18}$", name):
if logger:
logger.warn("Slice names may only be alphanumeric plus hyphen (no leading hyphen): %s", name)
return False
elif urntype == 'sliver':
# May use only alphanumeric characters plus hyphen
# Note that EG uses a ':' as well.
if not re.match("^[-a-zA-Z0-9_\.]+$", name):
if logger:
logger.warn("Sliver names may only be alphanumeric plus hyphen, underscore, or period: %s", name)
return False
elif urntype == 'user':
# Usernames should begin with a letter and be alphanumeric or underscores; no hyphen or '.': ('^[a-zA-Z][\w]{0,7}$').
# Usernames are limited to 8 characters.
if len(name) > 8:
if logger:
logger.warn("URN %s too long. User names are max 8 characters", inurn)
return False
if not re.match("^[a-zA-Z][\w]{0,7}$", name):
if logger:
logger.warn("User names may only be alphanumeric plus underscore, beginning with a letter: %s", name)
return False
elif len(name) == 0:
if logger:
logger.warn("Empty name in URN %s", inurn)
return False
return True
def urn_to_publicid(urn):
'''Convert a URN like urn:publicid:... to a publicid'''
# Remove prefix
if urn is None or not is_valid_urn(urn):
# Erroneous urn for conversion
raise ValueError('Invalid urn: ' + urn)
publicid = urn[len(publicid_urn_prefix):]
# return the un-escaped string
return urn_to_string_format(publicid)
def publicid_to_urn(id):
'''Convert a publicid to a urn like urn:publicid:.....'''
# prefix with 'urn:publicid:' and escape chars
return publicid_urn_prefix + string_to_urn_format(id)
def string_to_urn_format(instr):
'''Make a string URN compatible, collapsing whitespace and escaping chars'''
if instr is None or instr.strip() == '':
raise ValueError("Empty string cant be in a URN")
# Collapse whitespace
instr = ' '.join(instr.strip().split())
for a, b in publicid_xforms:
instr = instr.replace(a, b)
return instr
def urn_to_string_format(urnstr):
'''Turn a part of a URN into publicid format, undoing transforms'''
if urnstr is None or urnstr.strip() == '':
return urnstr
publicid = urnstr
# Validate it is reasonable URN string?
for a, b in reversed(publicid_xforms):
publicid = publicid.replace(b, a)
return publicid
| mit | 3,436,441,151,345,414,700 | 40.854839 | 138 | 0.593353 | false |
infobloxopen/infoblox-netmri | infoblox_netmri/api/remote/models/vlan_network_explorer_summaries_summary_grid_remote.py | 1 | 2052 | from ..remote import RemoteModel
class VlanNetworkExplorerSummariesSummaryGridRemote(RemoteModel):
"""
| ``VlanID:`` none
| ``attribute type:`` string
| ``VlanMemberID:`` none
| ``attribute type:`` string
| ``BridgeMemberInd:`` none
| ``attribute type:`` string
| ``RootVlanMemberID:`` none
| ``attribute type:`` string
| ``DeviceID:`` none
| ``attribute type:`` string
| ``DeviceIPDotted:`` none
| ``attribute type:`` string
| ``DeviceIPNumeric:`` none
| ``attribute type:`` string
| ``VirtualNetworkID:`` none
| ``attribute type:`` string
| ``Network:`` none
| ``attribute type:`` string
| ``DeviceName:`` none
| ``attribute type:`` string
| ``VlanIndex:`` none
| ``attribute type:`` string
| ``VlanName:`` none
| ``attribute type:`` string
| ``VlanMemberTimestamp:`` none
| ``attribute type:`` string
| ``StpPriority:`` none
| ``attribute type:`` string
| ``BaseBridgeAddress:`` none
| ``attribute type:`` string
| ``StpBridgeMaxAge:`` none
| ``attribute type:`` string
| ``StpBridgeHelloTime:`` none
| ``attribute type:`` string
| ``StpBridgeForwardDelay:`` none
| ``attribute type:`` string
| ``Timers:`` none
| ``attribute type:`` string
"""
properties = ("VlanID",
"VlanMemberID",
"BridgeMemberInd",
"RootVlanMemberID",
"DeviceID",
"DeviceIPDotted",
"DeviceIPNumeric",
"VirtualNetworkID",
"Network",
"DeviceName",
"VlanIndex",
"VlanName",
"VlanMemberTimestamp",
"StpPriority",
"BaseBridgeAddress",
"StpBridgeMaxAge",
"StpBridgeHelloTime",
"StpBridgeForwardDelay",
"Timers",
)
| apache-2.0 | 589,517,927,399,518,200 | 22.586207 | 65 | 0.497563 | false |
amfarrell/pickhost | src/pickhost/pickhost/settings.py | 1 | 4605 | """
Django settings for pickhost project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
import dj_database_url
import json
DEBUG = os.environ.get('DEBUG', False) in ['True', 'TRUE', 'true']
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
WEBPACK_BASE = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = (
os.path.join(WEBPACK_BASE, 'assets'),
)
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/', #os.path.join(os.path.join(WEBPACK_BASE, 'assets'), 'bundles/'),
'STATS_FILE': os.path.join(WEBPACK_BASE, 'webpack-stats.json'),
'POLL_INTERVAL': 0.1,
'IGNORE': ['.+\.hot-update.js', '.+\.map']
}
}
if not DEBUG:
WEBPACK_LOADER['DEFAULT'].update({
'BUNDLE_DIR_NAME': 'dist/',
'STATS_FILE': os.path.join(WEBPACK_BASE, 'webpack-stats-prod.json')
})
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY', 'XXX')
# SECURITY WARNING: don't run with debug turned on in production!
ALLOWED_HOSTS = json.loads(os.environ.get('DOMAINS', '["0.0.0.0"]'))
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'webpack_loader',
'party'
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pickhost.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pickhost.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
if os.environ.get('DATABASE_URL'):
DATABASES = {
'default': dj_database_url.config(),
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = os.environ.get('STATIC_URL', '/static/')
STATIC_ROOT = os.environ.get('STATIC_ROOT', './static_root/')
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO'),
},
},
}
CITYMAPPER_API_KEY = os.environ['CITYMAPPER_API_KEY']
CITYMAPPER_URL = os.environ.get('CITYMAPPER_URL', 'https://developer.citymapper.com')
| mit | -8,103,281,417,396,609,000 | 26.740964 | 103 | 0.6519 | false |
hbradleyiii/ww | ww/main.py | 1 | 1722 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# name: main.py
# author: Harold Bradley III
# email: [email protected]
# date: 12/11/2015
#
# description: A program for managing websites
#
from __future__ import absolute_import, print_function
try:
from ext_pylib.prompt import prompt, prompt_str, warn_prompt
except ImportError:
raise ImportError('ext_pylib must be installed to run ww')
import platform
import sys
from ww import Website, WebsiteDomain, Vhost, WPWebsite
__author__ = 'Harold Bradley III'
__copyright__ = 'Copyright (c) 2015-2016 Harold Bradley III'
__license__ = 'MIT'
def display_help():
"""Displays script help."""
print('Help not yet implemented.')
def main():
"""Main entry point for the script."""
if platform.system() != 'Linux':
raise SysError('ERROR: ww cannot be run from ' + platform.system() + '.')
try:
script = sys.argv.pop(0)
except IndexError: # Unknown Error
raise UnknownError('ERROR: sys.argv was not set in main()')
try:
command = sys.argv.pop(0)
except IndexError: # No arguments given
display_help() # If no argmuments are given, display help
return
if command not in ['install', 'remove', 'pack', 'unpack', 'verify', 'repair']:
print('ERROR: Command "' + command + '" not understood.')
return 1
wp = False
if sys.argv and sys.argv[0] == 'wp':
sys.argv.pop(0)
wp = True
domain = ''
if sys.argv:
domain = sys.argv.pop(0)
website = WPWebsite(domain) if wp else Website(domain)
getattr(website, command)()
if __name__ == '__main__':
sys.exit(main())
| mit | 5,718,139,895,927,511,000 | 23.956522 | 82 | 0.608595 | false |
ajpina/uffema | uffema/slots/type1.py | 1 | 7525 | #!/usr/bin/python
# -*- coding: iso-8859-15 -*-
# ==========================================================================
# Copyright (C) 2016 Dr. Alejandro Pina Ortega
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==========================================================================
"""
Class for slots of type 1
"""
# ==========================================================================
# Program: type1.py
# Author: ajpina
# Date: 12/23/16
# Version: 0.1.1
#
# Revision History:
# Date Version Author Description
# - 12/23/16: 0.1.1 ajpina Defines mandatory methods and properties
#
# ==========================================================================
__author__ = 'ajpina'
import numpy as np
from uffema.slots import Slot
from uffema.misc.constants import *
class Type1(Slot):
@property
def h0(self):
return self._h0
@h0.setter
def h0(self, value):
self._h0 = value
@property
def h2(self):
return self._h2
@h2.setter
def h2(self, value):
self._h2 = value
@property
def w0(self):
return self._w0
@w0.setter
def w0(self, value):
self._w0 = value
@property
def w1(self):
return self._w1
@w1.setter
def w1(self, value):
self._w1 = value
@property
def w2(self):
return self._w2
@w2.setter
def w2(self, value):
self._w2 = value
@property
def so_position(self):
return self._so_position
@so_position.setter
def so_position(self, value):
self._so_position = value
@property
def s_position(self):
return self._s_position
@s_position.setter
def s_position(self, value):
self._s_position = value
@property
def liner_thickness(self):
return self._liner_thickness
@liner_thickness.setter
def liner_thickness(self, value):
self._liner_thickness = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def __init__(self, slot_settings, stator_mode):
super(Type1, self).__init__(slot_settings)
self.h0 = slot_settings['h0']
self.h2 = slot_settings['h2']
self.w0 = slot_settings['w0']
self.w1 = slot_settings['w1']
self.w2 = slot_settings['w2']
self.so_position = slot_settings['SOpos']
self.s_position = slot_settings['Spos']
# It is assumed an insulation liner of 0.5mm thickness
self.liner_thickness = 0.5e-3
self.type = self.type + 'Type1'
def get_slot_center(self):
return self.h0 + (2.0/5.0)*self.h2
def get_type(self):
return 'Type1'
def get_area(self):
return 0
def get_slot_total_height(self):
return self.h0 + self.h2
def get_conductor_area_width(self):
return (self.w1 + self.w2) / 2.0
def get_conductor_area_height(self):
return self.h2
def get_coil_area_base_point(self, inner_radius):
return inner_radius + self.h0
def get_slot_opening_geometry(self, inner_radius):
angle_slot_opening_bottom = np.arcsin(-(self.w0/2.0)/ inner_radius + self.h0 )
angle_slot_opening_top = np.arcsin(-(self.w0 / 2.0) / inner_radius )
points = {
'2': [inner_radius, 0, 0],
'3': [inner_radius + self.h0, 0, 0],
'4': [(inner_radius + self.h0)*np.cos(angle_slot_opening_bottom), (inner_radius + self.h0)*np.sin(angle_slot_opening_bottom) , 0],
'5': [(inner_radius)*np.cos(angle_slot_opening_bottom), (inner_radius)*np.sin(angle_slot_opening_bottom) , 0]
}
lines = {
'1': [2, 3],
'2': [3, 4],
'3': [4, 5],
'4': [5, 2]
}
return points, lines
def get_slot_wedge_geometry(self, inner_radius):
points = None
lines = None
return points, lines
def get_backiron_geometry(self, inner_radius, outer_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
angle_slot_base = np.arcsin(-(self.w2 / 2.0) / (inner_radius + self.h2))
points = {
'6': [inner_radius + self.h2, 0, 0],
'7': [outer_radius, 0, 0],
'8': [outer_radius * np.cos( -slot_pitch/2.0 ), outer_radius * np.sin( -slot_pitch/2.0 ), 0],
'9': [(inner_radius + self.h0 + self.h2) * np.cos( -slot_pitch/2.0 ),
(inner_radius + self.h0 + self.h2) * np.sin( -slot_pitch/2.0 ) , 0],
'10': [(inner_radius + self.h2) * np.cos(angle_slot_base),
(inner_radius + self.h2) * np.sin(angle_slot_base), 0]
}
lines = {
'5': [6, 7],
'6': [7, 1, 8],
'7': [8, 9],
'8': [9, 10],
'9': [10, 1, 6]
}
return points, lines
def get_tooth_geometry(self, inner_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
angle_slot_top = np.arcsin(-(self.w1 / 2.0) / (inner_radius + self.h0))
points = {
'11': [(inner_radius + self.h0 ) * np.cos( -slot_pitch/2.0 ),
(inner_radius + self.h0 ) * np.sin( -slot_pitch/2.0 ) , 0],
'12': [(inner_radius + self.h0) * np.cos(angle_slot_top), (inner_radius + self.h0) * np.sin(angle_slot_top),
0]
}
lines = {
'10': [9, 11],
'11': [11, 1, 12],
'12': [12, 10],
'-8': [0]
}
return points, lines
def get_coil_area_geometry(self, inner_radius):
points = None
lines = {
'13': [12, 1, 4],
'-2': [0],
'14': [3, 6],
'-9': [0],
'-12': [0]
}
return points, lines
def get_toothtip_geometry(self, inner_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
points = {
'14': [inner_radius * np.cos( -slot_pitch/2.0 ), inner_radius * np.sin( -slot_pitch/2.0 ) , 0]
}
lines = {
'15': [11, 14],
'16': [14, 1, 5],
'-3': [0],
'-13': [0],
'-11': [0]
}
return points, lines
def get_stator_airgap_geometry(self, airgap_radius, slot_number):
slot_pitch = 360 * DEG2RAD / slot_number
points = {
'15': [airgap_radius * np.cos( -slot_pitch/2.0 ), airgap_radius * np.sin( -slot_pitch/2.0 ) , 0],
'16': [airgap_radius, 0, 0]
}
lines = {
'17': [14, 15],
'18': [15, 1, 16],
'19': [16, 2],
'-4': [0],
'-16': [0]
}
return points, lines
def get_stator_airgap_boundary(self):
return {'18': [15, 1, 16]}
def get_outer_stator_boundary(self):
return [6]
def get_master_boundary(self):
return [7, 10, 15, 17]
| apache-2.0 | 8,788,168,946,986,012,000 | 27.612167 | 142 | 0.505249 | false |
aptivate/django-registration | registration/models.py | 1 | 10475 | import datetime
import hashlib
import random
import re
from django.conf import settings
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import get_user_model
try:
from django.utils.timezone import now as datetime_now
except ImportError:
datetime_now = datetime.datetime.now
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = get_user_model().objects.create_user(username, email,
password)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.commit_on_success(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
activation_key = hashlib.sha1(salt+username).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
try:
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
profile.delete()
except get_user_model().DoesNotExist:
profile.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True,
verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime_now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
| bsd-3-clause | -6,932,972,600,714,886,000 | 38.23221 | 83 | 0.621289 | false |
pvvx/RTL00_WEB | USDK/flasher/add_sample_bat/runram.py | 1 | 1340 | #!/usr/bin/env python3
# -*- coding: cp1251 -*-
import jlinkarm as jl
import os, sys, time, struct
dllfilename = 'D:/MCU/SEGGER/JLink_V612i/JLinkARM.dll'
if __name__ == '__main__':
if len(sys.argv) >= 2:
if sys.argv[1] == '-h':
print 'Usage: ldram.py ram_all.bin'
exit(0)
imgfilename = 'build/bin/ram_all.bin'
if len(sys.argv) > 1:
if sys.argv[1]:
imgfilename = sys.argv[1]
try:
ff = open(imgfilename, "rb")
except:
print "Error file open " + imgfilename
exit(1)
jl.loadJLinkARMdll(dllfilename)
jl.open()
if jl.is_connected == 0:
raise RuntimeError('Jlink not connected')
jl.exec_command('device Cortex-M3', 0, 0)
jl.exec_command('endian little', 0, 0)
jl.tif_select(1)
jl.set_speed(1000)
jl.clear_RESET()
jl.clear_TRST()
time.sleep(0.01)
jl.set_RESET()
jl.set_TRST()
jl.reset()
jl.halt()
jl.set_speed(3500)
bin = ff.read()
get_dword = lambda address: struct.unpack('<L', bin[address:address + 4])[0]
i1_address = 0x0
i1_length = get_dword(0x10)
jl.write_mem(0x10000bc8, bin[i1_address + 0x20: i1_address + 0x20 + i1_length])
i2_address = (get_dword(0x18) & 0xffff) * 1024
i2_length = get_dword(i2_address) + 0x10
jl.write_mem(0x10006000, bin[i2_address + 0x10: i2_address + 0x10 + i2_length])
jl.reset()
jl.write_u32(0x40000210, 0x20111113)
jl.go()
jl.close()
ff.close()
exit(0)
| unlicense | -5,581,535,721,488,196,000 | 24.283019 | 80 | 0.657463 | false |
vpetersson/docker-py | docker/models/services.py | 1 | 11138 | import copy
from docker.errors import create_unexpected_kwargs_error
from docker.types import TaskTemplate, ContainerSpec
from .resource import Model, Collection
class Service(Model):
"""A service."""
id_attribute = 'ID'
@property
def name(self):
"""The service's name."""
return self.attrs['Spec']['Name']
@property
def version(self):
"""
The version number of the service. If this is not the same as the
server, the :py:meth:`update` function will not work and you will
need to call :py:meth:`reload` before calling it again.
"""
return self.attrs.get('Version').get('Index')
def remove(self):
"""
Stop and remove the service.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self.client.api.remove_service(self.id)
def tasks(self, filters=None):
"""
List the tasks in this service.
Args:
filters (dict): A map of filters to process on the tasks list.
Valid filters: ``id``, ``name``, ``node``,
``label``, and ``desired-state``.
Returns:
(:py:class:`list`): List of task dictionaries.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
if filters is None:
filters = {}
filters['service'] = self.id
return self.client.api.tasks(filters=filters)
def update(self, **kwargs):
"""
Update a service's configuration. Similar to the ``docker service
update`` command.
Takes the same parameters as :py:meth:`~ServiceCollection.create`.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
# Image is required, so if it hasn't been set, use current image
if 'image' not in kwargs:
spec = self.attrs['Spec']['TaskTemplate']['ContainerSpec']
kwargs['image'] = spec['Image']
create_kwargs = _get_create_service_kwargs('update', kwargs)
return self.client.api.update_service(
self.id,
self.version,
**create_kwargs
)
def logs(self, **kwargs):
"""
Get log stream for the service.
Note: This method works only for services with the ``json-file``
or ``journald`` logging drivers.
Args:
details (bool): Show extra details provided to logs.
Default: ``False``
follow (bool): Keep connection open to read logs as they are
sent by the Engine. Default: ``False``
stdout (bool): Return logs from ``stdout``. Default: ``False``
stderr (bool): Return logs from ``stderr``. Default: ``False``
since (int): UNIX timestamp for the logs staring point.
Default: 0
timestamps (bool): Add timestamps to every log line.
tail (string or int): Number of log lines to be returned,
counting from the current end of the logs. Specify an
integer or ``'all'`` to output all log lines.
Default: ``all``
Returns (generator): Logs for the service.
"""
is_tty = self.attrs['Spec']['TaskTemplate']['ContainerSpec'].get(
'TTY', False
)
return self.client.api.service_logs(self.id, is_tty=is_tty, **kwargs)
class ServiceCollection(Collection):
"""Services on the Docker server."""
model = Service
def create(self, image, command=None, **kwargs):
"""
Create a service. Similar to the ``docker service create`` command.
Args:
image (str): The image name to use for the containers.
command (list of str or str): Command to run.
args (list of str): Arguments to the command.
constraints (list of str): Placement constraints.
container_labels (dict): Labels to apply to the container.
endpoint_spec (EndpointSpec): Properties that can be configured to
access and load balance a service. Default: ``None``.
env (list of str): Environment variables, in the form
``KEY=val``.
hostname (string): Hostname to set on the container.
labels (dict): Labels to apply to the service.
log_driver (str): Log driver to use for containers.
log_driver_options (dict): Log driver options.
mode (ServiceMode): Scheduling mode for the service.
Default:``None``
mounts (list of str): Mounts for the containers, in the form
``source:target:options``, where options is either
``ro`` or ``rw``.
name (str): Name to give to the service.
networks (list of str): List of network names or IDs to attach
the service to. Default: ``None``.
resources (Resources): Resource limits and reservations.
restart_policy (RestartPolicy): Restart policy for containers.
secrets (list of :py:class:`docker.types.SecretReference`): List
of secrets accessible to containers for this service.
stop_grace_period (int): Amount of time to wait for
containers to terminate before forcefully killing them.
update_config (UpdateConfig): Specification for the update strategy
of the service. Default: ``None``
user (str): User to run commands as.
workdir (str): Working directory for commands to run.
tty (boolean): Whether a pseudo-TTY should be allocated.
groups (:py:class:`list`): A list of additional groups that the
container process will run as.
open_stdin (boolean): Open ``stdin``
read_only (boolean): Mount the container's root filesystem as read
only.
stop_signal (string): Set signal to stop the service's containers
healthcheck (Healthcheck): Healthcheck
configuration for this service.
hosts (:py:class:`dict`): A set of host to IP mappings to add to
the container's `hosts` file.
dns_config (DNSConfig): Specification for DNS
related configurations in resolver configuration file.
configs (:py:class:`list`): List of :py:class:`ConfigReference`
that will be exposed to the service.
privileges (Privileges): Security options for the service's
containers.
Returns:
(:py:class:`Service`) The created service.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
kwargs['image'] = image
kwargs['command'] = command
create_kwargs = _get_create_service_kwargs('create', kwargs)
service_id = self.client.api.create_service(**create_kwargs)
return self.get(service_id)
def get(self, service_id, insert_defaults=None):
"""
Get a service.
Args:
service_id (str): The ID of the service.
insert_defaults (boolean): If true, default values will be merged
into the output.
Returns:
(:py:class:`Service`): The service.
Raises:
:py:class:`docker.errors.NotFound`
If the service does not exist.
:py:class:`docker.errors.APIError`
If the server returns an error.
:py:class:`docker.errors.InvalidVersion`
If one of the arguments is not supported with the current
API version.
"""
return self.prepare_model(
self.client.api.inspect_service(service_id, insert_defaults)
)
def list(self, **kwargs):
"""
List services.
Args:
filters (dict): Filters to process on the nodes list. Valid
filters: ``id``, ``name`` , ``label`` and ``mode``.
Default: ``None``.
Returns:
(list of :py:class:`Service`): The services.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return [
self.prepare_model(s)
for s in self.client.api.services(**kwargs)
]
# kwargs to copy straight over to ContainerSpec
CONTAINER_SPEC_KWARGS = [
'args',
'command',
'configs',
'dns_config',
'env',
'groups',
'healthcheck',
'hostname',
'hosts',
'image',
'labels',
'mounts',
'open_stdin',
'privileges'
'read_only',
'secrets',
'stop_grace_period',
'stop_signal',
'tty',
'user',
'workdir',
]
# kwargs to copy straight over to TaskTemplate
TASK_TEMPLATE_KWARGS = [
'networks',
'resources',
'restart_policy',
]
# kwargs to copy straight over to create_service
CREATE_SERVICE_KWARGS = [
'name',
'labels',
'mode',
'update_config',
'endpoint_spec',
]
def _get_create_service_kwargs(func_name, kwargs):
# Copy over things which can be copied directly
create_kwargs = {}
for key in copy.copy(kwargs):
if key in CREATE_SERVICE_KWARGS:
create_kwargs[key] = kwargs.pop(key)
container_spec_kwargs = {}
for key in copy.copy(kwargs):
if key in CONTAINER_SPEC_KWARGS:
container_spec_kwargs[key] = kwargs.pop(key)
task_template_kwargs = {}
for key in copy.copy(kwargs):
if key in TASK_TEMPLATE_KWARGS:
task_template_kwargs[key] = kwargs.pop(key)
if 'container_labels' in kwargs:
container_spec_kwargs['labels'] = kwargs.pop('container_labels')
if 'constraints' in kwargs:
task_template_kwargs['placement'] = {
'Constraints': kwargs.pop('constraints')
}
if 'log_driver' in kwargs:
task_template_kwargs['log_driver'] = {
'Name': kwargs.pop('log_driver'),
'Options': kwargs.pop('log_driver_options', {})
}
if func_name == 'update':
if 'force_update' in kwargs:
task_template_kwargs['force_update'] = kwargs.pop('force_update')
# fetch the current spec by default if updating the service
# through the model
fetch_current_spec = kwargs.pop('fetch_current_spec', True)
create_kwargs['fetch_current_spec'] = fetch_current_spec
# All kwargs should have been consumed by this point, so raise
# error if any are left
if kwargs:
raise create_unexpected_kwargs_error(func_name, kwargs)
container_spec = ContainerSpec(**container_spec_kwargs)
task_template_kwargs['container_spec'] = container_spec
create_kwargs['task_template'] = TaskTemplate(**task_template_kwargs)
return create_kwargs
| apache-2.0 | -1,413,702,806,940,108,000 | 34.35873 | 79 | 0.574879 | false |
EUDAT-B2SHARE/invenio-old | modules/miscutil/lib/plotextractor_regression_tests.py | 1 | 2138 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Regression tests for the plotextract script."""
__revision__ = "$Id$"
import os
from invenio.config import CFG_TMPDIR, CFG_SITE_URL
from invenio.testutils import make_test_suite, run_test_suite, InvenioTestCase
class GetDefaultsTest(InvenioTestCase):
"""Test function to get default values."""
def setUp(self):
self.arXiv_id = "arXiv:astro-ph_0104076"
self.tarball = "%s/2001/04/arXiv:astro-ph_0104076/arXiv:astro-ph_0104076" % (CFG_TMPDIR,)
def test_get_defaults(self):
"""plotextractor - get defaults"""
from invenio.shellutils import run_shell_command
from invenio.plotextractor import get_defaults
sdir_should_be = os.path.join(CFG_TMPDIR, self.arXiv_id + '_plots')
refno_should_be = "15" # Note: For ATLANTIS DEMO site
sdir, refno = get_defaults(tarball=self.tarball, sdir=None, refno_url=CFG_SITE_URL)
if sdir != None:
run_shell_command("rm -rf %s" % (sdir,))
self.assertTrue(sdir == sdir_should_be, \
"didn\'t get correct default scratch dir")
self.assertTrue(refno == refno_should_be, \
'didn\'t get correct default reference number')
TEST_SUITE = make_test_suite(GetDefaultsTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
| gpl-2.0 | -7,628,279,441,769,755,000 | 39.339623 | 97 | 0.675398 | false |
umitproject/tease-o-matic | mediagenerator/filters/yuicompressor.py | 1 | 1533 | from django.conf import settings
from django.utils.encoding import smart_str
from mediagenerator.generators.bundles.base import Filter
class YUICompressor(Filter):
def __init__(self, **kwargs):
super(YUICompressor, self).__init__(**kwargs)
assert self.filetype in ('css', 'js'), (
'YUICompressor only supports compilation to css and js. '
'The parent filter expects "%s".' % self.filetype)
def get_output(self, variation):
# We import this here, so App Engine Helper users don't get import
# errors.
from subprocess import Popen, PIPE
for input in self.get_input(variation):
try:
compressor = settings.YUICOMPRESSOR_PATH
cmd = Popen(['java', '-jar', compressor,
'--charset', 'utf-8', '--type', self.filetype],
stdin=PIPE, stdout=PIPE, stderr=PIPE,
universal_newlines=True)
output, error = cmd.communicate(smart_str(input))
assert cmd.wait() == 0, 'Command returned bad result:\n%s' % error
yield output
except Exception, e:
raise ValueError("Failed to execute Java VM or yuicompressor. "
"Please make sure that you have installed Java "
"and that it's in your PATH and that you've configured "
"YUICOMPRESSOR_PATH in your settings correctly.\n"
"Error was: %s" % e)
| bsd-3-clause | -8,061,247,068,897,401,000 | 48.451613 | 82 | 0.566862 | false |
manaschaturvedi/oscarbuddy | main.py | 1 | 22607 | import os
import re
import random
import hashlib
import hmac
from string import letters
import mimetypes
import webapp2
import jinja2
from google.appengine.ext import db
import webbrowser
from urllib2 import urlopen
import requests
from bs4 import BeautifulSoup
import json
import html5lib
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),autoescape = True)
secret = 'fart'
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def make_secure_val(val):
return '%s|%s' % (val, hmac.new(secret, val).hexdigest())
def check_secure_val(secure_val):
val = secure_val.split('|')[0]
if secure_val == make_secure_val(val):
return val
class Handler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
params['user'] = self.user
return render_str(template, **params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def set_secure_cookie(self, name, val):
cookie_val = make_secure_val(val)
self.response.headers.add_header('Set-Cookie','%s=%s; Path=/' % (name, cookie_val))
def read_secure_cookie(self, name):
cookie_val = self.request.cookies.get(name)
return cookie_val and check_secure_val(cookie_val)
def login(self, user):
self.set_secure_cookie('user_id', str(user.key().id()))
def logout(self):
self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')
def render_json(self, d):
json_txt = json.dumps(d)
self.response.headers['Content-Type'] = 'application/json; charset=UTF-8'
self.write(json_txt)
def initialize(self, *a, **kw):
webapp2.RequestHandler.initialize(self, *a, **kw)
uid = self.read_secure_cookie('user_id')
self.user = uid and User.by_id(int(uid))
class MainPage(Handler):
def get(self):
self.render("home.html")
def make_salt(length = 5):
return ''.join(random.choice(letters) for x in xrange(length))
def make_pw_hash(name, pw, salt = None):
if not salt:
salt = make_salt()
h = hashlib.sha256(name + pw + salt).hexdigest()
return '%s,%s' % (salt, h)
def valid_pw(name, password, h):
salt = h.split(',')[0]
return h == make_pw_hash(name, password, salt)
def users_key(group = 'default'):
return db.Key.from_path('users', group)
class User(db.Model):
name = db.StringProperty(required = True)
pw_hash = db.StringProperty(required = True)
email = db.StringProperty()
@classmethod
def by_id(cls, uid):
return User.get_by_id(uid, parent = users_key())
@classmethod
def by_name(cls, name):
u = User.all().filter('name =', name).get()
return u
@classmethod
def register(cls, name, pw, email = None):
pw_hash = make_pw_hash(name, pw)
return User(parent = users_key(),name = name,pw_hash = pw_hash,email = email)
@classmethod
def login(cls, name, pw):
u = cls.by_name(name)
if u and valid_pw(name, pw, u.pw_hash):
return u
USER_RE = re.compile(r"^[a-zA-Z0-9_-]{3,20}$")
def valid_username(username):
return username and USER_RE.match(username)
PASS_RE = re.compile(r"^.{3,20}$")
def valid_password(password):
return password and PASS_RE.match(password)
EMAIL_RE = re.compile(r'^[\S]+@[\S]+\.[\S]+$')
def valid_email(email):
return not email or EMAIL_RE.match(email)
class Signup(Handler):
def post(self):
have_error = False
self.username = self.request.get('username')
self.password = self.request.get('password')
self.verify = self.request.get('verify')
self.email = self.request.get('email')
params = dict(username = self.username,email = self.email)
if not valid_username(self.username):
params['error_username'] = "That's not a valid username."
have_error = True
if not valid_password(self.password):
params['error_password'] = "That wasn't a valid password."
have_error = True
elif self.password != self.verify:
params['error_verify'] = "Your passwords didn't match."
have_error = True
if not valid_email(self.email):
params['error_email'] = "That's not a valid email."
have_error = True
if have_error:
self.render('home.html', **params)
else:
self.done()
def done(self, *a, **kw):
raise NotImplementedError
class Register(Signup):
def done(self):
#make sure the user doesn't already exist
u = User.by_name(self.username)
if u:
msg = 'That user already exists.'
self.render('home.html', error_username = msg)
else:
u = User.register(self.username, self.password, self.email)
u.put()
self.login(u)
self.redirect('/')
class Login(Handler):
def post(self):
username = self.request.get('username')
password = self.request.get('password')
u = User.login(username, password)
if u:
self.login(u)
frontuser = username
self.redirect('/')
else:
msg = 'Invalid login'
self.render('home.html', error = msg)
class Logout(Handler):
def get(self):
self.logout()
self.redirect('/')
class NewBooks(Handler):
def get(self):
self.render("newbooks.html")
def post(self):
branch = self.request.get("branch")
semester = self.request.get("semester")
publications = self.request.get("publications")
subject = self.request.get("subject")
if semester:
yo = int(semester)
if(branch and semester and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and semester):
disp = Books.all().filter("branch =", branch).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and publications):
disp = Books.all().filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester and subject):
disp = Books.all().filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(branch):
disp = Books.all().filter("branch =", branch).fetch(10)
self.render("newbooks.html", disp = disp)
elif(semester):
disp = Books.all().filter("semester =", yo).fetch(10)
self.render("newbooks.html", disp = disp)
elif(publications):
disp = Books.all().filter("publisher =", publications).fetch(10)
self.render("newbooks.html", disp = disp)
elif(subject):
disp = Books.all().filter("name =", subject).fetch(10)
self.render("newbooks.html", disp = disp)
else:
self.render("newbooks.html")
class OldBooks(Handler):
def get(self):
self.render("oldbooks.html")
def post(self):
branch = self.request.get("branch")
semester = self.request.get("semester")
publications = self.request.get("publications")
subject = self.request.get("subject")
if semester:
yo = int(semester)
if(branch and semester and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and publications and subject):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and semester):
disp = Books.all().filter("branch =", branch).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and publications):
disp = Books.all().filter("publisher =", publications).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(publications and subject):
disp = Books.all().filter("publisher =", publications).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and subject):
disp = Books.all().filter("branch =", branch).filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch and publications):
disp = Books.all().filter("branch =", branch).filter("publisher =", publications).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester and subject):
disp = Books.all().filter("name =", subject).filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(branch):
disp = Books.all().filter("branch =", branch).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(semester):
disp = Books.all().filter("semester =", yo).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(publications):
disp = Books.all().filter("publisher =", publications).fetch(10)
self.render("oldbooks.html", disp = disp)
elif(subject):
disp = Books.all().filter("name =", subject).fetch(10)
self.render("oldbooks.html", disp = disp)
else:
self.render("oldbooks.html")
class Books(db.Model):
prod_id = db.StringProperty()
name = db.StringProperty()
semester = db.IntegerProperty()
author = db.StringProperty()
stock = db.IntegerProperty()
actual_price = db.IntegerProperty()
discount_price = db.IntegerProperty()
branch = db.StringProperty()
publisher = db.StringProperty()
publishing_date = db.StringProperty()
edition = db.StringProperty()
def as_dict(self):
d = {'name': self.name,
'author': self.author,
'actual_price': self.actual_price,
'publisher': self.publisher}
return d
class Orders(db.Model):
cust_name = db.StringProperty()
address = db.PostalAddressProperty()
college = db.StringProperty()
book_name = db.StringProperty()
quantity = db.IntegerProperty()
total_amount = db.IntegerProperty()
contact_no = db.IntegerProperty()
book_id = db.StringProperty()
email_id = db.EmailProperty()
"""
d = Orders(cust_name = "Manas Chaturvedi", address = "Borivali", college = "TCET",
book_name = "OOSE", quantity = 1, total_amount = 325, contact_no = 9022380436,
book_id = "oose.jpeg", email_id = "[email protected]")
d.put()
a = Books(prod_id = "oose.jpeg", name = "Object Oriented Software Engineering",
semester = 6, author = "Bernard Bruegge, Allen H. Dutoit", stock = 5,
actual_price = 325, discount_price = 275, branch = "Computers",
publisher = "Pearson", publishing_date = "2010", edition = "2013")
a.put()
a2 = Books(prod_id = "dwm.png", name = "Data Warehouse and Data Mining",
semester = 6, author = "Aarti Deshpande", stock = 5,
actual_price = 315, discount_price = 260, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a2.put()
a3 = Books(prod_id = "cg.jpeg", name = "Computer Graphics",
semester = 4, author = "A.P. Godse, D.A. Godse", stock = 2,
actual_price = 330, discount_price = 280, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a3.put()
a4 = Books(prod_id = "spccjohn.jpeg", name = "System Programming and Compiler Construction",
semester = 6, author = "John Donovan", stock = 2,
actual_price = 410, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a4.put()
a5 = Books(prod_id = "1.jpg", name = "Advanced Microprocessors",
semester = 6, author = "J. S. Katre", stock = 2,
actual_price = 320, discount_price = 290, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a5.put()
a6 = Books(prod_id = "ampburchandi.gif", name = "Advanced Microprocessors",
semester = 6, author = "K.M. Burchandi, A.K. Ray", stock = 2,
actual_price = 390, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a6.put()
a7 = Books(prod_id = "CN.jpg", name = "Computer Networks",
semester = 5, author = "Andrew Tenenbaum", stock = 1,
actual_price = 390, discount_price = 355, branch = "Computers",
publisher = "Tata McGraw Hill", publishing_date = "2010", edition = "2013")
a7.put()
a8 = Books(prod_id = "mp.jpeg", name = "Microprocessors and Interfacing",
semester = 5, author = "J. S. Katre", stock = 2,
actual_price = 320, discount_price = 290, branch = "Computers",
publisher = "Techmax", publishing_date = "2010", edition = "2013")
a8.put()
"""
class Template(Handler):
def get(self):
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.render("template.html", disp = disp)
def post(self):
if self.user:
qua = self.request.get("quantity")
if not qua:
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_quantity'] = "That's not a valid quantity."
self.render("template.html", disp = disp, **params)
else:
quantity = int(qua)
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.redirect('/cart?quantity=%s&key=%s' % (quantity,k))
else:
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("template.html", disp = disp, **params)
class Cart(Handler):
def get(self):
qu = self.request.get("quantity")
quantity = int(qu)
k = self.request.get("key")
disp = Books.all().filter("prod_id =", k).get()
self.render("cart.html", disp = disp, quantity = quantity)
def post(self):
if self.user:
qua = self.request.get("quantity")
quantity = int(qua)
k = self.request.get("key")
disp=Books.all().filter("prod_id =", k).get()
self.redirect('/order?quantity=%s&key=%s' % (quantity,k))
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("cart.html", disp = disp, quantity = quantity, **params)
class Order(Handler):
def get(self):
qu = self.request.get("quantity")
quantity = int(qu)
k = self.request.get("key")
disp = Books.all().filter("prod_id =", k).get()
self.render("order1.html", disp = disp, quantity = quantity)
def post(self):
if self.user:
cust_name = self.request.get("cusname")
address = self.request.get("address")
college = self.request.get("college")
book_name = self.request.get("book_name")
qua = self.request.get("quant")
tot_amount = self.request.get("tot_amount")
cont = self.request.get("mobile")
book_id = self.request.get("book_id")
email_id = self.request.get("email")
if(cust_name and address and college and book_name and qua and tot_amount and cont and book_id and email_id):
quantity = int(qua)
total_amount = int(tot_amount)
contact_no = int(cont)
ordered = Orders(cust_name = cust_name, address = address, college = college,
book_name = book_name, quantity = quantity, total_amount = total_amount,
contact_no = contact_no, book_id = book_id, email_id = email_id)
ordered.put()
self.redirect("/successful_order")
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_form'] = "please fill all the order details"
self.render("order1.html", disp = disp, quantity = quantity, **params)
else:
k = self.request.get("key")
qu = self.request.get("quantity")
quantity = int(qu)
disp=Books.all().filter("prod_id =", k).get()
params = dict()
params['error_user'] = "please login to procced"
self.render("order1.html", disp = disp, quantity = quantity, **params)
class ContactUs(Handler):
def get(self):
self.render("cont.html")
class AboutUs(Handler):
def get(self):
self.render("aboutus.html")
class SuccessOrder(Handler):
def get(self):
self.render("successorder.html")
class AjaxHandler(Handler):
def get(self):
self.response.write("If you can read this message, do know that Ajax is working tirelessly behind the scenes to load this data dynamically ! ")
class GGHandler(Handler):
def get(self):
self.render("gg.html")
class SearchHandler(Handler):
def get(self):
self.render("search.html")
def post(self):
keey = self.request.get('keey')
url = "http://www.amazon.in/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=" + str(keey)
url1 = "http://books.rediff.com/" + str(keey) + "?sc_cid=books_inhomesrch"
ss = requests.get(url)
ss1 = requests.get(url1)
src = ss.text
src1 = ss1.text
obj = BeautifulSoup(src, 'html5lib')
obj1 = BeautifulSoup(src1, 'html5lib')
li = []
ai = []
for e in obj.findAll("span", {'class' : 'lrg bold'}):
title = e.string
li.append(title)
for e in obj1.findAll("a", {'class' : 'bold'}):
title = e.string
ai.append(title)
self.render("searchresult.html", li = li, ai = ai, keey = keey)
class RSSHandler(Handler):
def get(self):
rsss = Books.all().fetch(1000)
rss = list(rsss)
return self.render_json([p.as_dict() for p in rss])
class EbayHandler(Handler):
def get(self):
app = webapp2.WSGIApplication([('/', MainPage),
('/signup', Register),
('/login', Login),
('/logout', Logout),
('/template', Template),
('/newbooks', NewBooks),
('/contactus', ContactUs),
('/aboutus', AboutUs),
('/oldbooks',OldBooks),
('/order',Order),
('/successful_order', SuccessOrder),
('/cart',Cart),
('/ajax', AjaxHandler),
('/tcet', GGHandler),
('/search', SearchHandler),
('/rss', RSSHandler),
('/ebay', EbayHandler)], debug=True) | mit | 342,617,429,224,100,900 | 41.41651 | 154 | 0.572035 | false |
Percona-QA/package-testing | molecule/pdmysql/pdps-setup/molecule/tests/test_ps.py | 1 | 6751 | import os
import pytest
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
DEBPACKAGES = ['percona-server-server', 'percona-server-test',
'percona-server-dbg', 'percona-server-source',
'percona-server-client', 'percona-server-tokudb',
'percona-server-rocksdb', 'percona-mysql-router',
'percona-mysql-shell']
RPMPACKAGES = ['percona-server-server', 'percona-server-client',
'percona-server-test', 'percona-server-debuginfo',
'percona-server-devel', 'percona-server-tokudb',
'percona-server-rocksdb', 'percona-mysql-router',
'percona-mysql-shell']
PLUGIN_COMMANDS = ["mysql -e \"CREATE FUNCTION"
" fnv1a_64 RETURNS INTEGER SONAME 'libfnv1a_udf.so';\"",
"mysql -e \"CREATE FUNCTION"
" fnv_64 RETURNS INTEGER SONAME 'libfnv_udf.so';\"",
"mysql -e \"CREATE FUNCTION"
" murmur_hash RETURNS INTEGER SONAME 'libmurmur_udf.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_set RETURNS STRING SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_show RETURNS STRING SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_edit RETURNS STRING SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_delete RETURNS STRING SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_lock_shared RETURNS INT SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_lock_exclusive RETURNS INT SONAME 'version_token.so';\"",
"mysql -e \"CREATE FUNCTION"
" version_tokens_unlock RETURNS INT SONAME 'version_token.so';\"",
"mysql -e \"INSTALL PLUGIN"
" mysql_no_login SONAME 'mysql_no_login.so';\"",
"mysql -e \"CREATE FUNCTION"
" service_get_read_locks RETURNS INT SONAME 'locking_service.so';\"",
"mysql -e \"CREATE FUNCTION"
" service_get_write_locks RETURNS INT SONAME 'locking_service.so';\"",
"mysql -e \"CREATE FUNCTION"
" service_release_locks RETURNS INT SONAME 'locking_service.so';\"",
"mysql -e \"INSTALL PLUGIN"
" validate_password SONAME 'validate_password.so';\"",
"mysql -e \"INSTALL PLUGIN"
" version_tokens SONAME 'version_token.so';\"",
"mysql -e \"INSTALL PLUGIN"
" rpl_semi_sync_master SONAME 'semisync_master.so';\"",
"mysql -e \"INSTALL PLUGIN"
" rpl_semi_sync_slave SONAME 'semisync_slave.so';\"",
"mysql -e \"INSTALL PLUGIN"
" connection_control SONAME 'connection_control.so';\"",
"mysql -e \"INSTALL PLUGIN"
" connection_control_failed_login_attempts SONAME 'connection_control.so';\""]
COMPONENTS = ['component_validate_password', 'component_log_sink_syseventlog',
'component_log_sink_json', 'component_log_filter_dragnet',
'component_audit_api_message_emit']
VERSION = os.environ.get("VERSION")
def is_running(host):
cmd = 'ps auxww| grep -v grep | grep -c "mysql"'
result = host.run(cmd)
print(result.stdout)
stdout = int(result.stdout)
if stdout == 0:
return True
return False
@pytest.mark.parametrize("package", DEBPACKAGES)
def test_check_deb_package(host, package):
dist = host.system_info.distribution
if dist.lower() in ["redhat", "centos", 'rhel']:
pytest.skip("This test only for Debian based platforms")
pkg = host.package(package)
assert pkg.is_installed
assert VERSION in pkg.version, pkg.version
@pytest.mark.parametrize("package", RPMPACKAGES)
def test_check_rpm_package(host, package):
dist = host.system_info.distribution
if dist.lower() in ["debian", "ubuntu"]:
pytest.skip("This test only for RHEL based platforms")
pkg = host.package(package)
assert pkg.is_installed
assert VERSION in pkg.version, pkg.version
@pytest.mark.parametrize("binary", ['mysqlsh', 'mysql', 'mysqlrouter'])
def test_binary_version(host, binary):
cmd = "{} --version".format(binary)
result = host.run(cmd)
print(result.stdout)
assert result.rc == 0, result.stderr
assert VERSION in result.stdout, result.stdout
@pytest.mark.parametrize('component', ['@@INNODB_VERSION', '@@VERSION'])
def test_mysql_version(host, component):
with host.sudo("root"):
cmd = "mysql -e \"SELECT {}; \"| grep -c \"{}\"".format(component, VERSION)
result = host.run(cmd)
print(result.stdout)
assert result.rc == 0, result.stderr
assert int(result.stdout) == 1, result.stdout
@pytest.mark.parametrize('plugin_command', PLUGIN_COMMANDS)
def test_plugins(host, plugin_command):
with host.sudo("root"):
result = host.run(plugin_command)
print(result.stdout)
assert result.rc == 0, result.stderr
@pytest.mark.parametrize("component", COMPONENTS)
def test_components(component, host):
with host.sudo("root"):
cmd = 'mysql -Ns -e "select count(*) from mysql.component where component_urn=\"file://{}\";"'.format(component)
check_component = host.run(cmd)
if check_component.rc == 0:
inst_cmd = 'mysql -e "INSTALL COMPONENT \"file://{}\";"'.format(component)
inst_res = host.run(inst_cmd)
assert inst_res.rc == 0, inst_res.stderr
check_cmd = 'mysql -Ns -e "select count(*) from mysql.component where component_urn=\"file://{}\";"'.format(
component)
check_result = host.run(check_cmd)
assert check_result.rc == 1, (check_result.rc, check_result.stderr, check_result.stdout)
def test_madmin(host):
with host.sudo("root"):
mysql = host.service("mysql")
assert mysql.is_running
cmd = 'mysqladmin shutdown'
shutdown = host.run(cmd)
assert shutdown.rc == 0, shutdown.stdout
mysql = host.service("mysql")
assert not mysql.is_running
cmd = 'service mysql start'
start = host.run(cmd)
assert start.rc == 0, start.stdout
mysql = host.service("mysql")
assert mysql.is_running
| gpl-2.0 | -2,438,026,188,252,262,400 | 42.837662 | 120 | 0.58969 | false |
JarbasAI/JarbasAI | jarbas_skills/skill_wiki/__init__.py | 1 | 3331 | # Copyright 2016 Mycroft AI, Inc.
#
# This file is part of Mycroft Core.
#
# Mycroft Core is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycroft Core is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycroft Core. If not, see <http://www.gnu.org/licenses/>.
from random import randrange
import re
import wikipedia as wiki
from adapt.intent import IntentBuilder
from os.path import join, dirname
from mycroft.skills.core import MycroftSkill
from mycroft.util import read_stripped_lines
from mycroft.util.log import getLogger
__author__ = 'jdorleans'
LOGGER = getLogger(__name__)
class WikipediaSkill(MycroftSkill):
def __init__(self):
super(WikipediaSkill, self).__init__(name="WikipediaSkill")
self.max_results = self.config.get('max_results', 3)
self.max_phrases = self.config.get('max_phrases', 3)
self.question = 'Would you like to know more about ' # TODO - i10n
self.feedback_prefix = read_stripped_lines(
join(dirname(__file__), 'dialog', self.lang,
'FeedbackPrefix.dialog'))
self.feedback_search = read_stripped_lines(
join(dirname(__file__), 'dialog', self.lang,
'FeedbackSearch.dialog'))
def initialize(self):
intent = IntentBuilder("WikipediaIntent").require(
"WikipediaKeyword").require("ArticleTitle").build()
self.register_intent(intent, self.handle_intent)
def handle_intent(self, message):
try:
title = message.data.get("ArticleTitle")
self.__feedback_search(title)
results = wiki.search(title, self.max_results)
summary = re.sub(
r'\([^)]*\)|/[^/]*/', '',
wiki.summary(results[0], self.max_phrases))
self.speak(summary)
except wiki.exceptions.DisambiguationError as e:
options = e.options[:self.max_results]
LOGGER.debug("Multiple options found: " + ', '.join(options))
self.__ask_more_about(options)
except Exception as e:
LOGGER.error("Error: {0}".format(e))
def __feedback_search(self, title):
prefix = self.feedback_prefix[randrange(len(self.feedback_prefix))]
feedback = self.feedback_search[randrange(len(self.feedback_search))]
sentence = feedback.replace('<prefix>', prefix).replace(
'<title>', title)
self.speak(sentence, metadata={"more_speech": True})
def __ask_more_about(self, opts):
sentence = self.question
size = len(opts)
for idx, opt in enumerate(opts):
sentence += opt
if idx < size - 2:
sentence += ', '
elif idx < size - 1:
sentence += ' or ' # TODO - i10n
self.speak(sentence)
def stop(self):
pass
def create_skill():
return WikipediaSkill()
| gpl-3.0 | -5,092,535,073,440,309,000 | 33.340206 | 77 | 0.631042 | false |
ha1fpint/PeepingTom-Modified | peepingtom2.py | 1 | 11299 | #!/usr/bin/env python
import sys
import urllib2
import subprocess
import re
import time
import os
import hashlib
import random
import requests
import urllib2
#=================================================
# MAIN FUNCTION
#=================================================
def main():
# depenency check
if not all([os.path.exists('phantomjs'), os.path.exists('/usr/bin/curl')]):
print '[!] PhantomJS and cURL required.'
return
# parse options
import argparse
usage = """
PeepingTom - Tim Tomes (@LaNMaSteR53) (www.lanmaster53.com)
Dependencies:
- PhantomJS
- cURL
$ python ./%(prog)s <mode> <path>"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('-l', help='list input mode. path to list file.', dest='list_file', action='store')
parser.add_argument('-x', help='xml input mode. path to Nessus/Nmap XML file.', dest='xml_file', action='store')
parser.add_argument('-s', help='single input mode. path to target, remote URL or local path.', dest='target', action='store')
parser.add_argument('-o', help='output directory', dest='output', action='store')
parser.add_argument('-t', help='socket timeout in seconds. default is 8 seconds.', dest='timeout', type=int, action='store')
parser.add_argument('-v', help='verbose mode', dest='verbose', action='store_true', default=False)
parser.add_argument('-b', help='open results in browser', dest='browser', action='store_true', default=False)
opts = parser.parse_args()
# process options
# input source
if opts.list_file:
try:
targets = open(opts.list_file).read().split()
except IOError:
print '[!] Invalid path to list file: \'%s\'' % opts.list_file
return
elif opts.xml_file:
# optimized portion of Peeper (https://github.com/invisiblethreat/peeper) by Scott Walsh (@blacktip)
import xml.etree.ElementTree as ET
try: tree = ET.parse(opts.xml_file)
except IOError:
print '[!] Invalid path to XML file: \'%s\'' % opts.xml_file
return
except ET.ParseError:
print '[!] Not a valid XML file: \'%s\'' % opts.xml_file
return
root = tree.getroot()
if root.tag.lower() == 'nmaprun':
# parse nmap file
targets = parseNmap(root)
elif root.tag.lower() == 'nessusclientdata_v2':
# parse nessus file
targets = parseNessus(root)
print '[*] Parsed targets:'
for x in targets: print x
elif opts.target:
targets = [opts.target]
else:
print '[!] Input mode required.'
return
# storage location
if opts.output:
directory = opts.output
if os.path.isdir(directory):
print '[!] Output directory already exists: \'%s\'' % directory
return
else:
random.seed()
directory = time.strftime('%y%m%d_%H%M%S', time.localtime()) + '_%04d' % random.randint(1, 10000)
# connection timeout
timeout = opts.timeout if opts.timeout else 8
print '[*] Analyzing %d targets.' % (len(targets))
print '[*] Storing data in \'%s/\'' % (directory)
os.mkdir(directory)
report = 'peepingtom.html'
outfile = '%s/%s' % (directory, report)
# logic to gather screenshots and headers for the given targets
db = {'targets': []}
cnt = 0
tot = len(targets) * 2
previouslen = 0
try:
for target in targets:
# Displays the target name to the right of the progress bar
if opts.verbose:
printProgress(cnt, tot, target, previouslen)
else:
printProgress(cnt, tot)
imgname = '%s.png' % re.sub('\W','',target)
srcname = '%s.txt' % re.sub('\W','',target)
imgpath = '%s/%s' % (directory, imgname)
srcpath = '%s/%s' % (directory, srcname)
getCapture(target, imgpath, timeout)
cnt += 1
previouslen = len(target)
target_data = {}
target_data['url'] = target
target_data['imgpath'] = imgname
target_data['srcpath'] = srcname
target_data['hash'] = hashlib.md5(open(imgpath).read()).hexdigest() if os.path.exists(imgpath) else 'z'*32
target_data['headers'] = getHeaders(target, srcpath, timeout)
#SJ edit
if get_status(target + '/robots.txt') == 200:
try:
robots = requests.get(target + "/robots.txt", verify=False)
print robots.headers['content-type'].split(';',2)
if robots.headers['content-type'].split(';',2)[0] == "text/plain":
robotText = robots.text.encode('utf-8')
#robots2 = robotText.splitlines()
target_data['robots'] = robotText
else:
target_data['robots'] = "empty robots.txt"
except Exception:
target_data['robots'] = "exception empty robots.txt"
else:
robots = 'no robots file'
target_data['robots'] = robots
db['targets'].append(target_data)
cnt += 1
print printProgress(1,1)
except Exception as e:
print '[!] %s' % (e.__str__())
# build the report and exit
buildReport(db, outfile)
if opts.browser:
import webbrowser
path = os.getcwd()
w = webbrowser.get()
w.open('file://%s/%s/%s' % (path, directory, report))
print '[*] Done.'
#=================================================
# SUPPORT FUNCTIONS
#=================================================
#SJ edit - check up
def get_status(target):
try:
conn = urllib2.urlopen(target, timeout = 2)
print target
print conn.code
return conn.code
except urllib2.URLError as e:
return 123
except Exception:
return 123
def parseNmap(root):
http_ports = [80,81,8000,8080,8081,8082]
https_ports = [443,444,8443]
targets = []
# iterate through all host nodes
for host in root.iter('host'):
hostname = host.find('address').get('addr')
# hostname node doesn't always exist. when it does, overwrite address previously assigned to hostanme
hostname_node = host.find('hostnames').find('hostname')
if hostname_node is not None: hostname = hostname_node.get('name')
# iterate through all port nodes reported for the current host
for item in host.iter('port'):
state = item.find('state').get('state')
if state.lower() == 'open':
# service node doesn't always exist when a port is open
service = item.find('service').get('name') if item.find('service') is not None else ''
port = item.get('portid')
if 'http' in service.lower() or int(port) in (http_ports + https_ports):
proto = 'http'
if 'https' in service.lower() or int(port) in https_ports:
proto = 'https'
url = '%s://%s:%s' % (proto, hostname, port)
if not url in targets:
targets.append(url)
elif not service:
# show the host and port for unknown services
print '[-] Unknown service: %s:%s' % (hostname, port)
return targets
def parseNessus(root):
targets = []
for host in root.iter('ReportHost'):
name = host.get('name')
for item in host.iter('ReportItem'):
svc = item.get('svc_name')
plugname = item.get('pluginName')
if (svc in ['www','http?','https?'] and plugname.lower().startswith('service detection')):
port = item.get('port')
output = item.find('plugin_output').text.strip()
proto = guessProto(output)
url = '%s://%s:%s' % (proto, name, port)
if not url in targets:
targets.append(url)
return targets
def guessProto(output):
# optimized portion of Peeper (https://github.com/invisiblethreat/peeper) by Scott Walsh (@blacktip)
secure = re.search('TLS|SSL', output)
if secure:
return "https"
return "http"
def getCapture(url, imgpath, timeout):
cookie_file = 'cookies'
cmd = './phantomjs --ssl-protocol=any --ignore-ssl-errors=yes --cookies-file="%s" ./capture.js "%s" "%s" %d' % (cookie_file, url, imgpath, timeout*1000)
returncode, response = runCommand(cmd)
# delete cookie file
#os.remove(cookie_file)
return returncode
def getHeaders(url, srcpath, timeout):
#cmd = 'curl -sILk %s --connect-timeout %d' % (url, timeout)
cmd = 'curl -sLkD - %s -o %s --max-time %d' % (url, srcpath, timeout)
returncode, response = runCommand(cmd)
return response
def runCommand(cmd):
proc = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
stdout, stderr = proc.communicate()
response = ''
if stdout: response += str(stdout)
if stderr: response += str(stderr)
return proc.returncode, response.strip()
def printProgress(cnt, tot, target='', previouslen=0):
percent = 100 * float(cnt) / float(tot)
if target and previouslen > len(target):
target = target + ' ' * (previouslen - len(target))
sys.stdout.write('[%-40s] %d%% %s\r' % ('='*int(float(percent)/100*40), percent, target))
sys.stdout.flush()
return ''
def buildReport(db, outfile):
live_markup = ''
error_markup = ''
dead_markup = ''
# process markup for live targets
for live in sorted(db['targets'], key=lambda k: k['hash']):
live_markup += "<tr><td class='tg-0ord'><a href='{0}' target='_blank'><img src='{0}' onerror=\"this.parentNode.parentNode.innerHTML='No image available.';\" /></a></td><td class='tg-0ord'><a href='{1}' target='_blank'>{1}</a> (<a href='{2}' target='_blank'>source</a>)<br/><pre>{3}</pre><pre><p>{4}</p></pre></td></tr>\n".format(live['imgpath'],live['url'],live['srcpath'],live['headers'],live['robots']) #addded robots
# add markup to the report
file = open(outfile, 'w')
file.write("""
<!doctype html>
<head>
<style type="text/css">
.tg {border-collapse:collapse;border-spacing:0;border-color:#ccc;}
.tg td{font-family:Arial, sans-serif;font-size:14px;padding:10px 5px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#ccc;color:#333;background-color:#fff;}
.tg th{font-family:Arial, sans-serif;font-size:14px;font-weight:normal;padding:10px 5px;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#ccc;color:#333;background-color:#f0f0f0;}
.tg .tg-0ord{text-align:left;background-color:#f0f0f0;}
.tg .tg-s6z2{text-align:center;background-color:#c0c0c0;}
</style>
</head>
<body>
<table class="tg">
<tr>
<th class="tg-s6z2">Screenshot</th>
<th class="tg-s6z2">Details</th>
</tr>
%s
</table>
</body>
</html>""" % (live_markup))
file.close()
#=================================================
# START
#=================================================
if __name__ == "__main__": main()
| gpl-3.0 | -3,573,121,943,859,222,000 | 38.645614 | 427 | 0.570139 | false |
snsokolov/contests | practice/envelopes.py | 1 | 2343 | #!/usr/bin/env python3
# envelopes.py - Envelopes program by Sergey 2017
import unittest
import random
import collections
import operator
###############################################################################
# Envelopes Class (Main Program)
###############################################################################
class Envelopes:
""" Envelopes representation """
def __init__(self, max, max_series):
""" Default constructor """
self.max = max
self.max_series = max_series
self.series = [self.gen() for _ in range(max_series)]
self.matches = collections.defaultdict(list)
for envelopes in self.series:
self.matches[envelopes[0]].append(envelopes[1])
self.matches[envelopes[1]].append(envelopes[0])
def gen(self):
rnd = random.randrange(self.max)
envelopes = [rnd, rnd*2]
random.shuffle(envelopes)
return tuple(envelopes)
###############################################################################
# Unit Tests
###############################################################################
class unitTests(unittest.TestCase):
def test_Envelopes_basic(self):
""" Envelopes class testing """
d = Envelopes(max=10, max_series=5)
# Check that series are generated
self.assertEqual(len(d.series), 5)
# Make sure envelopes are generated correctly
envelopes = d.gen()
self.assertTrue(
envelopes[0] == 2 * envelopes[1] or
envelopes[1] == 2 * envelopes[0])
# Check that array of matches is generated correctly
self.assertTrue(d.series[0][1] in d.matches[d.series[0][0]])
def test_Envelopes_series(self):
for _ in range(5):
d = Envelopes(max=100, max_series=10000)
# Equity when not opening envelopes
eno = (0, 0)
# Equity when opening first envelope
eo = (0, 0)
num = 20
for envelopes in d.series:
eno = tuple(map(operator.add, eno, envelopes))
if envelopes[0] == num:
eo = tuple(map(operator.add, eo, envelopes))
print("Not opening: ", eno)
print("Opening: ", eo)
if __name__ == "__main__":
unittest.main(argv=[" "])
| unlicense | -7,317,045,707,159,549,000 | 29.038462 | 79 | 0.498506 | false |
dgarrett622/ObsDist | ObsDist/Population.py | 1 | 5657 | # -*- coding: utf-8 -*-
"""
v1: Created on November 28, 2016
author: [email protected]
"""
import numpy as np
import astropy.units as u
class Population(object):
"""This class contains all the planetary parameters necessary for sampling
or finding probability distribution functions
Args:
a_min (Quantity or float):
minimum population semi-major axis with unit (Quantity) attached or
in AU (float)
a_max (Quantity or float):
maximum population semi-major axis with unit (Quantity) attached or
in AU (float)
e_min (float):
minimum population eccentricity
e_max (float):
maximum population eccentricity
R_min (Quantity or float):
minimum population planetary radius with unit (Quantity) attached
or in AU (float)
R_max (Quantity or float):
maximum population planetary radius with unit (Quantity) attached
or in AU (float)
p_min (float):
minimum population geometric albedo
p_max (float):
maximum population geometric albedo
Attributes:
arange (ndarray):
1D numpy ndarray containing minimum and maximum semi-major axis in
AU
erange (ndarray):
1D numpy ndarray containing minimum and maximum eccentricity
Rrange (ndarray):
1D numpy ndarray containing minimum and maximum planetary radius in
AU
prange (ndarray):
1D numpy ndarray containing minimum and maximum geometric albedo
Phi (callable):
phase function
"""
def __init__(self, a_min=None, a_max=None, e_min=None, e_max=None, \
R_min=None, R_max=None, p_min=None, p_max=None):
unittest = u.quantity.Quantity
# minimum semi-major axis (AU)
if a_min == None:
a_min = 0.5
elif type(a_min) == unittest:
a_min = a_min.to('AU').value
# maximum semi-major axis (AU)
if a_max == None:
a_max = 5.0
elif type(a_max) == unittest:
a_max = a_max.to('AU').value
# semi-major axis range
self.arange = np.array([a_min, a_max])
# minimum eccentricity
if e_min == None:
e_min = np.finfo(float).eps*100.0
# maximum eccentricity
if e_max == None:
e_max = 0.35
# eccentricity range
self.erange = np.array([e_min, e_max])
# minimum planetary radius
if R_min == None:
R_min = 6000*u.km
R_min = R_min.to('AU').value
elif type(R_min) == unittest:
R_min = R_min.to('AU').value
# maximum planetary radius
if R_max == None:
R_max = 30000*u.km
R_max = R_max.to('AU').value
elif type(R_max) == unittest:
R_max = R_max.to('AU').value
self.Rrange = np.array([R_min, R_max]) # in AU
# minimum albedo
if p_min == None:
p_min = 0.2
# maximum albedo
if p_max == None:
p_max = 0.3
self.prange = np.array([p_min, p_max])
# phase function
self.Phi = lambda b: (1.0/np.pi)*(np.sin(b) + (np.pi-b)*np.cos(b))
def f_a(self, a):
"""Probability density function for semi-major axis in AU
Args:
a (float or ndarray):
Semi-major axis value(s) in AU
Returns:
f (ndarray):
Probability density (units of 1/AU)
"""
a = np.array(a, ndmin=1, copy=False)
# uniform
# f = ((a >= self.arange[0]) & (a <= self.arange[1])).astype(int)/(self.arange[1]-self.arange[0])
# log-uniform
f = ((a >= self.arange[0]) & (a <= self.arange[1])).astype(int)/(a*np.log(self.arange[1]/self.arange[0]))
return f
def f_e(self, e):
"""Probability density function for eccentricity
Args:
e (float or ndarray):
eccentricity value(s)
Returns:
f (ndarray):
probability density
"""
e = np.array(e, ndmin=1, copy=False)
# uniform
f = ((e >= self.erange[0]) & (e <= self.erange[1])).astype(int)/(self.erange[1]-self.erange[0])
return f
def f_R(self, R):
"""Probability density function for planet radius (AU)
Args:
R (float or ndarray):
planet radius in AU
Returns:
f (ndarray):
probability density function value
"""
R = np.array(R, ndmin=1, copy=False)
# uniform
# f = ((R >= self.Rrange[0]) & (R <= self.Rrange[1])).astype(int)/(self.Rrange[1]-self.Rrange[0])
# log-uniform
f = ((R >= self.Rrange[0]) & (R <= self.Rrange[1])).astype(int)/(R*np.log(self.Rrange[1]/self.Rrange[0]))
return f
def f_p(self, p):
"""Probability density function for geometric albedo
Args:
x (float or ndarray):
geometric albedo
Returns:
f (ndarray):
probability density function value
"""
p = np.array(p, ndmin=1, copy=False)
# uniform
f = ((p >= self.prange[0]) & (p <= self.prange[1])).astype(int)/(self.prange[1]-self.prange[0])
return f | mit | -879,130,761,740,373,000 | 31.331429 | 113 | 0.505745 | false |
tbphu/fachkurs_2016_project | model.py | 1 | 5801 | import modeldata
import molecules as mol
import translation
import replication as rep
import transcription
class Output:
"""
class for handling the simulation results of the different species types
"""
def __init__(self, model):
self.meta = {}
self.model = model
self.timecourses = {state: SimulationResult(model.states[state]) for state in model.states}
def add_timepoint(self, species):
"""
add a simulation time point for one species
@param species: mol.BioMolecule
@return: None
"""
if isinstance(self.model.states[species], mol.Polymer):
pass # TODO: implement a useful method for Polymers
elif isinstance(self.model.states[species], mol.BioMoleculeCount):
self.timecourses[species].add_timepoint(self.model.states[species].count, self.model.timestep)
class SimulationResult:
"""
handles and stores a simulation result for one species
"""
def __init__(self, species):
self.name = species.name
self.value = []
self.time = []
def add_timepoint(self, time, value):
self.value.append(value)
self.time.append(time)
class Model:
"""
Initializes the states and processes for the model and lets the processes update their corresponding states.
"""
def __init__(self):
self.states = {} #dictionary with all molecules {Rib_name: Rib_object, mrna_ids: mrna_object, mrna2_id: ...}
self.processes = {} #dictionary filled with all active processes
self.timestep = 0
self.mrnas = {} # all selfs should be initialized in the constructor
self.ribosomes = {} #dictionary will be filled with 10 Ribosomes
self.helicases = {}
self.polymerases = {}
self.chromosomes = {}
self.volume = 1
self.db = modeldata.ModelData()
# self.chromosomes=modeldata.ModelData.createchromosomes() #list with chromosomes
self.genes=modeldata.ModelData.creategenes() #dictionary with genes
self.__initialize_macromolecules()
self.__initialize_states()
self.__initialize_processes()
#self.results = Output(self)
def __initialize_macromolecules(self):
self.ribosomes = {'Ribosomes': mol.Ribosome('Ribos', 'Ribosomes', 187000)}
self.polymerase2= mol.RNAPolymeraseII('Pol2', 'Polymerase2', 100000000)
self.nucleotides= mol.NucleotidPool('Nucs','Nucleotides', 1000000000000)
self.helicases = {'DnaB': rep.Helicase("Helicase", "DnaB", 100)}
self.polymerases = {'Polymerase3' :rep.Polymerase("Polymerase", "Polymerase3", 100)}
self.chromosomes = {x.id:x for x in modeldata.ModelData.createchromosomes()}
#for i, mrna in enumerate(self.db.get_states(mol.MRNA)):
# mid, name, sequence = mrna
# sequence=list(sequence)
# sequence[0:3]='AUG'
#sequence[12:15]='UGA'
#sequence=''.join(sequence)
#self.mrnas[mid] = [mol.MRNA(mid, name, sequence)]
def __initialize_states(self):
"""
initialize the different states
"""
self.states.update(self.ribosomes) #adding dictionaries to self.states
self.states.update(self.helicases)
self.states.update(self.polymerases)
self.states.update(self.chromosomes)
self.states.update(self.mrnas)
self.states["Nucleotides"] = self.nucleotides
def __initialize_processes(self):
"""
initialize processes
"""
# transcription
trsc = transcription.Transcription(0, 'Transcription')
trsc.set_states(self.genes.keys(), self.polymerase2)
self.processes["Transkription"] = trsc
# translation
trsl = translation.Translation(1, "Translation")
trsl.set_states(self.mrnas.keys(), self.ribosomes.keys()) #states in Process are keys: Rib_name, mrna_name?!
self.processes["Translation"] = trsl
# replication
repl =rep.Replication(2, "Replication")
replication_enzyme_ids= list(self.helicases.keys()).extend(list(self.polymerases.keys()))
repl.set_states(list(self.chromosomes.keys()), replication_enzyme_ids)
self.processes.update({"Replication":repl})
def step(self):
"""
Do one update step for each process.
"""
for p in self.processes:
self.processes[p].update(self)
#for state in self.states:
# self.results.add_timepoint(state)
self.timestep += 1
def simulate(self, steps, log=True):
"""
Simulate the model for some time.
"""
for s in range(steps):
self.step()
if log: # This could be an entry point for further logging
#print all states
print(self.states.keys())
a = 0
for i in self.states.keys():
if str(i)[0].isdigit():
a = 1+a
print("Die Anzahl der Chromosomen nach " + str(s) + " update Schritten beträgt " + str(a))
keylist = self.states.keys()
keylist = [str(x) for x in keylist]
mrnalist = [x for x in keylist if "mRNA" in x]
print("Die Anzahl der mRNAs nach " + str(s) + " update Schritten beträgt " + str(len(mrnalist)))
print("Folgende mRNAs wurden kreiert: " + str([x for x in keylist if "mRNA" in x]))
print("es sind noch " + str(self.states["Nucleotides"].count) + " freie vorhanden")
if __name__ == "__main__":
c = Model()
c.simulate(300, log=True)
| mit | -8,622,250,622,444,500,000 | 34.359756 | 126 | 0.594068 | false |
dziobas/ChangesCheckstyle | checkstyle.py | 1 | 3398 | #!/usr/bin/python
import os
import re
import sys
import getopt
def run_checkstyle(file_name, checkstyle, project_dir):
output = os.popen("(cd " + project_dir + "; \
java -jar checkstyle.jar \
-c " + checkstyle + " \
" + file_name + ")").read()
output = output.split("\n")
length = len(output)
return output[1:length - 2] # remove first and last line
def find_changed_lines(git_diff): # returns changed line numbers
changed_lines_pattern = "@@ [0-9\-+,]+ ([0-9\-+,]+) @@"
lines = []
for change in re.findall(changed_lines_pattern, git_diff):
value = change.split(",")
if len(value) == 1: # one line changed
line_number = (value[0])
lines.append(int(line_number))
elif len(value) == 2: # more lines changed
line_number = int(value[0])
count = int(value[1])
for i in range(line_number, line_number + count):
lines.append(i)
return lines
def filter_out(processed_line): # True when file should be filtered out
column = processed_line.split("\t")
if len(column) != 3:
return True
added_lines = column[0]
name = column[2]
return not name.endswith(".java") or not added_lines.isdigit() or not int(added_lines) > 0
def get_file_name(processed_line):
return processed_line.split("\t")[2]
def introduced_error(error_message, changed_lines, out):
line_pattern = ":(\d+):"
number = re.search(line_pattern, error_message).group(1)
number = int(number)
if number in changed_lines:
print "Introduced Error: " + error_message
out.append(error_message)
else:
print "Warning: " + error_message
def usage():
return "checkstyle -c <checkstyle.xml> -d <project directory> -h <help>"
def main(argv):
try:
opts, args = getopt.getopt(argv, "c:d:hx")
except getopt.GetoptError:
print usage()
sys.exit(2)
checkstyle_rules = "checkstyle-rules.xml"
project_dir = "."
debug = False
for opt, arg in opts:
if opt == "-c":
checkstyle_rules = arg
elif opt == "-d":
project_dir = arg
elif opt == "-h":
print usage()
elif opt == "-x":
debug = True
if debug:
print "dir: " + project_dir + " rules: " + checkstyle_rules
diff_command = "(cd " + project_dir + "; git diff HEAD^ --numstat)"
print "Processing"
errors = []
list_of_files = os.popen(diff_command).read().split("\n")
for file_line in list_of_files:
if filter_out(file_line):
# skip non java files and without added lines
continue
file_name = get_file_name(file_line)
if debug:
print "check " + file_name
# get changed lines
changes = os.popen("(cd " + project_dir + "; git diff -U0 HEAD^ " + file_name + ")").read()
lines = find_changed_lines(changes)
checkstyle = run_checkstyle(file_name, checkstyle_rules, project_dir)
for item in checkstyle:
# extract errors introduced in added lines and append errors list
introduced_error(item, lines, errors)
if errors:
print "Errors in added lines:"
for item in errors:
print item
sys.exit(1)
if __name__ == "__main__":
main(sys.argv[1:]) | apache-2.0 | -1,082,372,101,561,743,700 | 28.051282 | 99 | 0.572396 | false |
mattwilliamson/webhookr | webhookr/sockets.py | 1 | 3157 | import logging
from socketio.namespace import BaseNamespace
from socketio.mixins import RoomsMixin, BroadcastMixin
class WebhookrChannelMixin(object):
room_key = 'rooms'
def __init__(self, *args, **kwargs):
super(WebhookrChannelMixin, self).__init__(*args, **kwargs)
if self.room_key not in self.session:
self.session[self.room_key] = set() # a set of simple strings
def join(self, room):
"""Lets a user join a room on a specific Namespace."""
self.session[self.room_key].add(self._get_room_name(room))
def leave(self, room):
"""Lets a user leave a room on a specific Namespace."""
self.session[self.room_key].remove(self._get_room_name(room))
def _get_room_name(self, room):
return self.ns_name + '_' + room
def room_subscribers(self, room, include_self=False):
room_name = self._get_room_name(room)
for sessid, socket in self.socket.server.sockets.iteritems():
if self.room_key not in socket.session:
continue
if room_name in socket.session[self.room_key] and (include_self or self.socket != socket):
yield (sessid, socket)
def all_rooms(self):
return (x[len(self.ns_name):] for x in self.session.get(self.room_key, []))
def _emit_to_channel(self, room, event, include_self, *args):
"""This is sent to all in the room (in this particular Namespace)"""
message = dict(type="event", name=event, args=args, endpoint=self.ns_name)
for sessid, socket in self.room_subscribers(room, include_self=include_self):
socket.send_packet(message)
def emit_to_channel(self, room, event, *args):
self._emit_to_channel(room, event, False, *args)
def emit_to_channel_and_me(self, room, event, *args):
self._emit_to_channel(room, event, True, *args)
class WebhookNamespace(BaseNamespace, WebhookrChannelMixin, BroadcastMixin):
def initialize(self):
self.logger = logging.getLogger("socketio.webhook")
self.log("WebhookNamespace socketio session started: %s" % self.socket)
def log(self, message):
self.logger.info("[{0}] {1}".format(self.socket.sessid, message))
def emit_subscriber_count(self, room):
# Enumerate to get length of subscribers while being lazy
i = 0
for i, x in enumerate(self.room_subscribers(room, include_self=True)):
self.logger.debug('[emit_subscriber_count] i= {}'.format(i))
total_subscribers = i + 1
self.log('Emitting totalSubscribers for {}: {}'.format(room, total_subscribers))
self.emit_to_channel_and_me(room, 'subscriber_joined', {'totalSubscribers': total_subscribers})
def on_join(self, room):
self.log('Connected')
self.room = room
self.join(room)
self.emit_subscriber_count(room)
return True
def recv_disconnect(self):
# Remove nickname from the list.
self.log('Disconnected')
for room in self.all_rooms():
self.emit_subscriber_count(room)
self.disconnect(silent=True)
return True
| mit | -3,892,924,647,368,414,700 | 36.583333 | 103 | 0.638264 | false |
Modular-Life-Assistant/MoLA | helpers/modules/BaseModule.py | 1 | 1329 | from helpers.modules.InternalBaseModule import InternalBaseModule
class BaseModule(InternalBaseModule):
"""This class is a template of methods to be implemented by modules."""
module_path = ''
def cron_day(self):
"""This method has been called one time by day"""
pass
def cron_hour(self):
"""This method has been called one time by day."""
pass
def cron_min(self):
"""This method has been called one time by min."""
pass
def cron_month(self):
"""This method has been called one time by month."""
pass
def cron_week(self):
"""This method has been called one time by week."""
pass
def cron_year(self):
"""This method has been called one time by year."""
pass
def is_available(self):
"""This module is available ?"""
return True
def init(self):
"""This module has been initialized."""
pass
def load_config(self):
"""Load module config."""
pass
def run(self):
"""This module loop running."""
pass
def started(self):
"""This module has been started."""
pass
def stopped(self):
"""This module has been stopped."""
pass
| gpl-2.0 | -7,246,877,211,243,007,000 | 22.611111 | 75 | 0.550038 | false |
yubbie/googleapps-message-recall | message_recall/frontend_views.py | 1 | 19041 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Frontend view implementations that handle user requests."""
import os
import re
import socket
import time
import jinja2
import log_utils
from models import domain_user
from models import error_reason
from models import recall_task
from models import sharded_counter
import recall_errors
import user_retriever
import view_utils
import webapp2
import wtforms
from wtforms import validators
import xsrf_helper
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.api.taskqueue import Error as TaskQueueError
from google.appengine.api.taskqueue import Task
from google.appengine.runtime import apiproxy_errors
_APPLICATION_DIR = os.path.dirname(__file__)
_CREATE_TASK_ACTION = 'CreateTask#ns'
_GET_USER_MAX_RETRIES = 2
_LOG = log_utils.GetLogger('messagerecall.views')
_MESSAGE_ID_REGEX = re.compile(r'^[\w+-=.]+@[\w.]+$')
_MESSAGE_ID_MAX_LEN = 100
_USER_ADMIN_CACHE_NAMESPACE = 'messagerecall_useradmin#ns'
_USER_ADMIN_CACHE_TIMEOUT_S = 60 * 60 * 2 # 2 hours
_USER_BILLING_CACHE_TIMEOUT_S = 60 * 60 * 24 # 24 hours
_APPLICATION_BILLING_CACHE_NAMESPACE = 'messagerecall_billing#ns'
def _CacheUserEmailBillingEnabled(user_email):
"""Cache the user_email to avoid billing-check rountrips.
Wrapped in a separate method to aid error handling.
Args:
user_email: String email address of the form [email protected].
Raises:
MessageRecallMemcacheError: If the add fails so cache issues can be noticed.
"""
if not memcache.add(user_email, True, time=_USER_BILLING_CACHE_TIMEOUT_S,
namespace=_APPLICATION_BILLING_CACHE_NAMESPACE):
raise recall_errors.MessageRecallMemcacheError(
'Unexpectedly unable to add application billing information to '
'memcache. Please try again.')
def _CacheUserEmailAsAdmin(user_email):
"""Cache the admin user_email to avoid rountrips.
Wrapped in a separate method to aid error handling.
Args:
user_email: String email address of the form [email protected].
Raises:
MessageRecallMemcacheError: If the add fails so cache issues can be noticed.
"""
if not memcache.add(user_email, True, time=_USER_ADMIN_CACHE_TIMEOUT_S,
namespace=_USER_ADMIN_CACHE_NAMESPACE):
raise recall_errors.MessageRecallMemcacheError(
'Unexpectedly unable to add admin user information to memcache. '
'Please try again.')
def _SafelyGetCurrentUserEmail():
"""Retrieve the current user's email or raise an exception.
We set 'login: required' in app.yaml so all users should be logged-in.
But, is has been observed that users.get_current_user() *can* return None.
Therefore, this must be checked.
Returns:
String email address of the currently logged-in user.
Raises:
MessageRecallAuthenticationError: If current user is noticed as None.
"""
user = None
get_user_attempts = 0
while not user and get_user_attempts < _GET_USER_MAX_RETRIES:
user = users.get_current_user()
get_user_attempts += 1
if not user:
raise recall_errors.MessageRecallAuthenticationError(
'A logged-in user was not retrieved. Please try again.')
return user.email()
def _FailIfBillingNotEnabled(user_email):
"""Ensure Google Apps Domain has billing enabled.
Billing-enabled is required to use sockets in AppEngine.
The IMAP mail api uses sockets. So this application requires billing
to be enabled.
If billing not enabled, this is observed:
FeatureNotEnabledError: The Socket API will be enabled for this application
once billing has been enabled in the admin console.
Args:
user_email: String email address of the form [email protected].
Raises:
MessageRecallAuthenticationError: If user is not properly authorized.
"""
if memcache.get(user_email, namespace=_APPLICATION_BILLING_CACHE_NAMESPACE):
return
imap_host = 'imap.gmail.com'
imap_port = 993
# The socket is discarded after 2min of no use.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((imap_host, imap_port))
except apiproxy_errors.FeatureNotEnabledError as e:
raise recall_errors.MessageRecallError(
'This AppEngine application requires billing status: '
'"Billing Enabled". Please choose "Enable Billing" in the AppEngine '
'admin console for this application (%s).' % e)
except Exception as e:
# Expect "[Errno 13] Permission denied" once billing enabled.
if str(e) != '[Errno 13] Permission denied':
raise
_CacheUserEmailBillingEnabled(user_email)
def _FailIfNonAdminUser(user_email):
"""Ensure user possesses adequate Admin authority.
This AppEngine application should set Authentication Type to:
'Google Accounts API'.
Per documentation, isAdmin is True if the user is a member of the
Google Apps System: Role = Super Admin.
https://developers.google.com/admin-sdk/directory/v1/reference/
If the user is found to be a properly authorized admin-user of this
application, then cache that fact to avoid roundtrips to the Admin SDK
for a little while.
Args:
user_email: String email address of the form [email protected].
Raises:
MessageRecallAuthenticationError: If user is not properly authorized.
"""
if memcache.get(user_email, namespace=_USER_ADMIN_CACHE_NAMESPACE):
return
retriever = user_retriever.DomainUserRetriever(
owner_email=user_email,
user_domain=view_utils.GetUserDomain(user_email),
search_query='email:%s' % user_email)
if not retriever.GetUserAttribute(user_email, 'isAdmin'):
# User is not a super-admin...
raise recall_errors.MessageRecallAuthenticationError(
'User %s is not authorized for Message Recall in this domain.'
% user_email)
_CacheUserEmailAsAdmin(user_email)
def _PreventUnauthorizedAccess():
"""Ensure user possesses adequate Admin authority."""
current_user_email = _SafelyGetCurrentUserEmail()
_FailIfNonAdminUser(current_user_email)
_FailIfBillingNotEnabled(current_user_email)
class UIBasePageHandler(webapp2.RequestHandler):
"""Setup common template handling for derived handlers."""
def __init__(self, request, response):
"""RequestHandler initialization requires base class initialization."""
self.initialize(request, response)
self.init_time = time.time()
template_dir = os.path.join(_APPLICATION_DIR, 'templates')
self._jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(template_dir),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
def __del__(self):
_LOG.debug('Handler for %s took %.2f seconds',
self.request.url, time.time() - self.init_time)
def handle_exception(self, exception, debug): # pylint: disable=g-bad-name
"""Common exception handler for webapp2."""
_LOG.exception(exception)
_LOG.debug('Is the web application in debug mode? %s.', debug)
self._WriteTemplate(
template_file='error',
tpl_exception=exception,
tpl_unauthorized=isinstance(
exception, recall_errors.MessageRecallAuthenticationError))
def _WriteTemplate(self, template_file, **kwargs):
"""Common method to write from a template.
Args:
template_file: String name of a file that exists within the template
folder. For subdirectories the name may be 'sub/file'.
**kwargs: A dictionary of key-value pairs that will be available
within the template.
"""
kwargs['tpl_logout_url'] = users.create_logout_url('/')
kwargs['tpl_user_name'] = _SafelyGetCurrentUserEmail()
if '.' not in template_file:
template_file = '%s.html' % template_file
self.response.headers['X-Frame-Options'] = 'DENY' # Prevent clickjacking.
self.response.write(
self._jinja_env.get_template(template_file).render(kwargs))
class AboutPageHandler(UIBasePageHandler):
"""Handle '/about' requests to show app about info."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /about get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate('about')
class CreateTaskForm(wtforms.Form):
"""Wrap and validate the form that ingests user input for a recall task.
Uses Regexp for xss protection to ensure no html tag characters are allowed.
"""
message_criteria = wtforms.TextField(
label='Message-ID', default='', validators=[
validators.Length(min=1, max=_MESSAGE_ID_MAX_LEN,
message=(u'message-id must be 1-%s characters.' %
_MESSAGE_ID_MAX_LEN)),
validators.Regexp(_MESSAGE_ID_REGEX,
message=(u'message-id format is: local-part@domain.'
'com (no spaces allowed).'))])
@property
def sanitized_message_criteria(self):
"""Helper to ensure message-id field has no extra junk.
Returns:
String as a safely scrubbed searchable message-id.
"""
return self.message_criteria.data.strip()
class CreateTaskPageHandler(UIBasePageHandler, xsrf_helper.XsrfHelper):
"""Handle '/create_task' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /create_task get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='create_task',
tpl_create_task_form=CreateTaskForm(self.request.GET),
xsrf_token=self.GetXsrfToken(user_email=_SafelyGetCurrentUserEmail(),
action_id=_CREATE_TASK_ACTION))
def _EnqueueMasterRecallTask(self, owner_email, message_criteria,
task_key_id):
"""Add master recall task with error handling.
Args:
owner_email: String email address of user running this recall.
message_criteria: String criteria (message-id) to recall.
task_key_id: Int unique id of the parent task.
Raises:
re-raises any task queue errors.
"""
task_name = '%s_%s' % (
view_utils.CreateSafeUserEmailForTaskName(owner_email),
view_utils.GetCurrentDateTimeForTaskName())
master_task = Task(name=task_name,
params={'owner_email': owner_email,
'task_key_id': task_key_id,
'message_criteria': message_criteria},
target='0.recall-backend',
url='/backend/recall_messages')
try:
master_task.add(queue_name='recall-messages-queue')
except TaskQueueError:
view_utils.FailRecallTask(task_key_id=task_key_id,
reason_string='Failed to enqueue master task.')
raise
def _CreateNewTask(self, owner_email, message_criteria):
"""Helper to create new task db entity and related Task for the backend.
If the master task fails creation in the db, the error will be raised
for the user to view.
If the master task fails to be enqueued, the task state is updated to
ABORTED.
Args:
owner_email: String email address of the user. Used in authorization.
message_criteria: String criteria used to find message(s) to recall.
Returns:
Urlsafe (String) key for the RecallTaskModel entity that was created.
"""
recall_task_entity = recall_task.RecallTaskModel(
owner_email=owner_email,
message_criteria=message_criteria)
recall_task_key = recall_task_entity.put()
self._EnqueueMasterRecallTask(owner_email=owner_email,
message_criteria=message_criteria,
task_key_id=recall_task_key.id())
return recall_task_key.urlsafe()
def post(self): # pylint: disable=g-bad-name
"""Handler for /create_task post requests."""
_PreventUnauthorizedAccess()
current_user_email = _SafelyGetCurrentUserEmail()
create_task_form = CreateTaskForm(self.request.POST)
if not self.IsXsrfTokenValid(
user_email=current_user_email,
action_id=_CREATE_TASK_ACTION,
submitted_xsrf_token=self.request.get('xsrf_token')):
raise recall_errors.MessageRecallXSRFError(
'[%s] Cross Site Request Forgery Checks Failed!' % current_user_email)
if not create_task_form.validate():
self._WriteTemplate(
template_file='create_task',
tpl_create_task_form=create_task_form,
xsrf_token=self.GetXsrfToken(user_email=current_user_email,
action_id=_CREATE_TASK_ACTION))
return
self.redirect('/task/%s' % self._CreateNewTask(
owner_email=current_user_email,
message_criteria=create_task_form.sanitized_message_criteria))
class DebugTaskPageHandler(UIBasePageHandler):
"""Handle '/task/debug' requests to show app debug info."""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/debug get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
task = recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe)
task_key_id = task.key.id() if task else 0
counter_tuples = [
('User Retrieval Tasks Started (Expected)',
sharded_counter.GetCounterCount(
view_utils.MakeRetrievalStartedCounterName(task_key_id))),
('User Retrieval Tasks Ended (Actual)',
sharded_counter.GetCounterCount(
view_utils.MakeRetrievalEndedCounterName(task_key_id))),
('Task Backend Errors (Automatically Retried)',
sharded_counter.GetCounterCount(
view_utils.MakeBackendErrorCounterName(task_key_id)))]
self._WriteTemplate(template_file='debug_task',
tpl_counter_tuples=counter_tuples, tpl_task=task)
class HistoryPageHandler(UIBasePageHandler):
"""Handle '/history' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for /history get requests."""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('task_cursor')
results, cursor, more = (
recall_task.RecallTaskModel.FetchOneUIPageOfTasksForDomain(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
urlsafe_cursor=previous_cursor))
self._WriteTemplate(template_file='history', tpl_tasks=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more)
class LandingPageHandler(UIBasePageHandler):
"""Handle '/' to show default page."""
def get(self): # pylint: disable=g-bad-name
"""Handler for / get requests."""
_PreventUnauthorizedAccess()
self._WriteTemplate('landing')
class TaskDetailsPageHandler(UIBasePageHandler):
"""Handle '/task' requests to show task details.
This page will show model fields such as task_state and calculated items
such as 'elapsed time' for a single task.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='task',
tpl_task=recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe))
class TaskProblemsPageHandler(UIBasePageHandler):
"""Handle '/task/problems' requests to show user details.
This page will show a list of errors encountered during a recall.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/errors get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('error_cursor')
results, cursor, more = (
error_reason.ErrorReasonModel.FetchOneUIPageOfErrorsForTask(
task_key_urlsafe=task_key_urlsafe,
urlsafe_cursor=previous_cursor))
self._WriteTemplate(template_file='task_error_reasons', tpl_errors=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more, tpl_task_key_urlsafe=task_key_urlsafe)
class TaskReportPageHandler(UIBasePageHandler):
"""Handle '/task/report' requests to show user details.
This page will show summary results from a recall task including
categories of user_state with counts and user email lists.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/report get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
self._WriteTemplate(
template_file='task_report',
tpl_task=recall_task.RecallTaskModel.FetchTaskFromSafeId(
user_domain=view_utils.GetUserDomain(_SafelyGetCurrentUserEmail()),
task_key_urlsafe=task_key_urlsafe),
tpl_user_states=domain_user.USER_STATES,
tpl_message_states=domain_user.MESSAGE_STATES,
tpl_task_key_urlsafe=task_key_urlsafe)
class TaskUsersPageHandler(UIBasePageHandler):
"""Handle '/task/users' requests to show user details.
This page will show full lists of users to compare against previous runs.
"""
def get(self, task_key_urlsafe): # pylint: disable=g-bad-name
"""Handler for /task/users/debug get requests.
Args:
task_key_urlsafe: String representation of task key safe for urls.
"""
_PreventUnauthorizedAccess()
previous_cursor = self.request.get('user_cursor')
results, cursor, more = (
domain_user.DomainUserToCheckModel.FetchOneUIPageOfUsersForTask(
task_key_urlsafe=task_key_urlsafe,
urlsafe_cursor=previous_cursor,
user_state_filters=self.request.params.getall('user_state'),
message_state_filters=self.request.params.getall('message_state')))
self._WriteTemplate(template_file='task_users', tpl_users=results,
tpl_previous_cursor=previous_cursor, tpl_cursor=cursor,
tpl_more=more, tpl_task_key_urlsafe=task_key_urlsafe)
| apache-2.0 | 1,862,183,678,027,718,700 | 36.70495 | 80 | 0.686308 | false |
globocom/database-as-a-service | dbaas/maintenance/migrations/0035_auto__add_field_databasemigrate_origin_environment.py | 1 | 51269 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DatabaseMigrate.origin_environment'
db.add_column(u'maintenance_databasemigrate', 'origin_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['physical.Environment']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'DatabaseMigrate.origin_environment'
db.delete_column(u'maintenance_databasemigrate', 'origin_environment_id')
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseconfiguressl': {
'Meta': {'object_name': 'DatabaseConfigureSSL'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'configure_ssl'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_configure_ssl'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasedestroy': {
'Meta': {'object_name': 'DatabaseDestroy'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_destroy'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_destroy'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasemigrate': {
'Meta': {'object_name': 'DatabaseMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['logical.Database']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'origin_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmigrate': {
'Meta': {'object_name': 'HostMigrate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database_migrate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'hosts'", 'null': 'True', 'to': u"orm['maintenance.DatabaseMigrate']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'migrate'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_migrate'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'zone': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'disable_alarms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relevance': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '1'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'metric_collector': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | bsd-3-clause | 6,568,704,805,031,850,000 | 97.40691 | 227 | 0.566034 | false |
RiceMunk/omnifit | omnifit/spectrum/tests/test_spectrumplotting.py | 1 | 1494 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from astropy.tests.helper import pytest
import numpy as np
import os
from ..spectrum import *
import matplotlib.pyplot as plt
from ...tests.helpers import *
class TestSpectrumPlotting:
def test_plotbasic(self):
"""
Make sure that basic spectrum plotting works as expected
"""
testspec = generate_spectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plot(ax)
testspec.plot(ax,drawstyle='steps-mid')
plt.close()
def test_plotwrong(self):
"""
Make sure that plotting fails when it should
"""
testspec = generate_spectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
with pytest.raises(Exception):
testspec.plot(ax,plotstyle='non-existent style')
with pytest.raises(Exception):
testspec.plot(ax,x='baselined')
def test_plotnk(self):
"""
Make sure that n and k spectrum plotting works as expected
"""
testspec = generate_cdespectrum()
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212)
fig = testspec.plotnk(ax1,ax2)
plt.close()
def test_plotabs(self):
"""
Make sure that OD spectrum plotting works as expected
"""
testspec = generate_absspectrum()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plotod(ax,in_wl=False)
plt.close()
fig = plt.figure()
ax = fig.add_subplot(111)
testspec.plotod(ax,in_wl=True)
plt.close()
| bsd-3-clause | -2,019,736,266,411,463,200 | 27.188679 | 63 | 0.659304 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/v2016_04_30_preview/models/snapshot_update_py3.py | 1 | 2814 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource_update import ResourceUpdate
class SnapshotUpdate(ResourceUpdate):
"""Snapshot update resource.
:param tags: Resource tags
:type tags: dict[str, str]
:param account_type: the storage account type of the disk. Possible values
include: 'Standard_LRS', 'Premium_LRS'
:type account_type: str or
~azure.mgmt.compute.v2016_04_30_preview.models.StorageAccountTypes
:param os_type: the Operating System type. Possible values include:
'Windows', 'Linux'
:type os_type: str or
~azure.mgmt.compute.v2016_04_30_preview.models.OperatingSystemTypes
:param creation_data: disk source information. CreationData information
cannot be changed after the disk has been created.
:type creation_data:
~azure.mgmt.compute.v2016_04_30_preview.models.CreationData
:param disk_size_gb: If creationData.createOption is Empty, this field is
mandatory and it indicates the size of the VHD to create. If this field is
present for updates or creation with other options, it indicates a resize.
Resizes are only allowed if the disk is not attached to a running VM, and
can only increase the disk's size.
:type disk_size_gb: int
:param encryption_settings: Encryption settings for disk or snapshot
:type encryption_settings:
~azure.mgmt.compute.v2016_04_30_preview.models.EncryptionSettings
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'account_type': {'key': 'properties.accountType', 'type': 'StorageAccountTypes'},
'os_type': {'key': 'properties.osType', 'type': 'OperatingSystemTypes'},
'creation_data': {'key': 'properties.creationData', 'type': 'CreationData'},
'disk_size_gb': {'key': 'properties.diskSizeGB', 'type': 'int'},
'encryption_settings': {'key': 'properties.encryptionSettings', 'type': 'EncryptionSettings'},
}
def __init__(self, *, tags=None, account_type=None, os_type=None, creation_data=None, disk_size_gb: int=None, encryption_settings=None, **kwargs) -> None:
super(SnapshotUpdate, self).__init__(tags=tags, **kwargs)
self.account_type = account_type
self.os_type = os_type
self.creation_data = creation_data
self.disk_size_gb = disk_size_gb
self.encryption_settings = encryption_settings
| mit | -6,854,801,527,586,538,000 | 47.517241 | 158 | 0.658138 | false |
sistason/pa3 | src/pa3_frontend/pa3_django/pa3/migrations/0017_auto_20180413_0948.py | 1 | 1619 | # Generated by Django 2.0.4 on 2018-04-13 07:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pa3', '0016_auto_20180413_0948'),
]
operations = [
migrations.AlterField(
model_name='newestnumberbatch',
name='src',
field=models.CharField(choices=[('pa_10', 'H 10'), ('pa_13', 'H 13'), ('pa_23', 'H 23'), ('pa_02', 'H 02')], max_length=50),
),
migrations.AlterField(
model_name='statisticaldata',
name='src',
field=models.CharField(choices=[(['H 10'], ['H 10']), (['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11'], ['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11']), (['H 19', 'H 23', 'H 25'], ['H 19', 'H 23', 'H 25']), (['H 02'], ['H 02'])], max_length=50),
),
migrations.AlterField(
model_name='waitingnumber',
name='src',
field=models.CharField(choices=[(['H 10'], ['H 10']), (['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11'], ['Schalter 1/2', 'Schalter 3/4', 'Schalter 5/6', 'Schalter 7/8/9', 'Schalter 10/11']), (['H 19', 'H 23', 'H 25'], ['H 19', 'H 23', 'H 25']), (['H 02'], ['H 02'])], max_length=50),
),
migrations.AlterField(
model_name='waitingnumberbatch',
name='src',
field=models.CharField(choices=[('pa_10', 'H 10'), ('pa_13', 'H 13'), ('pa_23', 'H 23'), ('pa_02', 'H 02')], db_index=True, max_length=50),
),
]
| gpl-3.0 | -1,901,392,672,714,096,400 | 48.060606 | 332 | 0.521309 | false |
semussan/pythfinder | popup_menu.py | 1 | 23014 | """popup_menu.py - A low-fuss, infinitely nested popup menu with simple blocking
behavior, and more advanced non-blocking behavior.
Classes:
PopupMenu -> A blocking menu.
NonBlockingPopupMenu -> A non-blocking menu.
Menu -> The graphics and geometry for a menu panel. Note: You'll typically
want to use PopupMenu or NonBlockingPopupMenu instead.
MenuItem -> The graphics and geometry for a menu item. Note: You'll
typically want to use PopupMenu or NonBlockingPopupMenu instead.
SubmenuLabel -> A helper class for strong-typing of submenu labels. Note:
You'll typically want to use PopupMenu or NonBlockingPopupMenu instead.
Module data (can be changed after importing the module):
font -> pygame.font.Font object used to render menus.
bg_color -> pygame.Color object used for the menu panel background.
hi_color -> pygame.Color object used for the highlighted item background.
text_color -> pygame.Color object used for the text.
glint_color -> pygame.Color object used for bright beveled edge.
shadow_color -> pygame.Color object used for dark beveled edge.
margin -> int used for menu and item padding.
Example blocking menu:
menu_data = ['Main', 'Item 0', ['Submenu', 'Item 0'], 'Quit']
while 1:
# game stuff...
for e in pygame.event.get():
if e.type == MOUSEBUTTONUP and e.button == 3:
PopupMenu(menu_data)
elif e.type == USEREVENT and e.code == 'MENU':
print 'menu event: %s.%d: %s' % (e.name,e.item_id,e.text)
if (e.name,e.text) == ('Main','Quit'):
quit()
else:
# handle all game events normally
pass
Example non-blocking menu:
menu_data = ['Main', 'Item 0', ['Submenu', 'Item 0'], 'Quit']
menu = NonBlockingPopupMenu(menu_data)
while 1:
# update game
# clear screen
# draw game
menu.draw()
# update/flip screen
for e in menu.handle_events(pygame.event.get()):
if e.type == MOUSEBUTTONUP and e.button == 3:
menu.show()
elif e.type == USEREVENT and e.code == 'MENU':
if e.name is None:
menu.hide()
elif (e.name,e.text) == ('Main','Quit'):
quit()
else:
# handle all game events normally
pass
"""
# PopupMenu
# Version: v1.2.1
# Description: A low-fuss, infinitely nested popup menu for pygame.
# Author: Gummbum
# Home: http://code.google.com/p/simple-pygame-menu/
# Source: See home.
import pygame
from pygame import Color, Rect, MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION, USEREVENT
# pygame must be initialized before we can create a Font.
pygame.init()
try:
# "data.py" is a skellington-ism. The included custom version supports
# subdirectories by type.
import data
except:
print 'warning: no data.py in module path: proceeding without it'
finally:
try:
font = pygame.font.Font(data.filepath('font', 'Vera.ttf'), 14)
except:
print 'warning: cannot load font Vera.ttf: using system default'
font = pygame.font.SysFont(None, 20)
bg_color = Color('grey')
hi_color = Color(155,155,155)
text_color = Color('black')
glint_color = Color(220,220,220)
shadow_color = Color(105,105,105)
margin = 2
class PopupMenu(object):
"""popup_menu.PopupMenu
PopupMenu(data, block=True) : return menu
data -> list; the list of strings and nested lists.
pos -> tuple; the xy screen coordinate for the topleft of the main menu; if
None, the mouse position is used.
block -> boolean; when True popup_menu will run its own event loop, blocking
your main loop until it exits; when False popup_menu.get_events() will
intercept events it cares about and return unhandled events to the
caller.
Note: For a non-blocking menu, use the NonBlockingPopupMenu instead. This
class supports non-blocking, but it is more cumbersome to use than the
NonBlockingPopupMenu class.
The first string in the data list is taken as the menu title. The remaining
strings are menu items. A nested list becomes a submenu. Submenu lists must
also contain strings for menu title and menu items. Submenus can be
theoretically infinitely nested.
The menu runs a mini event loop. This will block the caller until it exits.
Upon exiting, the screen is restored to its prior state.
Left-clicking outside the topmost menu will quit the entire menu. Right-
clicking anywhere will close the topmost submenu; if only the main menu
remains the menu will exit. Left-clicking a menu item in the topmost menu
will post a USEREVENT for the caller to process.
The USEREVENT will have attributes: code='MENU', name=popup_menu.name,
item_id=menu_item.item_id, text=menu_item.text. name is first element in a
menu data list. item_id corresponds to the Nth element in a menu data list,
incremented from 0; submenu items count as one menu_id even though they are
never posted in an event. text is the string value of the Nth element in the
menu data list. Thus, combinations of name and menu_id or name and text can
be used to uniquely identify menu selections.
Example menu data and resulting event data:
['Main', # main menu title
'Item 0', # name='Main', menu_id=0, text='Item 0'
['Submenu', # submenu title
'Item 0', # name='Submenu', menu_id=0, text='Item 0'
'Item 1', # name='Submenu', menu_id=0, text='Item 1'
],
'Item 2', # name='Main', menu_id=2, text='Item 2'
]
High-level steps for a blocking menu:
1. Fashion a nested list of strings for the PopupMenu constructor.
2. Upon creation, the menu runs its own loop.
3. Upon exit, control is returned to the caller.
4. Handle the resulting USEREVENT event in the caller where
event.name=='your menu title', event.item_id holds the selected item
number, and event.text holds the item label.
High-level steps for a non-blocking menu:
Note: This usage exists to support the NonBlockingPopupMenu class and
custom non-blocking implementations; for typical use NonBlockingPopupMenu
is recommended.
1. Fashion a nested list of strings for the PopupMenu constructor.
2. Store the menu object in a variable.
3. Devise a means for the main loop to choose whether to draw the menu and pass
it events.
4. Call menu.draw() to draw the menu.
5. Pass pygame events to menu.handle_events() and process the unhandled events
that are returned as you would pygame's events.
6. Upon menu exit, one or two USEREVENTs are posted via pygame. Retrieve
them and recognize they are menu events (event.code=='MENU').
a. The menu-exit event signals the main loop it has exited, with or
without a menu selection. Recognize this by event.name==None. Upon
receiving this event the main loop should stop using the menu's
draw() and get_events() (until the next time it wants to post the
menu to the user).
b. The menu-selection event signals the main loop that a menu item was
selected. Recognize this by event.name=='your menu title'.
event.menu_id holds the selected item number, and event.text holds
the item label.
7. Destroying the menu is not necessary. But creating and destroying it may
be a convenient means to manage the menu state (i.e. to post it or not).
"""
def __init__(self, data, pos=None, block=True):
# list of open Menu() objects
self.menus = []
# key to main menu data
self.top = data[0]
# dict of menus, keyed by menu title
self.data = {self.top:[]}
# walk the nested list, creating the data dict for easy lookup
self._walk(self.top, list(data))
# make the main menu
self._make_menu(self.data[self.top], pos)
# Save the display surface; use to clear screen
self.screen = pygame.display.get_surface()
self.clear_screen = self.screen.copy()
if block:
self.selection=self._run(block)
if self.selection is not None:
return
def handle_events(self, events, block=False):
unhandled = []
for e in events:
if e.type == MOUSEBUTTONUP:
if e.button == 1:
menu = self.menus[-1]
item = menu.menu_item
if item:
if isinstance(item.text, SubmenuLabel):
# open submenu
key = item.text[:-3]
self._make_menu(self.data[key])
else:
# pick item (post event)
self._quit(block)
return [(menu, item)]
else:
# close menu
self._quit(block)
return []
elif e.button == 3:
# close menu
if len(self.menus) == 1:
self._quit(block)
return []
else:
self._del_menu()
elif e.type == MOUSEMOTION:
self.mouse_pos = e.pos
self.menus[-1].check_collision(self.mouse_pos)
unhandled.append(e)
elif e.type == MOUSEBUTTONDOWN:
pass
else:
unhandled.append(e)
return None
def draw(self):
for menu in self.menus:
menu.draw()
def _pick_event(self, menu, item):
event = pygame.event.Event(USEREVENT, code='MENU',
name=menu.name, item_id=item.item_id, text=item.text)
return event
def _quit_event(self):
event = pygame.event.Event(USEREVENT, code='MENU',
name=None, item_id=-1, text='_MENU_EXIT_')
return event
def _run(self, block=True):
screen = self.screen
clock = pygame.time.Clock()
self.mouse_pos = pygame.mouse.get_pos()
self.running = True
while self.running:
self.screen.blit(self.clear_screen, (0,0))
self.draw()
pygame.display.flip()
ret=self.handle_events(pygame.event.get())
if ret is not None:
return ret
clock.tick(60)
def _walk(self, key, data):
# Recursively walk the nested data lists, building the data dict for
# easy lookup.
for i,ent in enumerate(data):
if isinstance(ent, str):
self.data[key].append(ent)
else:
ent = list(ent)
new_key = ent[0]
ent[0] = SubmenuLabel(new_key)
self.data[key].append(ent[0])
self.data[new_key] = []
self._walk(new_key, list(ent))
def _make_menu(self, data, pos=None):
# Make a menu from data list and add it to the menu stack.
if self.menus:
# position submenu relative to parent
parent = self.menus[-1]
rect = parent.menu_item.rect
pos = rect.right,rect.top
# unset the parent's menu_item (for appearance)
parent.menu_item = None
else:
# position main menu at mouse
if pos is None:
pos = pygame.mouse.get_pos()
name = data[0]
items = data[1:]
self.menus.append(Menu(pos, name, items))
def _del_menu(self):
# Remove the topmost menu from the menu stack.
self.menus.pop()
def _quit(self, block):
# Put the original screen contents back.
if block:
self.screen.blit(self.clear_screen, (0,0))
pygame.display.flip()
self.running = False
class NonBlockingPopupMenu(PopupMenu):
"""popup_menu.NonBlockingPopupMenu
NonBlockingPopupMenu(data, pos=None, show=False) : return menu
data -> list; the list of strings and nested lists.
pos -> tuple; the xy screen coordinate for the topleft of the main menu; if
None, the mouse position is used.
show -> boolean; make the menu visible in the constructor.
visible is a read-write property that sets and gets the boolean value
representing the state. The show() and hide() methods are equivalent
alternatives to using the property.
Note that the constructor does not copy the data argument. Changes to the
contents will result in changes to the menus once show() is called or
visible is set to True. In addition, data can be entirely replaced by
setting menu.init_data.
High-level steps for a non-blocking menu:
1. Fashion a nested list of strings for the NonBlockingPopupMenu constructor.
2. Store the menu object in a variable.
3. Construct the NonBlockingPopupMenu object.
4. Detect the condition that triggers the menu to post, and call menu.show()
(or set menu.visible=True).
5. Call menu.draw() to draw the menu. If it is visible, it will be drawn.
6. Pass pygame events to menu.handle_events() and process the unhandled events
that are returned as you would pygame's events. If the menu is not visible
the method will immediately return the list passed in, unchanged.
7. Upon menu exit, one or two USEREVENTs are posted via pygame. Retrieve them
and recognize they are menu events (i.e., event.code=='MENU').
a. A menu-exit event signals the menu has detected an exit condition, which
may or many not be accompanied by a menu selection. Recognize this by
event.name==None or event.menu_id==-1. Upon receiving this event the
main loop should call menu.hide() (or set menu.visible=False).
b. A menu-selection event signals the main loop that a menu item was
selected. Recognize this by event.name=='your menu title'. event.menu_id
holds the selected item number, and event.text holds the item label.
8. Destroying the menu is optional.
9. Assigning to menu.init_data, or changing its contents or that of the
original list variable, will result in a modified menu the next time
menu.show() is called (or menu.visible is set to True).
"""
def __init__(self, data, pos=None, show=False):
self.init_data = data
self._init_pos = pos
if show:
self.show()
else:
self.hide()
def show(self):
"""generate the menu geometry and graphics, and makes the menu visible"""
super(NonBlockingPopupMenu, self).__init__(
self.init_data, pos=self._init_pos, block=False)
self._show = True
def hide(self):
"""destroy the menu geometry and grpahics, and hides the menu"""
if hasattr(self, 'menus'):
del self.menus[:]
self._show = False
@property
def visible(self):
return self._show
@visible.setter
def visible(self, val):
if val:
self.show()
else:
self.hide()
def handle_events(self, events):
"""preemptively return if the menu is not visible; else, call the
superclass's method.
"""
if self._show:
return super(NonBlockingPopupMenu, self).handle_events(events)
else:
return events
def draw(self):
"""preemptively return if the menu is not visible; else, call the
superclass's method.
"""
if self._show:
super(NonBlockingPopupMenu, self).draw()
class SubmenuLabel(str):
"""popup_menu.SubmenuLabel
SubmenuLabel(s) : return label
s -> str; the label text
This is a helper class for strong-typing of submenu labels.
This class is not intended to be used directly. See PopupMenu or
NonBlockingPopupMenu.
"""
def __new__(cls, s):
return str.__new__(cls, s+'...')
class MenuItem(object):
"""popup_menu.MenuItem
MenuItem(text, item_id) : return menu_item
text -> str; the display text.
item_id -> int; the numeric ID; also the item_id attribute returned in the
pygame event.
This class is not intended to be used directly. Use PopupMenu or
NonBlockingPopupMenu instead, unless designing your own subclass.
"""
def __init__(self, text, item_id):
self.text = text
self.item_id = item_id
self.image = font.render(text, True, text_color)
self.rect = self.image.get_rect()
class Menu(object):
"""popup_menu.Menu
Menu(pos, name, items) : return menu
pos -> (x,y); topleft coordinates of the menu.
name -> str; the name of the menu.
items -> list; a list containing strings for menu items labels.
This class is not intended to be used directly. Use PopupMenu or
NonBlockingPopupMenu instead, unless designing your own subclass.
"""
def __init__(self, pos, name, items):
screen = pygame.display.get_surface()
screen_rect = screen.get_rect()
self.name = name
self.items = []
self.menu_item = None
# Make the frame rect
x,y = pos
self.rect = Rect(x,y,0,0)
self.rect.width += margin * 2
self.rect.height += margin * 2
# Make the title image and rect, and grow the frame rect
self.title_image = font.render(name, True, text_color)
self.title_rect = self.title_image.get_rect(topleft=(x+margin,y+margin))
self.rect.width = margin*2 + self.title_rect.width
self.rect.height = margin + self.title_rect.height
# Make the item highlight rect
self.hi_rect = Rect(0,0,0,0)
# Make menu items
n = 0
for item in items:
menu_item = MenuItem(item, n)
self.items.append(menu_item)
self.rect.width = max(self.rect.width, menu_item.rect.width+margin*2)
self.rect.height += menu_item.rect.height + margin
n += 1
self.rect.height += margin
# Position menu fully within view
if not screen_rect.contains(self.rect):
savex,savey = self.rect.topleft
self.rect.clamp_ip(screen_rect)
self.title_rect.top = self.rect.top + margin
self.title_rect.left = self.rect.left + margin
# Position menu items within menu frame
y = self.title_rect.bottom + margin
for item in self.items:
item.rect.x = self.rect.x + margin
item.rect.y = y
y = item.rect.bottom + margin
item.rect.width = self.rect.width - margin*2
# Calculate highlight rect's left-alignment and size
self.hi_rect.left = menu_item.rect.left
self.hi_rect.width = self.rect.width - margin*2
self.hi_rect.height = menu_item.rect.height
# Create the menu frame and highlight frame images
self.bg_image = pygame.surface.Surface(self.rect.size)
self.hi_image = pygame.surface.Surface(self.hi_rect.size)
self.bg_image.fill(bg_color)
self.hi_image.fill(hi_color)
# Draw menu border
rect = self.bg_image.get_rect()
pygame.draw.rect(self.bg_image, glint_color, rect, 1)
t,l,b,r = rect.top,rect.left,rect.bottom,rect.right
pygame.draw.line(self.bg_image, shadow_color, (l,b-1), (r,b-1), 1)
pygame.draw.line(self.bg_image, shadow_color, (r-1,t), (r-1,b), 1)
# Draw title divider in menu frame
left = margin
right = self.rect.width - margin*2
y = self.title_rect.height + 1
pygame.draw.line(self.bg_image, shadow_color, (left,y), (right,y))
def draw(self):
# Draw the menu on the main display.
screen = pygame.display.get_surface()
screen.blit(self.bg_image, self.rect)
screen.blit(self.title_image, self.title_rect)
for item in self.items:
if item is self.menu_item:
self.hi_rect.top = item.rect.top
screen.blit(self.hi_image, self.hi_rect)
screen.blit(item.image, item.rect)
def check_collision(self, mouse_pos):
# Set self.menu_item if the mouse is hovering over one.
self.menu_item = None
if self.rect.collidepoint(mouse_pos):
for item in self.items:
if item.rect.collidepoint(mouse_pos):
self.menu_item = item
break
if __name__ == '__main__':
# Test non-blocking.
screen = pygame.display.set_mode((600,600), RESIZABLE)
clock = pygame.time.Clock()
menu_data = (
'Main',
'Item 0',
'Item 1',
(
'More Things',
'Item 0',
'Item 1',
),
'Quit',
)
class Cursor(object):
def __init__(self):
self.image = pygame.surface.Surface((13,13))
pygame.draw.line(self.image, Color('yellow'), (6,0), (6,12), 5)
pygame.draw.line(self.image, Color('yellow'), (0,6), (12,6), 5)
pygame.draw.line(self.image, Color(0,0,99), (6,0), (6,12), 3)
pygame.draw.line(self.image, Color(0,0,99), (0,6), (12,6), 3)
pygame.draw.line(self.image, Color('black'), (6,0), (6,120), 1)
pygame.draw.line(self.image, Color('black'), (0,6), (12,6), 1)
self.image.set_colorkey(Color('black'))
self.rect = self.image.get_rect(center=(0,0))
pygame.mouse.set_visible(False)
def draw(self):
pygame.display.get_surface().blit(self.image, self.rect)
cursor = Cursor()
def handle_menu(e):
print 'menu event: %s.%d: %s' % (e.name,e.item_id,e.text)
if e.name == 'Main':
if e.text == 'Quit':
quit()
menu = NonBlockingPopupMenu(menu_data)
while 1:
clock.tick(60)
for e in menu.handle_events(pygame.event.get()):
if e.type == MOUSEBUTTONUP:
menu.show()
elif e.type == MOUSEMOTION:
cursor.rect.center = e.pos
elif e.type == USEREVENT:
if e.code == 'MENU':
if e.name is None:
menu.hide()
else:
handle_menu(e)
screen.fill(Color('darkblue'))
menu.draw()
cursor.draw()
pygame.display.flip()
| gpl-2.0 | 423,739,067,350,770,560 | 37.614094 | 86 | 0.589424 | false |
sunhwan/NAMD-replica | wham/myptwham_pt_grand.py | 1 | 10158 | from StringIO import StringIO
import sys, os
import numpy as np
os.environ["CC"] = "gcc-4.9"
os.environ["CXX"] = "g++-4.9"
debug = False
n_max = False
if len(sys.argv) > 1: n_max = int(sys.argv[1])
input = sys.stdin
pmf_filename = input.readline().strip() # stores pmf
rho_filename = input.readline().strip() # stores average density
bia_filename = input.readline().strip() # stores biased distribution
fff_filename = input.readline().strip() # stores F(i)
temperature = float(input.readline().strip())
xmin, xmax, deltax, is_x_periodic = map(float, input.readline().strip().split())
umin, umax, deltau, ntemp = map(float, input.readline().strip().split())
vmin, vmax, deltav = map(float, input.readline().strip().split())
nwin, niter, fifreq = map(int, input.readline().strip().split())
tol = map(float, input.readline().strip().split())
is_x_periodic = bool(is_x_periodic)
nbinx = int((xmax - xmin) / deltax + 0.5)
nbinu = int(abs(umax - umin) / deltau + 0.5)
nbinv = int(abs(vmax - vmin) / deltav + 0.5)
ntemp = int(ntemp)
kb = 0.0019872
kbt = kb * temperature
beta0 = 1.0/kbt
if debug:
temperature = 283.15
kbt = kb * temperature
beta0 = 1.0/kbt
k1 = np.zeros(nwin)
cx1 = np.zeros(nwin)
temp = np.zeros(ntemp)
beta = np.zeros((nwin, ntemp))
tseries = np.empty(nwin, dtype='S')
hist = np.zeros((nwin, ntemp, nbinx, nbinu, nbinv), dtype=np.int)
nb_data = np.zeros((nwin, ntemp), dtype=np.int)
x1 = lambda j: xmin + (j+1)*deltax - 0.5*deltax
u1 = lambda j: (j+1)*deltau - 0.5*deltau
v1 = lambda j: (j+1)*deltav - 0.5*deltav
energy = np.zeros((nbinx, nbinu))
press = 1.01325 * 1.4383 * 10**-5
data_range = [[None, None], [None, None], [None, None]]
for j in range(ntemp):
for i in range(nwin):
fname = input.readline().strip()
tseries[i] = fname
line = input.readline().strip()
cx1[i], k1[i], temp[j] = map(float, line.split()[:3])
beta[i,j] = 1 / (kb * temp[j])
def mkhist(fname, xmin, xmax, ymin, ymax, deltax, deltay, ihist, jtemp, k, cx):
xdata = []
udata = []
vdata = []
count = 0
for line in open(fname):
time, x, u, v = map(float, line.strip().split()[:4])
xdata.append(x)
udata.append(u)
vdata.append(v)
if debug and len(xdata) > 10000: break
if n_max and len(xdata) > n_max: break
x = np.array(xdata)
u = np.array(udata)
v = np.array(vdata)
u = u - k*(x-cx)**2 #+ press * v
xbins = [xmin+i*deltax for i in range(nbinx+1)]
ubins = [umin+i*deltau for i in range(nbinu+1)]
vbins = [vmin+i*deltav for i in range(nbinv+1)]
data = np.array((x,u,v)).transpose()
hist[ihist, jtemp], edges = np.histogramdd(data, bins=(xbins, ubins, vbins), range=((xmin, xmax), (umin, umax), (vmin, vmax)))
nb_data[ihist, jtemp] = np.sum(hist[ihist,jtemp])
if data_range[0][0] is None or np.min(x) < data_range[0][0]: data_range[0][0] = np.min(x)
if data_range[0][1] is None or np.max(x) > data_range[0][1]: data_range[0][1] = np.max(x)
if data_range[1][0] is None or np.min(u) < data_range[1][0]: data_range[1][0] = np.min(u)
if data_range[1][1] is None or np.max(u) > data_range[1][1]: data_range[1][1] = np.max(u)
if data_range[2][0] is None or np.min(v) < data_range[2][0]: data_range[2][0] = np.min(v)
if data_range[2][1] is None or np.max(v) > data_range[2][1]: data_range[2][1] = np.max(v)
xedges = edges[0]
uedges = edges[1]
print 'statistics for timeseries # ', ihist
print 'minx:', '%8.3f' % np.min(x), 'maxx:', '%8.3f' % np.max(x)
print 'average x', '%8.3f' % np.average(x), 'rms x', '%8.3f' % np.std(x)
print 'minu:', '%8.3f' % np.min(u), 'maxu:', '%8.3f' % np.max(u)
print 'average u', '%8.3f' % np.average(u), 'rms u', '%8.3f' % np.std(u)
print 'statistics for histogram # ', ihist
print int(np.sum(hist[ihist,jtemp])), 'points in the histogram x'
print 'average x', '%8.3f' % (np.sum([hist[ihist,jtemp,i,:]*(xedges[i]+xedges[i+1])/2 for i in range(nbinx)])/np.sum(hist[ihist,jtemp]))
print 'average u', '%8.3f' % (np.sum([hist[ihist,jtemp,:,i]*(uedges[i]+uedges[i+1])/2 for i in range(nbinu)])/np.sum(hist[ihist,jtemp]))
print
mkhist(fname, xmin, xmax, umin, umax, deltax, deltau, i, j, k1[i], cx1[i])
print 'minx:', '%8.3f' % data_range[0][0], 'maxx:', '%8.3f' % data_range[0][1]
print 'minu:', '%8.3f' % data_range[1][0], 'maxu:', '%8.3f' % data_range[1][1]
print 'minv:', '%8.3f' % data_range[2][0], 'maxu:', '%8.3f' % data_range[2][1]
print hist.shape
# write biased distribution
f = open(bia_filename, 'w')
for j in range(nbinx):
for k in range(nbinu):
f.write("%8d\n" % np.sum(hist[:,:,j,k]))
# iterate wham equation to unbias and recombine the histogram
TOP = np.zeros((nbinx, nbinu, nbinv), dtype=np.int32)
BOT = np.zeros((nbinx, nbinu, nbinv))
W1 = np.zeros((nwin, ntemp, nbinx))
U1 = np.zeros((nwin, ntemp, nbinu))
V1 = np.zeros((nwin, ntemp, nbinv))
for i in range(nwin):
for j in range(ntemp):
for k in range(nbinx):
W1[i,j,k] = k1[i]*(x1(k) - cx1[i])**2
for l in range(nbinu):
U1[i,j,l] = u1(l)
for m in range(nbinv):
V1[i,j,m] = v1(m) * press
for k in range(nbinx):
for l in range(nbinu):
for m in range(nbinv):
TOP[k,l,m] = np.sum(hist[:,:,k,l,m])
np.set_printoptions(linewidth=200)
from scipy import weave
from scipy.weave import converters
def wham2d(nb_data, TOP, nbinx, nbinu, nbinv, W1, V1, U1, beta, beta0, F=None):
icycle = 1
rho = np.zeros((nbinx, nbinu, nbinv), np.double)
if F is None: F = np.zeros((nwin, ntemp))
F2 = np.zeros((nwin, ntemp), np.double)
while icycle < niter:
code_pragma = """
double beta1;
beta1 = beta0;
#pragma omp parallel num_threads(nthreads)
{
#pragma omp for collapse(3)
for (int k=0; k<nbinx; k++) {
for (int l=0; l<nbinu; l++) {
for (int m=0; m<nbinv; m++) {
double BOT = 0.0;
for (int i=0; i<nwin; i++) {
for (int j=0; j<ntemp; j++) {
BOT += nb_data(i,j)*exp(F(i,j)-beta(i,j)*(W1(i,j,k)+U1(i,j,l)+V1(i,j,m)) +beta1*(U1(i,j,l)+V1(i,j,m)));
}
}
if (BOT < 1e-100 || TOP(k,l,m) == 0) continue;
rho(k,l,m) = TOP(k,l,m) / BOT;
}
}
}
#pragma omp for collapse(2)
for (int i=0; i<nwin; i++) {
for (int j=0; j<ntemp; j++) {
for (int k=0; k<nbinx; k++) {
for (int l=0; l<nbinu; l++) {
for (int m=0; m<nbinv; m++) {
F2(i,j) += rho(k,l,m)*exp(-beta(i,j)*(W1(i,j,k)+U1(i,j,l)+V1(i,j,m)) + beta1*(U1(i,j,l)+V1(i,j,m)));
}
}
}
}
}
}
"""
nthreads = 4
weave.inline(code_pragma, ['F', 'F2', 'rho', 'nb_data', 'beta', 'W1', 'U1', 'V1', 'beta0', 'TOP', 'nbinx', 'nbinu', 'nbinv', 'nwin', 'ntemp', 'nthreads'], type_converters=converters.blitz, extra_compile_args=['-O3 -fopenmp'], extra_link_args=['-O3 -fopenmp'], headers=['<omp.h>'])#, library_dirs=['/Users/sunhwan/local/python/lib'])
converged = True
F2 = -np.log(F2)
F2 = F2 -np.min(F2)
diff = np.max(np.abs(F2 - F))
if diff > tol: converged = False
print 'round = ', icycle, 'diff = ', diff
icycle += 1
if ( fifreq != 0 and icycle % fifreq == 0 ) or ( icycle == niter or converged ):
print F2
#open(fff_filename, 'w').write("%8i %s\n" % (icycle, " ".join(["%8.3f" % f for f in F2])))
if icycle == niter or converged: break
F = F2
F2 = np.zeros((nwin, ntemp))
return F2, rho
F = np.zeros((nwin, ntemp))
for i in range(ntemp):
temperature = temp[i]
kbt = kb * temperature
beta0 = 1.0/kbt
fff = "%s.%d" % (fff_filename, i)
if i == 0 and os.path.exists(fff):
F = np.loadtxt(fff)
F, rho = wham2d(nb_data, TOP, nbinx, nbinu, nbinv, W1, V1, U1, beta, beta0, F)
np.savetxt(fff, F)
# jacobian
for j in range(nbinx):
rho[j] = rho[j] / x1(j)**2
# average energy
avgur = np.zeros(nbinx)
avgur2 = np.zeros(nbinx)
avgvr = np.zeros(nbinx)
rho = rho / np.sum(rho)
for k in range(nbinx):
for l in range(nbinu):
for m in range(nbinv):
if not (TOP[k,l,m] > 0): continue
avgur[k] += rho[k,l,m]/np.sum(rho[k]) * u1(l)
avgur2[k] += rho[k,l,m]/np.sum(rho[k]) * u1(l) * u1(l)
avgvr[k] += rho[k,l,m]/np.sum(rho[k]) * v1(m)
# find maximum rho
rho = np.sum(rho, axis=(1,2))
jmin = np.argmax(rho)
rhomax = rho[jmin]
#print 'maximum density at: x = ', x1(jmin)
x0 = int(( 10.55 - xmin ) / deltax)
rhomax = np.sum(rho[x0-5:x0+5])/10
avgu = np.sum(avgur[nbinx-10:])/10
avgv = np.sum(avgvr[nbinx-10:])/10
#cv = ( avgur2 - avgur**2 ) / kbt / temperature
#avgcv = np.average(cv)
print temperature, avgu, avgv
# make PMF from the rho
np.seterr(divide='ignore')
pmf = -kbt * np.log(rho/rhomax)
open("%s.%d" % (pmf_filename, i), 'w').write("\n".join(["%8.3f %12.8f %12.8f %12.8f" % (x1(j), pmf[j], avgvr[j]-avgv, avgur[j]-avgu) for j in range(nbinx)]))
open("%s.%d" % (rho_filename, i), 'w').write("\n".join(["%8.3f %12.8f" % (x1(j), rho[j]) for j in range(nbinx)]))
| bsd-2-clause | 4,400,316,292,958,923,300 | 37.332075 | 340 | 0.511616 | false |
zackzachariah/feelslike | weather.py | 1 | 1194 | # Created 2014 by Zack Sheppard. Licensed under the MIT License (see LICENSE file)
"""
Main handlers for the two pages that make up the clear-weather app
"""
from flask import Flask, request, url_for
import api_endpoint, os, web_endpoints
app = Flask(__name__)
if (not app.debug):
import logging
from logging import StreamHandler
app.logger.setLevel(logging.INFO)
app.logger.addHandler(StreamHandler())
def path_to_style(lessPath):
return url_for('static', filename='styles/css/' + lessPath + '.css')
def path_to_image(imagePath):
return url_for('static', filename='img/' + imagePath)
def path_to_script(scriptPath):
return url_for('static', filename='scripts/' + scriptPath + '.js')
def google_analytics():
return os.environ.get('GOOGLE_ANALYTICS', None)
app.jinja_env.globals.update(path_to_style = path_to_style)
app.jinja_env.globals.update(path_to_image = path_to_image)
app.jinja_env.globals.update(path_to_script = path_to_script)
app.jinja_env.globals.update(google_analytics = google_analytics)
@app.before_request
def before_request():
app.logger.info('Handling base: ' + request.base_url)
web_endpoints.setupWebRoutes(app)
api_endpoint.setupApiRoute(app)
| mit | -6,699,916,966,404,352,000 | 29.615385 | 82 | 0.742044 | false |
unidesigner/unidesign | unidesign/spatial/container.py | 1 | 1962 |
# TODO: add Group and/or Selection
class Region(object):
""" Base class for a spatial Region container
A Region can contain simple and advanced geometry.
A Region can be thought of as a 3D analogy to a sheet of
paper in 2D. A Region defines its own coordinate system,
dimension and resolution.
"""
def __init__(self, **attr):
""" Initialize a region
Parameters
----------
dimension ; array-like, integer
The number of units in each spatial direction
e.g. [1000,2000,400]
resolution : array-like, double
The resolution in each spatial direction
e.g. [2.5, 3.0, 10.23]
resolution_unit : array-like
The unit of resolution in each spatial direction
e.g. ['nm', 'nm', 'nm']
origo : array-like
The locus of the origo of the coordinate system
XXX: in relation to some global coordinate system (?)
this could be implied in the affine
axes_orientation : array-like
The orthogonal orientation of the x-, y- and z-axes
of a cartesian coordinate system as a 3x3 array.
coordinate_system : ['left-handed', 'right-handed']
(default='left-handed')
affine : array-like
origo and axes_orientation can be grouped in a 4x4 affine
array. TODO: correct?
extension : ['bounded', 'unbounded']
Defining a dimension implies a bounded Region
name : string, optional (default='')
An optional name for the Region.
attr : keyword arguments, optional (default=no attributes)
Attributes to add to a graph as key-value pairs.
"""
pass
def __str__(self):
"""Return the Region name.
Returns
-------
name : string
The name of the Region.
"""
return self.name | bsd-3-clause | -5,724,006,586,254,198,000 | 28.298507 | 69 | 0.575433 | false |
valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/downloader/common.py | 1 | 12568 | from __future__ import division, unicode_literals
import os
import re
import sys
import time
import random
from ..compat import compat_os_name
from ..utils import (
decodeArgument,
encodeFilename,
error_to_compat_str,
format_bytes,
shell_quote,
timeconvert,
)
class FileDownloader(object):
"""File Downloader class.
File downloader objects are the ones responsible of downloading the
actual video file and writing it to disk.
File downloaders accept a lot of parameters. In order not to saturate
the object constructor with arguments, it receives a dictionary of
options instead.
Available options:
verbose: Print additional info to stdout.
quiet: Do not print messages to stdout.
ratelimit: Download speed limit, in bytes/sec.
retries: Number of times to retry for HTTP error 5xx
buffersize: Size of download buffer in bytes.
noresizebuffer: Do not automatically resize the download buffer.
continuedl: Try to continue downloads if possible.
noprogress: Do not print the progress bar.
logtostderr: Log messages to stderr instead of stdout.
consoletitle: Display progress in console window's titlebar.
nopart: Do not use temporary .part files.
updatetime: Use the Last-modified header to set output file timestamps.
test: Download only first bytes to test the downloader.
min_filesize: Skip files smaller than this size
max_filesize: Skip files larger than this size
xattr_set_filesize: Set ytdl.filesize user xattribute with expected size.
external_downloader_args: A list of additional command-line arguments for the
external downloader.
hls_use_mpegts: Use the mpegts container for HLS videos.
http_chunk_size: Size of a chunk for chunk-based HTTP downloading. May be
useful for bypassing bandwidth throttling imposed by
a webserver (experimental)
Subclasses of this one must re-define the real_download method.
"""
_TEST_FILE_SIZE = 10241
params = None
def __init__(self, ydl, params):
"""Create a FileDownloader object with the given options."""
self.ydl = ydl
self._progress_hooks = []
self.params = params
self.add_progress_hook(self.report_progress)
@staticmethod
def format_seconds(seconds):
(mins, secs) = divmod(seconds, 60)
(hours, mins) = divmod(mins, 60)
if hours > 99:
return '--:--:--'
if hours == 0:
return '%02d:%02d' % (mins, secs)
else:
return '%02d:%02d:%02d' % (hours, mins, secs)
@staticmethod
def calc_percent(byte_counter, data_len):
if data_len is None:
return None
return float(byte_counter) / float(data_len) * 100.0
@staticmethod
def format_percent(percent):
if percent is None:
return '---.-%'
return '%6s' % ('%3.1f%%' % percent)
@staticmethod
def calc_eta(start, now, total, current):
if total is None:
return None
if now is None:
now = time.time()
dif = now - start
if current == 0 or dif < 0.001: # One millisecond
return None
rate = float(current) / dif
return int((float(total) - float(current)) / rate)
@staticmethod
def format_eta(eta):
if eta is None:
return '--:--'
return FileDownloader.format_seconds(eta)
@staticmethod
def calc_speed(start, now, bytes):
dif = now - start
if bytes == 0 or dif < 0.001: # One millisecond
return None
return float(bytes) / dif
@staticmethod
def format_speed(speed):
if speed is None:
return '%10s' % '---b/s'
return '%10s' % ('%s/s' % format_bytes(speed))
@staticmethod
def format_retries(retries):
return 'inf' if retries == float('inf') else '%.0f' % retries
@staticmethod
def best_block_size(elapsed_time, bytes):
new_min = max(bytes / 2.0, 1.0)
new_max = min(max(bytes * 2.0, 1.0), 4194304) # Do not surpass 4 MB
if elapsed_time < 0.001:
return int(new_max)
rate = bytes / elapsed_time
if rate > new_max:
return int(new_max)
if rate < new_min:
return int(new_min)
return int(rate)
@staticmethod
def parse_bytes(bytestr):
"""Parse a string indicating a byte quantity into an integer."""
matchobj = re.match(r'(?i)^(\d+(?:\.\d+)?)([kMGTPEZY]?)$', bytestr)
if matchobj is None:
return None
number = float(matchobj.group(1))
multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower())
return int(round(number * multiplier))
def to_screen(self, *args, **kargs):
self.ydl.to_screen(*args, **kargs)
def to_stderr(self, message):
self.ydl.to_screen(message)
def to_console_title(self, message):
self.ydl.to_console_title(message)
def trouble(self, *args, **kargs):
self.ydl.trouble(*args, **kargs)
def report_warning(self, *args, **kargs):
self.ydl.report_warning(*args, **kargs)
def report_error(self, *args, **kargs):
self.ydl.report_error(*args, **kargs)
def slow_down(self, start_time, now, byte_counter):
"""Sleep if the download speed is over the rate limit."""
rate_limit = self.params.get('ratelimit')
if rate_limit is None or byte_counter == 0:
return
if now is None:
now = time.time()
elapsed = now - start_time
if elapsed <= 0.0:
return
speed = float(byte_counter) / elapsed
if speed > rate_limit:
time.sleep(max((byte_counter // rate_limit) - elapsed, 0))
def temp_name(self, filename):
"""Returns a temporary filename for the given filename."""
if self.params.get('nopart', False) or filename == '-' or \
(os.path.exists(encodeFilename(filename)) and not os.path.isfile(encodeFilename(filename))):
return filename
return filename + '.part'
def undo_temp_name(self, filename):
if filename.endswith('.part'):
return filename[:-len('.part')]
return filename
def ytdl_filename(self, filename):
return filename + '.ytdl'
def try_rename(self, old_filename, new_filename):
try:
if old_filename == new_filename:
return
os.rename(encodeFilename(old_filename), encodeFilename(new_filename))
except (IOError, OSError) as err:
self.report_error('unable to rename file: %s' % error_to_compat_str(err))
def try_utime(self, filename, last_modified_hdr):
"""Try to set the last-modified time of the given file."""
if last_modified_hdr is None:
return
if not os.path.isfile(encodeFilename(filename)):
return
timestr = last_modified_hdr
if timestr is None:
return
filetime = timeconvert(timestr)
if filetime is None:
return filetime
# Ignore obviously invalid dates
if filetime == 0:
return
try:
os.utime(filename, (time.time(), filetime))
except Exception:
pass
return filetime
def report_destination(self, filename):
"""Report destination filename."""
self.to_screen('[download] Destination: ' + filename)
def _report_progress_status(self, msg, is_last_line=False):
fullmsg = '[download] ' + msg
if self.params.get('progress_with_newline', False):
self.to_screen(fullmsg)
else:
if compat_os_name == 'nt':
prev_len = getattr(self, '_report_progress_prev_line_length',
0)
if prev_len > len(fullmsg):
fullmsg += ' ' * (prev_len - len(fullmsg))
self._report_progress_prev_line_length = len(fullmsg)
clear_line = '\r'
else:
clear_line = ('\r\x1b[K' if sys.stderr.isatty() else '\r')
self.to_screen(clear_line + fullmsg, skip_eol=not is_last_line)
self.to_console_title('youtube-dl ' + msg)
def report_progress(self, s):
if s['status'] == 'finished':
if self.params.get('noprogress', False):
self.to_screen('[download] Download completed')
else:
msg_template = '100%%'
if s.get('total_bytes') is not None:
s['_total_bytes_str'] = format_bytes(s['total_bytes'])
msg_template += ' of %(_total_bytes_str)s'
if s.get('elapsed') is not None:
s['_elapsed_str'] = self.format_seconds(s['elapsed'])
msg_template += ' in %(_elapsed_str)s'
self._report_progress_status(
msg_template % s, is_last_line=True)
if self.params.get('noprogress'):
return
if s['status'] != 'downloading':
return
if s.get('eta') is not None:
s['_eta_str'] = self.format_eta(s['eta'])
else:
s['_eta_str'] = 'Unknown ETA'
if s.get('total_bytes') and s.get('downloaded_bytes') is not None:
s['_percent_str'] = self.format_percent(100 * s['downloaded_bytes'] / s['total_bytes'])
elif s.get('total_bytes_estimate') and s.get('downloaded_bytes') is not None:
s['_percent_str'] = self.format_percent(100 * s['downloaded_bytes'] / s['total_bytes_estimate'])
else:
if s.get('downloaded_bytes') == 0:
s['_percent_str'] = self.format_percent(0)
else:
s['_percent_str'] = 'Unknown %'
if s.get('speed') is not None:
s['_speed_str'] = self.format_speed(s['speed'])
else:
s['_speed_str'] = 'Unknown speed'
if s.get('total_bytes') is not None:
s['_total_bytes_str'] = format_bytes(s['total_bytes'])
msg_template = '%(_percent_str)s of %(_total_bytes_str)s at %(_speed_str)s ETA %(_eta_str)s'
elif s.get('total_bytes_estimate') is not None:
s['_total_bytes_estimate_str'] = format_bytes(s['total_bytes_estimate'])
msg_template = '%(_percent_str)s of ~%(_total_bytes_estimate_str)s at %(_speed_str)s ETA %(_eta_str)s'
else:
if s.get('downloaded_bytes') is not None:
s['_downloaded_bytes_str'] = format_bytes(s['downloaded_bytes'])
if s.get('elapsed'):
s['_elapsed_str'] = self.format_seconds(s['elapsed'])
msg_template = '%(_downloaded_bytes_str)s at %(_speed_str)s (%(_elapsed_str)s)'
else:
msg_template = '%(_downloaded_bytes_str)s at %(_speed_str)s'
else:
msg_template = '%(_percent_str)s % at %(_speed_str)s ETA %(_eta_str)s'
self._report_progress_status(msg_template % s)
def report_resuming_byte(self, resume_len):
"""Report attempt to resume at given byte."""
self.to_screen('[download] Resuming download at byte %s' % resume_len)
def report_retry(self, err, count, retries):
"""Report retry in case of HTTP error 5xx"""
self.to_screen(
'[download] Got server HTTP error: %s. Retrying (attempt %d of %s)...'
% (error_to_compat_str(err), count, self.format_retries(retries)))
def report_file_already_downloaded(self, file_name):
"""Report file has already been fully downloaded."""
try:
self.to_screen('[download] %s has already been downloaded' % file_name)
except UnicodeEncodeError:
self.to_screen('[download] The file has already been downloaded')
def report_unable_to_resume(self):
"""Report it was impossible to resume download."""
self.to_screen('[download] Unable to resume')
def download(self, filename, info_dict):
"""Download to a filename using the info from info_dict
Return True on success and False otherwise
"""
nooverwrites_and_exists = (
self.params.get('nooverwrites', False) and
os.path.exists(encodeFilename(filename))
)
if not hasattr(filename, 'write'):
continuedl_and_exists = (
self.params.get('continuedl', True) and
os.path.isfile(encodeFilename(filename)) and
not self.params.get('nopart', False)
)
# Check file already present
if filename != '-' and (nooverwrites_and_exists or continuedl_and_exists):
self.report_file_already_downloaded(filename)
self._hook_progress({
'filename': filename,
'status': 'finished',
'total_bytes': os.path.getsize(encodeFilename(filename)),
})
return True
min_sleep_interval = self.params.get('sleep_interval')
if min_sleep_interval:
max_sleep_interval = self.params.get('max_sleep_interval', min_sleep_interval)
sleep_interval = random.uniform(min_sleep_interval, max_sleep_interval)
self.to_screen(
'[download] Sleeping %s seconds...' % (
int(sleep_interval) if sleep_interval.is_integer()
else '%.2f' % sleep_interval))
time.sleep(sleep_interval)
return self.real_download(filename, info_dict)
def real_download(self, filename, info_dict):
"""Real download process. Redefine in subclasses."""
raise NotImplementedError('This method must be implemented by subclasses')
def _hook_progress(self, status):
for ph in self._progress_hooks:
ph(status)
def add_progress_hook(self, ph):
# See YoutubeDl.py (search for progress_hooks) for a description of
# this interface
self._progress_hooks.append(ph)
def _debug_cmd(self, args, exe=None):
if not self.params.get('verbose', False):
return
str_args = [decodeArgument(a) for a in args]
if exe is None:
exe = os.path.basename(str_args[0])
self.to_screen('[debug] %s command line: %s' % (
exe, shell_quote(str_args)))
| gpl-3.0 | 557,000,624,049,092,600 | 31.308483 | 105 | 0.667489 | false |
ahara/kaggle_otto | otto/model/model_11_xgboost_poly/xgboost_poly.py | 1 | 5512 | """
5-fold cv - log loss 0.468809065953
"""
import graphlab as gl
import numpy as np
import logging
import os
from hyperopt import fmin, hp, tpe
from sklearn.base import BaseEstimator
from sklearn.svm import LinearSVC
from sklearn import preprocessing
from otto_utils import consts, utils
MODEL_NAME = 'model_11_xgboost_poly'
MODE = 'cv' # cv|submission|holdout|tune
logging.disable(logging.INFO)
class XGBoost(BaseEstimator):
def __init__(self, max_iterations=50, max_depth=9, min_child_weight=4, row_subsample=.75,
min_loss_reduction=1., column_subsample=.8, step_size=.3, verbose=True):
self.n_classes_ = 9
self.max_iterations = max_iterations
self.max_depth = max_depth
self.min_child_weight = min_child_weight
self.row_subsample = row_subsample
self.min_loss_reduction = min_loss_reduction
self.column_subsample = column_subsample
self.step_size = step_size
self.verbose = verbose
self.model = None
def fit(self, X, y, sample_weight=None):
sf = self._array_to_sframe(X, y)
self.model = gl.boosted_trees_classifier.create(sf, target='target',
max_iterations=self.max_iterations,
max_depth=self.max_depth,
min_child_weight=self.min_child_weight,
row_subsample=self.row_subsample,
min_loss_reduction=self.min_loss_reduction,
column_subsample=self.column_subsample,
step_size=self.step_size,
validation_set=None,
verbose=self.verbose)
return self
def predict(self, X):
preds = self.predict_proba(X)
return np.argmax(preds, axis=1)
def predict_proba(self, X):
sf = self._array_to_sframe(X)
preds = self.model.predict_topk(sf, output_type='probability', k=self.n_classes_)
return self._preds_to_array(preds)
# Private methods
def _array_to_sframe(self, data, targets=None):
d = dict()
for i in xrange(data.shape[1]):
d['feat_%d' % (i + 1)] = gl.SArray(data[:, i])
if targets is not None:
d['target'] = gl.SArray(targets)
return gl.SFrame(d)
def _preds_to_array(self, preds):
p = preds.unstack(['class', 'probability'], 'probs').unpack('probs', '')
p['id'] = p['id'].astype(int) + 1
p = p.sort('id')
del p['id']
preds_array = np.array(p.to_dataframe(), dtype=float)
return preds_array
if __name__ == '__main__':
train, labels, test, _, _ = utils.load_data()
# polynomial features
poly_feat = preprocessing.PolynomialFeatures(degree=2, interaction_only=False, include_bias=True)
train = poly_feat.fit_transform(train, labels)
test = poly_feat.transform(test)
print train.shape
# feature selection
feat_selector = LinearSVC(C=0.0001, penalty='l1', dual=False)
train = feat_selector.fit_transform(train, labels)
test = feat_selector.transform(test)
print train.shape
clf = XGBoost(max_iterations=4800, max_depth=12, min_child_weight=4.9208250938262745, row_subsample=.9134478530382129,
min_loss_reduction=.5132278416508804, column_subsample=.730128689911957, step_size=.009)
if MODE == 'cv':
scores, predictions = utils.make_blender_cv(clf, train, labels, calibrate=False)
print 'CV:', scores, 'Mean log loss:', np.mean(scores)
utils.write_blender_data(consts.BLEND_PATH, MODEL_NAME + '.csv', predictions)
elif MODE == 'submission':
clf.fit(train, labels)
predictions = clf.predict_proba(test)
utils.save_submission(consts.DATA_SAMPLE_SUBMISSION_PATH,
os.path.join(consts.ENSEMBLE_PATH, MODEL_NAME + '.csv'),
predictions)
elif MODE == 'holdout':
score = utils.hold_out_evaluation(clf, train, labels, calibrate=False)
print 'Log loss:', score
elif MODE == 'tune':
# Objective function
def objective(args):
max_depth, min_child_weight, row_subsample, min_loss_reduction, column_subsample = args
clf = XGBoost(max_depth=max_depth, min_child_weight=min_child_weight,
row_subsample=row_subsample, min_loss_reduction=min_loss_reduction,
column_subsample=column_subsample, verbose=False)
score = utils.hold_out_evaluation(clf, train, labels, calibrate=False)
print 'max_depth, min_child_weight, row_subsample, min_loss_reduction, column_subsample, logloss'
print args, score
return score
# Searching space
space = (
hp.quniform('max_depth', 2, 14, 1),
hp.uniform('min_child_weight', .5, 10.),
hp.uniform('row_subsample', .3, 1.),
hp.uniform('min_loss_reduction', .1, 3.),
hp.uniform('column_subsample', .1, 1.),
)
best_sln = fmin(objective, space, algo=tpe.suggest, max_evals=500)
print 'Best solution:', best_sln
else:
print 'Unknown mode'
| bsd-3-clause | -715,724,797,070,731,300 | 38.371429 | 122 | 0.570573 | false |
drvinceknight/Nashpy | tests/unit/test_replicator_dynamics.py | 1 | 62289 | """
Tests for Replicator Dynamics
"""
import numpy as np
import pytest
from hypothesis import given, settings
from hypothesis.strategies import integers
from hypothesis.extra.numpy import arrays
from nashpy.learning.replicator_dynamics import (
get_derivative_of_fitness,
replicator_dynamics,
get_derivative_of_asymmetric_fitness,
asymmetric_replicator_dynamics,
)
@given(M=arrays(np.int8, (3, 3)))
def test_property_get_derivative_of_fitness(M):
t = 0
x = np.zeros(M.shape[1])
derivative_of_fitness = get_derivative_of_fitness(x, t, M)
assert len(derivative_of_fitness) == len(x)
def test_get_derivative_of_fitness():
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
x_values = (
np.array([1, 0, 0]),
np.array([1 / 2, 1 / 2, 0]),
np.array([0, 1 / 4, 3 / 4]),
np.array([1 / 5, 2 / 5, 2 / 5]),
np.array([1 / 2, 0, 1 / 2]),
np.array([2 / 4, 1 / 4, 1 / 4]),
)
derivative_values = (
np.array([0, 0, 0]),
np.array([0, 0, 0]),
np.array([0.0, -0.09375, 0.09375]),
np.array([0.128, -0.144, 0.016]),
np.array([0.375, 0.0, -0.375]),
np.array([0.125, 0.0, -0.125]),
)
for x_value, expected_derivative in zip(x_values, derivative_values):
derivative = get_derivative_of_fitness(x=x_value, t=0, A=M)
assert np.allclose(derivative, expected_derivative), x_value
@given(M=arrays(np.int8, (3, 3)))
def test_property_of_output_dimension_for_games_of_size_3(M):
xs = replicator_dynamics(M)
assert all(len(x) == 3 for x in xs)
@given(M=arrays(np.int8, (4, 4)))
def test_property_of_output_dimension_for_games_of_size_4(M):
xs = replicator_dynamics(M)
assert all(len(x) == 4 for x in xs)
def test_replicator_dynamics_example_1():
M = np.array([[3, 2], [4, 1]])
y0 = np.array([0.9, 0.1])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[0.9, 0.1],
[0.89256013, 0.10743987],
[0.88479436, 0.11520564],
[0.87671801, 0.12328199],
[0.86834987, 0.13165013],
[0.8597121, 0.1402879],
[0.8508299, 0.1491701],
[0.8417312, 0.1582688],
[0.83244622, 0.16755378],
[0.82300701, 0.17699299],
[0.81344687, 0.18655313],
[0.80379989, 0.19620011],
[0.79410031, 0.20589969],
[0.78438204, 0.21561796],
[0.77467816, 0.22532184],
[0.76502043, 0.23497957],
[0.75543894, 0.24456106],
[0.74596174, 0.25403826],
[0.73661466, 0.26338534],
[0.72742107, 0.27257893],
[0.7184018, 0.2815982],
[0.70957507, 0.29042493],
[0.70095654, 0.29904346],
[0.69255932, 0.30744068],
[0.6843941, 0.3156059],
[0.67646927, 0.32353073],
[0.66879107, 0.33120893],
[0.66136378, 0.33863622],
[0.65418987, 0.34581013],
[0.64727021, 0.35272979],
[0.64060422, 0.35939578],
[0.63419006, 0.36580994],
[0.62802483, 0.37197517],
[0.62210466, 0.37789534],
[0.61642492, 0.38357508],
[0.61098033, 0.38901967],
[0.60576506, 0.39423494],
[0.60077288, 0.39922712],
[0.59599723, 0.40400277],
[0.59143133, 0.40856867],
[0.58706824, 0.41293176],
[0.5829009, 0.4170991],
[0.57892225, 0.42107775],
[0.57512523, 0.42487477],
[0.57150283, 0.42849717],
[0.56804814, 0.43195186],
[0.56475435, 0.43524565],
[0.56161481, 0.43838519],
[0.558623, 0.441377],
[0.55577261, 0.44422739],
[0.55305749, 0.44694251],
[0.55047167, 0.44952833],
[0.5480094, 0.4519906],
[0.54566512, 0.45433488],
[0.54343348, 0.45656652],
[0.54130932, 0.45869068],
[0.53928768, 0.46071232],
[0.53736381, 0.46263619],
[0.53553315, 0.46446685],
[0.53379131, 0.46620869],
[0.53213411, 0.46786589],
[0.53055754, 0.46944246],
[0.52905777, 0.47094223],
[0.52763113, 0.47236887],
[0.52627413, 0.47372587],
[0.52498342, 0.47501658],
[0.52375581, 0.47624419],
[0.52258827, 0.47741173],
[0.52147788, 0.47852212],
[0.52042188, 0.47957812],
[0.51941764, 0.48058236],
[0.51846265, 0.48153735],
[0.51755449, 0.48244551],
[0.51669091, 0.48330909],
[0.51586971, 0.48413029],
[0.51508885, 0.48491115],
[0.51434634, 0.48565366],
[0.51364031, 0.48635969],
[0.51296897, 0.48703103],
[0.51233064, 0.48766936],
[0.51172369, 0.48827631],
[0.51114658, 0.48885342],
[0.51059785, 0.48940215],
[0.51007612, 0.48992388],
[0.50958005, 0.49041995],
[0.50910838, 0.49089162],
[0.50865992, 0.49134008],
[0.50823353, 0.49176647],
[0.50782813, 0.49217187],
[0.50744267, 0.49255733],
[0.50707619, 0.49292381],
[0.50672775, 0.49327225],
[0.50639645, 0.49360355],
[0.50608147, 0.49391853],
[0.50578199, 0.49421801],
[0.50549726, 0.49450274],
[0.50522655, 0.49477345],
[0.50496916, 0.49503084],
[0.50472445, 0.49527555],
[0.50449178, 0.49550822],
],
)
xs = replicator_dynamics(y0=y0, timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_example_2():
M = np.array([[3, 2], [4, 1]])
y0 = np.array([0.65, 0.35])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[0.65, 0.35],
[0.64323298, 0.35676702],
[0.63671861, 0.36328139],
[0.63045449, 0.36954551],
[0.62443708, 0.37556292],
[0.61866205, 0.38133795],
[0.61312433, 0.38687567],
[0.60781832, 0.39218168],
[0.60273792, 0.39726208],
[0.5978767, 0.4021233],
[0.59322796, 0.40677204],
[0.5887848, 0.4112152],
[0.58454021, 0.41545979],
[0.58048714, 0.41951286],
[0.57661851, 0.42338149],
[0.57292729, 0.42707271],
[0.56940652, 0.43059348],
[0.56604936, 0.43395064],
[0.56284908, 0.43715092],
[0.55979911, 0.44020089],
[0.55689305, 0.44310695],
[0.5541247, 0.4458753],
[0.551488, 0.448512],
[0.54897713, 0.45102287],
[0.54658644, 0.45341356],
[0.5443105, 0.4556895],
[0.54214408, 0.45785592],
[0.54008213, 0.45991787],
[0.53811982, 0.46188018],
[0.53625251, 0.46374749],
[0.53447575, 0.46552425],
[0.53278527, 0.46721473],
[0.53117701, 0.46882299],
[0.52964705, 0.47035295],
[0.52819167, 0.47180833],
[0.5268073, 0.4731927],
[0.52549054, 0.47450946],
[0.52423814, 0.47576186],
[0.52304699, 0.47695301],
[0.52191414, 0.47808586],
[0.52083678, 0.47916322],
[0.5198122, 0.4801878],
[0.51883785, 0.48116215],
[0.51791129, 0.48208871],
[0.5170302, 0.4829698],
[0.51619235, 0.48380765],
[0.51539564, 0.48460436],
[0.51463806, 0.48536194],
[0.51391769, 0.48608231],
[0.51323273, 0.48676727],
[0.51258143, 0.48741857],
[0.51196214, 0.48803786],
[0.51137331, 0.48862669],
[0.51081343, 0.48918657],
[0.51028109, 0.48971891],
[0.50977494, 0.49022506],
[0.50929368, 0.49070632],
[0.50883611, 0.49116389],
[0.50840105, 0.49159895],
[0.5079874, 0.4920126],
[0.5075941, 0.4924059],
[0.50722017, 0.49277983],
[0.50686464, 0.49313536],
[0.5065266, 0.4934734],
[0.50620521, 0.49379479],
[0.50589964, 0.49410036],
[0.50560912, 0.49439088],
[0.5053329, 0.4946671],
[0.50507027, 0.49492973],
[0.50482058, 0.49517942],
[0.50458318, 0.49541682],
[0.50435748, 0.49564252],
[0.50414288, 0.49585712],
[0.50393885, 0.49606115],
[0.50374487, 0.49625513],
[0.50356044, 0.49643956],
[0.5033851, 0.4966149],
[0.50321838, 0.49678162],
[0.50305988, 0.49694012],
[0.50290918, 0.49709082],
[0.50276591, 0.49723409],
[0.50262969, 0.49737031],
[0.50250018, 0.49749982],
[0.50237704, 0.49762296],
[0.50225997, 0.49774003],
[0.50214867, 0.49785133],
[0.50204285, 0.49795715],
[0.50194224, 0.49805776],
[0.50184658, 0.49815342],
[0.50175564, 0.49824436],
[0.50166917, 0.49833083],
[0.50158696, 0.49841304],
[0.50150881, 0.49849119],
[0.5014345, 0.4985655],
[0.50136385, 0.49863615],
[0.50129668, 0.49870332],
[0.50123281, 0.49876719],
[0.5011721, 0.4988279],
[0.50111437, 0.49888563],
[0.50105949, 0.49894051],
]
)
xs = replicator_dynamics(y0=y0, timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_example_3_default_y0():
M = np.array([[8, 2], [5, 3]])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[5.00000000e-01, 5.00000000e-01],
[5.26546322e-01, 4.73453678e-01],
[5.55724197e-01, 4.44275803e-01],
[5.87511410e-01, 4.12488590e-01],
[6.21728489e-01, 3.78271511e-01],
[6.57990146e-01, 3.42009854e-01],
[6.95672143e-01, 3.04327857e-01],
[7.33912742e-01, 2.66087258e-01],
[7.71667086e-01, 2.28332914e-01],
[8.07818764e-01, 1.92181236e-01],
[8.41329566e-01, 1.58670434e-01],
[8.71386505e-01, 1.28613495e-01],
[8.97500124e-01, 1.02499876e-01],
[9.19528237e-01, 8.04717629e-02],
[9.37629038e-01, 6.23709623e-02],
[9.52172012e-01, 4.78279882e-02],
[9.63640143e-01, 3.63598573e-02],
[9.72547714e-01, 2.74522863e-02],
[9.79383883e-01, 2.06161168e-02],
[9.84581519e-01, 1.54184812e-02],
[9.88504989e-01, 1.14950108e-02],
[9.91450448e-01, 8.54955249e-03],
[9.93652534e-01, 6.34746643e-03],
[9.95293742e-01, 4.70625842e-03],
[9.96514084e-01, 3.48591598e-03],
[9.97419907e-01, 2.58009292e-03],
[9.98091404e-01, 1.90859574e-03],
[9.98588714e-01, 1.41128551e-03],
[9.98956761e-01, 1.04323949e-03],
[9.99228997e-01, 7.71002583e-04],
[9.99430288e-01, 5.69712057e-04],
[9.99579078e-01, 4.20922069e-04],
[9.99689037e-01, 3.10962897e-04],
[9.99770287e-01, 2.29713463e-04],
[9.99830315e-01, 1.69684731e-04],
[9.99874662e-01, 1.25338089e-04],
[9.99907422e-01, 9.25784618e-05],
[9.99931619e-01, 6.83805402e-05],
[9.99949493e-01, 5.05073861e-05],
[9.99962695e-01, 3.73049637e-05],
[9.99972447e-01, 2.75532065e-05],
[9.99979649e-01, 2.03505730e-05],
[9.99984969e-01, 1.50308175e-05],
[9.99988898e-01, 1.11015649e-05],
[9.99991801e-01, 8.19919968e-06],
[9.99993944e-01, 6.05565505e-06],
[9.99995527e-01, 4.47259259e-06],
[9.99996697e-01, 3.30326369e-06],
[9.99997560e-01, 2.43964910e-06],
[9.99998198e-01, 1.80194467e-06],
[9.99998669e-01, 1.33084681e-06],
[9.99999018e-01, 9.82166934e-07],
[9.99999275e-01, 7.25194883e-07],
[9.99999464e-01, 5.35772717e-07],
[9.99999604e-01, 3.96066315e-07],
[9.99999707e-01, 2.92673377e-07],
[9.99999784e-01, 2.16251201e-07],
[9.99999840e-01, 1.59970163e-07],
[9.99999882e-01, 1.18489534e-07],
[9.99999912e-01, 8.79198227e-08],
[9.99999935e-01, 6.47958220e-08],
[9.99999952e-01, 4.76360370e-08],
[9.99999965e-01, 3.46731961e-08],
[9.99999974e-01, 2.55665790e-08],
[9.99999981e-01, 1.86146820e-08],
[9.99999986e-01, 1.36425118e-08],
[9.99999990e-01, 1.00816167e-08],
[9.99999992e-01, 7.55059409e-09],
[9.99999994e-01, 5.67732943e-09],
[9.99999996e-01, 4.28158816e-09],
[9.99999997e-01, 3.20976917e-09],
[9.99999998e-01, 2.40345206e-09],
[9.99999998e-01, 1.78669125e-09],
[9.99999999e-01, 1.33286584e-09],
[9.99999999e-01, 9.89591714e-10],
[9.99999999e-01, 7.40089560e-10],
[9.99999999e-01, 5.51209294e-10],
[1.00000000e00, 3.28925518e-10],
[1.00000000e00, 1.11214696e-10],
[1.00000000e00, -7.15664780e-11],
[1.00000000e00, -2.19418003e-10],
[1.00000000e00, -3.32339878e-10],
[1.00000000e00, -4.10332104e-10],
[1.00000000e00, -4.53394682e-10],
[1.00000000e00, -4.61527610e-10],
[9.99999999e-01, -4.34730889e-10],
[9.99999999e-01, -3.73004520e-10],
[9.99999999e-01, -3.39039249e-10],
[9.99999999e-01, -3.64704692e-10],
[9.99999999e-01, -3.81253172e-10],
[9.99999999e-01, -3.88684691e-10],
[9.99999999e-01, -3.86999249e-10],
[9.99999999e-01, -3.76196845e-10],
[9.99999999e-01, -3.56277479e-10],
[9.99999999e-01, -3.27241152e-10],
[9.99999999e-01, -2.89087864e-10],
[9.99999999e-01, -2.41817614e-10],
[9.99999998e-01, -2.02072563e-10],
[9.99999998e-01, -1.86998011e-10],
[9.99999998e-01, -1.71923460e-10],
]
)
xs = replicator_dynamics(timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_3_example_1():
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
y0 = np.array([0.2, 0.1, 0.7])
timepoints = np.linspace(0, 20, 100)
expected_xs_over_time = np.array(
[
[0.2, 0.1, 0.7],
[0.25084045, 0.09789735, 0.6512622],
[0.30680235, 0.096512, 0.59668565],
[0.36555987, 0.09596192, 0.53847822],
[0.4244435, 0.09636333, 0.47919317],
[0.48091315, 0.097819, 0.42126785],
[0.53292835, 0.10041113, 0.36666052],
[0.57912, 0.10419922, 0.31668078],
[0.61877662, 0.10922098, 0.2720024],
[0.6517203, 0.11549383, 0.23278586],
[0.67814881, 0.12301537, 0.19883582],
[0.69849303, 0.13176241, 0.16974455],
[0.71330936, 0.14168906, 0.14500158],
[0.72320749, 0.15272473, 0.12406778],
[0.72880646, 0.16477305, 0.10642048],
[0.73070961, 0.17771243, 0.09157796],
[0.72949106, 0.19139871, 0.07911023],
[0.72568835, 0.2056699, 0.06864175],
[0.719798, 0.22035258, 0.05984942],
[0.71227239, 0.23526931, 0.0524583],
[0.70351757, 0.25024617, 0.04623626],
[0.69389181, 0.26511966, 0.04098853],
[0.68370534, 0.27974226, 0.03655241],
[0.67322108, 0.29398645, 0.03279247],
[0.66265685, 0.30774686, 0.02959629],
[0.65218849, 0.32094081, 0.0268707],
[0.64195385, 0.33350758, 0.02453857],
[0.63205732, 0.34540661, 0.02253606],
[0.62257449, 0.35661514, 0.02081038],
[0.61355672, 0.36712552, 0.01931776],
[0.60503556, 0.37694249, 0.01802195],
[0.59702658, 0.38608059, 0.01689283],
[0.58953289, 0.39456178, 0.01590533],
[0.58254803, 0.40241341, 0.01503856],
[0.57605847, 0.40966651, 0.01427502],
[0.5700456, 0.41635432, 0.01360008],
[0.5644874, 0.42251121, 0.01300139],
[0.55935967, 0.42817177, 0.01246856],
[0.55463707, 0.43337015, 0.01199279],
[0.55029384, 0.43813956, 0.0115666],
[0.54630443, 0.44251196, 0.01118362],
[0.54264384, 0.44651774, 0.01083841],
[0.53928799, 0.4501857, 0.01052632],
[0.53621383, 0.45354285, 0.01024332],
[0.53339955, 0.45661449, 0.00998596],
[0.5308246, 0.45942417, 0.00975123],
[0.52846971, 0.46199374, 0.00953656],
[0.5263169, 0.46434344, 0.00933966],
[0.52434948, 0.46649195, 0.00915857],
[0.52255195, 0.46845648, 0.00899157],
[0.52091, 0.47025287, 0.00883714],
[0.51941039, 0.47189566, 0.00869395],
[0.51804096, 0.47339821, 0.00856083],
[0.51679051, 0.47477274, 0.00843675],
[0.51564876, 0.47603045, 0.0083208],
[0.51460626, 0.47718159, 0.00821216],
[0.51365436, 0.47823552, 0.00811011],
[0.51278515, 0.47920082, 0.00801403],
[0.51199138, 0.48008529, 0.00792334],
[0.5112664, 0.48089607, 0.00783753],
[0.51060417, 0.48163968, 0.00775615],
[0.50999914, 0.48232207, 0.0076788],
[0.50944625, 0.48294864, 0.00760511],
[0.50894089, 0.48352435, 0.00753476],
[0.50847885, 0.48405369, 0.00746746],
[0.50805628, 0.48454077, 0.00740295],
[0.50766968, 0.48498934, 0.00734099],
[0.50731584, 0.48540279, 0.00728137],
[0.50699186, 0.48578424, 0.0072239],
[0.50669508, 0.4861365, 0.00716841],
[0.50642309, 0.48646217, 0.00711475],
[0.50617366, 0.48676357, 0.00706277],
[0.50594481, 0.48704285, 0.00701234],
[0.50573469, 0.48730195, 0.00696337],
[0.50554164, 0.48754263, 0.00691573],
[0.50536414, 0.48776652, 0.00686933],
[0.50520082, 0.48797508, 0.0068241],
[0.5050504, 0.48816965, 0.00677995],
[0.50491176, 0.48835143, 0.00673681],
[0.50478384, 0.48852154, 0.00669463],
[0.50466569, 0.48868099, 0.00665333],
[0.50455645, 0.48883068, 0.00661287],
[0.50445532, 0.48897146, 0.00657321],
[0.50436161, 0.48910409, 0.00653429],
[0.50427466, 0.48922926, 0.00649607],
[0.50419387, 0.4893476, 0.00645853],
[0.5041187, 0.48945968, 0.00642162],
[0.50404866, 0.48956602, 0.00638531],
[0.50398331, 0.48966711, 0.00634958],
[0.50392223, 0.48976337, 0.0063144],
[0.50386505, 0.48985519, 0.00627975],
[0.50381145, 0.48994295, 0.0062456],
[0.50376111, 0.49002695, 0.00621194],
[0.50371375, 0.49010751, 0.00617874],
[0.50366912, 0.49018489, 0.00614599],
[0.50362698, 0.49025935, 0.00611367],
[0.50358714, 0.4903311, 0.00608177],
[0.50354938, 0.49040035, 0.00605027],
[0.50351355, 0.49046729, 0.00601915],
[0.50347948, 0.4905321, 0.00598842],
]
)
xs = replicator_dynamics(y0=y0, A=M, timepoints=timepoints)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_3_example_default_timepoints():
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
y0 = np.array([0.2, 0.1, 0.7])
expected_x_1 = np.array([[0.20237066, 0.09988063, 0.69774871]])
expected_x_1000 = np.array([[0.52171238, 0.46937475, 0.00891287]])
xs = replicator_dynamics(y0=y0, A=M)
assert np.allclose(xs[1], expected_x_1)
assert np.allclose(xs[-1], expected_x_1000)
assert len(xs) == 1000
def test_replicator_dynamics_game_size_3_example_2():
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
y0 = np.array([0.5, 0.1, 0.4])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[0.5, 0.1, 0.4],
[0.52559968, 0.10135984, 0.37304048],
[0.5497745, 0.10301946, 0.34720604],
[0.57240754, 0.10498432, 0.32260814],
[0.59342086, 0.10725857, 0.29932057],
[0.61277116, 0.10984506, 0.27738379],
[0.63044466, 0.11274538, 0.25680996],
[0.64645211, 0.11595993, 0.23758796],
[0.66082391, 0.11948781, 0.21968828],
[0.67360575, 0.12332687, 0.20306738],
[0.68485481, 0.1274736, 0.18767159],
[0.69463657, 0.13192312, 0.17344031],
[0.70302219, 0.13666906, 0.16030874],
[0.71008648, 0.14170354, 0.14820998],
[0.71590619, 0.14701708, 0.13707673],
[0.72055883, 0.15259863, 0.12684255],
[0.7241217, 0.1584355, 0.1174428],
[0.72667121, 0.16451346, 0.10881533],
[0.72828236, 0.17081672, 0.10090091],
[0.72902836, 0.17732812, 0.09364353],
[0.72898035, 0.18402915, 0.08699051],
[0.72820721, 0.19090019, 0.08089259],
[0.72677541, 0.19792069, 0.0753039],
[0.72474884, 0.20506931, 0.07018184],
[0.72218875, 0.21232425, 0.06548701],
[0.71915361, 0.21966337, 0.06118302],
[0.71569909, 0.22706454, 0.05723638],
[0.71187792, 0.23450578, 0.05361629],
[0.70773992, 0.24196557, 0.0502945],
[0.7033319, 0.24942298, 0.04724512],
[0.69869765, 0.25685792, 0.04444443],
[0.69387795, 0.26425127, 0.04187078],
[0.68891058, 0.27158505, 0.03950437],
[0.68383036, 0.27884249, 0.03732715],
[0.67866917, 0.28600818, 0.03532264],
[0.67345609, 0.29306806, 0.03347585],
[0.66821741, 0.3000095, 0.03177308],
[0.66297682, 0.30682127, 0.03020191],
[0.65775544, 0.31349356, 0.02875099],
[0.65257204, 0.32001794, 0.02741002],
[0.64744309, 0.3263873, 0.02616961],
[0.64238298, 0.3325958, 0.02502123],
[0.63740411, 0.33863879, 0.0239571],
[0.63251708, 0.34451275, 0.02297018],
[0.62773079, 0.35021519, 0.02205402],
[0.62305263, 0.35574459, 0.02120278],
[0.61848858, 0.36110029, 0.02041112],
[0.6140434, 0.36628239, 0.01967421],
[0.60972067, 0.37129171, 0.01898762],
[0.60552299, 0.37612969, 0.01834732],
[0.60145203, 0.38079834, 0.01774963],
[0.59750868, 0.3853001, 0.01719122],
[0.59369315, 0.38963783, 0.01666902],
[0.59000503, 0.39381472, 0.01618024],
[0.58644338, 0.39783429, 0.01572233],
[0.5830068, 0.40170025, 0.01529295],
[0.57969351, 0.40541652, 0.01488997],
[0.57650141, 0.40898716, 0.01451143],
[0.57342812, 0.41241633, 0.01415554],
[0.57047106, 0.41570828, 0.01382066],
[0.56762744, 0.41886729, 0.01350527],
[0.56489435, 0.42189765, 0.01320799],
[0.56226876, 0.42480368, 0.01292755],
[0.55974758, 0.42758964, 0.01266278],
[0.55732765, 0.43025976, 0.01241259],
[0.5550058, 0.43281821, 0.01217599],
[0.55277882, 0.43526911, 0.01195207],
[0.55064354, 0.43761648, 0.01173997],
[0.54859681, 0.43986427, 0.01153892],
[0.54663548, 0.44201632, 0.0113482],
[0.54475648, 0.44407639, 0.01116712],
[0.54295678, 0.44604813, 0.01099509],
[0.54123342, 0.44793507, 0.01083152],
[0.53958348, 0.44974065, 0.01067587],
[0.53800412, 0.45146821, 0.01052767],
[0.53649261, 0.45312096, 0.01038644],
[0.53504623, 0.45470201, 0.01025176],
[0.5336624, 0.45621436, 0.01012324],
[0.53233858, 0.45766092, 0.0100005],
[0.53107233, 0.45904447, 0.0098832],
[0.52986128, 0.46036769, 0.00977103],
[0.52870314, 0.46163318, 0.00966368],
[0.52759571, 0.46284341, 0.00956088],
[0.52653686, 0.46400078, 0.00946235],
[0.52552454, 0.46510759, 0.00936787],
[0.52455677, 0.46616604, 0.0092772],
[0.52363164, 0.46717823, 0.00919013],
[0.52274734, 0.46814621, 0.00910645],
[0.5219021, 0.46907191, 0.00902599],
[0.52109424, 0.4699572, 0.00894857],
[0.52032212, 0.47080386, 0.00887402],
[0.5195842, 0.47161362, 0.00880218],
[0.51887898, 0.47238809, 0.00873292],
[0.51820503, 0.47312887, 0.0086661],
[0.51756097, 0.47383744, 0.00860159],
[0.51694549, 0.47451525, 0.00853926],
[0.51635732, 0.47516367, 0.00847902],
[0.51579525, 0.47578401, 0.00842074],
[0.51525813, 0.47637754, 0.00836433],
[0.51474485, 0.47694545, 0.0083097],
]
)
xs = replicator_dynamics(y0=y0, timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_3_example_1_default_y0():
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[0.33333333, 0.33333333, 0.33333333],
[0.34828459, 0.3262229, 0.32549251],
[0.36315697, 0.31983211, 0.31701092],
[0.37787298, 0.31412581, 0.30800121],
[0.3923581, 0.30907001, 0.29857189],
[0.40654213, 0.30463189, 0.28882598],
[0.42036029, 0.30077978, 0.27885992],
[0.43375425, 0.29748314, 0.26876261],
[0.44667261, 0.29471251, 0.25861488],
[0.45907142, 0.29243945, 0.24848913],
[0.47091426, 0.29063654, 0.23844919],
[0.48217224, 0.2892773, 0.22855046],
[0.49282377, 0.28833613, 0.2188401],
[0.50285411, 0.28778834, 0.20935755],
[0.51225504, 0.28761002, 0.20013494],
[0.52102418, 0.28777809, 0.19119773],
[0.52916446, 0.28827022, 0.18256531],
[0.53668352, 0.28906482, 0.17425166],
[0.54359302, 0.29014101, 0.16626597],
[0.54990812, 0.2914786, 0.15861327],
[0.55564688, 0.2930581, 0.15129501],
[0.5608297, 0.29486067, 0.14430963],
[0.56547887, 0.29686812, 0.13765301],
[0.56961809, 0.29906291, 0.131319],
[0.57327211, 0.30142816, 0.12529973],
[0.57646634, 0.30394761, 0.11958605],
[0.57922655, 0.30660565, 0.1141678],
[0.58157859, 0.30938732, 0.1090341],
[0.58354816, 0.31227828, 0.10417356],
[0.58516064, 0.31526484, 0.09957451],
[0.58644089, 0.31833398, 0.09522513],
[0.58741312, 0.32147329, 0.09111359],
[0.58810078, 0.32467101, 0.08722821],
[0.58852649, 0.32791604, 0.08355748],
[0.58871195, 0.33119789, 0.08009016],
[0.58867791, 0.33450672, 0.07681537],
[0.58844412, 0.33783332, 0.07372256],
[0.58802931, 0.34116907, 0.07080162],
[0.5874512, 0.34450598, 0.06804282],
[0.58672648, 0.34783662, 0.0654369],
[0.58587083, 0.35115417, 0.06297501],
[0.58489893, 0.35445233, 0.06064874],
[0.58382451, 0.35772538, 0.05845012],
[0.58266033, 0.36096807, 0.05637159],
[0.58141828, 0.3641757, 0.05440602],
[0.58010932, 0.36734401, 0.05254667],
[0.57874361, 0.37046921, 0.05078718],
[0.57733051, 0.37354793, 0.04912156],
[0.57587859, 0.37657722, 0.04754419],
[0.57439572, 0.37955451, 0.04604977],
[0.57288908, 0.38247759, 0.04463333],
[0.57136522, 0.38534459, 0.04329019],
[0.56983008, 0.38815395, 0.04201597],
[0.56828904, 0.3909044, 0.04080656],
[0.56674693, 0.39359496, 0.03965812],
[0.56520812, 0.39622487, 0.03856701],
[0.56367651, 0.39879364, 0.03752985],
[0.56215558, 0.40130095, 0.03654348],
[0.5606484, 0.40374669, 0.03560491],
[0.5591577, 0.40613095, 0.03471135],
[0.55768587, 0.40845392, 0.03386021],
[0.55623498, 0.410716, 0.03304902],
[0.55480684, 0.41291766, 0.0322755],
[0.55340299, 0.41505951, 0.0315375],
[0.55202472, 0.41714228, 0.030833],
[0.55067314, 0.41916674, 0.03016012],
[0.54934913, 0.42113379, 0.02951708],
[0.5480534, 0.42304437, 0.02890223],
[0.54678652, 0.42489947, 0.02831401],
[0.5455489, 0.42670015, 0.02775096],
[0.54434081, 0.42844749, 0.0272117],
[0.54316241, 0.43014264, 0.02669495],
[0.54201376, 0.43178673, 0.02619951],
[0.54089483, 0.43338094, 0.02572423],
[0.53980549, 0.43492646, 0.02526805],
[0.53874555, 0.43642449, 0.02482996],
[0.53771475, 0.43787623, 0.02440903],
[0.53671276, 0.43928288, 0.02400436],
[0.53573922, 0.44064566, 0.02361512],
[0.53479372, 0.44196575, 0.02324053],
[0.53387581, 0.44324434, 0.02287984],
[0.53298501, 0.44448262, 0.02253237],
[0.53212081, 0.44568175, 0.02219745],
[0.53128267, 0.44684287, 0.02187446],
[0.53047007, 0.44796711, 0.02156282],
[0.52968241, 0.4490556, 0.02126199],
[0.52891915, 0.45010942, 0.02097143],
[0.52817969, 0.45112965, 0.02069066],
[0.52746344, 0.45211733, 0.02041923],
[0.52676982, 0.4530735, 0.02015668],
[0.52609823, 0.45399916, 0.01990261],
[0.52544809, 0.45489529, 0.01965662],
[0.5248188, 0.45576285, 0.01941835],
[0.52420978, 0.45660278, 0.01918744],
[0.52362045, 0.45741598, 0.01896357],
[0.52305024, 0.45820334, 0.01874642],
[0.52249859, 0.45896572, 0.0185357],
[0.52196493, 0.45970395, 0.01833112],
[0.52144873, 0.46041886, 0.01813241],
[0.52094945, 0.46111123, 0.01793933],
]
)
xs = replicator_dynamics(timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_4_example_1():
M = np.array([[3, 2, 4, 2], [5, 1, 1, 3], [6, 2, 3, 2], [1, 3, 4, 7]])
y0 = np.array([0.2, 0.2, 0.5, 0.1])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[2.00000000e-01, 2.00000000e-01, 5.00000000e-01, 1.00000000e-01],
[2.03014607e-01, 1.79775683e-01, 5.12598077e-01, 1.04611633e-01],
[2.05602634e-01, 1.61119562e-01, 5.24116145e-01, 1.09161659e-01],
[2.07780154e-01, 1.44008247e-01, 5.34544791e-01, 1.13666808e-01],
[2.09565963e-01, 1.28397079e-01, 5.43887293e-01, 1.18149665e-01],
[2.10980399e-01, 1.14224398e-01, 5.52156911e-01, 1.22638292e-01],
[2.12044290e-01, 1.01415511e-01, 5.59374243e-01, 1.27165955e-01],
[2.12777911e-01, 8.98868138e-02, 5.65564304e-01, 1.31770970e-01],
[2.13200213e-01, 7.95489630e-02, 5.70754057e-01, 1.36496767e-01],
[2.13328015e-01, 7.03100693e-02, 5.74969767e-01, 1.41392148e-01],
[2.13175377e-01, 6.20781199e-02, 5.78234667e-01, 1.46511836e-01],
[2.12753003e-01, 5.47629634e-02, 5.80566684e-01, 1.51917349e-01],
[2.12067694e-01, 4.82778457e-02, 5.81976254e-01, 1.57678207e-01],
[2.11121790e-01, 4.25404241e-02, 5.82464120e-01, 1.63873667e-01],
[2.09912560e-01, 3.74735370e-02, 5.82018990e-01, 1.70594913e-01],
[2.08431491e-01, 3.30056010e-02, 5.80614947e-01, 1.77947961e-01],
[2.06663415e-01, 2.90707653e-02, 5.78208424e-01, 1.86057396e-01],
[2.04585400e-01, 2.56088838e-02, 5.74734550e-01, 1.95071166e-01],
[2.02165324e-01, 2.25653431e-02, 5.70102601e-01, 2.05166732e-01],
[1.99360030e-01, 1.98907774e-02, 5.64190280e-01, 2.16558913e-01],
[1.96112947e-01, 1.75407096e-02, 5.56836486e-01, 2.29509857e-01],
[1.92351073e-01, 1.54751355e-02, 5.47832225e-01, 2.44341566e-01],
[1.87981233e-01, 1.36580686e-02, 5.36909436e-01, 2.61451263e-01],
[1.82885632e-01, 1.20570529e-02, 5.23727800e-01, 2.81329515e-01],
[1.76917104e-01, 1.06426524e-02, 5.07860610e-01, 3.04579633e-01],
[1.69895030e-01, 9.38792086e-03, 4.88782663e-01, 3.31934386e-01],
[1.61604457e-01, 8.26788100e-03, 4.65867494e-01, 3.64260168e-01],
[1.51803622e-01, 7.25907672e-03, 4.38409213e-01, 4.02528089e-01],
[1.40249665e-01, 6.33936318e-03, 4.05697635e-01, 4.47713338e-01],
[1.26757836e-01, 5.48826132e-03, 3.67191501e-01, 5.00562402e-01],
[1.11310569e-01, 4.68839622e-03, 3.22837907e-01, 5.61163128e-01],
[9.42142047e-02, 3.92847992e-03, 2.73531647e-01, 6.28325668e-01],
[7.62423767e-02, 3.20736701e-03, 2.21536256e-01, 6.99014000e-01],
[5.86232788e-02, 2.53656143e-03, 1.70448974e-01, 7.68391186e-01],
[4.27407988e-02, 1.93705559e-03, 1.24328358e-01, 8.30993787e-01],
[2.96454250e-02, 1.42958466e-03, 8.62637052e-02, 8.82661285e-01],
[1.97150138e-02, 1.02426416e-03, 5.73804221e-02, 9.21880300e-01],
[1.26941686e-02, 7.17049061e-04, 3.69516241e-02, 9.49637158e-01],
[7.98780274e-03, 4.93669057e-04, 2.32539272e-02, 9.68264601e-01],
[4.94920545e-03, 3.36053575e-04, 1.44088459e-02, 9.80305895e-01],
[3.03598214e-03, 2.27083826e-04, 8.83910216e-03, 9.87897832e-01],
[1.85064498e-03, 1.52736531e-04, 5.38817086e-03, 9.92608448e-01],
[1.12369181e-03, 1.02433840e-04, 3.27168224e-03, 9.95502192e-01],
[6.80655561e-04, 6.85759243e-05, 1.98177675e-03, 9.97268992e-01],
[4.11689813e-04, 4.58593611e-05, 1.19866970e-03, 9.98343781e-01],
[2.48785662e-04, 3.06476572e-05, 7.24362584e-04, 9.98996204e-01],
[1.50260753e-04, 2.04735127e-05, 4.37498917e-04, 9.99391767e-01],
[9.07242335e-05, 1.36735679e-05, 2.64152780e-04, 9.99631449e-01],
[5.47664709e-05, 9.13076963e-06, 1.59458220e-04, 9.99776645e-01],
[3.30562786e-05, 6.09669114e-06, 9.62467961e-05, 9.99864600e-01],
[1.99507373e-05, 4.07058812e-06, 5.80886620e-05, 9.99917890e-01],
[1.20411070e-05, 2.71775634e-06, 3.50589495e-05, 9.99950182e-01],
[7.26674553e-06, 1.81448136e-06, 2.11578956e-05, 9.99969761e-01],
[4.38544006e-06, 1.21140807e-06, 1.27686717e-05, 9.99981634e-01],
[2.64635182e-06, 8.08751772e-07, 7.70513293e-06, 9.99988840e-01],
[1.59658805e-06, 5.39894155e-07, 4.64863489e-06, 9.99993215e-01],
[9.63663665e-07, 3.60440823e-07, 2.80580866e-06, 9.99995870e-01],
[5.81458028e-07, 2.40616233e-07, 1.69297654e-06, 9.99997485e-01],
[3.50999339e-07, 1.60627592e-07, 1.02197170e-06, 9.99998466e-01],
[2.12105795e-07, 1.07247667e-07, 6.17568459e-07, 9.99999063e-01],
[1.28067075e-07, 7.16010679e-08, 3.72880837e-07, 9.99999427e-01],
[7.73743336e-08, 4.78056299e-08, 2.25283557e-07, 9.99999650e-01],
[4.68788643e-08, 3.19497919e-08, 1.36492772e-07, 9.99999785e-01],
[2.84194486e-08, 2.13634439e-08, 8.27462311e-08, 9.99999867e-01],
[1.70359632e-08, 1.42530879e-08, 4.96020091e-08, 9.99999919e-01],
[1.01359124e-08, 9.49314320e-09, 2.95117812e-08, 9.99999951e-01],
[6.01592329e-09, 6.30408466e-09, 1.75159971e-08, 9.99999970e-01],
[3.60293196e-09, 4.18726641e-09, 1.04903175e-08, 9.99999982e-01],
[2.21909257e-09, 2.79929916e-09, 6.46112278e-09, 9.99999989e-01],
[1.38252426e-09, 1.87597724e-09, 4.02536563e-09, 9.99999993e-01],
[8.55072319e-10, 1.25591890e-09, 2.48963351e-09, 9.99999995e-01],
[5.23161237e-10, 8.39645178e-10, 1.52323929e-09, 9.99999997e-01],
[3.19635397e-10, 5.61528892e-10, 9.30652271e-10, 9.99999998e-01],
[1.96274641e-10, 3.75788327e-10, 5.71474380e-10, 9.99999999e-01],
[7.90089904e-11, 2.01486586e-10, 2.30043033e-10, 9.99999999e-01],
[-2.75223088e-11, 4.43008850e-11, -8.01341155e-11, 1.00000000e00],
[-1.02459537e-10, -7.11494645e-11, -2.98321786e-10, 1.00000000e00],
[-1.45802696e-10, -1.44864462e-10, -4.24519978e-10, 1.00000000e00],
[-1.57551783e-10, -1.76844109e-10, -4.58728693e-10, 1.00000000e00],
[-1.37706801e-10, -1.67088404e-10, -4.00947928e-10, 1.00000000e00],
[-1.15830837e-10, -1.52277395e-10, -3.37253748e-10, 1.00000000e00],
[-1.27082255e-10, -1.76033398e-10, -3.70013445e-10, 1.00000000e00],
[-1.30203828e-10, -1.87167004e-10, -3.79102236e-10, 9.99999999e-01],
[-1.25195555e-10, -1.85678212e-10, -3.64520121e-10, 9.99999999e-01],
[-1.12057437e-10, -1.71567024e-10, -3.26267100e-10, 9.99999999e-01],
[-9.07894722e-11, -1.44833438e-10, -2.64343172e-10, 9.99999999e-01],
[-6.58177915e-11, -1.29826786e-10, -1.91635490e-10, 9.99999998e-01],
[-5.77524326e-11, -1.14827540e-10, -1.68152342e-10, 9.99999999e-01],
[-4.96870738e-11, -9.98282933e-11, -1.44669194e-10, 9.99999999e-01],
[-4.16217149e-11, -8.48290467e-11, -1.21186046e-10, 9.99999999e-01],
[-3.35563561e-11, -6.98298001e-11, -9.77028982e-11, 9.99999999e-01],
[-2.54909973e-11, -5.48305535e-11, -7.42197503e-11, 9.99999999e-01],
[-1.74256384e-11, -3.98313069e-11, -5.07366023e-11, 9.99999999e-01],
[-1.56239261e-11, -3.63303742e-11, -4.54907255e-11, 9.99999999e-01],
[-1.52023171e-11, -3.53629278e-11, -4.42631659e-11, 9.99999999e-01],
[-1.47807080e-11, -3.43954813e-11, -4.30356063e-11, 9.99999999e-01],
[-1.43590990e-11, -3.34280348e-11, -4.18080466e-11, 9.99999999e-01],
[-1.39374900e-11, -3.24605883e-11, -4.05804870e-11, 9.99999999e-01],
[-1.35158810e-11, -3.14931418e-11, -3.93529274e-11, 9.99999999e-01],
[-1.30942720e-11, -3.05256954e-11, -3.81253678e-11, 9.99999999e-01],
]
)
xs = replicator_dynamics(y0=y0, timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_4_example_2():
M = np.array([[3, 2, 4, 2], [5, 1, 1, 3], [6, 2, 3, 2], [1, 3, 4, 7]])
y0 = np.array([0.6, 0.1, 0.2, 0.1])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[6.00000000e-01, 1.00000000e-01, 2.00000000e-01, 1.00000000e-01],
[5.80420179e-01, 1.02112104e-01, 2.26438063e-01, 9.10296545e-02],
[5.60703224e-01, 1.02764556e-01, 2.53803256e-01, 8.27289637e-02],
[5.41167086e-01, 1.01977465e-01, 2.81742410e-01, 7.51130396e-02],
[5.22074545e-01, 9.98438659e-02, 3.09908715e-01, 6.81728742e-02],
[5.03628230e-01, 9.65147322e-02, 3.37975461e-01, 6.18815768e-02],
[4.85972882e-01, 9.21809929e-02, 3.65645769e-01, 5.62003554e-02],
[4.69202136e-01, 8.70552790e-02, 3.92658949e-01, 5.10836358e-02],
[4.53367471e-01, 8.13554217e-02, 4.18794068e-01, 4.64830393e-02],
[4.38487551e-01, 7.52909495e-02, 4.43871296e-01, 4.23502030e-02],
[4.24556757e-01, 6.90530159e-02, 4.67751586e-01, 3.86386415e-02],
[4.11552390e-01, 6.28078193e-02, 4.90334955e-01, 3.53048365e-02],
[3.99440406e-01, 5.66931176e-02, 5.11557695e-01, 3.23087817e-02],
[3.88179692e-01, 5.08174781e-02, 5.31388695e-01, 2.96141349e-02],
[3.77725273e-01, 4.52612790e-02, 5.49825265e-01, 2.71881840e-02],
[3.68030476e-01, 4.00793316e-02, 5.66888559e-01, 2.50016336e-02],
[3.59048444e-01, 3.53042210e-02, 5.82618984e-01, 2.30283506e-02],
[3.50733127e-01, 3.09500459e-02, 5.97071747e-01, 2.12450802e-02],
[3.43039913e-01, 2.70161764e-02, 6.10312748e-01, 1.96311622e-02],
[3.35926019e-01, 2.34907933e-02, 6.22414927e-01, 1.81682605e-02],
[3.29350717e-01, 2.03540183e-02, 6.33455151e-01, 1.68401144e-02],
[3.23275448e-01, 1.75806009e-02, 6.43511642e-01, 1.56323094e-02],
[3.17663866e-01, 1.51421124e-02, 6.52661949e-01, 1.45320720e-02],
[3.12481831e-01, 1.30086774e-02, 6.60981407e-01, 1.35280845e-02],
[3.07697368e-01, 1.11502875e-02, 6.68542025e-01, 1.26103192e-02],
[3.03280608e-01, 9.53775700e-03, 6.75411745e-01, 1.17698907e-02],
[2.99203707e-01, 8.14336365e-03, 6.81654005e-01, 1.09989243e-02],
[2.95440763e-01, 6.94128233e-03, 6.87327517e-01, 1.02904379e-02],
[2.91967717e-01, 5.90780631e-03, 6.92486238e-01, 9.63823834e-03],
[2.88762258e-01, 5.02143667e-03, 6.97179477e-01, 9.03682838e-03],
[2.85803721e-01, 4.26287314e-03, 7.01452081e-01, 8.48132511e-03],
[2.83072986e-01, 3.61493852e-03, 7.05344689e-01, 7.96738728e-03],
[2.80552380e-01, 3.06245306e-03, 7.08894015e-01, 7.49115128e-03],
[2.78225583e-01, 2.59208318e-03, 7.12133159e-01, 7.04917476e-03],
[2.76077528e-01, 2.19217981e-03, 7.15091905e-01, 6.63838682e-03],
[2.74094319e-01, 1.85260717e-03, 7.17797029e-01, 6.25604450e-03],
[2.72263140e-01, 1.56458788e-03, 7.20272578e-01, 5.89969385e-03],
[2.70572180e-01, 1.32054238e-03, 7.22540141e-01, 5.56713642e-03],
[2.69010552e-01, 1.11394543e-03, 7.24619103e-01, 5.25639953e-03],
[2.67568225e-01, 9.39195127e-04, 7.26526869e-01, 4.96571001e-03],
[2.66235959e-01, 7.91492823e-04, 7.28279077e-01, 4.69347140e-03],
[2.65005238e-01, 6.66737179e-04, 7.29889781e-01, 4.43824386e-03],
[2.63868220e-01, 5.61428279e-04, 7.31371625e-01, 4.19872636e-03],
[2.62817677e-01, 4.72584699e-04, 7.32735997e-01, 3.97374121e-03],
[2.61846950e-01, 3.97670564e-04, 7.33993159e-01, 3.76222045e-03],
[2.60949904e-01, 3.34531655e-04, 7.35152371e-01, 3.56319374e-03],
[2.60120883e-01, 2.81339801e-04, 7.36222000e-01, 3.37577783e-03],
[2.59354674e-01, 2.36545608e-04, 7.37209613e-01, 3.19916730e-03],
[2.58646473e-01, 1.98836743e-04, 7.38122064e-01, 3.03262621e-03],
[2.57991847e-01, 1.67102937e-04, 7.38965569e-01, 2.87548099e-03],
[2.57386712e-01, 1.40406606e-04, 7.39745767e-01, 2.72711416e-03],
[2.56827297e-01, 1.17953508e-04, 7.40467791e-01, 2.58695819e-03],
[2.56310127e-01, 9.90745170e-05, 7.41136308e-01, 2.45449105e-03],
[2.55831991e-01, 8.32032628e-05, 7.41755574e-01, 2.32923122e-03],
[2.55389930e-01, 6.98644087e-05, 7.42329471e-01, 2.21073444e-03],
[2.54981210e-01, 5.86559924e-05, 7.42861544e-01, 2.09858959e-03],
[2.54603308e-01, 4.92394766e-05, 7.43355037e-01, 1.99241593e-03],
[2.54253894e-01, 4.13300061e-05, 7.43812916e-01, 1.89186027e-03],
[2.53930817e-01, 3.46872608e-05, 7.44237902e-01, 1.79659435e-03],
[2.53632090e-01, 2.91099509e-05, 7.44632487e-01, 1.70631297e-03],
[2.53355875e-01, 2.44261534e-05, 7.44998968e-01, 1.62073123e-03],
[2.53100479e-01, 2.04943791e-05, 7.45339443e-01, 1.53958396e-03],
[2.52864331e-01, 1.71936064e-05, 7.45655852e-01, 1.46262303e-03],
[2.52645987e-01, 1.44237080e-05, 7.45949973e-01, 1.38961674e-03],
[2.52444104e-01, 1.20990188e-05, 7.46223449e-01, 1.32034782e-03],
[2.52257447e-01, 1.01486469e-05, 7.46477791e-01, 1.25461296e-03],
[2.52084872e-01, 8.51182213e-06, 7.46714395e-01, 1.19222110e-03],
[2.51925320e-01, 7.13869064e-06, 7.46934548e-01, 1.13299311e-03],
[2.51777814e-01, 5.98662919e-06, 7.47139439e-01, 1.07676038e-03],
[2.51641450e-01, 5.02038939e-06, 7.47330165e-01, 1.02336449e-03],
[2.51515390e-01, 4.20987155e-06, 7.47507744e-01, 9.72656120e-04],
[2.51398863e-01, 3.53027631e-06, 7.47673112e-01, 9.24494781e-04],
[2.51291152e-01, 2.96001271e-06, 7.47827141e-01, 8.78747738e-04],
[2.51191595e-01, 2.48172331e-06, 7.47970634e-01, 8.35290020e-04],
[2.51099580e-01, 2.08057621e-06, 7.48104336e-01, 7.94003505e-04],
[2.51014541e-01, 1.74422498e-06, 7.48228938e-01, 7.54776690e-04],
[2.50935955e-01, 1.46226491e-06, 7.48345078e-01, 7.17504179e-04],
[2.50863336e-01, 1.22585116e-06, 7.48453351e-01, 6.82086278e-04],
[2.50796237e-01, 1.02769432e-06, 7.48554306e-01, 6.48428765e-04],
[2.50734243e-01, 8.61444851e-07, 7.48648453e-01, 6.16442360e-04],
[2.50676969e-01, 7.22075292e-07, 7.48736266e-01, 5.86042659e-04],
[2.50624061e-01, 6.05224449e-07, 7.48818184e-01, 5.57149616e-04],
[2.50575191e-01, 5.07304773e-07, 7.48894614e-01, 5.29687483e-04],
[2.50530054e-01, 4.25231575e-07, 7.48965936e-01, 5.03584414e-04],
[2.50488370e-01, 3.56536646e-07, 7.49032501e-01, 4.78772412e-04],
[2.50449879e-01, 2.99072838e-07, 7.49094635e-01, 4.55186947e-04],
[2.50414339e-01, 2.50787707e-07, 7.49152643e-01, 4.32766625e-04],
[2.50381527e-01, 2.09990839e-07, 7.49206810e-01, 4.11453246e-04],
[2.50351237e-01, 1.75740049e-07, 7.49257395e-01, 3.91191937e-04],
[2.50323280e-01, 1.46960312e-07, 7.49304642e-01, 3.71930334e-04],
[2.50297478e-01, 1.22681547e-07, 7.49348781e-01, 3.53618629e-04],
[2.50273668e-01, 1.02313601e-07, 7.49390020e-01, 3.36209829e-04],
[2.50251700e-01, 8.53321199e-08, 7.49428555e-01, 3.19659221e-04],
[2.50231434e-01, 7.12334339e-08, 7.49464570e-01, 3.03924199e-04],
[2.50212740e-01, 5.95800286e-08, 7.49498236e-01, 2.88964386e-04],
[2.50195501e-01, 4.99771822e-08, 7.49529708e-01, 2.74741555e-04],
[2.50179604e-01, 4.19815889e-08, 7.49559135e-01, 2.61219233e-04],
[2.50164947e-01, 3.51538946e-08, 7.49586655e-01, 2.48362728e-04],
[2.50151436e-01, 2.93685983e-08, 7.49612395e-01, 2.36139268e-04],
[2.50138985e-01, 2.44612938e-08, 7.49636473e-01, 2.24517619e-04],
]
)
xs = replicator_dynamics(y0=y0, timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_game_size_4_default_y0_example_1():
M = np.array([[3, 2, 4, 2], [5, 1, 1, 3], [6, 2, 3, 2], [1, 3, 4, 7]])
timepoints = np.linspace(0, 10, 100)
expected_xs_over_time = np.array(
[
[2.50000000e-01, 2.50000000e-01, 2.50000000e-01, 2.50000000e-01],
[2.41719166e-01, 2.35735156e-01, 2.53873636e-01, 2.68672042e-01],
[2.32546984e-01, 2.21360179e-01, 2.55769777e-01, 2.90323060e-01],
[2.22291827e-01, 2.06871196e-01, 2.55261786e-01, 3.15575190e-01],
[2.10731484e-01, 1.92233270e-01, 2.51864185e-01, 3.45171061e-01],
[1.97619659e-01, 1.77375409e-01, 2.45043522e-01, 3.79961410e-01],
[1.82707977e-01, 1.62190468e-01, 2.34255381e-01, 4.20846174e-01],
[1.65796132e-01, 1.46547147e-01, 2.19029301e-01, 4.68627420e-01],
[1.46825571e-01, 1.30325960e-01, 1.99128685e-01, 5.23719784e-01],
[1.26021893e-01, 1.13492486e-01, 1.74796565e-01, 5.85689056e-01],
[1.04051013e-01, 9.62074113e-02, 1.47031763e-01, 6.52709813e-01],
[8.20816740e-02, 7.89301100e-02, 1.17717959e-01, 7.21270257e-01],
[6.16068147e-02, 6.24180199e-02, 8.93556151e-02, 7.86619550e-01],
[4.39995702e-02, 4.75413069e-02, 6.43425051e-02, 8.44116618e-01],
[3.00443142e-02, 3.49797125e-02, 4.41856313e-02, 8.90790342e-01],
[1.97703777e-02, 2.50057355e-02, 2.91865133e-02, 9.26037374e-01],
[1.26494877e-02, 1.74854076e-02, 1.87200451e-02, 9.51145060e-01],
[7.93309603e-03, 1.20349937e-02, 1.17584783e-02, 9.68273432e-01],
[4.90797650e-03, 8.19467468e-03, 7.28166293e-03, 9.79615686e-01],
[3.00928572e-03, 5.54018743e-03, 4.46734830e-03, 9.86983179e-01],
[1.83444440e-03, 3.72835899e-03, 2.72426145e-03, 9.91712935e-01],
[1.11413336e-03, 2.50170133e-03, 1.65492179e-03, 9.94729244e-01],
[6.75070430e-04, 1.67550183e-03, 1.00287691e-03, 9.96646551e-01],
[4.08428511e-04, 1.12084196e-03, 6.06805960e-04, 9.97863924e-01],
[2.46874544e-04, 7.49242911e-04, 3.66801756e-04, 9.98637081e-01],
[1.49135183e-04, 5.00608887e-04, 2.21588946e-04, 9.99128667e-01],
[9.00578701e-05, 3.34385059e-04, 1.33812730e-04, 9.99441744e-01],
[5.43699910e-05, 2.23313206e-04, 8.07866707e-05, 9.99641530e-01],
[3.28200257e-05, 1.49118817e-04, 4.87665696e-05, 9.99769295e-01],
[1.98098290e-05, 9.95680219e-05, 2.94351134e-05, 9.99851187e-01],
[1.19560257e-05, 6.64791103e-05, 1.77653131e-05, 9.99903800e-01],
[7.21588878e-06, 4.43855409e-05, 1.07220167e-05, 9.99937677e-01],
[4.35460889e-06, 2.96335830e-05, 6.47047541e-06, 9.99959541e-01],
[2.62795277e-06, 1.97844865e-05, 3.90485418e-06, 9.99973683e-01],
[1.58581836e-06, 1.32085926e-05, 2.35635571e-06, 9.99982849e-01],
[9.57098845e-07, 8.81857690e-06, 1.42214629e-06, 9.99988802e-01],
[5.77181917e-07, 5.88651878e-06, 8.57630601e-07, 9.99992679e-01],
[3.48017960e-07, 3.92912327e-06, 5.17117549e-07, 9.99995206e-01],
[2.10432900e-07, 2.62354883e-06, 3.12680828e-07, 9.99996853e-01],
[1.27160084e-07, 1.75168432e-06, 1.88946318e-07, 9.99997932e-01],
[7.69138599e-08, 1.17045061e-06, 1.14285800e-07, 9.99998638e-01],
[4.67857994e-08, 7.82807429e-07, 6.95187050e-08, 9.99999101e-01],
[2.81940356e-08, 5.22518381e-07, 4.18933223e-08, 9.99999407e-01],
[1.66353720e-08, 3.46827263e-07, 2.47183847e-08, 9.99999612e-01],
[9.92969426e-09, 2.31604309e-07, 1.47544662e-08, 9.99999744e-01],
[5.86008245e-09, 1.52926389e-07, 8.70745685e-09, 9.99999833e-01],
[3.61260541e-09, 1.01921785e-07, 5.36794489e-09, 9.99999889e-01],
[2.35820495e-09, 6.89433682e-08, 3.50403969e-09, 9.99999925e-01],
[1.54512934e-09, 4.67336874e-08, 2.29589660e-09, 9.99999949e-01],
[9.91566874e-10, 3.15653821e-08, 1.47336228e-09, 9.99999966e-01],
[6.23213391e-10, 2.13755295e-08, 9.26028545e-10, 9.99999977e-01],
[3.89169337e-10, 1.45088069e-08, 5.78264090e-10, 9.99999985e-01],
[2.44232046e-10, 9.82444655e-09, 3.62902722e-10, 9.99999990e-01],
[1.55958046e-10, 6.64441716e-09, 2.31736960e-10, 9.99999993e-01],
[1.01833748e-10, 4.50886059e-09, 1.51314038e-10, 9.99999995e-01],
[6.61625623e-11, 3.05585415e-09, 9.83104808e-11, 9.99999997e-01],
[4.22829112e-11, 2.06460089e-09, 6.28278830e-11, 9.99999998e-01],
[2.66805156e-11, 1.40000880e-09, 3.96443958e-11, 9.99999999e-01],
[1.67673488e-11, 9.49807666e-10, 2.49144894e-11, 9.99999999e-01],
[1.05528316e-11, 6.42239872e-10, 1.56803796e-11, 9.99999999e-01],
[2.03838562e-12, 2.61718511e-10, 3.02882291e-12, 1.00000000e00],
[-4.66662938e-12, -4.40298241e-11, -6.93411260e-12, 1.00000000e00],
[-9.48563948e-12, -2.72252392e-10, -1.40946462e-11, 1.00000000e00],
[-1.24186447e-11, -4.22949193e-10, -1.84527780e-11, 1.00000000e00],
[-1.34656449e-11, -4.96120226e-10, -2.00085079e-11, 1.00000000e00],
[-1.26266403e-11, -4.91765491e-10, -1.87618360e-11, 1.00000000e00],
[-9.90163080e-12, -4.09884990e-10, -1.47127621e-11, 1.00000000e00],
[-9.10831756e-12, -3.98815250e-10, -1.35339828e-11, 9.99999999e-01],
[-9.61446910e-12, -4.33990960e-10, -1.42860696e-11, 9.99999999e-01],
[-9.68711978e-12, -4.48077852e-10, -1.43940204e-11, 9.99999999e-01],
[-9.32626962e-12, -4.41075926e-10, -1.38578353e-11, 9.99999999e-01],
[-8.53191859e-12, -4.12985183e-10, -1.26775142e-11, 9.99999999e-01],
[-7.30406672e-12, -3.63805622e-10, -1.08530573e-11, 9.99999999e-01],
[-5.64271399e-12, -2.93537243e-10, -8.38446446e-12, 9.99999999e-01],
[-3.76042064e-12, -2.46757395e-10, -5.58757547e-12, 9.99999998e-01],
[-3.34138439e-12, -2.20697822e-10, -4.96493302e-12, 9.99999999e-01],
[-2.92234814e-12, -1.94638250e-10, -4.34229056e-12, 9.99999999e-01],
[-2.50331189e-12, -1.68578677e-10, -3.71964811e-12, 9.99999999e-01],
[-2.08427564e-12, -1.42519105e-10, -3.09700565e-12, 9.99999999e-01],
[-1.66523938e-12, -1.16459532e-10, -2.47436319e-12, 9.99999999e-01],
[-1.24620313e-12, -9.03999598e-11, -1.85172074e-12, 9.99999999e-01],
[-8.29582639e-13, -6.44890104e-11, -1.23266784e-12, 9.99999999e-01],
[-8.11410998e-13, -6.30915593e-11, -1.20566680e-12, 9.99999999e-01],
[-7.93239357e-13, -6.16941082e-11, -1.17866576e-12, 9.99999999e-01],
[-7.75067715e-13, -6.02966571e-11, -1.15166472e-12, 9.99999999e-01],
[-7.56896074e-13, -5.88992060e-11, -1.12466368e-12, 9.99999999e-01],
[-7.38724433e-13, -5.75017549e-11, -1.09766264e-12, 9.99999999e-01],
[-7.20552791e-13, -5.61043038e-11, -1.07066160e-12, 9.99999999e-01],
[-7.02381150e-13, -5.47068527e-11, -1.04366056e-12, 9.99999999e-01],
[-6.84209509e-13, -5.33094016e-11, -1.01665952e-12, 9.99999999e-01],
[-6.66037867e-13, -5.19119505e-11, -9.89658484e-13, 9.99999999e-01],
[-6.47866226e-13, -5.05144994e-11, -9.62657445e-13, 9.99999999e-01],
[-6.29694585e-13, -4.91170483e-11, -9.35656406e-13, 9.99999999e-01],
[-6.11522943e-13, -4.77195972e-11, -9.08655366e-13, 9.99999999e-01],
[-5.93351302e-13, -4.63221461e-11, -8.81654327e-13, 9.99999999e-01],
[-5.75179661e-13, -4.49246950e-11, -8.54653288e-13, 9.99999999e-01],
[-5.57008019e-13, -4.35272439e-11, -8.27652249e-13, 9.99999999e-01],
[-5.38836378e-13, -4.21297928e-11, -8.00651210e-13, 9.99999999e-01],
[-5.20664737e-13, -4.07323417e-11, -7.73650170e-13, 9.99999999e-01],
[-5.02493095e-13, -3.93348906e-11, -7.46649131e-13, 9.99999999e-01],
]
)
xs = replicator_dynamics(timepoints=timepoints, A=M)
assert np.allclose(xs, expected_xs_over_time)
def test_replicator_dynamics_with_incorrect_inputs():
"""
Test that if an incorrect starting value is given, an error is raised
"""
M = np.array([[3, 2, 4, 2], [5, 1, 1, 3], [6, 2, 3, 2], [1, 3, 4, 7]])
y0 = np.array([1, 0, 0])
with pytest.raises(ValueError):
replicator_dynamics(y0=y0, A=M)
@given(A=arrays(np.int8, (3, 2)), B=arrays(np.int8, (3, 2)))
def test_property_get_derivative_of_asymmetric_fitness(A, B):
"""
Property-based test of get_derivative_of_asymmetric_fitness for a 3x2 game
"""
t = 0
x = np.ones(A.shape[1] + A.shape[0])
derivative_of_fitness = get_derivative_of_asymmetric_fitness(x, t, A, B)
assert len(derivative_of_fitness) == len(x)
def test_get_derivative_of_asymmetric_fitness_example():
"""
Test for the asymmetric derivative of fitness function
"""
M = np.array([[3, 2, 3], [4, 1, 1], [2, 3, 1]])
N = np.array([[1, 2, 3], [3, 2, 1], [2, 1, 3]])
x_values = (
np.array([1, 0, 0, 1, 0, 0]),
np.array([1 / 2, 1 / 2, 0, 1 / 2, 1 / 2, 0]),
np.array([0, 1 / 4, 3 / 4, 0, 1 / 4, 3 / 4]),
np.array([1 / 5, 2 / 5, 2 / 5, 1 / 5, 2 / 5, 2 / 5]),
np.array([1 / 2, 0, 1 / 2, 1 / 2, 0, 1 / 2]),
np.array([2 / 4, 1 / 4, 1 / 4, 2 / 4, 1 / 4, 1 / 4]),
)
derivative_values = (
np.array([0, 0, 0, 0, 0, 0]),
np.array([0, 0, 0, 0, 0, 0]),
np.array([0.0, -0.09375, 0.09375, 0.0, -0.234375, 0.234375]),
np.array([0.128, -0.144, 0.016, 0.048, -0.144, 0.096]),
np.array([0.375, 0.0, -0.375, -0.375, 0.0, 0.375]),
np.array([0.125, 0.0, -0.125, -0.09375, -0.046875, 0.140625]),
)
for x_value, expected_derivative in zip(x_values, derivative_values):
derivative = get_derivative_of_asymmetric_fitness(x=x_value, t=0, A=M, B=N)
assert np.allclose(derivative, expected_derivative), x_value
@settings(max_examples=10)
@given(
A=arrays(np.int8, (4, 2), elements=integers(0, 100)),
B=arrays(np.int8, (4, 2), elements=integers(0, 100)),
)
def test_property_of_output_dimension_for_asymmetric_games_of_size_4_2(A, B):
"""
Property-based test of asymmetric_replicator_dynamics for a 4x2 game
"""
xs1, xs2 = asymmetric_replicator_dynamics(A, B)
assert all(len(x) == 4 for x in xs1)
assert all(len(x) == 2 for x in xs2)
@given(A=arrays(np.int8, shape=(2, 2), elements=integers(1, 5)))
def test_equivalence_between_symmetric_and_asymmetric_replicator_dynamics(A):
"""
Tests that when we have two populations with identical strategies then the
output of the asymmetric_replicator_dynamics for both populations is the
same as using just one population in replicator_dynamics. The test is
carried out for 2x2 matrices with elements from 1-5
Note that the test hypothesis can find cases where this test can fail for
larger elements or larger matrix sizes. One potenetial reason for this might
be the fact that scipy.odeint() is a deprecated function.
"""
B = A.transpose()
symmetric_xs = replicator_dynamics(A)
asymmetric_row_xs, asymmetric_col_xs = asymmetric_replicator_dynamics(A, B)
assert np.allclose(asymmetric_row_xs, asymmetric_col_xs, atol=1e-3)
assert np.allclose(symmetric_xs, asymmetric_row_xs, atol=1e-3)
assert np.allclose(symmetric_xs, asymmetric_col_xs, atol=1e-3)
def test_asymmetric_replicator_dynamics_size_2_3_default_values():
"""
Test the asymmetric replicator dynamics function for a 2x3 game by using
the default values
"""
A = np.array([[1, 2, 3], [4, 5, 6]])
B = np.array([[7, 8, 9], [10, 11, 12]])
xs_A, xs_B = asymmetric_replicator_dynamics(A, B)
assert np.allclose(xs_A[1], np.array([0.49249308, 0.50750692]), atol=1e-5)
assert np.allclose(xs_A[-1], np.array([9.33624531e-14, 1]), atol=1e-5)
assert np.allclose(
xs_B[1], np.array([0.33000229, 0.3333222, 0.33667551]), atol=1e-5
)
assert np.allclose(
xs_B[-1],
np.array([2.04812640e-09, 4.53898590e-05, 9.99954607e-01]),
atol=1e-5,
)
def test_asymmetric_replicator_dynamics_size_2_3_given_timepoints():
"""
Test the asymmetric replicator dynamics function for a 2x3 game and not
using the default timepoints
"""
timepoints = np.linspace(0, 100, 100)
A = np.array([[1, 1, 2], [2, 3, 2]])
B = np.array([[1, 2, 2], [2, 1, 3]])
xs_A, xs_B = asymmetric_replicator_dynamics(A, B, timepoints=timepoints)
assert np.allclose(xs_A[1], np.array([0.30904906, 0.69095094]))
assert np.allclose(xs_B[1], np.array([0.2196786, 0.1771107, 0.6032107]))
assert np.allclose(xs_A[-1], np.array([0.2, 0.8]))
assert np.allclose(xs_B[-1], np.array([-6.57013390e-14, 2.92761632e-17, 1]))
def test_asymmetric_replicator_dynamics_size_4_6_given_x0_y0():
"""
Test the asymmetric replicator dynamics function for a 4x6 game by
specifying values for x0 and y0
"""
A = np.array(
[
[1, 20, 23, 21, 15, 4],
[9, 29, 0, 14, 19, 27],
[22, 28, 30, 12, 3, 25],
[5, 16, 8, 17, 11, 18],
]
)
B = np.array(
[
[11, 39, 27, 15, 36, 35],
[1, 31, 2, 18, 10, 19],
[21, 38, 8, 24, 40, 32],
[22, 37, 25, 7, 30, 0],
]
)
x0 = np.array([0.5, 0.2, 0.2, 0.1])
y0 = np.array([0.4, 0.1, 0.1, 0.1, 0.2, 0.1])
xs_A, xs_B = asymmetric_replicator_dynamics(A, B, x0=x0, y0=y0)
assert np.allclose(
xs_A[1], np.array([0.48729326, 0.20349646, 0.21191178, 0.0972985])
)
assert np.allclose(
xs_A[-1],
np.array([-2.50483397e-15, 9.99977992e-01, 2.20078313e-05, 1.18367977e-17]),
)
assert np.allclose(
xs_B[1],
np.array(
[
0.36455939,
0.11688505,
0.096508,
0.09537898,
0.22015362,
0.10651496,
]
),
)
assert np.allclose(
xs_B[-1],
np.array(
[
4.58211507e-12,
1.00000000e00,
8.73932312e-12,
1.58763628e-18,
-1.22965529e-14,
-9.91094095e-17,
]
),
)
| mit | 8,769,032,210,956,131,000 | 47.587363 | 84 | 0.561769 | false |
polyaxon/polyaxon | sdks/python/http_client/v1/polyaxon_sdk/models/v1_early_stopping.py | 1 | 6790 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.10.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1EarlyStopping(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'median': 'V1MedianStoppingPolicy',
'diff': 'V1DiffStoppingPolicy',
'truncation': 'V1TruncationStoppingPolicy',
'metric': 'V1MetricEarlyStopping',
'failure': 'V1FailureEarlyStopping'
}
attribute_map = {
'median': 'median',
'diff': 'diff',
'truncation': 'truncation',
'metric': 'metric',
'failure': 'failure'
}
def __init__(self, median=None, diff=None, truncation=None, metric=None, failure=None, local_vars_configuration=None): # noqa: E501
"""V1EarlyStopping - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._median = None
self._diff = None
self._truncation = None
self._metric = None
self._failure = None
self.discriminator = None
if median is not None:
self.median = median
if diff is not None:
self.diff = diff
if truncation is not None:
self.truncation = truncation
if metric is not None:
self.metric = metric
if failure is not None:
self.failure = failure
@property
def median(self):
"""Gets the median of this V1EarlyStopping. # noqa: E501
:return: The median of this V1EarlyStopping. # noqa: E501
:rtype: V1MedianStoppingPolicy
"""
return self._median
@median.setter
def median(self, median):
"""Sets the median of this V1EarlyStopping.
:param median: The median of this V1EarlyStopping. # noqa: E501
:type: V1MedianStoppingPolicy
"""
self._median = median
@property
def diff(self):
"""Gets the diff of this V1EarlyStopping. # noqa: E501
:return: The diff of this V1EarlyStopping. # noqa: E501
:rtype: V1DiffStoppingPolicy
"""
return self._diff
@diff.setter
def diff(self, diff):
"""Sets the diff of this V1EarlyStopping.
:param diff: The diff of this V1EarlyStopping. # noqa: E501
:type: V1DiffStoppingPolicy
"""
self._diff = diff
@property
def truncation(self):
"""Gets the truncation of this V1EarlyStopping. # noqa: E501
:return: The truncation of this V1EarlyStopping. # noqa: E501
:rtype: V1TruncationStoppingPolicy
"""
return self._truncation
@truncation.setter
def truncation(self, truncation):
"""Sets the truncation of this V1EarlyStopping.
:param truncation: The truncation of this V1EarlyStopping. # noqa: E501
:type: V1TruncationStoppingPolicy
"""
self._truncation = truncation
@property
def metric(self):
"""Gets the metric of this V1EarlyStopping. # noqa: E501
:return: The metric of this V1EarlyStopping. # noqa: E501
:rtype: V1MetricEarlyStopping
"""
return self._metric
@metric.setter
def metric(self, metric):
"""Sets the metric of this V1EarlyStopping.
:param metric: The metric of this V1EarlyStopping. # noqa: E501
:type: V1MetricEarlyStopping
"""
self._metric = metric
@property
def failure(self):
"""Gets the failure of this V1EarlyStopping. # noqa: E501
:return: The failure of this V1EarlyStopping. # noqa: E501
:rtype: V1FailureEarlyStopping
"""
return self._failure
@failure.setter
def failure(self, failure):
"""Sets the failure of this V1EarlyStopping.
:param failure: The failure of this V1EarlyStopping. # noqa: E501
:type: V1FailureEarlyStopping
"""
self._failure = failure
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1EarlyStopping):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1EarlyStopping):
return True
return self.to_dict() != other.to_dict()
| apache-2.0 | -2,446,098,202,475,685,000 | 27.174274 | 136 | 0.595582 | false |
scaramallion/pynetdicom | pynetdicom/apps/storescu/storescu.py | 1 | 9028 | #!/usr/bin/env python
"""A Storage SCU application.
Used for transferring DICOM SOP Instances to a Storage SCP.
"""
import argparse
import os
from pathlib import Path
import sys
from pydicom import dcmread
from pydicom.errors import InvalidDicomError
from pydicom.uid import (
ExplicitVRLittleEndian, ImplicitVRLittleEndian,
ExplicitVRBigEndian, DeflatedExplicitVRLittleEndian
)
from pynetdicom import AE, StoragePresentationContexts
from pynetdicom.apps.common import setup_logging, get_files
from pynetdicom._globals import DEFAULT_MAX_LENGTH
from pynetdicom.status import STORAGE_SERVICE_CLASS_STATUS
__version__ = '0.3.0'
def _setup_argparser():
"""Setup the command line arguments"""
# Description
parser = argparse.ArgumentParser(
description=(
"The storescu application implements a Service Class User "
"(SCU) for the Storage Service Class. For each DICOM "
"file on the command line it sends a C-STORE message to a "
"Storage Service Class Provider (SCP) and waits for a response."
),
usage="storescu [options] addr port path"
)
# Parameters
req_opts = parser.add_argument_group('Parameters')
req_opts.add_argument(
"addr", help="TCP/IP address or hostname of DICOM peer", type=str
)
req_opts.add_argument("port", help="TCP/IP port number of peer", type=int)
req_opts.add_argument(
"path", metavar="path", nargs='+',
help="DICOM file or directory to be transmitted",
type=str
)
# General Options
gen_opts = parser.add_argument_group('General Options')
gen_opts.add_argument(
"--version",
help="print version information and exit",
action="store_true"
)
output = gen_opts.add_mutually_exclusive_group()
output.add_argument(
"-q", "--quiet",
help="quiet mode, print no warnings and errors",
action="store_const",
dest='log_type', const='q'
)
output.add_argument(
"-v", "--verbose",
help="verbose mode, print processing details",
action="store_const",
dest='log_type', const='v'
)
output.add_argument(
"-d", "--debug",
help="debug mode, print debug information",
action="store_const",
dest='log_type', const='d'
)
gen_opts.add_argument(
"-ll", "--log-level", metavar='[l]',
help=(
"use level l for the logger (critical, error, warn, info, debug)"
),
type=str,
choices=['critical', 'error', 'warn', 'info', 'debug']
)
# Input Options
in_opts = parser.add_argument_group('Input Options')
in_opts.add_argument(
'-r', '--recurse',
help="recursively search the given directory",
action="store_true"
)
# Network Options
net_opts = parser.add_argument_group('Network Options')
net_opts.add_argument(
"-aet", "--calling-aet", metavar='[a]etitle',
help="set my calling AE title (default: STORESCU)",
type=str,
default='STORESCU'
)
net_opts.add_argument(
"-aec", "--called-aet", metavar='[a]etitle',
help="set called AE title of peer (default: ANY-SCP)",
type=str,
default='ANY-SCP'
)
net_opts.add_argument(
"-ta", "--acse-timeout", metavar='[s]econds',
help="timeout for ACSE messages (default: 30 s)",
type=float,
default=30
)
net_opts.add_argument(
"-td", "--dimse-timeout", metavar='[s]econds',
help="timeout for DIMSE messages (default: 30 s)",
type=float,
default=30
)
net_opts.add_argument(
"-tn", "--network-timeout", metavar='[s]econds',
help="timeout for the network (default: 30 s)",
type=float,
default=30
)
net_opts.add_argument(
"-pdu", "--max-pdu", metavar='[n]umber of bytes',
help=(
f"set max receive pdu to n bytes (0 for unlimited, "
f"default: {DEFAULT_MAX_LENGTH})"
),
type=int,
default=DEFAULT_MAX_LENGTH
)
# Transfer Syntaxes
ts_opts = parser.add_argument_group("Transfer Syntax Options")
syntax = ts_opts.add_mutually_exclusive_group()
syntax.add_argument(
"-xe", "--request-little",
help="request explicit VR little endian TS only",
action="store_true"
)
syntax.add_argument(
"-xb", "--request-big",
help="request explicit VR big endian TS only",
action="store_true"
)
syntax.add_argument(
"-xi", "--request-implicit",
help="request implicit VR little endian TS only",
action="store_true"
)
# Misc Options
misc_opts = parser.add_argument_group('Miscellaneous Options')
misc_opts.add_argument(
"-cx", "--required-contexts",
help=(
"only request the presentation contexts required for the "
"input DICOM file(s)"
),
action="store_true",
)
return parser.parse_args()
def get_contexts(fpaths, app_logger):
"""Return the valid DICOM files and their context values.
Parameters
----------
fpaths : list of str
A list of paths to the files to try and get data from.
Returns
-------
list of str, dict
A list of paths to valid DICOM files and the {SOP Class UID :
[Transfer Syntax UIDs]} that can be used to create the required
presentation contexts.
"""
good, bad = [], []
contexts = {}
for fpath in fpaths:
path = os.fspath(Path(fpath).resolve())
try:
ds = dcmread(path)
except Exception as exc:
bad.append(('Bad DICOM file', path))
continue
try:
sop_class = ds.SOPClassUID
tsyntax = ds.file_meta.TransferSyntaxUID
except Exception as exc:
bad.append(('Unknown SOP Class or Transfer Syntax UID', path))
continue
tsyntaxes = contexts.setdefault(sop_class, [])
if tsyntax not in tsyntaxes:
tsyntaxes.append(tsyntax)
good.append(path)
for (reason, path) in bad:
app_logger.error(f"{reason}: {path}")
return good, contexts
def main(args=None):
"""Run the application."""
if args is not None:
sys.argv = args
args = _setup_argparser()
if args.version:
print(f'storescu.py v{__version__}')
sys.exit()
APP_LOGGER = setup_logging(args, 'storescu')
APP_LOGGER.debug(f'storescu.py v{__version__}')
APP_LOGGER.debug('')
lfiles, badfiles = get_files(args.path, args.recurse)
for bad in badfiles:
APP_LOGGER.error(f"Cannot access path: {bad}")
ae = AE(ae_title=args.calling_aet)
ae.acse_timeout = args.acse_timeout
ae.dimse_timeout = args.dimse_timeout
ae.network_timeout = args.network_timeout
if args.required_contexts:
# Only propose required presentation contexts
lfiles, contexts = get_contexts(lfiles, APP_LOGGER)
try:
for abstract, transfer in contexts.items():
for tsyntax in transfer:
ae.add_requested_context(abstract, tsyntax)
except ValueError:
raise ValueError(
"More than 128 presentation contexts required with "
"the '--required-contexts' flag, please try again "
"without it or with fewer files"
)
else:
# Propose the default presentation contexts
if args.request_little:
transfer_syntax = [ExplicitVRLittleEndian]
elif args.request_big:
transfer_syntax = [ExplicitVRBigEndian]
elif args.request_implicit:
transfer_syntax = [ImplicitVRLittleEndian]
else:
transfer_syntax = [
ExplicitVRLittleEndian,
ImplicitVRLittleEndian,
DeflatedExplicitVRLittleEndian,
ExplicitVRBigEndian
]
for cx in StoragePresentationContexts:
ae.add_requested_context(cx.abstract_syntax, transfer_syntax)
if not lfiles:
APP_LOGGER.warning("No suitable DICOM files found")
sys.exit()
# Request association with remote
assoc = ae.associate(
args.addr, args.port, ae_title=args.called_aet, max_pdu=args.max_pdu
)
if assoc.is_established:
ii = 1
for fpath in lfiles:
APP_LOGGER.info(f'Sending file: {fpath}')
try:
ds = dcmread(fpath)
status = assoc.send_c_store(ds, ii)
ii += 1
except InvalidDicomError:
APP_LOGGER.error(f'Bad DICOM file: {fpath}')
except Exception as exc:
APP_LOGGER.error(f"Store failed: {fpath}")
APP_LOGGER.exception(exc)
assoc.release()
else:
sys.exit(1)
if __name__ == "__main__":
main()
| mit | 6,390,212,979,904,259,000 | 29.093333 | 78 | 0.588835 | false |
nave91/teak-nbtree | src/rank.py | 1 | 4248 | from lib import *
import re
from globfilerank import *
def obs(f,alli):
now = alli
line = f.readline()
while(line):
lst = line.split()
for i in lst:
isitnum = re.match('^([^0-9]|\.)',i)
if isitnum:
now = i
else:
v = float(i)
inc(v,now)
inc(v,alli)
for i in name:
if i != alli:
temp = {}
temp["="] = i
temp["x"] = mu[i]
order.append(temp)
line = f.readline()
def inc(v,k):
print v,"vvvvvvvvvvvvvvvvvvv"
print k,"kkkkkkkkkkkkk"
name.append(k)
label[k] = 0
try:
n[k] += 1
except KeyError:
n[k] = 1
alli = n[k]
try:
x[k][alli] = v
except KeyError:
x[k] = {}
x[k][alli] = v
try:
sumi[k] += v
except KeyError:
sumi[k] = v
try:
delta = v - mu[k]
except KeyError:
mu[k] = 0
delta = v - mu[k]
try:
mu[k] += delta/alli
except KeyError:
mu[k] = delta/alli
try:
m2[k] += delta*(v - mu[k])
except KeyError:
m2[k] = delta*(v - mu[k])
var[k] = m2[k]/(alli - 1 + PINCH)
def rank(alli,cohen,mittas,a12):
cohen = cohen*(var[alli])**0.5
level = 0
total = n[alli]
rdiv(0,len(order)-1,1,cohen,mittas,a12,level)
def rdiv(low,high,c,cohen,mittas,a12,level):
cut = div(low,high,cohen,mittas,a12)
if cut:
print "in cut",cut
level += 1
c = rdiv(low,cut-1,c,cohen,mittas,a12,level) + 1
c = rdiv(cut,high,c,cohen,mittas,a12,level)
else:
for i in range(low,high):
print order[i]["="],"orderrrrrrrrrr",c
label[order[i]["="]] = c
return c
def div(low,high,cohen,mittas,a12):
n0 = [0 for i in range(0,len(order))]
n1 = [0 for i in range(0,len(order))]
sum0 = [0 for i in range(0,len(order))]
sum1 = [0 for i in range(0,len(order))]
muAll = divInits(low,high,n0,n1,sum0,sum1)
maxi = -1
cut = 0
for i in range(low,high):
b = order[i]["="]
n0[i] = n0[i-1] + n[b]
sum0[i] = sum0[i-1] + sumi[b]
left = n0[i]
muLeft = sum0[i] / left
right = n1[i]
muRight = sum0[i] / right
e = errDiff(muAll,left,muLeft,right,muRight)
if cohen:
if abs(muLeft - muRight) <= cohen:
continue
if mittas:
if e < maxi:
continue
if a12:
if bigger(low,i,high) < a12:
continue
maxi = e
cut = i
print cut,"cutttt"
return cut
def errDiff(mu,n0,mu0,n1,mu1):
return n0*(mu - mu0)**2 + n1*(mu - mu1)**2
def divInits(low,high,n0,n1,sum0,sum1):
b= order[low]["="]
n0[low]= n[b]
sum0[low]= sumi[b]
b= order[high]["="]
n1[high]= n[b]
sum1[high]= sumi[b]
for i in range(high-1,low-1,-1):
b = order[i]["="]
n1[i] = n1[i+1] + n[b]
sum1[i] = sum1[i+1] + sumi[b]
return sum1[low+1]/n1[low+1]
def bigger(low,mid,high):
below = []
above = []
below = values(low,mid-1)
above = values(mid,high)
return a12statistic(below,above)
def a12statistic(below,above):
more = 0
same = 0
comparisons = 1
for j in range(0,len(above)-1):
for i in range(0,len(below)-1):
comparisons += 1
more += above[j] if above[j] > below[i] else below[i]
same += above[j] if above[j] == below[i] else below[i]
return (more + 0.5*same)/comparisons
def values(i,j):
out = []
m = 0
for k in range(i,j):
b = order[k]["="]
for l,n in enumerate(x[b]):
m += 1
out.append(x[b][n])
return out
def ranks(f,cohens,mittas,a12):
print "\n----|,",f.name,"|------------------"
obs(f,0)
rank(0,cohens,mittas,a12)
maxi = len(order)
for i in range(0,maxi):
k = order[i]["="]
print k
print name,"nameee"
print mu,"muuu"
print label,"rank"
print k,name[k],":mu",mu[k],":rank",label[k]
f = open('../data/ska.txt','r')
ranks(f,0.3,1,0.6)
f.close()
| gpl-2.0 | -2,870,331,426,623,933,000 | 23.988235 | 66 | 0.476695 | false |
yephper/django | tests/template_tests/filter_tests/test_truncatewords_html.py | 1 | 1651 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.template.defaultfilters import truncatewords_html
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_truncate_zero(self):
self.assertEqual(truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 0), '')
def test_truncate(self):
self.assertEqual(
truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 2),
'<p>one <a href="#">two ...</a></p>',
)
def test_truncate2(self):
self.assertEqual(
truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 4),
'<p>one <a href="#">two - three <br>four ...</a></p>',
)
def test_truncate3(self):
self.assertEqual(
truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 5),
'<p>one <a href="#">two - three <br>four</a> five</p>',
)
def test_truncate4(self):
self.assertEqual(
truncatewords_html('<p>one <a href="#">two - three <br>four</a> five</p>', 100),
'<p>one <a href="#">two - three <br>four</a> five</p>',
)
def test_truncate_unicode(self):
self.assertEqual(truncatewords_html('\xc5ngstr\xf6m was here', 1), '\xc5ngstr\xf6m ...')
def test_truncate_complex(self):
self.assertEqual(
truncatewords_html('<i>Buenos días! ¿Cómo está?</i>', 3),
'<i>Buenos días! ¿Cómo ...</i>',
)
| bsd-3-clause | 7,660,796,380,695,282,000 | 35.522727 | 107 | 0.540884 | false |
cobbler/cobbler | tests/xmlrpcapi/image_test.py | 1 | 2527 | import pytest
# TODO: Create fixture where image is create
@pytest.fixture(scope="function")
def remove_item(remote, token):
"""
Remove an item with the given name.
:param token: The fixture to have the token for authenticated strings available.
:param remote: The fixture to have the base xmlrpc connection.
"""
def _remove_item(itemtype, name):
yield
remote.remove_item(itemtype, name, token)
return _remove_item
@pytest.mark.usefixtures("cobbler_xmlrpc_base")
class TestImage:
def test_create_image(self, remote, token):
"""
Test: create/edit of an image object"""
# Arrange
# Act
images = remote.get_images(token)
image = remote.new_image(token)
# Assert
assert remote.modify_image(image, "name", "testimage0", token)
assert remote.save_image(image, token)
new_images = remote.get_images(token)
assert len(new_images) == len(images) + 1
def test_get_images(self, remote):
"""
Test: get images
"""
# Arrange
# Act
remote.get_images()
# Assert
def test_get_image(self, remote):
"""
Test: Get an image object
"""
# Arrange
# Act
# Assert
image = remote.get_image("testimage0")
def test_find_image(self, remote, token):
"""
Test: Find an image object
"""
# Arrange
# Act
result = remote.find_image({"name": "testimage0"}, token)
# Assert
assert result
def test_copy_image(self, remote, token):
"""
Test: Copy an image object
"""
# Arrange
# Act
image = remote.get_item_handle("image", "testimage0", token)
# Assert
assert remote.copy_image(image, "testimagecopy", token)
def test_rename_image(self, remote, token, remove_item):
"""
Test: Rename an image object
"""
# Arrange
name = "testimage1"
image = remote.get_item_handle("image", "testimagecopy", token)
# Act
result = remote.rename_image(image, name, token)
# Cleanup
remote.remove_item("image", name, token)
# Assert
assert result
def test_remove_image(self, remote, token):
"""
Test: remove an image object
"""
# Arrange
# Act
# Assert
assert remote.remove_image("testimage0", token)
| gpl-2.0 | -5,530,098,281,154,148,000 | 20.973913 | 84 | 0.557974 | false |
pmatigakis/Huginn | huginn/instruments.py | 1 | 6186 | """
The hugin.instruments module contains classes that simulate the aircraft's
instruments
"""
import math
from math import sqrt, log
from huginn.fdm import Position, Velocities, Atmosphere, Orientation
from huginn.constants import a0, T0, g, M, R
from huginn.unit_conversions import convert_jsbsim_pressure, ur
def true_airspeed(total_pressure, static_pressure, temperature):
"""Calculate the true airspeed
Arguments:
total_pressure: the total pressure in Pascal
static_pressure: the static pressure in Pascal
temperature: the temperature in kelvin
returns the airspeed in knots
"""
impact_pressure = total_pressure - static_pressure
t_t0 = temperature / T0
q_p = impact_pressure / static_pressure
return a0 * sqrt(5.0 * (math.pow(q_p + 1.0, 2.0/7.0) - 1.0) * t_t0)
def pressure_altitude(sea_level_pressure, pressure, temperature):
"""Calculate the pressure altitude
Arguments:
sea_level_pressure: the pressure at sea level in Pascal
pressure: the pressure at the current altitude in Pascal
temperature: the temperature in Kelvin
"""
return log(sea_level_pressure/pressure) * ((R * temperature) / (g * M))
class GPS(object):
"""The GPS class simulates the aircraft's GPS system."""
def __init__(self, fdmexec):
self.fdmexec = fdmexec
self._position = Position(fdmexec)
self._velocities = Velocities(fdmexec)
@property
def latitude(self):
"""Returns the latitude in degrees"""
return self._position.latitude
@property
def longitude(self):
"""Returns the longitude in degrees"""
return self._position.longitude
@property
def altitude(self):
"""Returns the altitude in meters"""
return self._position.altitude
@property
def airspeed(self):
"""Returns the airspeed in meters per second"""
return self._velocities.true_airspeed
@property
def heading(self):
"""Returns the heading in degrees"""
return self._position.heading
class AirspeedIndicator(object):
"""The AirspeedIndicator class simulates the aircraft airspeed
indicator"""
def __init__(self, fdmexec):
"""Create a new AirspeedIndicator object
Arguments:
fdmexec: a JSBSim FGFDMExec object
"""
self.fdmexec = fdmexec
self._atmosphere = Atmosphere(fdmexec)
@property
def airspeed(self):
"""Returns the airspeed in knots"""
total_pressure = self.fdmexec.GetAuxiliary().GetTotalPressure()
total_pressure = convert_jsbsim_pressure(total_pressure)
return true_airspeed(total_pressure,
self._atmosphere.pressure,
self._atmosphere.temperature)
class Altimeter(object):
"""The Altimeter class simulates the aircraft altimeter"""
def __init__(self, fdmexec):
"""Create a new Altimeter object
Arguments:
fdmexec: A JSBSim FGFDMExec object
"""
self.fdmexec = fdmexec
self._atmosphere = Atmosphere(fdmexec)
self._pressure = 29.92130302799185 * ur.in_Hg
@property
def altitude(self):
"""Return the altitude in feet"""
sea_level_pressure = self._pressure.to(ur.pascal)
altitude = pressure_altitude(sea_level_pressure.magnitude,
self._atmosphere.pressure,
self._atmosphere.temperature)
altitude = altitude * ur.meter
altitude.ito(ur.foot)
return altitude.magnitude
@property
def pressure(self):
"""Return the instrument's pressure setting in inHg"""
return self._pressure.magnitude
@pressure.setter
def pressure(self, value):
"""Set the instrument's pressure setting
Arguments:
value: the pressure in inHg
"""
self._pressure = value * ur.in_Hg
class AttitudeIndicator(object):
"""The AttitudeIndicator class simulates the attitude indicator
instrument"""
def __init__(self, fdmexec):
"""Create a new AttitudeIndicator object
Arguments:
fdmexec: a JSBSim FGFDMExec object
"""
self.fdmexec = fdmexec
self._orientation = Orientation(fdmexec)
@property
def roll(self):
"""Return the roll angle ikn degrees"""
return self._orientation.phi
@property
def pitch(self):
"""Return the pitch angle in degrees"""
return self._orientation.theta
class HeadingIndicator(object):
"""The HeadingIndicator class simulates the heading indicator
instrument"""
def __init__(self, fdmexec):
"""Create a new HeadingIndicator object
Arguments:
fdmexec: a JSBSim FGFDMExec object
"""
self.fdmexec = fdmexec
self._orientation = Orientation(fdmexec)
@property
def heading(self):
"""Return the heading in degrees"""
return self._orientation.psi
class VerticalSpeedIndicator(object):
"""The VerticalSpeedIndicator simulates the aircraft's vertical speed
indicator instrument"""
def __init__(self, fdmexec):
"""Create a new VerticalSpeedIndicator object
Arguments:
fdmexec: a JSBSim FGFDMExec object
"""
self.fdmexec = fdmexec
self._velocities = Velocities(fdmexec)
@property
def climb_rate(self):
"""Return the climb rate in feet per minutes"""
climb_rate = self._velocities.climb_rate * ur.meters_per_second
climb_rate.ito(ur.feet_per_minute)
return climb_rate.magnitude
class Instruments(object):
"""The Instruments class contains the instances of the aircraft's
instruments"""
def __init__(self, fdmexec):
self.fdmexec = fdmexec
self.gps = GPS(fdmexec)
self.airspeed_indicator = AirspeedIndicator(fdmexec)
self.altimeter = Altimeter(fdmexec)
self.attitude_indicator = AttitudeIndicator(fdmexec)
self.heading_indicator = HeadingIndicator(fdmexec)
self.vertical_speed_indicator = VerticalSpeedIndicator(fdmexec)
| bsd-3-clause | 6,210,240,152,551,229,000 | 27.506912 | 75 | 0.642903 | false |
rcatwood/Savu | savu/plugins/loaders/i12_tomo_loader.py | 1 | 4933 | # Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: i12_tomo_loader
:platform: Unix
:synopsis: A class for loading i12 mutli-scan tomography data
.. moduleauthor:: Nicola Wadeson <[email protected]>
"""
import h5py
import logging
import numpy as np
import os
import savu.data.data_structures as ds
from savu.plugins.base_loader import BaseLoader
import savu.test.test_utils as tu
from savu.plugins.utils import register_plugin
@register_plugin
class I12TomoLoader(BaseLoader):
"""
A class to load i12 tomography data from a hdf5 file
:param angular_spacing: Angular spacing between successive \
projections. Default: 0.2.
:param data_path: Path to the data inside the \
file. Default: 'entry1/tomo_entry/data/data'.
:param dark: Path to the dark field data \
file. Default: 'Savu/test_data/data/i12_test_data/45657.nxs'.
:param flat: Path to the flat field data \
file. Default: 'Savu/test_data/data/i12_test_data/45658.nxs'.
:param flat_dark_path: Path to the data inside the \
file. Default: 'entry1/data/pco4000_dio_hdf/data'
"""
def __init__(self, name='I12TomoLoader'):
super(I12TomoLoader, self).__init__(name)
def setup(self):
exp = self.exp
data_obj = exp.create_data_object('in_data', 'tomo')
ds.data_add_ons.TomoRaw(data_obj)
# from nexus file determine rotation angle
frame = 0
detY = 1
detX = 2
data_obj.set_axis_labels('frame.number',
'detector_y.pixel',
'detector_x.pixel')
data_obj.add_pattern('PROJECTION', core_dir=(detX, detY),
slice_dir=(frame,))
expInfo = exp.meta_data
data_obj.backing_file = \
h5py.File(expInfo.get_meta_data("data_file"), 'r')
logging.debug("Opened file '%s' '%s'", 'tomo_entry',
data_obj.backing_file.filename)
logging.debug("Getting the path to the data")
data_obj.data = \
data_obj.backing_file[self.parameters['data_path']]
logging.debug("Getting the path to the dark data")
dark_file = h5py.File(self.get_file_path('dark'), 'r')
dark = dark_file[self.parameters['flat_dark_path']]
expInfo.set_meta_data('dark', dark[:].mean(0))
logging.debug("Getting the path to the flat data")
flat_file = h5py.File(self.get_file_path('flat'), 'r')
flat = flat_file[self.parameters['flat_dark_path']]
expInfo.set_meta_data('flat', flat[:].mean(0))
data_obj.set_shape(data_obj.data.shape)
self.set_data_reduction_params(data_obj)
def data_mapping(self):
exp = self.exp
data_obj = exp.index['in_data']['tomo']
data_obj.mapping = True
mapping_obj = exp.create_data_object('mapping', 'tomo')
angular_spacing = self.parameters['angular_spacing']
# use this if scaning [0, 180]
n_angles = int(np.ceil((180+angular_spacing)/float(angular_spacing)))
# use this if scaning [0, 180)
# n_angles = int(np.ceil((180)/float(angular_spacing)))
rotation_angle = np.linspace(0, 180, n_angles)
mapping_obj.set_axis_labels('rotation_angle.degrees',
'detector_y.pixel',
'detector_x.pixel',
'scan.number')
rot = 0
detY = 1
detX = 2
scan = 3
mapping_obj.meta_data.set_meta_data('rotation_angle', rotation_angle)
mapping_obj.add_pattern('PROJECTION', core_dir=(detX, detY),
slice_dir=(rot, scan))
mapping_obj.add_pattern('SINOGRAM', core_dir=(detX, rot),
slice_dir=(detY, scan))
loaded_shape = data_obj.get_shape()
n_scans = loaded_shape[0]/len(rotation_angle)
shape = (rotation_angle.shape + loaded_shape[1:3] + (n_scans,))
mapping_obj.set_shape(shape)
def get_file_path(self, name):
path = self.parameters[name]
if path.split(os.sep)[0] == 'Savu':
path = tu.get_test_data_path(path.split('/test_data/data')[1])
self.parameters['flat_dark_path'] = 'entry/final_result_tomo/data'
return path
| gpl-3.0 | 537,823,949,443,047,940 | 33.256944 | 78 | 0.604703 | false |
satishgoda/programmingusingpython | sandbox/rhomStub/randd/uv_master.py | 1 | 6009 | import bpy
# Get a handle to the active object
object = bpy.context.active_object
# If the object is in edit mode, come out of it
if object.mode == 'EDIT':
bpy.ops.object.mode_set(mode='OBJECT', toggle=True)
# Get a handle to the active object's mesh data
bmesh = object.data
#########################################################################################
class ProcessBMeshForrhom(object):
'''This classes takes as input a Blender Mesh and creates an IR
(Intermediate Representation) to be used by rhom. Processing
includes figuring out group membership of polygons, removing
redundant data in Blender's uv point tables. Apart from the
processing, this class provides a clean API to be used by the
exporter.'''
def __init__(self, mesh):
self._export_mesh = {}
# MasterDict
export_mesh = {}
# this will be used to hash the uv's when processing the polygons one by one.
# Information from here is used to update polygons and uv_layers
# Can be deleted once pre-processing is done
export_mesh['vertices'] = { v.index: {
uvlayer.name: {} for uvlayer in bmesh.uv_layers }
for v in bmesh.vertices
}
# Unique uvs as a result of pre-processing the polygons
export_mesh['uv_layers'] = { uvlayer.name: {
'uvindex': 0, 'table': []}
for uvlayer in bmesh.uv_layers
}
# This will hold the vertices and uv indices for all the polygons
# as part of the pre-processing
export_mesh['polygons'] = { p.index: {
'vertices': {'no': -1, 'array': ''},
'uvlayers': {uvlayer.name: '' for uvlayer in bmesh.uv_layers},
'groups': [],
}
for p in bmesh.polygons
}
# This data is used by the per polygon pre-processing step to figure out
export_mesh['groups'] = {'names': [group_name for group_name in sorted(bmesh.polygon_layers_int.keys())],
'table': {p.index: [] for p in bmesh.polygons}
}
for polygon in bmesh.polygons:
for group_name in export_mesh['groups']['names']:
export_mesh['groups']['table'][polygon.index].append(bool(bmesh.polygon_layers_int[group_name].data[polygon.index].value))
####################### Start Pre-Processing ########################
def process_uv_layer(polygon, layer, export_mesh):
uvtag = layer.name
uvdata = layer.data
uv_layer = export_mesh['uv_layers'][uvtag]
uvindices = []
for vindex in polygon.vertices:
# Get the uv value corresponding to this vertex
uv = uvdata[uv_layer['uvindex']].uv.to_tuple()
# Is this a new uv coming in
if export_mesh['vertices'][vindex][uvtag].get(uv) is None:
# Get the index from master uv table
index = len(export_mesh['uv_layers'][uvtag]['table'])
# Insert into the master uv table
export_mesh['uv_layers'][uvtag]['table'].append(uv)
# Log the uv in the vertices hash, so that when a shared uv comes by, we can just use this
export_mesh['vertices'][vindex][uvtag][uv] = index
else:
# This uv is shared, so reuse the index
index = export_mesh['vertices'][vindex][uvtag][uv]
# Add to the polygons master uv index
uvindices.append(index)
# Ready to fetch the next raw uv
uv_layer['uvindex'] += 1
# Store the uv index loop as a ready to use list for rhom
export_mesh['polygons'][polygon.index]['uvlayers'][uvtag] = '{0}'.format(str(uvindices))
# Group membership data for each polygon
def process_group_membership(polygon, export_mesh):
polygon_groups = export_mesh['polygons'][polygon.index]['groups']
groups_table = export_mesh['groups']['table']
groups_names = export_mesh['groups']['names']
for (group_index, is_present) in enumerate(groups_table[polygon.index]):
if is_present:
polygon_groups.append(groups_names[group_index])
# PRE PROCESSING OF THE MESH
for polygon in bmesh.polygons:
# This data will be used for generating the Vertex Table
vertices = export_mesh['polygons'][polygon.index]['vertices']
vertices['no'] = len(polygon.vertices)
vertices['array'] = '{0}'.format(str(list(polygon.vertices)))
for layer in bmesh.uv_layers:
process_uv_layer(polygon, layer, export_mesh)
process_group_membership(polygon, export_mesh)
####################### End Pre-Processing ###########################
#from pprint import pprint
#pprint(export_mesh)
#################### Use rhom Stub to Create Obn ##################
if not False:
import sys
sys.path.append('/muse/satishg/learning/soc/obnblender/blender/io_scene_obn')
import rhomstub as rhom
mesh = rhom.Mesh()
for vertex in bmesh.vertices:
x, y, z = vertex.co
mesh.addPoint(rhom.Point(x, y, z))
for uvtag in export_mesh['uv_layers'].keys():
for uv in export_mesh['uv_layers'][uvtag]['table']:
mesh.addTexCoord(uvtag, rhom.TexCoord(uv[0], uv[1]))
for group_name in export_mesh['groups']['names']:
mesh.addGroup(group_name)
for polygon_index, polygon in export_mesh['polygons'].items():
element = mesh.addElement(polygon['vertices']['no'])
mesh.setPointIndices(element, polygon['vertices']['array'])
for uvtag in export_mesh['uv_layers']:
mesh.setTexCoordIndices(uvtag, element, polygon['uvlayers'][uvtag])
for group_name in polygon['groups']:
mesh.addElementToGroup(mesh.getElement(polygon_index), group_name)
if group_name.endswith('Mtl'):
mesh.setMaterial(mesh.getElement(polygon_index), 'xxx_' + group_name)
rhom.writeMesh('Foo.obn', mesh)
| gpl-2.0 | 9,153,331,899,270,852,000 | 39.601351 | 130 | 0.59128 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.