repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
mpkato/interleaving | tests/test_team_draft.py | 1 | 3306 | import interleaving as il
from interleaving import TeamRanking
import json
from .test_methods import TestMethods
class TestTeamDraft(TestMethods):
def test_interleave(self):
self.interleave(il.TeamDraft, [[1, 2], [2, 3]], 2, [(1, 2), (2, 1)])
self.interleave(il.TeamDraft, [[1, 2], [2, 3]], 3, [(1, 2, 3), (2, 1, 3)])
self.interleave(il.TeamDraft, [[1, 2], [2, 3]], 4, [(1, 2, 3), (2, 1, 3)])
self.interleave(il.TeamDraft, [[1, 2], [3, 4]], 2, [(1, 3), (3, 1)])
self.interleave(il.TeamDraft, [[1, 2], [3, 4]], 3,
[(1, 3, 2), (1, 3, 4), (3, 1, 2), (3, 1, 4)])
# check teams
td = il.TeamDraft([[1, 2], [2, 3]])
res = td.interleave()
assert set(res.teams[0]) == set([1])
assert set(res.teams[1]) == set([2])
td = il.TeamDraft([[1, 2], [3, 4]])
res = td.interleave()
assert set(res.teams[0]) == set([1])
assert set(res.teams[1]) == set([3])
def test_team_draft_ranking(self):
td = il.TeamDraft([[1, 2, 3], [2, 3, 1]], sample_num=100)
rankings, distributions = zip(*td.ranking_distribution)
assert len(rankings) == 4
def test_dump(self, tmpdir):
tmpfile = str(tmpdir) + '/team_draft.json'
td = il.TeamDraft([[1, 2, 3], [2, 3, 1]], sample_num=100)
td.dump_rankings(tmpfile)
with open(tmpfile, 'r') as f:
obj = json.load(f)
# Test keys
s = {str(hash(r)) for r in td._rankings}
assert s == set(obj.keys())
# Test rankings
l1 = sorted(td._rankings)
l2 = sorted([v['ranking']['ranking_list'] for v in obj.values()])
assert l1 == l2
# Test teams
f = lambda d: {str(k): sorted(list(s)) for k, s in d.items()}
l1 = [sorted(f(r.teams).items()) for r in td._rankings]
l2 = [sorted(v['ranking']['teams'].items()) for v in obj.values()]
assert sorted(l1) == sorted(l2)
def test_multileave(self):
self.interleave(il.TeamDraft, [[1, 2], [2, 3], [3, 4]], 2,
[(1, 2), (1, 3), (2, 1), (2, 3), (3, 1), (3, 2)])
self.interleave(il.TeamDraft, [[1, 2], [3, 4], [5, 6]], 2,
[(1, 3), (1, 5), (3, 1), (3, 5), (5, 1), (5, 3)])
def test_evaluate(self):
ranking = TeamRanking(team_indices=[0, 1], contents=[1, 2])
ranking.teams = {}
ranking.teams[0] = [1]
ranking.teams[1] = [2]
self.evaluate(il.TeamDraft, ranking, [0, 1], [])
self.evaluate(il.TeamDraft, ranking, [0], [(0, 1)])
self.evaluate(il.TeamDraft, ranking, [1], [(1, 0)])
self.evaluate(il.TeamDraft, ranking, [], [])
ranking = TeamRanking(team_indices=[0, 1], contents=[1, 3, 4])
ranking.teams = {}
ranking.teams[0] = [1]
ranking.teams[1] = [3, 4]
self.evaluate(il.TeamDraft, ranking, [0, 1, 2], [(1, 0)])
self.evaluate(il.TeamDraft, ranking, [0, 1], [])
self.evaluate(il.TeamDraft, ranking, [0, 2], [])
self.evaluate(il.TeamDraft, ranking, [1, 2], [(1, 0)])
self.evaluate(il.TeamDraft, ranking, [0], [(0, 1)])
self.evaluate(il.TeamDraft, ranking, [1], [(1, 0)])
self.evaluate(il.TeamDraft, ranking, [2], [(1, 0)])
self.evaluate(il.TeamDraft, ranking, [], [])
| mit | 3,198,218,194,433,597,400 | 40.848101 | 82 | 0.515124 | false |
pedropena/iteexe | exe/engine/jsidevice.py | 1 | 16044 | # ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
An JavaScript iDevice definition
"""
from exe.engine.idevice import Idevice
# For backward compatibility Jelly expects to find a Field class
from exe.engine.field import Field, TextField, TextAreaField, FeedbackField
from exe.engine.field import ImageField, AttachmentField
from exe.engine.path import Path
from exe.engine.exceptions.invalidconfigjsidevice import InvalidConfigJsIdevice
from xml.dom import minidom
import re
import logging
import os
# Initialize logger
log = logging.getLogger(__name__)
# ===========================================================================
class JsIdevice(Idevice):
"""
A JavaScript Idevice definition
"""
persistenceVersion = 1
def __init__(self, iDeviceDir=None):
"""
Initialize JS IDevice
"""
self._iDeviceDir = str(iDeviceDir)
self._valid = False
self._checkValid()
if self._valid:
# Get XML values from the config file
xmlValues = self.__loadIdevice()
# Add the values not present on the XML
xmlValues = self.__fillIdeviceDefaultValues(xmlValues)
self._attributes = xmlValues
# xml node : [ label , 0=textfield 1=textarea , order into form]
_attributespre ={
'title': ['Title',0,0],
'category': ['Category',0,1],
'css-class': ['CSS class',0,2],
'icon': ['Icon',0,3]
}
self._attributes= sorted(_attributespre.items(), key=lambda t: t[1][2])
# Initialize the IDevice instance
Idevice.__init__(self, xmlValues['title'], xmlValues['author'], xmlValues['purpose'], xmlValues['tip'], xmlValues['icon'])
# CSS class
self.class_ = xmlValues['css-class']
if 'category' in xmlValues:
self.ideviceCategory = xmlValues['category']
if 'icon' in xmlValues:
self.icon = xmlValues['icon']
# Initialize resources list
self._editionResources = []
self._exportResources = []
# Initialize field arrays
self.fields = []
self.nextFieldId = 0
# Set IDevice emphasis
self.emphasis = Idevice.SomeEmphasis
# Add default JS Idevice fields
self.__addDefaultFields()
self.__getFolderResources()
def __loadIdevice(self):
"""
Load IDevice configuration from its config.xml file
"""
try:
if self._valid:
# Check if the folder has a config.xml file
configFile = Path(self._iDeviceDir + '/config.xml')
if configFile.exists():
# Get config data
configData = open(configFile).read()
try:
newConfigData = configData.decode()
except UnicodeDecodeError:
configCharset = chardet.detect(configData)
newConfigData = configData.decode(configCharset['encoding'], 'replace')
# Parse the XML file
xmlConfig = minidom.parseString(newConfigData)
# Get main element
xmlIdevice = xmlConfig.getElementsByTagName('idevice')
# Initialize results variable
result = dict()
# If there is a main element tag
if (len(xmlIdevice) > 0):
# Go over all the child nodes
for tag in xmlIdevice[0].childNodes:
# Only process the node if it is an Element
# This means only tags get processed
if(isinstance(tag, minidom.Element)):
# Add the tag name and value to the result dictionary
result.update({tag.tagName: tag.firstChild.nodeValue})
if 'title' in result and 'css-class' in result:
return result
else:
raise InvalidConfigJsIdevice(Path(self._iDeviceDir).basename(), 'Mandatory fields not found.')
else:
raise InvalidConfigJsIdevice(Path(self._iDeviceDir).basename(), 'config.xml file doesn\'t exist.')
except IOError as ioerror:
# If we can't load an iDevice, we simply continue with the rest (and log it)
log.debug("iDevice " + Path(self._iDeviceDir).basename() + " doesn't appear to have a valid \"config.xml\" file")
raise InvalidConfigJsIdevice(Path(self._iDeviceDir).basename(), ioerror.message)
def _checkValid(self):
config = Path(self._iDeviceDir)/'config.xml'
edition = Path(self._iDeviceDir)/'edition'
export = Path(self._iDeviceDir)/'export'
if config.exists() and edition.exists() and export.exists():
self._valid = True
else:
self._valid = False
def isValid(self):
return self._valid
def clone(self):
"""
Clone a JS iDevice just like this one
"""
miniMe = Idevice.clone(self)
for field in miniMe.fields:
field.idevice = miniMe
return miniMe
# Add the iDevice default fields
def __addDefaultFields(self):
""" A JS iDevice only has a Textarea with no instructions """
self.addField(TextAreaField(""))
def __getFolderResources(self):
self._editionFolder = str(Path(self._iDeviceDir).basename() + '/edition/')
self._exportFolder = str(Path(self._iDeviceDir).basename() + '/export/')
for editionFile in os.listdir(self._iDeviceDir + '/edition'):
self._editionResources.append(editionFile)
for exportFile in os.listdir(self._iDeviceDir + '/export'):
self._exportResources.append(exportFile)
def addField(self, field):
"""
Add a new field to this iDevice. Fields are indexed by their id.
"""
if field.idevice:
log.error(u"Field already belonging to %s added to %s" %
(field.idevice.title, self.title))
field.idevice = self
self.fields.append(field)
def getUniqueFieldId(self):
"""
Returns a unique id (within this idevice) for a field
of the form ii_ff where ii is the idevice and ff the field
"""
self.calcNextFieldId()
result = self.id + '_' + unicode(self.nextFieldId)
self.nextFieldId += 1
log.debug(u"UniqueFieldId: %s" % (result))
return result
def calcNextFieldId(self):
"""
initializes nextFieldId if it is still 0
"""
if not hasattr(self, 'nextFieldId'):
self.nextFieldId = 0
if self.nextFieldId == 0:
log.debug(u"nextFieldId==0 for self.class_ %s" % (self.class_))
maxId = 0
for field in self.fields:
if isinstance(field.id, unicode):
log.debug(u"** field.id = u: %s" % (field.id))
# only look at the part after the (last) underbar
c = field.id.split('_')
if int(c[-1]) > maxId:
maxId = int(c[-1])
else:
log.error(u"** field.id is not unicode= %d" % (field.id))
if field.id > maxId:
maxId = field.id
self.nextFieldId = maxId + 1
def __iter__(self):
return iter(self.fields)
def getResourcesField(self, this_resource):
"""
Implement the specific resource finding mechanism for these JS iDevices
TODO: This is an exact copy from Generic iDevice
"""
from exe.engine.field import FieldWithResources
if hasattr(self, 'fields'):
# check through each of this idevice's fields,
# to see if it is supposed to be there.
for this_field in self.fields:
if isinstance(this_field, FieldWithResources) \
and hasattr(this_field, 'images') :
# field is capable of embedding resources
for this_image in this_field.images:
if hasattr(this_image, '_imageResource') \
and this_resource == this_image._imageResource:
return this_field
# else, not found in the above loop:
return None
def getRichTextFields(self):
"""
Like getResourcesField(), a general helper to allow nodes to search
through all of their fields without having to know the specifics of each
iDevice type.
TODO: This is an exact copy from Generic iDevice
"""
# All of Generic iDevice's rich-text fields are in... fields!
# Some of the fields may NOT be rich-text, though,
# so this needs a bit more parsing:
fields_list = []
from exe.engine.field import FieldWithResources
if hasattr(self, 'fields'):
for this_field in self.fields:
if isinstance(this_field, FieldWithResources):
fields_list.append(this_field)
return fields_list
def burstHTML(self, i):
"""
takes a BeautifulSoup fragment (i) and bursts its contents to
import this idevice from a CommonCartridge export
TODO: This is an exact copy from Generic iDevice
"""
# Generic Idevice, with content in fields[]:
title = i.find(name='h2',
attrs={'class' : 'iDeviceTitle' })
self.title = title.renderContents().decode('utf-8')
if self.class_ in ("objectives", "activity", "preknowledge", "generic"):
inner = i.find(name='div',
attrs={'class' : 'iDevice_inner' })
inner_content = inner.find(name='div',
attrs={'id' : re.compile('^ta') })
self.fields[0].content_wo_resourcePaths = \
inner_content.renderContents().decode('utf-8')
# and add the LOCAL resources back in:
self.fields[0].content_w_resourcePaths = \
self.fields[0].MassageResourceDirsIntoContent( \
self.fields[0].content_wo_resourcePaths)
self.fields[0].content = self.fields[0].content_w_resourcePaths
elif self.class_ == "reading":
readings = i.findAll(name='div', attrs={'id' : re.compile('^ta') })
# should be exactly two of these:
# 1st = field[0] == What to Read
if len(readings) >= 1:
self.fields[0].content_wo_resourcePaths = \
readings[0].renderContents().decode('utf-8')
# and add the LOCAL resource paths back in:
self.fields[0].content_w_resourcePaths = \
self.fields[0].MassageResourceDirsIntoContent( \
self.fields[0].content_wo_resourcePaths)
self.fields[0].content = \
self.fields[0].content_w_resourcePaths
# 2nd = field[1] == Activity
if len(readings) >= 2:
self.fields[1].content_wo_resourcePaths = \
readings[1].renderContents().decode('utf-8')
# and add the LOCAL resource paths back in:
self.fields[1].content_w_resourcePaths = \
self.fields[1].MassageResourceDirsIntoContent(\
self.fields[1].content_wo_resourcePaths)
self.fields[1].content = \
self.fields[1].content_w_resourcePaths
# if available, feedback is the 3rd field:
feedback = i.find(name='div', attrs={'class' : 'feedback' , \
'id' : re.compile('^fb') })
if feedback is not None:
self.fields[2].content_wo_resourcePaths = \
feedback.renderContents().decode('utf-8')
# and add the LOCAL resource paths back in:
self.fields[2].content_w_resourcePaths = \
self.fields[2].MassageResourceDirsIntoContent( \
self.fields[2].content_wo_resourcePaths)
self.fields[2].content = \
self.fields[2].content_w_resourcePaths
def __fillIdeviceDefaultValues(self, values):
"""
Fill the values required for the IDevice contructor but not
present on the config.xml
"""
keys = ['title', 'author', 'purpose', 'tip', 'icon']
for key in keys:
if key not in values:
values[key] = ''
return values
def getResourcesList(self, editMode = False, ordered = True, appendPath = True):
""" Get a list of the iDevice resources """
resources = list()
# Export resources (always included)
for res in self._exportResources:
if appendPath:
resources.append(str(self._exportFolder + res))
else:
resources.append(str(res))
# Edition resources
if(editMode):
for res in self._editionResources:
if appendPath:
resources.append(str(self._editionFolder + res))
else:
resources.append(str(res))
# Order the list
if(ordered):
resources.sort(key = lambda x: x.split('/')[-1])
return resources
def get_export_folder(self):
return self._exportFolder
def get_dirname(self):
return Path(self._iDeviceDir).basename()
def get_jsidevice_dir(self):
return Path(self._iDeviceDir)
def renderProperties(self):
properties = []
for attribute in self._attributes:
if attribute[0] == 'css-class':
value = self.class_
elif attribute[0] == 'category':
value = self.ideviceCategory
else:
value = getattr(self, attribute[0])
name = attribute[1][0]
properties.append({'name': _(name), 'value': value})
return properties
# ===========================================================================
| gpl-2.0 | 7,648,947,192,443,165,000 | 39.824427 | 134 | 0.526053 | false |
MOOCworkbench/MOOCworkbench | git_manager/migrations/0001_initial.py | 1 | 1495 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-02 09:52
from __future__ import unicode_literals
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('user_manager', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='GitRepository',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('name', models.CharField(max_length=200)),
('has_issues', models.BooleanField(default=True)),
('has_wiki', models.BooleanField(default=True)),
('github_url', models.URLField()),
('private', models.BooleanField(default=False)),
('hooks_url', models.CharField(max_length=100, null=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='user_manager.WorkbenchUser')),
],
options={
'abstract': False,
},
),
]
| mit | 1,464,028,200,771,823,400 | 38.342105 | 147 | 0.60602 | false |
phil65/script.maps.browser | resources/lib/googleplaces.py | 1 | 7062 | # -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <[email protected]>
# This program is Free Software see LICENSE file for details
from __future__ import absolute_import
from __future__ import unicode_literals
import urllib
import xbmcgui
from kodi65 import utils
from kodi65 import addon
from kodi65 import VideoItem
from kodi65 import ItemList
GOOGLE_PLACES_KEY = 'AIzaSyCgfpm7hE_ufKMoiSUhoH75bRmQqV8b7P4'
BASE_URL = 'https://maps.googleapis.com/maps/api/place/'
CATEGORIES = {"accounting": addon.LANG(32000),
"airport": addon.LANG(32035),
"amusement_park": addon.LANG(32036),
"aquarium": addon.LANG(32037),
"art_gallery": addon.LANG(32038),
"atm": addon.LANG(32039),
"bakery": addon.LANG(32040),
"bank": addon.LANG(32041),
"bar": addon.LANG(32042),
"beauty_salon": addon.LANG(32016),
"bicycle_store": addon.LANG(32017),
"book_store": addon.LANG(32018),
"bowling_alley": addon.LANG(32023),
"bus_station": addon.LANG(32033),
"cafe": addon.LANG(32043),
"campground": addon.LANG(32044),
"car_dealer": addon.LANG(32045),
"car_rental": addon.LANG(32046),
"car_repair": addon.LANG(32047),
"car_wash": addon.LANG(32048),
"casino": addon.LANG(32049),
"cemetery": addon.LANG(32050),
"church": addon.LANG(32051),
"city_hall": addon.LANG(32052),
"clothing_store": addon.LANG(32053),
"convenience_store": addon.LANG(32054),
"courthouse": addon.LANG(32055),
"dentist": addon.LANG(32056),
"department_store": addon.LANG(32057),
"doctor": addon.LANG(32058),
"electrician": addon.LANG(32059),
"electronics_store": addon.LANG(32060),
"embassy": addon.LANG(32061),
"establishment": addon.LANG(32062),
"finance": addon.LANG(29957),
"fire_station": addon.LANG(32063),
"florist": addon.LANG(32064),
"food": addon.LANG(32006),
"funeral_home": addon.LANG(32065),
"furniture_store": addon.LANG(32066),
"gas_station": addon.LANG(32067),
"general_contractor": addon.LANG(32068),
"grocery_or_supermarket": addon.LANG(32069),
"gym": addon.LANG(32070),
"hair_care": addon.LANG(32071),
"hardware_store": addon.LANG(32072),
"health": addon.LANG(32073),
"hindu_temple": addon.LANG(32074),
"home_goods_store": addon.LANG(32075),
"hospital": addon.LANG(32076),
"insurance_agency": addon.LANG(32077),
"jewelry_store": addon.LANG(32078),
"laundry": addon.LANG(32079),
"lawyer": addon.LANG(32080),
"library": addon.LANG(14022),
"liquor_store": addon.LANG(32081),
"local_government_office": addon.LANG(32082),
"locksmith": addon.LANG(32083),
"lodging": addon.LANG(32084),
"meal_delivery": addon.LANG(32085),
"meal_takeaway": addon.LANG(32086),
"mosque": addon.LANG(32087),
"movie_rental": addon.LANG(32088),
"movie_theater": addon.LANG(32089),
"moving_company": addon.LANG(32090),
"museum": addon.LANG(32091),
"night_club": addon.LANG(32092),
"painter": addon.LANG(32093),
"park": addon.LANG(32094),
"parking": addon.LANG(32095),
"pet_store": addon.LANG(32096),
"pharmacy": addon.LANG(32097),
"physiotherapist": addon.LANG(32098),
"place_of_worship": addon.LANG(32099),
"plumber": addon.LANG(32100),
"police": addon.LANG(32101),
"post_office": addon.LANG(32102),
"real_estate_agency": addon.LANG(32103),
"restaurant": addon.LANG(32104),
"roofing_contractor": addon.LANG(32105),
"rv_park": addon.LANG(32106),
"school": addon.LANG(32107),
"shoe_store": addon.LANG(32108),
"spa": addon.LANG(32109),
"stadium": addon.LANG(32110),
"storage": addon.LANG(154),
"store": addon.LANG(32111),
"subway_station": addon.LANG(32112),
"synagogue": addon.LANG(32113),
"taxi_stand": addon.LANG(32114),
"train_station": addon.LANG(32115),
"travel_agency": addon.LANG(32116),
"university": addon.LANG(32117),
"veterinary_care": addon.LANG(32118),
"zoo": addon.LANG(32119)
}
class GooglePlaces():
def __init__(self):
pass
def select_category(self):
modeselect = [addon.LANG(32120)]
modeselect += [value for value in CATEGORIES.itervalues()]
index = xbmcgui.Dialog().select(addon.LANG(32121), modeselect)
if index > 0:
return CATEGORIES.keys()[index - 1]
elif index > -1:
return ""
else:
return None
def get_locations(self, lat, lon, radius, locationtype):
params = {"key": GOOGLE_PLACES_KEY,
"radius": min(30000, radius),
"location": "%s,%s" % (lat, lon),
"types": locationtype}
base_url = BASE_URL + 'nearbysearch/json?'
results = utils.get_JSON_response(base_url + urllib.urlencode(params))
places = ItemList()
if "meta" in results and results['meta']['code'] == 400:
utils.log("LIMIT EXCEEDED")
return "", []
if "results" not in results:
return "", []
for place in results['results']:
try:
params = {"maxwidth": 400,
"photoreference": place['photos'][0]['photo_reference'],
"key": GOOGLE_PLACES_KEY}
photo = BASE_URL + 'photo?' + urllib.urlencode(params)
except Exception:
photo = ""
description = place['vicinity'] if "vicinity" in place else place.get('formatted_address', "")
item = VideoItem(label=place['name'],
label2=" / ".join(place['types']))
item.set_artwork({"thumb": photo,
"icon": place['icon']})
item.set_info("rating", place['rating'] * 2.0 if "rating" in place else "")
item.set_properties({'description': description,
"lat": place['geometry']['location']['lat'],
"lon": place['geometry']['location']['lng']})
places.append(item)
return places
GP = GooglePlaces()
| gpl-2.0 | 1,950,442,622,420,908,300 | 40.786982 | 106 | 0.517417 | false |
phiedulxp/tweet_search | spider/got/manager/TweetManager.py | 1 | 10058 | import urllib,urllib2,json,re,datetime,sys,cookielib
from .. import models
from pyquery import PyQuery
import requests
import random
random.seed(1)
def fetch_activities(tweet_id):
retusers = []
favorusers = []
re_url = 'https://twitter.com/i/activity/retweeted_popup?id=%s'%(tweet_id)
favor_url = 'https://twitter.com/i/activity/favorited_popup?id=%s'%(tweet_id)
headers = {
'Host':"twitter.com",
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.%s'%(random.randint(0,999)),
'Accept':"application/json, text/javascript, */*; q=0.01",
'Accept-Language':"de,en-US;q=0.7,en;q=0.3",
'X-Requested-With':"XMLHttpRequest",
'Referer':'https://twitter.com/',
'Connection':"keep-alive",
}
re_users = PyQuery(requests.get(re_url,headers=headers).json()['htmlUsers'])('ol.activity-popup-users')
for re_user in re_users('div.account'):
userPQ = PyQuery(re_user)
userd = {
'screen_name':userPQ.attr('data-screen-name'),
'user_id':userPQ.attr('data-user-id'),
'data_name':userPQ.attr('data-name'),
'avatar_src':userPQ('img.avatar').attr('src'),
'userbadges':userPQ('span.UserBadges').text(),
'bio':userPQ('p.bio').text(),
}
retusers.append({userd['screen_name']:userd})
favor_users = PyQuery(requests.get(favor_url,headers=headers).json()['htmlUsers'])('ol.activity-popup-users')
for favor_user in favor_users('div.account'):
userPQ = PyQuery(favor_user)
userd = {
'screen_name':userPQ.attr('data-screen-name'),
'user_id':userPQ.attr('data-user-id'),
'data_name':userPQ.attr('data-name'),
'avatar_src':userPQ('img.avatar').attr('src'),
'userbadges':userPQ('span.UserBadges').text(),
'bio':userPQ('p.bio').text(),
}
favorusers.append({userd['screen_name']:userd})
return retusers,favorusers
def fetch_entities(tweetPQ):
hashtags = []
urls = []
for url in tweetPQ('p.js-tweet-text a'):
d = dict(url.items())
if d.has_key('data-expanded-url'): #d['class'] == 'twitter-timeline-link'
#pdb.set_trace()
urls.append({'href':d['href'],'expanded_url':d['data-expanded-url']})
if d['href'].startswith('/hashtag/'):
hashtags.append(d['href'].split('?')[0].split('/')[-1])
tweetPQ('p.js-tweet-text a.twitter-timeline-link').remove()
return hashtags,urls
def getTweet(tweetHTML):
tweetPQ = PyQuery(tweetHTML)
tweet = models.Tweet()
#base info
id = tweetPQ.attr("data-tweet-id")
conversation_id = tweetPQ.attr('data-conversation-id')
dateSec = int(tweetPQ("small.time span.js-short-timestamp").attr("data-time"))
#permalink = tweetPQ.attr("data-permalink-path")
#user
screen_name = tweetPQ.attr('data-screen-name')
user_id = tweetPQ.attr('data-user-id')
data_name = tweetPQ.attr('data-name')
avatar_src = tweetPQ('img.avatar').attr('src')
userbadges = tweetPQ('span.UserBadges').text()
#text
hashtags,urls = fetch_entities(tweetPQ)
mentions = tweetPQ.attr("data-mentions")
lang = tweetPQ("p.js-tweet-text").attr('lang')
raw_text = re.sub(r"\s+", " ", tweetPQ("p.js-tweet-text").text().replace('# ', '#').replace('@ ', '@'))
standard_text = re.sub(r"\s+", " ", tweetPQ("p.js-tweet-text").text().replace('# ', '').replace('@ ', ''))
tweetPQ('p.js-tweet-text')('a').remove()
tweetPQ('p.js-tweet-text')('img').remove()
clean_text = tweetPQ("p.js-tweet-text").text()
#media
quote_id = tweetPQ('div.QuoteTweet a.QuoteTweet-link').attr('data-conversation-id')
has_cards = tweetPQ.attr('data-has-cards')
card_url = tweetPQ('div.js-macaw-cards-iframe-container').attr('data-card-url')
img_src = tweetPQ('div.AdaptiveMedia-container img').attr('src')
video_src = tweetPQ('div.AdaptiveMedia-container video').attr('src')
geo = ''
geoSpan = tweetPQ('span.Tweet-geo')
if len(geoSpan) > 0:
geo = geoSpan.attr('title')
#action
retweet_id = tweetPQ.attr('data-retweet-id')
retweeter = tweetPQ.attr('data-retweeter')
#retusers,favorusers = fetch_activities(id)
replies = int(tweetPQ("span.ProfileTweet-action--reply span.ProfileTweet-actionCount").attr("data-tweet-stat-count").replace(",", ""))
retweets = int(tweetPQ("span.ProfileTweet-action--retweet span.ProfileTweet-actionCount").attr("data-tweet-stat-count").replace(",", ""))
favorites = int(tweetPQ("span.ProfileTweet-action--favorite span.ProfileTweet-actionCount").attr("data-tweet-stat-count").replace(",", ""))
## tweet model
tweet.id = id
tweet.conversation_id = conversation_id
tweet.is_reply = tweet.id != tweet.conversation_id
tweet.created_at = datetime.datetime.fromtimestamp(dateSec)
#tweet.permalink = 'https://twitter.com' + permalink
#user
tweet.user = {
'screen_name':screen_name,
'user_id':user_id,
'data_name':data_name,
'avatar_src':avatar_src,
'userbadges':userbadges,
}
#media
tweet.media = {
'quote_id':quote_id,
'has_cards':has_cards,
'card_url':card_url,
'img_src':img_src,
'video_src':video_src,
'geo':geo,
}
#text
tweet.hashtags = hashtags
tweet.urls = urls
tweet.mentions = mentions.split(' ') if mentions != None else None
tweet.lang = lang
tweet.raw_text = raw_text
tweet.standard_text = standard_text
#tweet.clean_text = clean_text
#action
tweet.action = {
#'retusers':retusers,
#'favorusers':favorusers,
'replies':replies,
'retweets':retweets,
'favorites':favorites,
'retweet_id':retweet_id,
'retweeter':retweeter,
'is_retweet':True if retweet_id != None else False,
}
return tweet
class TweetManager:
def __init__(self):
pass
@staticmethod
def getTweetsById(tweet_id):
url = 'https://twitter.com/xxx/status/%s'%(tweet_id)
headers = {
'Host':"twitter.com",
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.%s'%(random.randint(0,999)),
'Accept':"application/json, text/javascript, */*; q=0.01",
'Accept-Language':"de,en-US;q=0.7,en;q=0.3",
'X-Requested-With':"XMLHttpRequest",
'Referer':'https://twitter.com/',
'Connection':"keep-alive",
}
tweets = PyQuery(requests.get(url,headers=headers).content)('div.js-original-tweet')
for tweetHTML in tweets:
return getTweet(tweetHTML)
@staticmethod
def getTweets(tweetCriteria, refreshCursor='', receiveBuffer=None, bufferLength=100, proxy=None):
results = []
resultsAux = []
cookieJar = cookielib.CookieJar()
if hasattr(tweetCriteria, 'username') and (tweetCriteria.username.startswith("\'") or tweetCriteria.username.startswith("\"")) and (tweetCriteria.username.endswith("\'") or tweetCriteria.username.endswith("\"")):
tweetCriteria.username = tweetCriteria.username[1:-1]
active = True
while active:
json = TweetManager.getJsonReponse(tweetCriteria, refreshCursor, cookieJar, proxy)
if len(json['items_html'].strip()) == 0:
break
if not json.has_key('min_position'):
break
refreshCursor = json['min_position']
if refreshCursor == None:
break
tweets = PyQuery(json['items_html'])('div.js-stream-tweet')
if len(tweets) == 0:
break
for tweetHTML in tweets:
tweet = getTweet(tweetHTML)
if hasattr(tweetCriteria, 'sinceTimeStamp'):
if tweet.created_at < tweetCriteria.sinceTimeStamp:
active = False
break
if hasattr(tweetCriteria, 'untilTimeStamp'):
if tweet.created_at <= tweetCriteria.untilTimeStamp:
results.append(tweet.__dict__)
else:
results.append(tweet.__dict__)
#resultsAux.append(tweet)
if receiveBuffer and len(resultsAux) >= bufferLength:
receiveBuffer(resultsAux)
resultsAux = []
if tweetCriteria.maxTweets > 0 and len(results) >= tweetCriteria.maxTweets:
active = False
break
if receiveBuffer and len(resultsAux) > 0:
receiveBuffer(resultsAux)
return results
@staticmethod
def getJsonReponse(tweetCriteria, refreshCursor, cookieJar, proxy):
url = "https://twitter.com/i/search/timeline?q=%s&src=typd&max_position=%s"
urlGetData = ''
if hasattr(tweetCriteria, 'username'):
urlGetData += ' from:' + tweetCriteria.username
if hasattr(tweetCriteria, 'querySearch'):
urlGetData += ' ' + tweetCriteria.querySearch
if hasattr(tweetCriteria, 'near'):
urlGetData += "&near:" + tweetCriteria.near + " within:" + tweetCriteria.within
if hasattr(tweetCriteria, 'since'):
urlGetData += ' since:' + tweetCriteria.since
if hasattr(tweetCriteria, 'until'):
urlGetData += ' until:' + tweetCriteria.until
if hasattr(tweetCriteria, 'topTweets'):
if tweetCriteria.topTweets:
url = "https://twitter.com/i/search/timeline?q=%s&src=typd&max_position=%s"
if hasattr(tweetCriteria, 'tweetType'):
url = url + tweetCriteria.tweetType
url = url % (urllib.quote(urlGetData), refreshCursor)
ua = 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.%s'%(random.randint(0,999))
headers = [
('Host', "twitter.com"),
('User-Agent', ua),
# Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36
#Mozilla/5.0 (Windows NT 6.1; Win64; x64)
('Accept', "application/json, text/javascript, */*; q=0.01"),
('Accept-Language', "de,en-US;q=0.7,en;q=0.3"),
('X-Requested-With', "XMLHttpRequest"),
('Referer', url),
('Connection', "keep-alive")
]
if proxy:
opener = urllib2.build_opener(urllib2.ProxyHandler({'http': proxy, 'https': proxy}), urllib2.HTTPCookieProcessor(cookieJar))
else:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookieJar))
opener.addheaders = headers
try:
response = opener.open(url)
jsonResponse = response.read()
except Exception,e:
print "Twitter weird response. Try to see on browser: https://twitter.com/search?q=%s&src=typd" % urllib.quote(urlGetData)
raise Exception(e.message)
#sys.exit()
#return None
dataJson = json.loads(jsonResponse)
return dataJson
| mit | 1,266,755,237,738,428,000 | 33.210884 | 214 | 0.669616 | false |
timwoj/ctrprogress | main.py | 1 | 1649 | # -*- coding: utf-8 -*-
#!/usr/bin/env python
from flask import Flask, request, Response
from werkzeug.routing import BaseConverter
import ctrpmodels
import display
import ranker
import rostermgmt
app = Flask(__name__)
app.debug = True
class RegexConverter(BaseConverter):
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
self.regex = items[0]
app.url_map.converters['regex'] = RegexConverter
app.jinja_env.filters['normalize'] = display.normalize
@app.route('/')
def root():
return display.display()
@app.route('/history')
def history():
return display.display_history(request)
@app.route('/loadgroups')
def load_groups():
return rostermgmt.load_groups()
@app.route('/rank', methods=['GET', 'POST'])
def rank():
if request.method == 'GET':
return ranker.rank()
return ranker.start_ranking()
# This is used by the cron job to start ranking automatically. We call the ranker
# but we don't care about the redirect that it responds with. Instead just return
# a 200 so the cron job doesn't fail.
@app.route('/startrank')
def startrank():
ranker.start_ranking()
return '', 200
@app.route('/builder', methods=['POST'])
def builder():
return ranker.run_builder(request)
@app.route('/migrate')
def migrate():
return ctrpmodels.migrate()
@app.route('/<regex("tier(\d+)"):tier>')
def display_tier(tier):
return display.display_tier(tier)
@app.route('/tooltips.js')
def tooltips():
return Response(display.build_tooltips(), content_type='application/javascript')
@app.route('/loadone')
def load_one():
return ranker.loadone(request)
| mit | -2,181,812,599,848,570,400 | 23.61194 | 84 | 0.691935 | false |
trmznt/genaf | genaf/scripts/facmd.py | 1 | 1173 | # genaf dbmgr script will override both rhombus' and fatools' dbmgr
import transaction, sys
from rhombus.lib.utils import cout, cerr, cexit, get_dbhandler
from rhombus.scripts import setup_settings, arg_parser
from fatools.scripts.facmd import ( init_argparser as fatools_init_argparser,
do_facmd as fatools_do_facmd )
def init_argparser( parser=None ):
if parser is None:
p = arg_parser('facmd - genaf')
else:
p = parser
p = fatools_init_argparser( p )
return p
def main(args):
cerr('genaf facmd main()')
settings = setup_settings( args )
if args.commit:
with transaction.manager:
do_facmd( args, settings )
cerr('** COMMIT database **')
else:
cerr('** WARNING -- running without database COMMIT **')
if not args.test:
keys = input('Do you want to continue? ')
if keys.lower()[0] != 'y':
sys.exit(1)
do_facmd( args, settings )
def do_facmd(args, settings, dbh=None):
if dbh is None:
dbh = get_dbhandler(settings)
print(dbh)
fatools_do_facmd(args, dbh)
| lgpl-3.0 | 4,359,145,294,513,377,300 | 21.557692 | 77 | 0.595055 | false |
Clinical-Genomics/scout | scripts/genelist_to_panel.py | 1 | 5499 | from pprint import pprint as pp
import click
@click.command()
@click.argument("gene-list", type=click.File("r"))
@click.option("--panel-name", "-n", required=True)
@click.pass_context
def cli(ctx, gene_list, panel_name):
# Dictionary with panel_id as key
panel_metadata = {}
header = []
panel_genes = {}
for line in gene_list:
line = line.rstrip()
if line.startswith("#"):
if line.startswith("##"):
# These headers include metainformation about the panels or
# contig information which we are not interested in.
# They allways start with Database=<ID=<some_name> and are
# ',' separated.
if not "contig" in line:
panel_info = {}
line = line.split(",")
panel_info["institute"] = line[0][15:22]
for entry in line[1:]:
splitted_entry = entry.split("=")
key = splitted_entry[0]
value = splitted_entry[1]
if key == "Version":
panel_info["version"] = float(value)
elif key == "Date":
year = value[0:4]
month = value[4:6]
day = value[6:]
panel_info["date"] = "{0}-{1}-{2}".format(year, month, day)
elif key == "Acronym":
panel_id = value
panel_info["panel_id"] = panel_id
elif key == "Complete_name":
panel_info["display_name"] = value
panel_metadata[panel_id] = panel_info
else:
# The header line starts with only one '#'
header = line[1:].split("\t")
# Check if the given panel name was found
if not panel_name in panel_metadata:
click.echo("Panel {0} could not be found in gene list".format(panel_name))
ctx.abort()
# These lines hold information about genes
else:
# Dictionary with information in gene list
gene_info = dict(zip(header, line.split("\t")))
# Dictionary with information in correct format:
panel_gene_info = {}
# Check if the gene belongs to the panel of interest
panels = gene_info.get("Clinical_db_gene_annotation", "").split(",")
if panel_name in panels:
# Get the hgnc symbol
hgnc_symbol = gene_info["HGNC_symbol"]
panel_gene_info["hgnc_symbol"] = hgnc_symbol
# Parse the manually annotated disease associated transcripts
transcripts_info = gene_info.get("Disease_associated_transcript")
transcripts = set()
if transcripts_info:
for entry in transcripts_info.split(","):
transcripts.add(entry.split(":")[1])
panel_gene_info["transcripts"] = ",".join(transcripts)
# Check manually annotated reduced penetrance
penetrance = gene_info.get("Reduced_penetrance")
panel_gene_info["penetrance"] = ""
if penetrance:
panel_gene_info["penetrance"] = "Yes"
# Check manually annotated mosaicism
mosaicism = gene_info.get("Mosaicism")
panel_gene_info["mosaicism"] = ""
if mosaicism:
panel_gene_info["mosaicism"] = "Yes"
# Check manually annotated disease models
panel_gene_info["inheritance"] = gene_info.get("Genetic_disease_model", "")
# Parse database entry version
panel_gene_info["entry_version"] = gene_info.get("Database_entry_version", "")
if hgnc_symbol in panel_genes:
# If we have multiple entries we update the information
pass
else:
panel_genes[hgnc_symbol] = panel_gene_info
# Print the headers
click.echo("##panel_id={}".format(panel_metadata[panel_name]["panel_id"]))
click.echo("##institute={}".format(panel_metadata[panel_name]["institute"]))
click.echo("##version={}".format(panel_metadata[panel_name]["version"]))
click.echo("##date={}".format(panel_metadata[panel_name]["date"]))
click.echo("##institute={}".format(panel_metadata[panel_name]["institute"]))
click.echo("##display_name={}".format(panel_metadata[panel_name]["display_name"]))
new_headers = [
"hgnc_symbol",
"disease_associated_transcripts",
"reduced_penetrance",
"genetic_disease_models",
"mosaicism",
"database_entry_version",
]
click.echo("#" + "\t".join(new_headers))
for hgnc_symbol in panel_genes:
panel_gene_info = panel_genes[hgnc_symbol]
click.echo(
"{0}\t{1}\t{2}\t{3}\t{4}\t{5}".format(
panel_gene_info["hgnc_symbol"],
panel_gene_info["transcripts"],
panel_gene_info["penetrance"],
panel_gene_info["inheritance"],
panel_gene_info["mosaicism"],
panel_gene_info["entry_version"],
)
)
if __name__ == "__main__":
cli()
| bsd-3-clause | 3,692,014,378,686,962,700 | 41.3 | 94 | 0.507183 | false |
jcpeterson/Dallinger | tests/test_api.py | 1 | 2085 | """Test python experiment API"""
import random
from uuid import UUID
import pytest
import dallinger
class TestAPI(object):
def test_uuid(self):
from dallinger.experiment import Experiment
exp_uuid = Experiment.make_uuid()
assert isinstance(UUID(exp_uuid, version=4), UUID)
def test_uuid_instance(self):
from dallinger.experiment import Experiment
exp = Experiment()
exp_uuid = exp.make_uuid()
assert isinstance(UUID(exp_uuid, version=4), UUID)
def test_uuid_reproducibility(self):
from dallinger.experiment import Experiment
exp = Experiment()
random.seed(1)
exp_uuid1 = exp.make_uuid()
exp_uuid2 = exp.make_uuid()
random.seed(1)
exp_uuid3 = exp.make_uuid()
assert exp_uuid1 != exp_uuid2
assert exp_uuid1 == exp_uuid3
def test_collect(self):
from dallinger.experiments import Bartlett1932
exp = Bartlett1932()
existing_uuid = "12345-12345-12345-12345"
data = exp.collect(existing_uuid, recruiter='bots')
assert isinstance(data, dallinger.data.Data)
dataless_uuid = "ed9e7ddd-3e97-452d-9e34-fee5d432258e"
dallinger.data.register(dataless_uuid, 'https://bogus-url.com/something')
try:
data = exp.collect(dataless_uuid, recruiter='bots')
except RuntimeError:
# This is expected for an already registered UUID with no accessible data
pass
else:
pytest.fail('Did not raise RuntimeError for existing UUID')
# In debug mode an unknown UUID fails
unknown_uuid = "c85d5086-2fa7-4baf-9103-e142b9170cca"
with pytest.raises(RuntimeError):
data = exp.collect(unknown_uuid, mode='debug', recruiter='bots')
def test_collect_requires_valid_uuid(self):
from dallinger.experiments import Bartlett1932
exp = Bartlett1932()
existing_uuid = "totally-bogus-id"
with pytest.raises(ValueError):
exp.collect(existing_uuid, recruiter='bots')
| mit | 4,388,388,590,610,283,500 | 32.095238 | 85 | 0.642686 | false |
Anatoscope/sofa | applications/plugins/Compliant/examples/bielle_manivelle/scene_sml.py | 1 | 1289 | import Sofa
import SofaPython
import SofaPython.units
import SofaPython.sml
import SofaPython.SofaNumpy
import Compliant.sml
def createScene(node):
node.dt=0.01
node.gravity=[0, -9.81, 0]
node.createObject('RequiredPlugin', name = 'Compliant' )
node.createObject('CompliantAttachButtonSetting' )
node.createObject('CompliantImplicitSolver', name='odesolver',stabilization=1)
node.createObject('MinresSolver', name='numsolver', iterations='250', precision='1e-14')
model = SofaPython.sml.Model( SofaPython.Tools.localPath( __file__, "bielle_manivelle.sml") )
scene_bielle_manivelle = Compliant.sml.SceneArticulatedRigid(node, model)
scene_bielle_manivelle.material.load( SofaPython.Tools.localPath( __file__, "material.json") )
scene_bielle_manivelle.setMaterialByTag("part", "steel")
scene_bielle_manivelle.param.simuLengthUnit="dm"
scene_bielle_manivelle.param.showRigid=True
scene_bielle_manivelle.param.showOffset=True
scene_bielle_manivelle.createScene()
scene_bielle_manivelle.rigids["1"].node.createObject('FixedConstraint')
with SofaPython.SofaNumpy.edit_data( node, "gravity" ) as gravity:
gravity[0] = SofaPython.units.acceleration_from_SI(gravity[0])
return node
| lgpl-2.1 | -4,048,201,276,032,951,300 | 33.837838 | 98 | 0.72692 | false |
emencia/emencia-django-slideshows | setup.py | 1 | 1293 | from setuptools import setup, find_packages
setup(
name='emencia-django-slideshows',
version=__import__('slideshows').__version__,
description=__import__('slideshows').__doc__,
long_description=open('README.rst').read(),
author='David Thenon',
author_email='[email protected]',
url='http://pypi.python.org/pypi/emencia-django-slideshows',
license='MIT',
packages=find_packages(),
classifiers=[
'Programming Language :: Python',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'Django>=1.8',
'django-filebrowser-no-grappelli>=3.5.6',
],
include_package_data=True,
zip_safe=False
)
| mit | 3,559,925,054,097,480,000 | 34.916667 | 71 | 0.600928 | false |
MaxTyutyunnikov/lino | obsolete/src/lino/apps/spz/spz.py | 1 | 1767 | #coding: iso-8859-1
## Copyright 2005 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify it
## under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful, but WITHOUT
## ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
## License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, write to the Free Software Foundation,
## Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from lino.forms import gui
from lino.adamo.ddl import Schema
from lino.apps.spz import tables
class SPZ(Schema):
name="Lino/SPZ"
version="0.0.1"
copyright="""\
Copyright (c) 2005 Luc Saffre.
This software comes with ABSOLUTELY NO WARRANTY and is
distributed under the terms of the GNU General Public License.
See file COPYING.txt for more information."""
def setupSchema(self):
for cl in tables.TABLES:
self.addTable(cl)
def showMainForm(self,sess):
frm = sess.form(
label="Main menu",
doc="""\
This is the SPZ main menu.
"""+("\n"*10))
m = frm.addMenu("s","&Stammdaten")
m.addItem("a",label="&Akten").setHandler(
sess.showViewGrid, tables.Akten)
self.addProgramMenu(sess,frm)
frm.addOnClose(sess.close)
frm.show()
if __name__ == '__main__':
app=SPZ()
app.quickStartup()
gui.run(app)
| gpl-3.0 | 3,422,176,852,637,496,000 | 27.967213 | 70 | 0.654216 | false |
eldarion/formly | formly/models.py | 1 | 16345 | from __future__ import unicode_literals
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import Max
from django.template.defaultfilters import slugify
from django.urls import reverse
from django.utils import timezone
from jsonfield import JSONField
from six import python_2_unicode_compatible
from .fields import LimitedMultipleChoiceField, MultipleTextField
from .forms.widgets import LikertSelect, MultiTextWidget, RatingSelect
@python_2_unicode_compatible
class OrdinalScale(models.Model):
ORDINAL_KIND_LIKERT = "likert"
ORDINAL_KIND_RATING = "rating"
ORDINAL_KIND_CHOICES = [
(ORDINAL_KIND_LIKERT, "Likert Scale"),
(ORDINAL_KIND_RATING, "Rating Scale")
]
name = models.CharField(max_length=100)
kind = models.CharField(max_length=6, choices=ORDINAL_KIND_CHOICES)
def __str__(self):
return "{} [{}]".format(self.name, ", ".join([str(c) for c in self.choices.order_by("score")]))
@python_2_unicode_compatible
class OrdinalChoice(models.Model):
scale = models.ForeignKey(OrdinalScale, related_name="choices", on_delete=models.CASCADE)
label = models.CharField(max_length=100)
score = models.IntegerField()
def __str__(self):
return "{} ({})".format(self.label, self.score) # pragma: no cover
class Meta:
unique_together = [("scale", "score"), ("scale", "label")]
@python_2_unicode_compatible
class Survey(models.Model):
name = models.CharField(max_length=255)
creator = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="surveys", on_delete=models.CASCADE)
created = models.DateTimeField(default=timezone.now)
updated = models.DateTimeField(default=timezone.now)
published = models.DateTimeField(null=True, blank=True)
def save(self, *args, **kwargs):
if self.pk:
self.updated = timezone.now()
return super(Survey, self).save(*args, **kwargs)
def __str__(self):
return self.name # pragma: no cover
def get_absolute_url(self):
return reverse("formly:survey_detail", kwargs={"pk": self.pk})
def get_run_url(self):
return reverse("formly:take_survey", kwargs={"pk": self.pk})
def duplicate(self): # @@@ This could like use with some refactoring
survey = Survey.objects.get(pk=self.pk)
survey.pk = None
survey.save()
survey.pages.all().delete()
pages = {}
page_targets = []
choice_targets = []
for page in Survey.objects.get(pk=self.pk).pages.all():
orig_page_target = page.target
orig_page_pk = page.pk
page.pk = None
page.survey = survey
page.target = None
page.save()
pages[orig_page_pk] = page
if orig_page_target:
page_targets.append({
"page": page,
"orig_target_pk": orig_page_target.pk
})
for field in Page.objects.get(pk=orig_page_pk).fields.all():
orig_field_pk = field.pk
field.pk = None
field.survey = survey
field.page = page
field.save()
for choice in Field.objects.get(pk=orig_field_pk).choices.all():
orig_target = choice.target
choice.pk = None
choice.field = field
choice.target = None
choice.save()
if orig_target:
choice_targets.append({
"choice": choice,
"orig_target_pk": orig_target.pk
})
for page_target in page_targets:
page = page_target["page"]
page.target = pages[page_target["orig_target_pk"]]
page.save()
for choice_target in choice_targets:
choice = choice_target["choice"]
choice.target = pages[choice_target["orig_target_pk"]]
choice.save()
return survey
@property
def fields(self):
for page in self.pages.all():
for field in page.fields.all():
yield field
def next_page(self, user):
return self.first_page().next_page(user=user)
def first_page(self):
if self.pages.count() == 0:
self.pages.create()
return self.pages.all()[0]
def publish(self):
self.published = timezone.now()
self.save()
@python_2_unicode_compatible
class Page(models.Model):
survey = models.ForeignKey(Survey, related_name="pages", on_delete=models.CASCADE)
page_num = models.PositiveIntegerField(null=True, blank=True)
subtitle = models.CharField(max_length=255, blank=True)
# Should be null when a FieldChoice on it's last field has a target.
target = models.ForeignKey("self", null=True, blank=True, on_delete=models.SET_NULL)
class Meta:
unique_together = [
("survey", "page_num")
]
ordering = ["survey", "page_num"]
def save(self, *args, **kwargs):
if self.page_num is None:
max_page = self.survey.pages.aggregate(Max("page_num"))
self.page_num = (max_page.get("page_num__max") or 0) + 1
return super(Page, self).save(*args, **kwargs)
def __str__(self):
return self.label() # pragma: no cover
def label(self):
if self.subtitle:
return self.subtitle
else:
return "Page %d" % self.page_num
def get_absolute_url(self):
return reverse("formly:page_detail", kwargs={"pk": self.pk})
def next_page(self, user):
target = self
if self.completed(user=user):
try:
target = self.survey.pages.get(
page_num=self.page_num + 1
)
except Page.DoesNotExist:
target = None
if self.target:
target = self.target
if target and target.completed(user=user):
target = target.next_page(user=user)
return target
def completed(self, user):
return self.results.filter(result__user=user).count() > 0
def is_last_page(self):
return self.next_page() is None
@python_2_unicode_compatible
class Field(models.Model):
TEXT_FIELD = 0
TEXT_AREA = 1
RADIO_CHOICES = 2
DATE_FIELD = 3
SELECT_FIELD = 4
CHECKBOX_FIELD = 5
MEDIA_FIELD = 6
BOOLEAN_FIELD = 7
MULTIPLE_TEXT = 8
LIKERT_FIELD = 9
RATING_FIELD = 10
FIELD_TYPE_CHOICES = [
(TEXT_FIELD, "Free Response - One Line"),
(TEXT_AREA, "Free Response - Box"),
(RADIO_CHOICES, "Multiple Choice - Pick One"),
(SELECT_FIELD, "Multiple Choice - Pick One (Dropdown)"),
(CHECKBOX_FIELD, "Multiple Choice - Can select multiple answers"),
(DATE_FIELD, "Date"),
(MEDIA_FIELD, "File Upload"),
(BOOLEAN_FIELD, "True/False"),
(MULTIPLE_TEXT, "Multiple Free Responses - Single Lines"),
(LIKERT_FIELD, "Likert Scale"),
(RATING_FIELD, "Rating Scale")
]
survey = models.ForeignKey(Survey, related_name="fields", on_delete=models.CASCADE) # Denorm
page = models.ForeignKey(Page, null=True, blank=True, related_name="fields", on_delete=models.SET_NULL)
label = models.TextField()
field_type = models.IntegerField(choices=FIELD_TYPE_CHOICES)
scale = models.ForeignKey(OrdinalScale, default=None, null=True, blank=True, related_name="fields", on_delete=models.SET_NULL)
help_text = models.TextField(blank=True)
ordinal = models.IntegerField()
maximum_choices = models.IntegerField(null=True, blank=True)
# Should this be moved to a separate Constraint model that can also
# represent cross field constraints
required = models.BooleanField(default=False)
expected_answers = models.PositiveSmallIntegerField(default=1)
mapping = JSONField(blank=True, default=dict())
# def clean(self):
# super(Field, self).clean()
# if self.page is None:
# if self.target_choices.count() == 0:
# raise ValidationError(
# "A question not on a page must be a target of a choice from another question"
# )
def save(self, *args, **kwargs):
if not self.ordinal:
# Set ordinal, since full_clean() will fail if not set
self.ordinal = 1
self.full_clean()
if not self.pk and self.page is not None:
self.ordinal = (self.page.fields.aggregate(
Max("ordinal")
)["ordinal__max"] or 0) + 1
return super(Field, self).save(*args, **kwargs)
def move_up(self):
try:
other_field = self.page.fields.order_by("-ordinal").filter(
ordinal__lt=self.ordinal
)[0]
existing = self.ordinal
other = other_field.ordinal
self.ordinal = other
other_field.ordinal = existing
other_field.save()
self.save()
except IndexError:
return
def move_down(self):
try:
other_field = self.page.fields.order_by("ordinal").filter(
ordinal__gt=self.ordinal
)[0]
existing = self.ordinal
other = other_field.ordinal
self.ordinal = other
other_field.ordinal = existing
other_field.save()
self.save()
except IndexError:
return
class Meta:
ordering = ["ordinal"]
def __str__(self):
return "%s of type %s on %s" % (
self.label, self.get_field_type_display(), self.survey
)
def get_absolute_url(self):
return reverse("formly:field_update", kwargs={"pk": self.pk})
@property
def needs_choices(self):
return self.field_type in [
Field.RADIO_CHOICES,
Field.SELECT_FIELD,
Field.CHECKBOX_FIELD
]
@property
def name(self):
return slugify(self.label)
@property
def is_multiple(self):
return self.field_type == Field.MULTIPLE_TEXT
def form_field(self):
if self.field_type in [Field.LIKERT_FIELD, Field.RATING_FIELD]:
if self.scale:
choices = [(x.pk, x.label) for x in self.scale.choices.all().order_by("score")]
else:
choices = []
else:
choices = [(x.pk, x.label) for x in self.choices.all()]
field_class, field_kwargs = self._get_field_class(choices)
field = field_class(**field_kwargs)
return field
def _get_field_class(self, choices):
"""
Set field_class and field kwargs based on field type
"""
field_class = forms.CharField
kwargs = dict(
label=self.label,
help_text=self.help_text,
required=self.required
)
field_type = FIELD_TYPES.get(self.field_type, {})
field_class = field_type.get("field_class", field_class)
kwargs.update(**field_type.get("kwargs", {}))
if self.field_type in [Field.CHECKBOX_FIELD, Field.SELECT_FIELD, Field.RADIO_CHOICES, Field.LIKERT_FIELD, Field.RATING_FIELD]:
kwargs.update({"choices": choices})
if self.field_type == Field.CHECKBOX_FIELD:
kwargs.update({"maximum_choices": self.maximum_choices})
elif self.field_type == Field.MULTIPLE_TEXT:
kwargs.update({
"fields_length": self.expected_answers,
"widget": MultiTextWidget(widgets_length=self.expected_answers),
})
return field_class, kwargs
FIELD_TYPES = {
Field.TEXT_AREA: dict(
field_class=forms.CharField,
kwargs=dict(
widget=forms.Textarea
)
),
Field.RADIO_CHOICES: dict(
field_class=forms.ChoiceField,
kwargs=dict(
widget=forms.RadioSelect
)
),
Field.LIKERT_FIELD: dict(
field_class=forms.ChoiceField,
kwargs=dict(
widget=LikertSelect
)
),
Field.RATING_FIELD: dict(
field_class=forms.ChoiceField,
kwargs=dict(
widget=RatingSelect
)
),
Field.DATE_FIELD: dict(
field_class=forms.DateField,
kwargs=dict()
),
Field.SELECT_FIELD: dict(
field_class=forms.ChoiceField,
kwargs=dict(
widget=forms.Select
)
),
Field.CHECKBOX_FIELD: dict(
field_class=LimitedMultipleChoiceField,
kwargs=dict(
widget=forms.CheckboxSelectMultiple
)
),
Field.BOOLEAN_FIELD: dict(
field_class=forms.BooleanField,
kwargs=dict()
),
Field.MEDIA_FIELD: dict(
field_class=forms.FileField,
kwargs=dict()
),
Field.MULTIPLE_TEXT: dict(
field_class=MultipleTextField,
kwargs=dict()
)
}
@python_2_unicode_compatible
class FieldChoice(models.Model):
field = models.ForeignKey(Field, related_name="choices", on_delete=models.CASCADE)
label = models.CharField(max_length=100)
target = models.ForeignKey(Field, null=True, blank=True, related_name="target_choices", on_delete=models.SET_NULL)
def clean(self):
super(FieldChoice, self).clean()
if self.target is not None:
if self.target.page:
raise ValidationError(
"FieldChoice target's can only be questions not associated with a page."
)
def save(self, *args, **kwargs):
self.full_clean()
return super(FieldChoice, self).save(*args, **kwargs)
def __str__(self):
return self.label
@python_2_unicode_compatible
class SurveyResult(models.Model):
survey = models.ForeignKey(Survey, related_name="survey_results", on_delete=models.CASCADE)
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="survey_results", on_delete=models.CASCADE)
date_submitted = models.DateTimeField(default=timezone.now)
def get_absolute_url(self):
return reverse("survey_edit", kwargs={"pk": self.pk, "page": 1})
def __str__(self):
return self.survey.name
@python_2_unicode_compatible
class FieldResult(models.Model):
survey = models.ForeignKey(Survey, related_name="results", on_delete=models.CASCADE) # Denorm
page = models.ForeignKey(Page, related_name="results", on_delete=models.CASCADE) # Denorm
result = models.ForeignKey(SurveyResult, related_name="results", on_delete=models.CASCADE)
question = models.ForeignKey(Field, related_name="results", on_delete=models.CASCADE)
upload = models.FileField(upload_to="formly/", blank=True)
answer = JSONField(blank=True) # @@@ I think this should be something different than a string
def _update_mapping(self):
answer = self.answer["answer"]
mapping = dict()
for ans in answer:
ans = ans.strip().upper()
if ans in self.question.mapping:
mapping[ans] = self.question.mapping[ans]
self.answer["mapping"] = mapping
def save(self, *args, **kwargs):
if self.question.field_type == Field.MULTIPLE_TEXT:
self._update_mapping()
return super(FieldResult, self).save(*args, **kwargs)
def answer_value(self):
if self.answer:
return self.answer.get("answer")
def answer_display(self):
val = self.answer_value()
if val:
if self.question.needs_choices:
if self.question.field_type == Field.CHECKBOX_FIELD:
return ", ".join([str(FieldChoice.objects.get(pk=int(v))) for v in val])
return FieldChoice.objects.get(pk=int(val)).label
if self.question.field_type in [Field.LIKERT_FIELD, Field.RATING_FIELD]:
choice = OrdinalChoice.objects.get(pk=int(val))
return "{} ({})".format(choice.label, choice.score)
return val
def __str__(self):
return self.survey.name
class Meta:
ordering = ["result", "question"]
| bsd-3-clause | 1,757,393,805,897,196,000 | 32.425358 | 134 | 0.591374 | false |
makuto/redditLikedSavedImageDownloader | submission.py | 1 | 4136 | # -*- coding: utf-8 -*-
import pickle
import os
# third-party imports
import jsonpickle
class Submission:
def __init__(self):
# Source is either Tumblr or Reddit
self.source = u''
self.title = u''
self.author = u''
self.subreddit = u''
self.subredditTitle = u''
self.body = u''
self.bodyUrl = u''
self.postUrl = u''
def getXML(self):
baseString = (u'\t<source>' + self.source + u'</source>\n'
+ u'\t<title>' + self.title + u'</title>\n'
+ u'\t<author>' + self.author + u'</author>\n'
+ u'\t<subreddit>' + self.subreddit + u'</subreddit>\n'
+ u'\t<subredditTitle>' + self.subredditTitle + u'</subredditTitle>\n'
+ u'\t<body>' + self.body + u'</body>\n'
+ u'\t<bodyUrl>' + self.bodyUrl + u'</bodyUrl>\n'
+ u'\t<postUrl>' + self.postUrl + u'</postUrl>\n')
return str(baseString)
def getHtml(self):
baseString = (u'\t<p>' + self.source + u'</p>\n'
+ u'\t<h2>' + self.title + u'</h2>\n'
+ u'\t<h3>' + self.author + u'</h3>\n'
+ u'\t<h4>' + self.subreddit + u'</h4>\n'
+ u'\t<h4>' + self.subredditTitle + u'</h4>\n'
+ u'\t<p>' + self.body + u'</p>\n'
# + u'\t<p>' + self.bodyUrl + u'</p>\n'
+ u'\t<a href=' + self.postUrl + u'/>Link</a><br /><br />\n')
return baseString
def getJson(self):
jsonpickle.set_preferred_backend('json')
jsonpickle.set_encoder_options('json', ensure_ascii=False, indent=4, separators=(',', ': '))
return jsonpickle.encode(self)
def getAsList(self):
return [self.source, self.title, self.author,
self.subreddit, self.subredditTitle,
self.body, self.bodyUrl, self.postUrl]
def initFromDict(self, dictEntry):
self.source = dictEntry['source']
self.title = dictEntry['title']
self.author = dictEntry['author']
self.subreddit = dictEntry['subreddit']
self.subredditTitle = dictEntry['subredditTitle']
self.body = dictEntry['body']
self.bodyUrl = dictEntry['bodyUrl']
self.postUrl = dictEntry['postUrl']
def getAsList_generator(submissions):
for submission in submissions:
yield submission.getAsList()
def writeOutSubmissionsAsJson(redditList, file):
file.write('{\n'.encode('utf8'))
for submission in redditList:
outputString = submission.getJson() + u',\n'
file.write(outputString.encode('utf8'))
file.write('}'.encode('utf8'))
def saveSubmissionsAsJson(submissions, fileName):
outputFile = open(fileName, 'wb')
writeOutSubmissionsAsJson(submissions, outputFile)
outputFile.close()
def writeOutSubmissionsAsHtml(redditList, file):
submissionsStr = ""
for submission in redditList:
submissionsStr += submission.getHtml() + u'\n'
htmlStructure = u"""<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Reddit Saved Comments</title>
</head>
<body>
{0}
</body>
</html>
""".format(submissionsStr)
file.write(htmlStructure.encode('utf8'))
def saveSubmissionsAsHtml(submissions, fileName):
outputFile = open(fileName, 'wb')
writeOutSubmissionsAsHtml(submissions, outputFile)
outputFile.close()
def writeOutSubmissionsAsXML(redditList, file):
for submission in redditList:
outputString = u'<submission>\n' + submission.getXML() + u'</submission>\n'
file.write(outputString.encode('utf8'))
def saveSubmissionsAsXML(submissions, fileName):
outputFile = open(fileName, 'wb')
writeOutSubmissionsAsXML(submissions, outputFile)
outputFile.close()
def writeCacheSubmissions(submissions, cacheFileName):
cacheFile = open(cacheFileName, 'wb')
pickle.dump(submissions, cacheFile)
def readCacheSubmissions(cacheFileName):
if os.path.exists(cacheFileName):
cacheFile = open(cacheFileName, 'rb')
submissions = pickle.load(cacheFile)
return submissions
else:
return []
| mit | -6,548,970,664,116,083,000 | 29.865672 | 100 | 0.602273 | false |
facelessuser/subclrschm | subclrschm/lib/gui/platform_window_focus.py | 1 | 1219 | """Platform window focus."""
from __future__ import unicode_literals
import ctypes
import ctypes.util
from .. import util
if util.platform() == "osx":
appkit = ctypes.cdll.LoadLibrary(ctypes.util.find_library('AppKit'))
objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('objc'))
objc.objc_getClass.restype = ctypes.c_void_p
objc.sel_registerName.restype = ctypes.c_void_p
objc.objc_msgSend.restype = ctypes.c_void_p
objc.objc_msgSend.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
def platform_window_focus(frame):
"""Set focus to the window frame."""
# General window raising
if frame.IsIconized():
frame.Iconize(False)
if not frame.IsShown():
frame.Show(True)
frame.Raise()
# OSX specific extra to ensure raise
if util.platform() == "osx":
try:
nsapplication = ctypes.c_void_p(objc.objc_getClass('NSApplication'))
nsapp = ctypes.c_void_p(objc.objc_msgSend(nsapplication, objc.sel_registerName('sharedApplication')))
objc.objc_msgSend(nsapp, objc.sel_registerName('activateIgnoringOtherApps:'), True)
except Exception:
# Failed to bring window to top in OSX
pass
| mit | -7,847,522,584,076,326,000 | 33.828571 | 113 | 0.66612 | false |
hirofumi0810/tensorflow_end2end_speech_recognition | models/recurrent/layers/bn_lstm.py | 1 | 9995 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Long-Short Term Memory with Batch Normalization."""
import tensorflow as tf
from tensorflow.contrib.rnn import RNNCell, LSTMStateTuple
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.platform import tf_logging as logging
from .batch_normalization import batch_norm
class BatchNormLSTMCell(RNNCell):
"""Batch Normalized Long short-term memory unit (LSTM) recurrent network cell.
The default non-peephole implementation is based on:
http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf
S. Hochreiter and J. Schmidhuber.
"Long Short-Term Memory". Neural Computation, 9(8):1735-1780, 1997.
The peephole implementation is based on:
https://research.google.com/pubs/archive/43905.pdf
Hasim Sak, Andrew Senior, and Francoise Beaufays.
"Long short-term memory recurrent neural network architectures for
large scale acoustic modeling." INTERSPEECH, 2014.
The class uses optional peep-hole connections, optional cell clipping, and
an optional projection layer.
"""
def __init__(self, num_units, is_training, input_size=None,
use_peepholes=False, cell_clip=None,
initializer=None, num_proj=None, proj_clip=None,
num_unit_shards=None, num_proj_shards=None,
forget_bias=1.0, state_is_tuple=True,
reuse=None):
"""Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell
is_training: bool, set True when training.
input_size: Deprecated and unused.
use_peepholes: bool, set True to enable diagonal/peephole connections.
cell_clip: (optional) A float value, if provided the cell state is clipped
by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices.
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
proj_clip: (optional) A float value. If `num_proj > 0` and `proj_clip` is
provided, then the projected values are clipped elementwise to within
`[-proj_clip, proj_clip]`.
num_unit_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
num_proj_shards: Deprecated, will be removed by Jan. 2017.
Use a variable_scope partitioner instead.
forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. If False, they are concatenated
along the column axis. This latter behavior will soon be deprecated.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
"""
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
if num_unit_shards is not None or num_proj_shards is not None:
logging.warn("%s: The num_unit_shards and proj_unit_shards parameters are "
"deprecated and will be removed in Jan 2017. "
"Use a variable scope with a partitioner instead.", self)
self._num_units = num_units
self._use_peepholes = use_peepholes
self._cell_clip = cell_clip
self._initializer = initializer
self._num_proj = num_proj
self._proj_clip = proj_clip
self._num_unit_shards = num_unit_shards
self._num_proj_shards = num_proj_shards
self._forget_bias = forget_bias
self._state_is_tuple = state_is_tuple
self._reuse = reuse
self._is_training = is_training
if num_proj:
self._state_size = (LSTMStateTuple(num_units, num_proj)
if state_is_tuple else num_units + num_proj)
self._output_size = num_proj
else:
self._state_size = (LSTMStateTuple(num_units, num_units)
if state_is_tuple else 2 * num_units)
self._output_size = num_units
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def __call__(self, inputs, state, scope=None):
"""Run one step of LSTM.
Args:
inputs: input Tensor, 2D, batch x num_units.
state: if `state_is_tuple` is False, this must be a state Tensor,
`2-D, batch x state_size`. If `state_is_tuple` is True, this must be a
tuple of state Tensors, both `2-D`, with column sizes `c_state` and
`m_state`.
scope: VariableScope for the created subgraph; defaults to "lstm_cell".
Returns:
A tuple containing:
- A `2-D, [batch x output_dim]`, Tensor representing the output of the
LSTM after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- Tensor(s) representing the new state of LSTM after reading `inputs` when
the previous state was `state`. Same type and shape(s) as `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
num_proj = self._num_units if self._num_proj is None else self._num_proj
if self._state_is_tuple:
(c_prev, h_prev) = state
else:
c_prev = tf.slice(state, begin=[0, 0], size=[-1, self._num_units])
h_prev = tf.slice(
state, begin=[0, self._num_units], size=[-1, num_proj])
dtype = inputs.dtype
input_size = inputs.get_shape().with_rank(2)[1]
if input_size.value is None:
raise ValueError(
"Could not infer input size from inputs.get_shape()[-1]")
with tf.variable_scope(scope or "lstm_cell", initializer=self._initializer,
reuse=self._reuse) as unit_scope:
if self._num_unit_shards is not None:
unit_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_unit_shards))
W_xh = tf.get_variable('W_xh', shape=[input_size, 4 * self._num_units],
initializer=self._initializer)
W_hh = tf.get_variable('W_hh', shape=[num_proj, 4 * self._num_units],
initializer=self._initializer)
bias = tf.get_variable('b', [4 * self._num_units])
xh = tf.matmul(inputs, W_xh)
hh = tf.matmul(h_prev, W_hh)
bn_xh = batch_norm(xh, 'xh', self._is_training)
bn_hh = batch_norm(hh, 'hh', self._is_training)
# i = input_gate, g = new_input, f = forget_gate, o = output_gate
# lstm_matrix = tf.contrib.rnn._linear([inputs, h_prev], 4 * self._num_units, bias=True)
lstm_matrix = tf.nn.bias_add(tf.add(bn_xh, bn_hh), bias)
i, g, f, o = tf.split(
value=lstm_matrix, num_or_size_splits=4, axis=1)
# Diagonal connections
if self._use_peepholes:
# tf.variable_scopeとtf.get_variableはセットで使う
with tf.variable_scope(unit_scope) as projection_scope:
if self._num_unit_shards is not None:
projection_scope.set_partitioner(None)
p_f_diag = tf.get_variable(
"p_f_diag", shape=[self._num_units], dtype=dtype)
p_i_diag = tf.get_variable(
"p_i_diag", shape=[self._num_units], dtype=dtype)
p_o_diag = tf.get_variable(
"p_o_diag", shape=[self._num_units], dtype=dtype)
if self._use_peepholes:
c = (tf.sigmoid(f + self._forget_bias + p_f_diag * c_prev) * c_prev +
tf.sigmoid(i + p_i_diag * c_prev) * tf.tanh(g))
else:
c = (tf.sigmoid(f + self._forget_bias) * c_prev +
tf.sigmoid(i) * tf.tanh(g))
if self._cell_clip is not None:
c = tf.clip_by_value(c, -self._cell_clip, self._cell_clip)
bn_c = batch_norm(c, 'bn_c', self._is_training)
if self._use_peepholes:
# peephole側には適用しない
h = tf.sigmoid(o + p_o_diag * c) * tf.tanh(bn_c)
else:
h = tf.sigmoid(o) * tf.tanh(bn_c)
if self._num_proj is not None:
with tf.variable_scope("projection") as proj_scope:
if self._num_proj_shards is not None:
proj_scope.set_partitioner(
partitioned_variables.fixed_size_partitioner(
self._num_proj_shards))
h = tf.contrib.rnn._linear(h, self._num_proj, bias=False)
if self._proj_clip is not None:
h = tf.clip_by_value(h, -self._proj_clip, self._proj_clip)
new_state = (LSTMStateTuple(c, h) if self._state_is_tuple else
tf.concat(values=[c, h], axis=1))
return h, new_state
| mit | -2,963,574,719,416,825,300 | 46.669856 | 100 | 0.574124 | false |
rocky/python-spark | test/test_misc.py | 1 | 2892 | import unittest
from spark_parser.spark import GenericParser
from spark_parser import PYTHON3
if PYTHON3:
from io import StringIO
else:
from StringIO import StringIO
class Rules(GenericParser):
"""Testing duplicate rules"""
def p_rules(self, args):
"""
x ::= TOKEN
x ::= TOKEN
stmts ::= stmt+
ratings ::= STARS*
"""
pass
def duplicate_rule(self, rule):
if not hasattr(self, 'dups'):
self.dups = []
self.dups.append(rule)
pass
class RulesPeriod(GenericParser):
"""Testing ? extended rule"""
def p_rules(self, args):
"""
opt_period ::= PERIOD?
"""
pass
pass
class InvalidRule(GenericParser):
"""Testing illegal recursive rule"""
def p_rules(self, args):
"""
foo ::= foo
"""
pass
pass
class TestMisc(unittest.TestCase):
def test_basic(self):
# Check duplicate rule detection
parser = Rules('x', debug={'dups': True})
self.assertTrue(hasattr(parser, 'dups'))
self.assertEqual(parser.dups, [('x', ('TOKEN',))])
# Check "+", and "*", expansion
rules = sorted(parser.rule2name.items())
self.assertEqual(rules,
[(('START', ('|-', 'x')), 'ambda>'),
(('ratings', ()), 'rules'),
(('ratings', ('ratings', 'STARS')), 'rules'),
(('stmts', ('stmt',)), 'rules'),
(('stmts', ('stmts', 'stmt')), 'rules'),
(('x', ('TOKEN',)), 'rules')])
f = StringIO()
expect = \
"""START ::= |- x
ratings ::=
ratings ::= ratings STARS
stmts ::= stmt
stmts ::= stmts stmt
x ::= TOKEN
"""
parser.dump_grammar(f)
self.assertEqual(f.getvalue(), expect)
# Check Invalid rule
try:
InvalidRule('foo', debug={'dups': True})
self.assertTrue(False)
except TypeError:
self.assertTrue(True)
self.assertEqual(set(['stmts', 'ratings']), parser.list_like_nt)
self.assertEqual(set(), parser.optional_nt)
# Check erroneous start symbol
try:
parser = Rules('bad-start')
self.assertTrue(False)
except TypeError:
self.assertTrue(True)
def test_period(self):
parser = RulesPeriod('opt_period', debug={'dups': True})
# Check "?" expansion
rules = sorted(parser.rule2name.items())
self.assertEqual(rules,
[(('START', ('|-', 'opt_period')), 'ambda>'),
(('opt_period', ()), 'rules'),
(('opt_period', ('PERIOD',)), 'rules'), ])
self.assertEqual(set(['opt_period']), parser.optional_nt)
if __name__ == '__main__':
unittest.main()
| mit | -6,832,816,445,743,411,000 | 26.542857 | 73 | 0.499654 | false |
vreon/figment | examples/theworldfoundry/tests/test_spatial_social.py | 1 | 3029 | import pytest
from theworldfoundry.modes import ActionMode
def test_move(player, courtyard):
player.perform("go north")
assert player.saw("Courtyard")
def test_move_invalid(player, courtyard):
player.perform("go fish")
assert player.saw("unable")
def test_move_sartre(player):
player.perform("go north")
assert player.saw("don't seem to be any exits")
def test_move_witness(player, statue, courtyard):
player.perform("go north")
assert statue.saw("Test Player travels north")
def test_say(player):
player.perform("say Hello!")
assert player.saw("Hello!")
def test_say_witness(player, statue, ghost):
ghost.hearing = True
statue.hearing = True
statue.mode = ActionMode()
player.perform("say Citizens! Hello!")
assert player.saw('You say: "Citizens! Hello!"')
assert statue.saw('Test Player says: "Citizens! Hello!"')
assert ghost.saw('Test Player says: "Citizens! Hello!"')
statue.perform("say Hello, orphan.")
assert statue.saw('You say: "Hello, orphan."')
assert player.saw('A statue says: "Hello, orphan."')
assert ghost.saw('A statue says: "Hello, orphan."')
def test_sing_witness(player, statue):
statue.hearing = True
player.perform("sing The road goes ever on and on...")
assert player.saw('You sing: "The road goes ever on and on..."')
assert statue.saw('Test Player sings: "The road goes ever on and on..."')
@pytest.mark.xfail(reason="not implemented yet")
def test_whisper_to(player, statue, ghost):
ghost.hearing = True
statue.hearing = True
player.perform("whisper to statue: Psst")
assert player.saw("Psst")
assert statue.saw("Psst")
assert ghost.did_not_see("Psst")
def test_emote(player, statue):
statue.hearing = True
player.perform("dance")
assert player.saw("You dance.")
assert statue.saw("Test Player dances.")
def test_emote_with(player, statue):
statue.hearing = True
player.perform("point at statue")
assert player.saw("You point at a statue.")
assert statue.saw("Test Player points at you.")
def test_emote_ambiguous(player, statue, ball, green_ball):
statue.hearing = True
player.perform("poke ball")
assert player.saw("Which")
player.forget()
player.perform("1")
assert player.saw("You poke a red ball")
assert statue.saw("Test Player pokes a red ball.")
def test_emote_ambiguous_with_join(player, statue, ball, green_ball):
statue.hearing = True
player.perform("point at ball")
assert player.saw("Which")
player.forget()
player.perform("1")
assert player.saw("You point at a red ball")
assert statue.saw("Test Player points at a red ball.")
@pytest.mark.xfail(reason="not implemented yet")
def test_emote_at_exit(player, statue, courtyard):
statue.hearing = True
player.perform("point north")
assert player.saw("You point north to The World Foundry - Courtyard.")
assert statue.saw("Test Player points north to The World Foundry - Courtyard.")
| mit | 5,833,138,179,500,380,000 | 28.125 | 83 | 0.683064 | false |
veeti/decent | decent/tests/test_schema.py | 1 | 8238 | import pytest
from pytest import mark
from decent.schema import *
from decent.error import *
## Helpers
def ok(x):
return x
## Empty schema
def test_empty_schema_valid_value():
schema = Schema({})
assert schema({}) == {}
@mark.parametrize('value', [None, object(), [], 123, True, False, "Hello"])
def test_empty_schema_invalid_value(value):
schema = Schema({})
with pytest.raises(Invalid):
schema(value)
## Invalid schema
@mark.parametrize('value', [None, object(), [{}], True, False, 123])
def test_invalid_schema(value):
with pytest.raises(SchemaError):
Schema(value)
@mark.parametrize('value', [None, object(), True, False, 123])
def test_invalid_schema_validators(value):
with pytest.raises(SchemaError):
Schema({
'a': value,
})
## Callables
def test_callable_transforms_value():
schema = Schema({
'test': lambda x: x + 1
})
assert schema({ 'test': 1 }) == { 'test': 2 }
def test_callable_error():
def raiser(x):
raise Error("Nope")
schema = Schema({
'test': raiser,
})
try:
schema({ 'test': "abc" })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.path == ['test']
assert e.message == "Nope"
def test_callable_invalid():
def raiser(x):
first = Error("First")
second = Error("Second")
raise Invalid([first, second])
schema = Schema({
'test': raiser,
})
try:
schema({ 'test': "abc" })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 2
assert "First" in e.messages
assert "Second" in e.messages
## Nested schemas
def test_nested_valid():
schema = Schema({
'test': Schema({
'inner': lambda x: x + 1,
})
})
assert schema({ 'test': { 'inner': 1 } }) == { 'test': { 'inner': 2 } }
def test_nested_error():
def raiser(x):
raise Error("Nope")
schema = Schema({
'test': Schema({
'inner': raiser,
})
})
try:
schema({ 'test': { 'inner': 123 } })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.path == ['test', 'inner']
assert e.message == "Nope"
def test_nested_multiple_errors():
def raiser(x):
raise Error("Nope")
schema = Schema({
'will_fail': raiser,
'nested': Schema({
'inner': raiser,
'another': ok,
})
})
try:
schema({
'will_fail': "Hello",
'nested': {
'inner': 123
}
})
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 3
assert ['will_fail'] in e.paths
assert ['nested', 'inner'] in e.paths
assert ['nested', 'another'] in e.paths
## Missing keys
def test_fails_with_missing_key():
schema = Schema({ 'a': ok })
try:
schema({})
raise AssertionError("Expected error.")
except Invalid as e:
assert e.path == ['a']
assert "required" in e.message
def test_fails_with_multiple_missing_keys():
schema = Schema({ 'a': ok, 'b': ok, 'c': ok })
try:
schema({})
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 3
assert ['a'] in e.paths
assert ['b'] in e.paths
assert ['c'] in e.paths
for message in e.messages:
assert "required" in message
def test_fails_with_nested_missing_keys():
schema = Schema({
'nested': Schema({
'a': ok,
'b': ok,
'c': ok,
}),
})
try:
schema({ 'nested': {} })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 3
assert ['nested', 'a'] in e.paths
assert ['nested', 'b'] in e.paths
assert ['nested', 'c'] in e.paths
for message in e.messages:
assert "required" in message
def test_fails_missing_nested_schema():
schema = Schema({
'nested': Schema({
'a': ok,
'b': ok,
'c': ok,
}),
})
try:
schema({})
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.path == ['nested']
def test_fails_missing_custom_message():
schema = Schema({ 'a': ok }, required_error="Bla")
try:
schema({})
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.path == ['a']
assert e.message == "Bla"
## Extra keys
def test_discards_unknown_keys():
schema = Schema({ 'a': ok }, extra_keys=Schema.IGNORE)
result = schema({ 'a': 123, 'b': 456 })
assert 'a' in result
assert 'b' not in result
def test_accepts_unknown_keys():
schema = Schema({
'a': ok,
}, extra_keys=Schema.ACCEPT)
result = schema({ 'a': 123, 'b': 456 })
assert 'a' in result
assert 'b' in result
assert result['b'] == 456
def test_fails_unknown_keys():
schema = Schema({
'a': ok,
}, extra_keys=Schema.REJECT)
try:
schema({ 'a': 123, 'b': 456 })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.path == ['b']
## Optional keys
def test_optional_keys_missing():
schema = Schema({
'a': ok,
Optional('b'): ok,
})
result = schema({ 'a': 123 })
assert result['a'] == 123
assert 'b' not in result
def test_optional_keys_accepted():
schema = Schema({
Optional('b'): ok,
})
result = schema({ 'b': 456 })
assert result['b'] == 456
## Default values
def test_default_value():
schema = Schema({
Default('test', 123): ok,
})
assert schema({}) == { 'test': 123 }
def test_nested_default_value():
schema = Schema({
Default('test', {}): Schema({
Default('inner', 123): ok,
})
})
assert schema({}) == { 'test': { 'inner': 123 } }
## Validator on entire schema
def test_entire_validator_gets_all_data():
called = []
def entire(data):
assert data['a'] == 'a'
assert data['b'] == 'b'
called.append(True)
schema = Schema({ 'a': ok, 'b': ok }, entire=entire)
schema({ 'a': 'a', 'b': 'b' })
assert called
def test_entire_validator_mutates_data():
def entire(data):
data['a'], data['b'] = data['b'], data['a']
return data
schema = Schema({ 'a': ok, 'b': ok }, entire=entire)
assert schema({ 'a': 'b', 'b': 'a' }) == { 'a': 'a', 'b': 'b' }
def test_entire_validator_called_with_failures():
called = []
def raiser(x):
raise Error("Nope")
def entire(data):
called.append(1)
assert data['a'] == 'a'
# b failed before, so it shouldn't be included
assert 'b' not in data
schema = Schema({
'a': ok,
'b': raiser,
}, entire=entire)
try:
schema({
'a': 'a',
'b': 123,
})
raise AssertionError("Expected error.")
except Invalid as e:
assert called
def test_entire_validator_raises_invalid():
def entire(data):
raise Error("Nope")
schema = Schema({ 'a': ok }, entire=entire)
try:
schema({ 'a': 123 })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 1
assert e.message == "Nope"
def test_entire_validator_raises_with_failures():
def entire(data):
raise Error("Entire")
def raiser(x):
raise Error("Nope")
schema = Schema({ 'a': raiser }, entire=entire)
try:
schema({ 'a': 123 })
raise AssertionError("Expected error.")
except Invalid as e:
assert len(e) == 2
assert "Entire" in e.messages
assert "Nope" in e.messages
## Markers
def test_marker_str():
marker = Marker("Hello, world!")
assert marker == "Hello, world!"
assert str(marker) == "Hello, world!"
| mit | 5,056,785,840,423,585,000 | 22.604585 | 75 | 0.525977 | false |
youzaka/ariblib | ariblib/aribgaiji.py | 1 | 9409 | # -*- coding: utf-8 -*-
"""
ARIB外字
original: https://github.com/murakamiy/epgdump_py/blob/master/aribgaiji.py
Copyright (C) 2011 Yasumasa Murakami. All Rights Reserved.
"""
GAIJI_MAP_TITLE = {
0x7A50: "[HV]",
0x7A51: "[SD]",
0x7A52: "[P]",
0x7A53: "[W]",
0x7A54: "[MV]",
0x7A55: "[手]",
0x7A56: "[字]",
0x7A57: "[双]",
0x7A58: "[デ]",
0x7A59: "[S]",
0x7A5A: "[二]",
0x7A5B: "[多]",
0x7A5C: "[解]",
0x7A5D: "[SS]",
0x7A5E: "[B]",
0x7A5F: "[N]",
0x7A62: "[天]",
0x7A63: "[交]",
0x7A64: "[映]",
0x7A65: "[無]",
0x7A66: "[料]",
0x7A67: "[年齢制限]",
0x7A68: "[前]",
0x7A69: "[後]",
0x7A6A: "[再]",
0x7A6B: "[新]",
0x7A6C: "[初]",
0x7A6D: "[終]",
0x7A6E: "[生]",
0x7A6F: "[販]",
0x7A70: "[声]",
0x7A71: "[吹]",
0x7A72: "[PPV]",
}
GAIJI_MAP_OTHER = {
0x7A60: "■",
0x7A61: "●",
0x7A73: "㊙",
0x7A74: "\U0001F200", # ほか
0x7C21: "→",
0x7C22: "←",
0x7C23: "↑",
0x7C24: "↓",
0x7C25: "●",
0x7C26: "○",
0x7C27: "年",
0x7C28: "月",
0x7C29: "日",
0x7C2A: "円",
0x7C2B: "㎡",
0x7C2C: "㎥",
0x7C2D: "㎝",
0x7C2E: "㎠",
0x7C2F: "㎤",
0x7C30: "0.",
0x7C31: "1.",
0x7C32: "2.",
0x7C33: "3.",
0x7C34: "4.",
0x7C35: "5.",
0x7C36: "6.",
0x7C37: "7.",
0x7C38: "8.",
0x7C39: "9.",
0x7C3A: "氏",
0x7C3B: "副",
0x7C3C: "元",
0x7C3D: "故",
0x7C3E: "前",
0x7C3F: "新",
0x7C40: "0,",
0x7C41: "1,",
0x7C42: "2,",
0x7C43: "3,",
0x7C44: "4,",
0x7C45: "5,",
0x7C46: "6,",
0x7C47: "7,",
0x7C48: "8,",
0x7C49: "9,",
0x7C4A: "㈳",
0x7C4B: "㈶",
0x7C4C: "㈲",
0x7C4D: "㈱",
0x7C4E: "㈹",
0x7C4F: "㉄",
0x7C50: "▶",
0x7C51: "◀",
0x7C52: "〖",
0x7C53: "〗",
0x7C54: "⟐",
0x7C55: "^2",
0x7C56: "^3",
0x7C57: "(CD)",
0x7C58: "(vn)",
0x7C59: "(ob)",
0x7C5A: "(cb)",
0x7C5B: "(ce",
0x7C5C: "mb)",
0x7C5D: "(hp)",
0x7C5E: "(br)",
0x7C5F: "(p)",
0x7C60: "(s)",
0x7C61: "(ms)",
0x7C62: "(t)",
0x7C63: "(bs)",
0x7C64: "(b)",
0x7C65: "(tb)",
0x7C66: "(tp)",
0x7C67: "(ds)",
0x7C68: "(ag)",
0x7C69: "(eg)",
0x7C6A: "(vo)",
0x7C6B: "(fl)",
0x7C6C: "(ke",
0x7C6D: "y)",
0x7C6E: "(sa",
0x7C6F: "x)",
0x7C70: "(sy",
0x7C71: "n)",
0x7C72: "(or",
0x7C73: "g)",
0x7C74: "(pe",
0x7C75: "r)",
0x7C76: "(R)",
0x7C77: "(C)",
0x7C78: "(箏)",
0x7C79: "DJ",
0x7C7A: "\U0001F226", # [演]
0x7C7B: "Fax",
0x7D21: "㈪",
0x7D22: "㈫",
0x7D23: "㈬",
0x7D24: "㈭",
0x7D25: "㈮",
0x7D26: "㈯",
0x7D27: "㈰",
0x7D28: "㈷",
0x7D29: "㍾",
0x7D2A: "㍽",
0x7D2B: "㍼",
0x7D2C: "㍻",
0x7D2D: "№",
0x7D2E: "℡",
0x7D2F: "〶",
0x7D30: "○",
0x7D31: "\U0001F240", # 〔本〕
0x7D32: "\U0001F241", # 〔三〕
0x7D33: "\U0001F242", # 〔二〕
0x7D34: "\U0001F243", # 〔安〕
0x7D35: "\U0001F244", # 〔点〕
0x7D36: "\U0001F245", # 〔打〕
0x7D37: "\U0001F246", # 〔盗〕
0x7D38: "\U0001F247", # 〔勝〕
0x7D39: "\U0001F248", # 〔敗〕
0x7D3A: "\U0001F122", # 〔S〕
0x7D3B: "\U0001F227", # [投]
0x7D3C: "\U0001F228", # [捕]
0x7D3D: "\U0001F229", # [一]
0x7D3E: "[二]",
0x7D3F: "\U0001F22A", # [三]
0x7D40: "\U0001F22B", # [遊]
0x7D41: "\U0001F22C", # [左]
0x7D42: "\U0001F22D", # [中]
0x7D43: "\U0001F22E", # [右]
0x7D44: "\U0001F22F", # [指]
0x7D45: "\U0001F230", # [走]
0x7D46: "\U0001F231", # [打]
0x7D47: "㍑",
0x7D48: "㎏",
0x7D49: "㎐",
0x7D4A: "ha",
0x7D4B: "㎞",
0x7D4C: "㎢",
0x7D4D: "㍱",
0x7D4E: "・",
0x7D4F: "・",
0x7D50: "1/2",
0x7D51: "0/3",
0x7D52: "1/3",
0x7D53: "2/3",
0x7D54: "1/4",
0x7D55: "3/4",
0x7D56: "1/5",
0x7D57: "2/5",
0x7D58: "3/5",
0x7D59: "4/5",
0x7D5A: "1/6",
0x7D5B: "5/6",
0x7D5C: "1/7",
0x7D5D: "1/8",
0x7D5E: "1/9",
0x7D5F: "1/10",
0x7D60: "☀",
0x7D61: "☁",
0x7D62: "☂",
0x7D63: "☃",
0x7D64: "☖",
0x7D65: "☗",
0x7D66: "▽",
0x7D67: "▼",
0x7D68: "♦",
0x7D69: "♥",
0x7D6A: "♣",
0x7D6B: "♠",
0x7D6C: "⌺",
0x7D6D: "⦿",
0x7D6E: "‼",
0x7D6F: "⁉",
0x7D70: "(曇/晴)",
0x7D71: "☔",
0x7D72: "(雨)",
0x7D73: "(雪)",
0x7D74: "(大雪)",
0x7D75: "⚡",
0x7D76: "(雷雨)",
0x7D77: " ",
0x7D78: "・",
0x7D79: "・",
0x7D7A: "♬",
0x7D7B: "☎",
0x7E21: "Ⅰ",
0x7E22: "Ⅱ",
0x7E23: "Ⅲ",
0x7E24: "Ⅳ",
0x7E25: "Ⅴ",
0x7E26: "Ⅵ",
0x7E27: "Ⅶ",
0x7E28: "Ⅷ",
0x7E29: "Ⅸ",
0x7E2A: "Ⅹ",
0x7E2B: "Ⅺ",
0x7E2C: "Ⅻ",
0x7E2D: "⑰",
0x7E2E: "⑱",
0x7E2F: "⑲",
0x7E30: "⑳",
0x7E31: "⑴",
0x7E32: "⑵",
0x7E33: "⑶",
0x7E34: "⑷",
0x7E35: "⑸",
0x7E36: "⑹",
0x7E37: "⑺",
0x7E38: "⑻",
0x7E39: "⑼",
0x7E3A: "⑽",
0x7E3B: "⑾",
0x7E3C: "⑿",
0x7E3D: "㉑",
0x7E3E: "㉒",
0x7E3F: "㉓",
0x7E40: "㉔",
0x7E41: "(A)",
0x7E42: "(B)",
0x7E43: "(C)",
0x7E44: "(D)",
0x7E45: "(E)",
0x7E46: "(F)",
0x7E47: "(G)",
0x7E48: "(H)",
0x7E49: "(I)",
0x7E4A: "(J)",
0x7E4B: "(K)",
0x7E4C: "(L)",
0x7E4D: "(M)",
0x7E4E: "(N)",
0x7E4F: "(O)",
0x7E50: "(P)",
0x7E51: "(Q)",
0x7E52: "(R)",
0x7E53: "(S)",
0x7E54: "(T)",
0x7E55: "(U)",
0x7E56: "(V)",
0x7E57: "(W)",
0x7E58: "(X)",
0x7E59: "(Y)",
0x7E5A: "(Z)",
0x7E5B: "㉕",
0x7E5C: "㉖",
0x7E5D: "㉗",
0x7E5E: "㉘",
0x7E5F: "㉙",
0x7E60: "㉚",
0x7E61: "①",
0x7E62: "②",
0x7E63: "③",
0x7E64: "④",
0x7E65: "⑤",
0x7E66: "⑥",
0x7E67: "⑦",
0x7E68: "⑧",
0x7E69: "⑨",
0x7E6A: "⑩",
0x7E6B: "⑪",
0x7E6C: "⑫",
0x7E6D: "⑬",
0x7E6E: "⑭",
0x7E6F: "⑮",
0x7E70: "⑯",
0x7E71: "❶",
0x7E72: "❷",
0x7E73: "❸",
0x7E74: "❹",
0x7E75: "❺",
0x7E76: "❻",
0x7E77: "❼",
0x7E78: "❽",
0x7E79: "❾",
0x7E7A: "❿",
0x7E7B: "⓫",
0x7E7C: "⓬",
0x7E7D: "㉛",
0x7521: "㐂",
0x7522: "亭",
0x7523: "份",
0x7524: "仿",
0x7525: "侚",
0x7526: "俉",
0x7527: "傜",
0x7528: "儞",
0x7529: "冼",
0x752A: "㔟",
0x752B: "匇",
0x752C: "卡",
0x752D: "卬",
0x752E: "詹",
0x752F: "吉",
0x7530: "呍",
0x7531: "咖",
0x7532: "咜",
0x7533: "咩",
0x7534: "唎",
0x7535: "啊",
0x7536: "噲",
0x7537: "囤",
0x7538: "圳",
0x7539: "圴",
0x753A: "塚",
0x753B: "墀",
0x753C: "姤",
0x753D: "娣",
0x753E: "婕",
0x753F: "寬",
0x7540: "﨑",
0x7541: "㟢",
0x7542: "庬",
0x7543: "弴",
0x7544: "彅",
0x7545: "德",
0x7546: "怗",
0x7547: "恵",
0x7548: "愰",
0x7549: "昤",
0x754A: "曈",
0x754B: "曙",
0x754C: "曺",
0x754D: "曻",
0x754E: "桒",
0x754F: "・",
0x7550: "椑",
0x7551: "椻",
0x7552: "橅",
0x7553: "檑",
0x7554: "櫛",
0x7555: "・",
0x7556: "・",
0x7557: "・",
0x7558: "毱",
0x7559: "泠",
0x755A: "洮",
0x755B: "海",
0x755C: "涿",
0x755D: "淊",
0x755E: "淸",
0x755F: "渚",
0x7560: "潞",
0x7561: "濹",
0x7562: "灤",
0x7563: "・",
0x7564: "・",
0x7565: "煇",
0x7566: "燁",
0x7567: "爀",
0x7568: "玟",
0x7569: "・",
0x756A: "珉",
0x756B: "珖",
0x756C: "琛",
0x756D: "琡",
0x756E: "琢",
0x756F: "琦",
0x7570: "琪",
0x7571: "琬",
0x7572: "琹",
0x7573: "瑋",
0x7574: "㻚",
0x7575: "畵",
0x7576: "疁",
0x7577: "睲",
0x7578: "䂓",
0x7579: "磈",
0x757A: "磠",
0x757B: "祇",
0x757C: "禮",
0x757D: "・",
0x757E: "・",
0x7621: "・",
0x7622: "秚",
0x7623: "稞",
0x7624: "筿",
0x7625: "簱",
0x7626: "䉤",
0x7627: "綋",
0x7628: "羡",
0x7629: "脘",
0x762A: "脺",
0x762B: "・",
0x762C: "芮",
0x762D: "葛",
0x762E: "蓜",
0x762F: "蓬",
0x7630: "蕙",
0x7631: "藎",
0x7632: "蝕",
0x7633: "蟬",
0x7634: "蠋",
0x7635: "裵",
0x7636: "角",
0x7637: "諶",
0x7638: "跎",
0x7639: "辻",
0x763A: "迶",
0x763B: "郝",
0x763C: "鄧",
0x763D: "鄭",
0x763E: "醲",
0x763F: "鈳",
0x7640: "銈",
0x7641: "錡",
0x7642: "鍈",
0x7643: "閒",
0x7644: "雞",
0x7645: "餃",
0x7646: "饀",
0x7647: "髙",
0x7648: "鯖",
0x7649: "鷗",
0x764A: "麴",
0x764B: "麵",
}
GAIJI_MAP = {}
GAIJI_MAP.update(GAIJI_MAP_TITLE)
GAIJI_MAP.update(GAIJI_MAP_OTHER)
| mit | -3,981,922,691,573,316,600 | 17.235669 | 74 | 0.411573 | false |
emonty/pyos | pyos/exceptions.py | 1 | 10473 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Since we use the novaclient package, we need to expose its exception
# classes here.
from novaclient import exceptions as _nova_exceptions
ServerNotFound = _nova_exceptions.NotFound
ServerClientException = _nova_exceptions.ClientException
class PyraxException(Exception):
pass
class AccessListIDNotFound(PyraxException):
pass
class AuthenticationFailed(PyraxException):
pass
class AuthorizationFailure(PyraxException):
pass
class AuthSystemNotFound(PyraxException):
pass
class CDNFailed(PyraxException):
pass
class DBUpdateUnchanged(PyraxException):
pass
class DNSCallTimedOut(PyraxException):
pass
class DomainCreationFailed(PyraxException):
pass
class DomainDeletionFailed(PyraxException):
pass
class DomainRecordAdditionFailed(PyraxException):
pass
class DomainRecordDeletionFailed(PyraxException):
pass
class DomainRecordNotFound(PyraxException):
pass
class DomainRecordNotUnique(PyraxException):
pass
class DomainRecordUpdateFailed(PyraxException):
pass
class DomainUpdateFailed(PyraxException):
pass
class DuplicateQueue(PyraxException):
pass
class DuplicateUser(PyraxException):
pass
class EndpointNotDefined(PyraxException):
pass
class EndpointNotFound(PyraxException):
pass
class EnvironmentNotFound(PyraxException):
pass
class FlavorNotFound(PyraxException):
pass
class FileNotFound(PyraxException):
pass
class FolderNotFound(PyraxException):
pass
class KeyringModuleNotInstalled(PyraxException):
pass
class KeyringPasswordNotFound(PyraxException):
pass
class KeyringUsernameMissing(PyraxException):
pass
class IdentityClassNotDefined(PyraxException):
pass
class InternalServerError(PyraxException):
pass
class InvalidCDNMetadata(PyraxException):
pass
class InvalidConfigurationFile(PyraxException):
pass
class InvalidCredentialFile(PyraxException):
pass
class InvalidDateTimeString(PyraxException):
pass
class InvalidDeviceType(PyraxException):
pass
class InvalidEmail(PyraxException):
pass
class InvalidImageMember(PyraxException):
pass
class InvalidImageMemberStatus(PyraxException):
pass
class InvalidLoadBalancer(PyraxException):
pass
class InvalidLoadBalancerParameters(PyraxException):
pass
class InvalidImageMemberStatus(PyraxException):
pass
class InvalidMonitoringCheckDetails(PyraxException):
pass
class InvalidMonitoringCheckUpdate(PyraxException):
pass
class InvalidMonitoringMetricsRequest(PyraxException):
pass
class InvalidMonitoringMetricsResolution(PyraxException):
pass
class InvalidNodeCondition(PyraxException):
pass
class InvalidNodeParameters(PyraxException):
pass
class InvalidPTRRecord(PyraxException):
pass
class InvalidQueueName(PyraxException):
pass
class InvalidSessionPersistenceType(PyraxException):
pass
class InvalidSetting(PyraxException):
pass
class InvalidSize(PyraxException):
pass
class InvalidTemporaryURLMethod(PyraxException):
pass
class InvalidUploadID(PyraxException):
pass
class InvalidVirtualIPType(PyraxException):
pass
class InvalidVirtualIPVersion(PyraxException):
pass
class InvalidVolumeResize(PyraxException):
pass
class MissingAuthSettings(PyraxException):
pass
class MissingClaimParameters(PyraxException):
pass
class MissingDBUserParameters(PyraxException):
pass
class MissingDNSSettings(PyraxException):
pass
class MissingHealthMonitorSettings(PyraxException):
pass
class MissingLoadBalancerParameters(PyraxException):
pass
class MissingMonitoringCheckDetails(PyraxException):
pass
class MissingMonitoringCheckGranularity(PyraxException):
pass
class MissingName(PyraxException):
pass
class MissingTemporaryURLKey(PyraxException):
pass
class MonitoringCheckTargetNotSpecified(PyraxException):
pass
class MonitoringZonesPollMissing(PyraxException):
pass
class NetworkCIDRInvalid(PyraxException):
pass
class NetworkCIDRMalformed(PyraxException):
pass
class NetworkCountExceeded(PyraxException):
pass
class NetworkInUse(PyraxException):
pass
class NetworkNotFound(PyraxException):
pass
class NetworkLabelNotUnique(PyraxException):
pass
class NoClientForService(PyraxException):
pass
class NoEndpointForRegion(PyraxException):
pass
class NoEndpointForService(PyraxException):
pass
class NoContentSpecified(PyraxException):
pass
class NoMoreResults(PyraxException):
pass
class NoReloadError(PyraxException):
pass
class NoSSLTerminationConfiguration(PyraxException):
pass
class NoSuchClient(PyraxException):
pass
class NoSuchContainer(PyraxException):
pass
class NoSuchDatabase(PyraxException):
pass
class NoSuchDatabaseUser(PyraxException):
pass
class NoSuchObject(PyraxException):
pass
class NotAuthenticated(PyraxException):
pass
class NotCDNEnabled(PyraxException):
pass
class NoTokenLookupException(PyraxException):
pass
class PasswordChangeFailed(PyraxException):
pass
class ProtocolMismatch(PyraxException):
pass
class PTRRecordCreationFailed(PyraxException):
pass
class PTRRecordDeletionFailed(PyraxException):
pass
class PTRRecordUpdateFailed(PyraxException):
pass
class QueueClientIDNotDefined(PyraxException):
pass
class ServiceNotAvailable(PyraxException):
pass
class ServiceResponseFailure(PyraxException):
pass
class SnapshotNotAvailable(PyraxException):
pass
class TenantNotFound(PyraxException):
pass
class UnattachedNode(PyraxException):
pass
class UnattachedVirtualIP(PyraxException):
pass
class UnicodePathError(PyraxException):
pass
class UnsharableImage(PyraxException):
pass
class UploadFailed(PyraxException):
pass
class UserNotFound(PyraxException):
pass
class VolumeAttachmentFailed(PyraxException):
pass
class VolumeCloneTooSmall(PyraxException):
pass
class VolumeDetachmentFailed(PyraxException):
pass
class VolumeNotAvailable(PyraxException):
pass
class AmbiguousEndpoints(PyraxException):
"""Found more than one matching endpoint in Service Catalog."""
def __init__(self, endpoints=None):
self.endpoints = endpoints
def __str__(self):
return "AmbiguousEndpoints: %s" % repr(self.endpoints)
class ClientException(PyraxException):
"""
The base exception class for all exceptions this library raises.
"""
def __init__(self, code, message=None, details=None, request_id=None):
self.code = code
self.message = message or "-no error message returned-"
self.details = details
self.request_id = request_id
def __str__(self):
formatted_string = "%s (HTTP %s)" % (self.message, self.code)
if self.request_id:
formatted_string += " (Request-ID: %s)" % self.request_id
return formatted_string
class BadRequest(ClientException):
"""
HTTP 400 - Bad request: you sent some malformed data.
"""
http_status = 400
message = "Bad request"
class Unauthorized(ClientException):
"""
HTTP 401 - Unauthorized: bad credentials.
"""
http_status = 401
message = "Unauthorized"
class Forbidden(ClientException):
"""
HTTP 403 - Forbidden: your credentials don't give you access to this
resource.
"""
http_status = 403
message = "Forbidden"
class NotFound(ClientException):
"""
HTTP 404 - Not found
"""
http_status = 404
message = "Not found"
class NoUniqueMatch(ClientException):
"""
HTTP 400 - Bad Request
"""
http_status = 400
message = "Not Unique"
class OverLimit(ClientException):
"""
HTTP 413 - Over limit: you're over the API limits for this time period.
"""
http_status = 413
message = "Over limit"
# NotImplemented is a python keyword.
class HTTPNotImplemented(ClientException):
"""
HTTP 501 - Not Implemented: the server does not support this operation.
"""
http_status = 501
message = "Not Implemented"
# In Python 2.4 Exception is old-style and thus doesn't have a __subclasses__()
# so we can do this:
# _code_map = dict((c.http_status, c)
# for c in ClientException.__subclasses__())
#
# Instead, we have to hardcode it:
_code_map = dict((c.http_status, c) for c in [BadRequest, Unauthorized,
Forbidden, NotFound, OverLimit, HTTPNotImplemented])
def from_response(response, body):
"""
Return an instance of a ClientException or subclass
based on an httplib2 response.
Usage::
resp, body = http.request(...)
if resp.status_code != 200:
raise exception_from_response(resp, body)
"""
if isinstance(response, dict):
status = response.get("status_code")
else:
status = response.status_code
cls = _code_map.get(int(status), ClientException)
# import pyos
# pyos.utils.trace()
request_id = response.headers.get("x-compute-request-id")
if body:
message = "n/a"
details = "n/a"
if isinstance(body, dict):
message = body.get("message")
details = body.get("details")
if message is details is None:
error = body[body.keys()[0]]
if isinstance(error, dict):
message = error.get("message", None)
details = error.get("details", None)
else:
message = error
details = None
else:
message = body
return cls(code=status, message=message, details=details,
request_id=request_id)
else:
return cls(code=status, request_id=request_id)
| apache-2.0 | 5,714,773,941,973,787,000 | 20.417178 | 79 | 0.721283 | false |
RiceMunk/omnifit | setup.py | 1 | 3884 | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import glob
import os
import sys
import ah_bootstrap
from setuptools import setup
#A dirty hack to get around some early import/configurations ambiguities
if sys.version_info[0] >= 3:
import builtins
else:
import __builtin__ as builtins
builtins._ASTROPY_SETUP_ = True
from astropy_helpers.setup_helpers import (
register_commands, get_debug_option, get_package_info)
from astropy_helpers.git_helpers import get_git_devstr
from astropy_helpers.version_helpers import generate_version_py
# Get some values from the setup.cfg
try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser
conf = ConfigParser()
conf.read(['setup.cfg'])
metadata = dict(conf.items('metadata'))
PACKAGENAME = metadata.get('package_name', 'packagename')
DESCRIPTION = metadata.get('description', 'Astropy affiliated package')
AUTHOR = metadata.get('author', '')
AUTHOR_EMAIL = metadata.get('author_email', '')
LICENSE = metadata.get('license', 'unknown')
URL = metadata.get('url', 'http://astropy.org')
# Get the long description from the package's docstring
__import__(PACKAGENAME)
package = sys.modules[PACKAGENAME]
LONG_DESCRIPTION = package.__doc__
# Store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure
builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME
# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
VERSION = '0.2.1'
# Indicates if this version is a release version
RELEASE = 'dev' not in VERSION
if not RELEASE:
VERSION += get_git_devstr(False)
# Populate the dict of setup command overrides; this should be done before
# invoking any other functionality from distutils since it can potentially
# modify distutils' behavior.
cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE)
# Freeze build information in version.py
generate_version_py(PACKAGENAME, VERSION, RELEASE,
get_debug_option(PACKAGENAME))
# Treat everything in scripts except README.rst as a script to be installed
scripts = [fname for fname in glob.glob(os.path.join('scripts', '*'))
if os.path.basename(fname) != 'README.rst']
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
# Add the project-global data
package_info['package_data'].setdefault(PACKAGENAME, [])
package_info['package_data'][PACKAGENAME].append('data/*')
# Define entry points for command-line scripts
entry_points = {}
entry_points['console_scripts'] = [
'astropy-package-template-example = packagename.example_mod:main',
]
# Include all .c files, recursively, including those generated by
# Cython, since we can not do this in MANIFEST.in with a "dynamic"
# directory name.
c_files = []
for root, dirs, files in os.walk(PACKAGENAME):
for filename in files:
if filename.endswith('.c'):
c_files.append(
os.path.join(
os.path.relpath(root, PACKAGENAME), filename))
package_info['package_data'][PACKAGENAME].extend(c_files)
# Note that requires and provides should not be included in the call to
# ``setup``, since these are now deprecated. See this link for more details:
# https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM
setup(name=PACKAGENAME,
version=VERSION,
description=DESCRIPTION,
scripts=scripts,
install_requires=['astropy>=1.0.0','matplotlib>=1.3.0','lmfit>=0.9.2'],
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
long_description=LONG_DESCRIPTION,
cmdclass=cmdclassd,
zip_safe=False,
use_2to3=True,
entry_points=entry_points,
**package_info
)
| bsd-3-clause | -5,524,028,956,380,954,000 | 32.196581 | 79 | 0.723738 | false |
thoreg/satchmo | satchmo/apps/payment/views/contact.py | 1 | 7346 | ####################################################################
# First step in the order process - capture all the demographic info
#####################################################################
import logging
from django import http
from django.core import urlresolvers
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.generic import FormView
from signals_ahoy.signals import form_initialdata
from livesettings import config_get_group, config_value
from satchmo_store.contact import CUSTOMER_ID
from satchmo_store.contact.models import Contact
from satchmo_store.shop.models import Cart, Config, Order
from satchmo_utils.dynamic import lookup_url
from payment.decorators import cart_has_minimum_order
from payment.forms import PaymentContactInfoForm
log = logging.getLogger('satchmo_store.contact.contact')
def authentication_required(
request,
template='shop/checkout/authentication_required.html'):
return render_to_response(
template, {}, context_instance = RequestContext(request)
)
class CheckoutForm(FormView):
"""The checkout page form (collects contact info, and payment method).
"""
initial = {}
template_name = 'shop/checkout/form.html'
form_class = PaymentContactInfoForm
def __init__(self, **kwargs):
self._success_url = None
self._initial_data = {}
self._form_extrakwargs = {}
super(CheckoutForm, self).__init__(**kwargs)
def get_shop(self):
shop = Config.objects.get_current()
return shop
def get_contact(self):
try:
contact = Contact.objects.from_request(self.request, create=False)
except Contact.DoesNotExist:
contact = None
return contact
def get_order(self):
try:
order = Order.objects.from_request(self.request)
except Order.DoesNotExist:
order = None
return order
def get_cart(self):
try:
cart = Cart.objects.from_request(self.request)
except Cart.DoesNotExist:
cart = None
return cart
def get_form_kwargs(self):
kwargs = super(CheckoutForm, self).get_form_kwargs()
kwargs.update(self._form_extrakwargs)
return kwargs
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
init_data = {}
if self.request.user.is_authenticated():
if self.request.user.email:
init_data['email'] = self.request.user.email
if self.request.user.first_name:
init_data['first_name'] = self.request.user.first_name
if self.request.user.last_name:
init_data['last_name'] = self.request.user.last_name
else:
init_data = {}
order = self.get_order()
if order and order.discount_code:
init_data['discount'] = order.discount_code
init_data.update(self._initial_data)
return init_data
def get_context_data(self, **kwargs):
kwargs = super(CheckoutForm, self).get_context_data(**kwargs)
shop = self.get_shop()
if shop.in_country_only:
only_country = shop.sales_country
else:
only_country = None
payment_methods = kwargs['form'].fields['paymentmethod'].choices
kwargs.update({
'country': only_country,
'paymentmethod_ct': len(payment_methods)
})
return kwargs
def get_success_url(self):
if self._success_url is not None:
return self._success_url
return super(CheckoutForm, self).get_success_url()
def get(self, request, *args, **kwargs):
contact = self.get_contact()
init_data = self.get_initial()
if not self.request.user.is_authenticated() and \
config_value('SHOP', 'AUTHENTICATION_REQUIRED'):
url = urlresolvers.reverse('satchmo_checkout_auth_required')
thisurl = urlresolvers.reverse('satchmo_checkout-step1')
return http.HttpResponseRedirect(url + "?next=" + thisurl)
if contact:
# If a person has their contact info,
# make sure we populate it in the form
for item in contact.__dict__.keys():
init_data[item] = getattr(contact, item)
if contact.shipping_address:
for item in contact.shipping_address.__dict__.keys():
init_data["ship_"+item] = getattr(
contact.shipping_address,
item
)
if contact.billing_address:
for item in contact.billing_address.__dict__.keys():
init_data[item] = getattr(contact.billing_address, item)
if contact.primary_phone:
init_data['phone'] = contact.primary_phone.phone
else:
# Allow them to login from this page.
request.session.set_test_cookie()
tempCart = self.get_cart()
if (not tempCart) or (tempCart.numItems == 0):
return render_to_response('shop/checkout/empty_cart.html',
context_instance=RequestContext(self.request))
shop = self.get_shop()
form_initialdata.send(
sender=self.get_form_class(),
initial=init_data,
contact=contact,
cart=tempCart,
shop=shop
)
self._initial_data = init_data
self._form_extrakwargs['shop'] = shop
self._form_extrakwargs['contact'] = contact
self._form_extrakwargs['shippable'] = tempCart.is_shippable
self._form_extrakwargs['cart'] = tempCart
return super(CheckoutForm, self).get(request, *args, **kwargs)
def form_valid(self, form):
contact = self.get_contact()
new_data = self.request.POST.copy()
tempCart = self.get_cart()
if contact is None and self.request.user \
and self.request.user.is_authenticated():
contact = Contact(user=self.request.user)
custID = form.save(self.request, cart=tempCart, contact=contact)
self.request.session[CUSTOMER_ID] = custID
modulename = new_data['paymentmethod']
if not modulename.startswith('PAYMENT_'):
modulename = 'PAYMENT_' + modulename
paymentmodule = config_get_group(modulename)
url = lookup_url(paymentmodule, 'satchmo_checkout-step2')
self._success_url = url
return super(CheckoutForm, self).form_valid(form)
def post(self, request, *args, **kwargs):
tempCart = self.get_cart()
new_data = self.request.POST.copy()
if not tempCart.is_shippable:
new_data['copy_address'] = True
self._form_extrakwargs['data'] = new_data
self._form_extrakwargs['shop'] = self.get_shop()
self._form_extrakwargs['contact'] = self.get_contact()
self._form_extrakwargs['shippable'] = tempCart.is_shippable
self._form_extrakwargs['cart'] = tempCart
return super(CheckoutForm, self).post(request, *args, **kwargs)
contact_info = CheckoutForm.as_view()
contact_info_view = cart_has_minimum_order()(contact_info)
| bsd-3-clause | 848,478,398,620,606,100 | 35.366337 | 78 | 0.599102 | false |
marto-nieto-g16/WPA_Decrypt | wpa_console.py | 1 | 2733 | import conexion_nmcli
import wpa_decrypt
def wpa_console():
print("\n\x1b[1;37m"+"WPA_Decrypt_Console >>", end="")
comando_WPA_Decrypt = input()
buscar_comandos_WPA_Decrypt(comando_WPA_Decrypt)
def buscar_comandos_WPA_Decrypt(comando):
#Sentencia para escanear redes inalambricas
if comando == "use WPA_Decrypt.scan.wireless" :
conexion_nmcli.escanear_wifi()
wpa_console()
elif comando == "use WPA_Decrypt.connection.general":
conexion_nmcli.conexion_general()
wpa_console()
elif comando == "use WPA_Decrypt.device":
conexion_nmcli.ver_dispositivos()
wpa_console()
elif comando == "use WPA_Decrypt.methods":
print("\n\x1b[1;32m"+"- num (Numerico)\n- alph (Alfabetico) \n- sym (Simbolos) \n- alph_num\n-all")
wpa_console()
elif comando == "use WPA_Decrypt.delete":
conexion_nmcli.borrar_ssid()
wpa_console()
elif comando == "use WPA_Decrypt.methods.num":
wpa_decrypt.numerico(conexion_nmcli.getSSID())
wpa_console()
elif comando == "run alphMay_random":
try:
wpa_decrypt.runAlphMay_random(conexion_nmcli.getSSID())
wpa_console()
except:
print ("\nMensaje: No se ha Cargado un SSID")
wpa_console()
elif comando == "run alphMin_random":
try:
wpa_decrypt.runAlphMin_random(conexion_nmcli.getSSID())
wpa_console()
except:
print ("\nMensaje: No se ha Cargado un SSID")
wpa_console()
elif comando == "run alphNum_random":
try:
wpa_decrypt.runAlphNum_random(conexion_nmcli.getSSID())
wpa_console()
except:
print ("\nMensaje: No se ha Cargado un SSID")
wpa_console()
elif comando == "run alphSym_random":
try:
wpa_decrypt.runAlphSym_random(conexion_nmcli.getSSID())
wpa_console()
except:
print ("\nMensaje: No se ha Cargado un SSID")
wpa_console()
elif comando == "run methodsAll":
try:
wpa_decrypt.methodsAll(conexion_nmcli.getSSID())
wpa_console()
except:
print ("\nMensaje: No se ha Cargado un SSID")
wpa_console()
elif comando == "set WPA_Decrypt.SSID":
print("\n\x1b[1;32m"+"Nombre de La Red Inalambrica (SSID) >> ", end ="")
nombre_red = input()
if nombre_red != "":
print("\x1b[1;33m"+"("+nombre_red+")"+"\x1b[1;32m"+" se a Cargado al SSID ")
conexion_nmcli.setSSID(nombre_red)
wpa_console()
else:
print ("\n\x1b[1;31m"+"Error: No se ha Ingresado un SSID")
wpa_console()
elif comando == "get WPA_Decrypt.SSID":
print("\n\x1b[1;32m"+"SSID Cargado >> "+"\x1b[1;33m"+ conexion_nmcli.getSSID())
wpa_console()
elif comando == "":
wpa_console()
else:
print("\x1b[1;31m"+"Error: comando no encontrado")
wpa_console()
wpa_console()
| lgpl-3.0 | -4,815,598,751,162,174,000 | 20.023077 | 101 | 0.638492 | false |
Neurosim-lab/netpyne | examples/NeuroMLImport/SimpleNet.py | 1 | 2134 |
import opencortex.core as oc
min_pop_size = 3
def scale_pop_size(baseline, scale):
return max(min_pop_size, int(baseline*scale))
def generate(reference = "SimpleNet",
scale=1,
format='xml'):
population_size = scale_pop_size(3,scale)
nml_doc, network = oc.generate_network(reference)
oc.include_opencortex_cell(nml_doc, 'izhikevich/RS.cell.nml')
pop = oc.add_population_in_rectangular_region(network,
'RS_pop',
'RS',
population_size,
0,0,0,
100,100,100,
color='0 .8 0')
syn = oc.add_exp_two_syn(nml_doc,
id="syn0",
gbase="2nS",
erev="0mV",
tau_rise="0.5ms",
tau_decay="10ms")
pfs = oc.add_poisson_firing_synapse(nml_doc,
id="poissonFiringSyn",
average_rate="50 Hz",
synapse_id=syn.id)
oc.add_inputs_to_population(network,
"Stim0",
pop,
pfs.id,
all_cells=True)
nml_file_name = '%s.net.nml'%network.id
oc.save_network(nml_doc,
nml_file_name,
validate=(format=='xml'),
format = format)
if format=='xml':
oc.generate_lems_simulation(nml_doc,
network,
nml_file_name,
duration = 500,
dt = 0.025)
if __name__ == '__main__':
import sys
if len(sys.argv)==2:
generate(scale=int(sys.argv[1]))
else:
generate()
| mit | -2,806,421,129,574,212,000 | 29.927536 | 66 | 0.365042 | false |
AtheonAnalytics/trext | trext/db/fill.py | 1 | 3905 | from tableausdk.Exceptions import TableauException
from tableausdk.Extract import Row
from tableausdk.Types import Type
from trext.db.utils import format_datetime, format_date, get_fake_date, get_fake_datetime
class ExtractFiller(object):
"""
Fills the extract skeleton with cleaned and formatted data.
"""
def __init__(self, table, table_definition, column_metadata):
"""
:param table: Tableau table to insert data into
:param table_definition: definition of the extract
:param column_metadata: the metadata about the columns - the name, position and type of
the columns in the view or table type mapped to Tableau SDK types. See
`tableausdk.Types.Type` and `trext.db.typemap` for more information.
"""
self._table = table
self._table_definition = table_definition
self._column_metadata = column_metadata
@staticmethod
def _replace_null(col_type, col_data):
"""
Replaces the null data with values based on type. Eg: 0 if integer and 0.0 if float.
If there is no null data then it returns the existing value, col_data.
Note:
1. This will need more suitable values as db NULLs are more useful than a replaced value.
:param col_type: type of the column to decide what value to replace
:param col_data: the value in the column that needs checking
:return: cleaned up column_data
"""
null_replacement_map = {
Type.INTEGER: 0,
Type.BOOLEAN: False,
Type.CHAR_STRING: '',
Type.UNICODE_STRING: u'',
Type.DATE: get_fake_date(),
Type.DATETIME: get_fake_datetime(),
Type.DOUBLE: 0.0
}
return null_replacement_map.get(col_type) if col_data is None else col_data
def insert_data_to_extract(self, db_data_row):
"""
Inserts the data row by row into the tableau extract skeleton
:param db_data_row: row from the database
"""
# Get the row of data to insert
insert_row = Row(self._table_definition)
# Map the column type to the TDE insert function
extract_type_map = {
Type.INTEGER: insert_row.setInteger,
Type.DOUBLE: insert_row.setDouble,
Type.BOOLEAN: insert_row.setBoolean,
Type.DATE: insert_row.setDate,
Type.DATETIME: insert_row.setDateTime,
Type.CHAR_STRING: insert_row.setCharString,
Type.UNICODE_STRING: insert_row.setString,
}
# Iterate through each column of the row to identify the type
for column_pos, column_type in self._column_metadata.iteritems():
extract_col_pos_ = column_pos - 1
insert_row.Insert = extract_type_map[column_type]
# If there is any NULL data replace with corresponding NULL type data for the field
column_data = db_data_row[extract_col_pos_]
column_data = self._replace_null(column_type, column_data)
# Identify the insert function for the data
try:
# Date time field
if column_type == 13:
year, month, day, hour, minute, sec, frac = format_datetime(column_data)
insert_row.Insert(extract_col_pos_, year, month, day, hour, minute, sec, frac)
# Date field
elif column_type == 12:
year, month, day = format_date(column_data)
insert_row.Insert(extract_col_pos_, year, month, day)
# Other fields
else:
insert_row.Insert(extract_col_pos_, column_data)
except TableauException as e:
raise e
# Insert the row
self._table.insert(insert_row)
insert_row.close()
| mit | 4,413,232,399,961,334,300 | 40.542553 | 98 | 0.599744 | false |
bdupharm/tf-mnist | mnist_basic_nn.py | 1 | 5743 | #!/usr/bin/env python
"""
An example of implementing multinomial logistic (softmax) regression with a single layer of
perceptrons using Tensorflow
Ouput: Confidence prediction (as an array) of which class an observation in the class belongs to
"""
import time
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data", one_hot=True)
"""
mnist is a DataSet object containing the following:
- 55000 images and labels for primary training
- 5000 images and labesl for iterative validation of training accuracy
- 10000 images and labels for final testing of trained accuracy
"""
print("Number of images/labels for model:")
print("Primary training: " + str(mnist.train.num_examples))
print("Iterative validation: " + str(mnist.test.num_examples))
print("Final testing: " + str(mnist.validation.num_examples))
print("")
"""
Images are stored as a n-dim array [ n_observations x n_features]
Labels are stored as [n_observations x n_labels]
where each observation is a one-hot vector
"""
print("Dimensions of the Image and Label tensors: ")
print("Images: " + str(mnist.train.images.shape),"Labels: " + str(mnist.train.labels.shape))
with tf.Graph().as_default():
# Inputs
x = tf.placeholder(tf.float32, shape=[None, 784], name="image_inputs")
y_ = tf.placeholder(tf.float32, shape=[None, 10], name="actual_class")
"""
Placeholder creates a container for an input image using tensorflow's graph.
We allow the first dimension to be None, since this will eventually
represent out mini-batches, or how many images we feed into a network
at a time during training/validation/testing
x : 28px by 28px images converted into a [(Batch Size * 28^2) x 1] column vector
y : [Batch Size * 10] matrix of one-hot encodings representing the actual class of the image
(ie. [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ] where the index of 1 is the class)
"""
with tf.name_scope("hidden1"):
W = tf.Variable(tf.zeros([784,10]), name="weights")
b = tf.Variable(tf.zeros([10]), name="biases")
# Sigmoid unit
y = tf.nn.softmax(tf.matmul(x,W) + b)
"""
function in the form of:
f(x_i, W, b) = Wx_i + b
which is a linear mapping of image pixels to class scores.
W and b are the parameters of this function which change after each iteration
1) W * x_i => [ 0.2, 0.5, 0.6, 0.3, 1.2, .5, .2, .9, .2, .6] # does not sum to 1
return a K element array representing the probabilities that an image belongs to each class K
2) + b => Adds biases to each of the classes
3) softmax() => [ 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1] # sums to 1
returns a K element array w/ normalized probabilities that an image belongs to each class K
Variables (Learning Parameters)
x_i : an image with all its pixels flattened out into a [D x 1] vector
b : "bias" vector of size [K x 1]
W : "weight" matrix of size [D * K] (transpose of x)
"""
cross_entropy = -tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1],
name="xentropy")
"""
Represents the cross-entropy between the true (p) distribution and the estimated (q) distribution
Defined as:
H(p,q) = - summation{p(x)*log(q(x))}
As q converges to p, the product of p*log(q) will increase and therefore, H(p,q) will become
more negative
The cross-entropy function p*log(q) represents a a second order equation with a defined minima so gradient descent
converges to only 1 minima.
"""
# Loss function
loss = tf.reduce_mean(cross_entropy, name="xentropy_mean")
global_step = tf.Variable(0, name="global_step", trainable=False)
train_op = tf.train.GradientDescentOptimizer(0.5).minimize(loss)
"""
cross_entropy example
assume inaccurate output:
output = [0.3, 0.2, 0.5]
expected = [0, 1, 0]
cross entropy would be -0.2
assume accurate output:
output = [0.3, 0.5, 0.2]
expected = [0, 1, 0]
cross entropy would be -0.5
Notice that the accurate output has a more negative value and therefore favored since
the loss function aims to minimize the cross entropy
"""
# SUMMARIES
tf.scalar_summary(loss.op.name, loss)
summary_op = tf.merge_all_summaries()
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
summary_writer = tf.train.SummaryWriter("/tmp/tf-summaries/", sess.graph)
start = time.time()
for step in range(1000):
image_inputs, actual_classes = mnist.train.next_batch(50)
_, loss_value = sess.run([train_op, loss], feed_dict={x: image_inputs, y_: actual_classes})
summary_str = sess.run(summary_op, feed_dict={x: image_inputs, y_: actual_classes})
summary_writer.add_summary(summary_str, step)
if step % 100 == 0:
duration = time.time() - start
print "Step {}: loss = {:.2f} ({:.3f} sec)".format(step, loss_value,
duration)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
"""
Arguements of the maxima (argmax) refers to the point/s of the
domain of a function where the function is maximized
In this context, argmax returns the index of the greatest value in the array
"""
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# Calling `Tensor.eval()` == `tf.get_default_session().run(Tensor)`
print(accuracy.eval(feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
| mit | -1,765,346,255,395,409,200 | 38.606897 | 118 | 0.641477 | false |
forcaeluz/easy-fat | flocks/migrations/0005_animalseparation.py | 1 | 1056 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-07 20:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flocks', '0004_auto_20170228_2038'),
]
operations = [
migrations.CreateModel(
name='AnimalSeparation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('reason', models.CharField(max_length=250)),
('death', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='flocks.AnimalDeath')),
('exit', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='flocks.AnimalExits')),
('flock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flocks.Flock')),
],
),
]
| gpl-3.0 | -908,075,736,189,567,700 | 38.111111 | 138 | 0.614583 | false |
sassoftware/rpath-storage | storage_test/testbase.py | 1 | 1462 | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
from testrunner import testhelp
from conary.lib import util
class TestCase(testhelp.TestCase):
def setUp(self):
testhelp.TestCase.setUp(self)
self.workDir = tempfile.mkdtemp(prefix='catalog-service-test-')
def tearDown(self):
testhelp.TestCase.tearDown(self)
util.rmtree(self.workDir, ignore_errors = True)
@staticmethod
def normalizeXML(data):
"""lxml will produce the header with single quotes for its attributes,
while xmllint uses double quotes. This function normalizes the data"""
return data.replace(
"<?xml version='1.0' encoding='UTF-8'?>",
'<?xml version="1.0" encoding="UTF-8"?>').strip()
def assertXMLEquals(self, first, second):
self.failUnlessEqual(self.normalizeXML(first),
self.normalizeXML(second))
| apache-2.0 | 1,107,802,165,091,560,100 | 33 | 78 | 0.691518 | false |
hoburg/gpkit | docs/source/examples/simple_box.py | 1 | 1077 | "Maximizes box volume given area and aspect ratio constraints."
from gpkit import Variable, Model
# Parameters
alpha = Variable("alpha", 2, "-", "lower limit, wall aspect ratio")
beta = Variable("beta", 10, "-", "upper limit, wall aspect ratio")
gamma = Variable("gamma", 2, "-", "lower limit, floor aspect ratio")
delta = Variable("delta", 10, "-", "upper limit, floor aspect ratio")
A_wall = Variable("A_{wall}", 200, "m^2", "upper limit, wall area")
A_floor = Variable("A_{floor}", 50, "m^2", "upper limit, floor area")
# Decision variables
h = Variable("h", "m", "height")
w = Variable("w", "m", "width")
d = Variable("d", "m", "depth")
# Constraints
constraints = [A_wall >= 2*h*w + 2*h*d,
A_floor >= w*d,
h/w >= alpha,
h/w <= beta,
d/w >= gamma,
d/w <= delta]
# Objective function
V = h*w*d
objective = 1/V # To maximize V, we minimize its reciprocal
# Formulate the Model
m = Model(objective, constraints)
# Solve the Model and print the results table
print(m.solve(verbosity=0).table())
| mit | -3,243,799,704,349,836,000 | 31.636364 | 69 | 0.610028 | false |
AntonKueltz/makwa | makwa/test.py | 1 | 8492 | from binascii import hexlify, unhexlify
from hashlib import sha512
from random import sample
from re import findall
from six import b
import unittest
from .makwa import Makwa, hashpw, checkpw
n = int(
'C22C40BBD056BB213AAD7C830519101AB926AE18E3E9FC9699C806E0AE5C2594'
'14A01AC1D52E873EC08046A68E344C8D74A508952842EF0F03F71A6EDC077FAA'
'14899A79F83C3AE136F774FA6EB88F1D1AEA5EA02FC0CCAF96E2CE86F3490F49'
'93B4B566C0079641472DEFC14BECCF48984A7946F1441EA144EA4C802A457550'
'BA3DF0F14C090A75FE9E6A77CF0BE98B71D56251A86943E719D27865A489566C'
'1DC57FCDEFACA6AB043F8E13F6C0BE7B39C92DA86E1D87477A189E73CE8E311D'
'3D51361F8B00249FB3D8435607B14A1E70170F9AF36784110A3F2E67428FC18F'
'B013B30FE6782AECB4428D7C8E354A0FBD061B01917C727ABEE0FE3FD3CEF761',
16
)
class MakwaTest(unittest.TestCase):
def test_spec_vector(self):
pi = unhexlify(
'4765676F206265736877616A692761616B656E20617765206D616B77613B206F6'
'E7A61616D206E616E69697A61616E697A692E'
)
sigma = unhexlify('C72703C22A96D9992F3DEA876497E392')
makwa = Makwa(work_factor=4096, pre_hashing=False)
digest = makwa._digest(pi, n, salt=sigma)
self.assertEqual(hexlify(digest), b'c9cea0e6ef09393ab1710a08')
h = makwa.hash(pi, n, salt=sigma)
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
h = hashpw(pi, n, salt=sigma, work_factor=4096, pre_hash=False)
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
self.assertEqual(h, '+RK3n5jz7gs_s211_xycDwiqW2ZkvPeqHZJfjkg_yc6g5u8JOTqxcQoI')
self.assertTrue(makwa.check(pi, h, n))
self.assertTrue(checkpw(pi, h, n))
self.assertFalse(makwa.check(b'password', h, n))
self.assertFalse(checkpw(b'password', h, n))
self.assertFalse(makwa.check(pi, h, 0xbadc0de))
self.assertFalse(checkpw(pi, h, 0xbadc0de))
def test_kdf_sha256(self):
m = Makwa()
matches = []
with open('kat.txt', 'r') as f:
pattern = r'KDF/SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'output: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, output) in sample(matches, 100):
result = hexlify(m._kdf(unhexlify(input), 100))
self.assertEqual(result, b(output))
def test_kdf_sha512(self):
m = Makwa(h=sha512)
matches = []
with open('kat.txt', 'r') as f:
pattern = r'KDF/SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'output: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, output) in sample(matches, 100):
result = hexlify(m._kdf(unhexlify(input), 100))
self.assertEqual(result, b(output))
def test_digest_sha256(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: ([a-f0-9]*)\n' \
'bin4096: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, bin384, bin4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
m = Makwa(
work_factor=384,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin384))
m = Makwa(
work_factor=4096,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin4096))
def test_digest_sha512(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: ([a-f0-9]*)\n' \
'bin4096: ([a-f0-9]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, bin384, bin4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
m = Makwa(
h=sha512,
work_factor=384,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin384))
m = Makwa(
h=sha512,
work_factor=4096,
pre_hashing=pre_hashing,
post_hashing_length=post_hashing
)
digest = m._digest(unhexlify(input), n, unhexlify(salt))
self.assertEqual(hexlify(digest), b(bin4096))
def test_hashpw_sha256(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-256\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: [a-f0-9]*\n' \
'bin4096: [a-f0-9]*\n' \
'str384: ([A-Za-z0-9\+\/\_]*)\n' \
'str4096: ([A-Za-z0-9\+\/\_]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, str384, str4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
work_factor=384,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str384)
self.assertTrue(checkpw(unhexlify(input), hashed, n))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
work_factor=4096,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str4096)
self.assertTrue(checkpw(unhexlify(input), hashed, n))
def test_hashpw_sha512(self):
matches = []
with open('kat.txt', 'r') as f:
pattern = r'2048-bit modulus, SHA-512\n' \
'input: ([a-f0-9]*)\n' \
'salt: ([a-f0-9]*)\n' \
'pre-hashing: (.*)\n' \
'post-hashing: (.*)\n' \
'bin384: [a-f0-9]*\n' \
'bin4096: [a-f0-9]*\n' \
'str384: ([A-Za-z0-9\+\/\_]*)\n' \
'str4096: ([A-Za-z0-9\+\/\_]*)'
matches = findall(pattern, f.read())
for (input, salt, pre_hashing, post_hashing, str384, str4096) in sample(matches, 100):
pre_hashing = (pre_hashing == 'true')
post_hashing = (None if post_hashing == 'false' else int(post_hashing))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
h=sha512,
work_factor=384,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str384)
self.assertTrue(checkpw(unhexlify(input), hashed, n, h=sha512))
hashed = hashpw(
unhexlify(input),
n,
salt=unhexlify(salt),
h=sha512,
work_factor=4096,
pre_hash=pre_hashing,
post_hash=post_hashing
)
self.assertEqual(hashed, str4096)
self.assertTrue(checkpw(unhexlify(input), hashed, n, h=sha512))
if __name__ == '__main__':
unittest.main()
| unlicense | -7,732,383,660,985,512,000 | 36.082969 | 94 | 0.526024 | false |
vistadataproject/nodeVISTA | setupDocker/pySetup/rasUtilities/SCActions.py | 1 | 53408 | #---------------------------------------------------------------------------
# Copyright 2013 PwC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
## @class SCActions
## Scheduling Actions
'''
Scheduler Actions class. Extends Actions.
Created on Jun 14, 2012
@author: pbradley, bcaine
@copyright PwC
@license http://www.apache.org/licenses/LICENSE-2.0
'''
from Actions import Actions
import TestHelper
import datetime
import time
class SCActions (Actions):
'''
This class extends the Actions class with methods specific to actions performed
through the Roll and Scroll interface for the Scheduling package.
'''
def __init__(self, VistAconn, scheduling=None, user=None, code=None):
Actions.__init__(self, VistAconn, scheduling, user, code)
def signon (self):
''' This provides a signon via ^XUP or ^ZU depending on the value of acode'''
if self.acode is None:
self.VistA.write('S DUZ=1,DUZ(0)="@" D ^XUP')
if self.sched is not None:
self.VistA.wait('OPTION NAME:')
self.VistA.write('SDAM APPT MGT')
else:
self.VistA.write('D ^ZU')
self.VistA.wait('ACCESS CODE:');
self.VistA.write(self.acode)
self.VistA.wait('VERIFY CODE:');
self.VistA.write(self.vcode)
self.VistA.wait('//');
self.VistA.write('')
self.VistA.wait('Core Applications')
self.VistA.write('Scheduling')
def schtime(self, plushour=1):
'''Calculates a time for the next hour'''
ttime = datetime.datetime.now() + datetime.timedelta(hours=1)
return ttime.strftime("%I%p").lstrip('0')
def getclinic(self):
'''Determines which clinic to use based on the time of day'''
now = datetime.datetime.now()
hour = now.hour
if (hour >= 23 and hour <= 24) or (hour >= 0 and hour <= 6):
clinic = 'Clinic1'
elif hour >= 7 and hour <= 14:
clinic = 'Clinic2'
elif hour >= 15 and hour <= 22:
clinic = 'CLINICX'
return clinic
def dateformat(self, dayadd=0):
'''Currently not used, needs to be able to handle when the added days
puts the total days over the months total (ei change 8/35/12 to 9/3/12).
Idea is to use for date verification'''
now = datetime.datetime.now()
month = now.month
day = now.day + dayadd
year = now.year % 20
date = str(month) + '/' + str(day) + '/' + str(year)
return date
def makeapp(self, clinic, patient, datetime, fresh=None, badtimeresp=None, apptype=None, subcat=None):
'''Makes Appointment for specified user at specified time via Clinic view'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('PATIENT NAME:')
self.VistA.write('??')
self.VistA.multiwait(['TO STOP:','to exit'])
self.VistA.write('^')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
if apptype is not None:
self.VistA.wait('TYPE:')
self.VistA.write(apptype)
self.VistA.wait('APPT TYPE:')
self.VistA.write(subcat[0])
self.VistA.wait('APPT TYPE:')
self.VistA.write(subcat[1])
else:
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
if fresh is not None:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('REQUEST')
self.VistA.write('Yes')
self.VistA.wait('DATE/TIME')
self.VistA.write('t+5')
self.VistA.wait('DATE/TIME')
self.VistA.write(datetime)
if badtimeresp is 'noslot':
self.VistA.wait('NO OPEN SLOTS THEN')
self.VistA.wait('DATE/TIME')
self.VistA.write('')
elif badtimeresp is 'overbook':
self.VistA.wait('OVERBOOK')
self.VistA.write('yes')
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
else:
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
index = self.VistA.multiwait(['Select Action:','APPOINTMENT LETTER'])
if index == 1:
self.VistA.write('No')
self.VistA.wait('Select Action')
self.VistA.write('Quit')
self.VistA.wait('')
def makeapp_bypat(self, clinic, patient, datetime, loopnum=1, fresh=None, CLfirst=None, prevCO=None):
'''Makes Appointment for specified user at specified time via Patient view'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
for _ in range(loopnum):
self.VistA.wait('Select Action:')
if CLfirst is not None:
self.VistA.write('CL')
self.VistA.wait('Select Clinic:')
self.VistA.write(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
else:
self.VistA.write('MA')
self.VistA.wait('Select CLINIC:')
self.VistA.write(clinic)
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
if fresh is not None:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
elif _ >= 1:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('REQUEST')
self.VistA.write('Yes')
self.VistA.wait('DATE/TIME')
self.VistA.write(datetime)
if _ >= 1:
self.VistA.wait('DO YOU WANT TO CANCEL IT')
self.VistA.write('Yes')
self.VistA.wait('Press RETURN to continue:')
self.VistA.write('')
if prevCO is not None:
self.VistA.wait('A check out date has been entered for this appointment!')
self.VistA.wait('DATE/TIME:')
self.VistA.write('')
else:
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
while True:
index = self.VistA.multiwait(['Select Action:','Select CLINIC:','APPOINTMENT LETTER'])
if index == 0:
self.VistA.write('?\r')
break
elif index == 1:
self.VistA.write('')
elif index == 2:
self.VistA.write('No')
self.VistA.write('Quit')
self.VistA.wait('')
def makeapp_var(self, clinic, patient, datetime, fresh=None, nextaval=None):
'''Makes Appointment for clinic that supports variable length appts (CLInicA)'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
self.VistA.write('CL')
self.VistA.wait('Select Clinic:')
self.VistA.write(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
if fresh is not None:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('REQUEST')
if nextaval is not None:
self.VistA.write('No')
self.VistA.wait('APPOINTMENT')
else:
self.VistA.write('Yes')
self.VistA.wait('DATE/TIME')
self.VistA.write(datetime)
if 't+122' in datetime:
self.VistA.wait('Add to EWL')
self.VistA.write('Yes')
else:
self.VistA.wait('LENGTH OF APPOINTMENT')
self.VistA.write('15')
self.VistA.wait('increment minutes per hour')
self.VistA.wait('LENGTH OF APPOINTMENT')
self.VistA.write('60')
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('continue')
self.VistA.write('')
index = self.VistA.multiwait(['Select Action:','APPOINTMENT LETTER'])
if index == 1:
self.VistA.write('No')
self.VistA.wait('Select Action')
self.VistA.write('Quit')
self.VistA.wait('')
def set_mademographics(self, clinic, patient, datetime, dgrph, CLfirst=None):
''' This test sets demographics via MA action. Not used. Reference only. This test crashes on SAVE in gtm'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
if CLfirst is not None:
self.VistA.write('CL')
self.VistA.wait('Select Clinic:')
self.VistA.write(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
else:
self.VistA.write('MA')
self.VistA.wait('Select CLINIC:')
self.VistA.write(clinic)
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
for wwset in dgrph:
self.VistA.wait(wwset[0])
self.VistA.write(wwset[1])
self.VistA.wait('REQUEST?')
self.VistA.write('yes')
self.VistA.wait('DATE/TIME:')
self.VistA.write(datetime)
rval = self.VistA.multiwait(['LENGTH OF APPOINTMENT', 'CORRECT'])
if rval == 0:
self.VistA.write('')
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
elif rval == 1:
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
if CLfirst is not None:
self.VistA.wait('Select Action:')
else:
self.VistA.wait('Select CLINIC:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
def fix_demographics(self, clinic, patient, dgrph,):
''' This test sets demographics via PD action. This is an alternate implementation of set_mademographics()'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
self.VistA.write('PD')
for wwset in dgrph:
self.VistA.wait(wwset[0])
self.VistA.write(wwset[1])
def set_demographics(self, clinic, patient, dgrph, emailAddress=None, CLfirst=None, patidx=None):
'''
This sets demographics via PD action and has an option to select the clinic
before setting demographics for a patient via a patient index (patidx) argument.
'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
if CLfirst is not None:
self.VistA.wait('Select Action:')
self.VistA.write('CL')
self.VistA.wait('Select Clinic:')
self.VistA.write(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('PD')
self.VistA.wait('Select Appointments')
self.VistA.write(patidx)
else:
self.VistA.wait('Select Action:')
self.VistA.write('PD')
for wwset in dgrph:
self.VistA.wait(wwset[0])
self.VistA.write(wwset[1])
index = self.VistA.multiwait(['DOES THE PATIENT','EMAIL ADDRESS'])
if index == 0:
if emailAddress != None :
self.VistA.write('Y')
self.VistA.wait('EMAIL ADDRESS')
self.VistA.write(emailAddress)
else:
self.VistA.write('N')
else:
if emailAddress != None :
self.VistA.write(emailAddress)
else:
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
def get_demographics(self, patient, vlist, emailAddress=None):
'''This gets the patient demographics via the PD action.'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
self.VistA.write('PD')
for wwset in vlist:
self.VistA.wait(wwset[0])
self.VistA.write(wwset[1])
index = self.VistA.multiwait(['DOES THE PATIENT','EMAIL ADDRESS'])
if index == 0:
if emailAddress != None:
self.VistA.write('Y')
self.VistA.wait(emailAddress)
self.VistA.write('')
else:
self.VistA.write('N')
else:
self.VistA.write('')
self.VistA.wait('Select Action')
self.VistA.write('Quit')
self.VistA.wait('')
def verapp_bypat(self, patient, vlist, ALvlist=None, EPvlist=None, COnum=None, CInum=None):
'''Verify previous Appointment for specified user at specified time.'''
self.VistA.wait('Clinic name:')
self.VistA.write(patient) # <--- by patient
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
for vitem in vlist:
self.VistA.wait(vitem)
if ALvlist is not None:
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
for vitem in ALvlist:
self.VistA.wait(vitem)
if EPvlist is not None:
self.VistA.wait('Select Action:')
self.VistA.write('EP')
self.VistA.wait('Select Appointment(s):')
self.VistA.write('1')
for vitem in EPvlist:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('^')
if COnum is not None:
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('FU')
self.VistA.wait('Select Action:')
self.VistA.write('CO')
if COnum[0] is not '1':
self.VistA.wait('Select Appointment(s):')
self.VistA.write(COnum[1])
self.VistA.wait('It is too soon to check out this appointment')
self.VistA.write('')
if CInum is not None:
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('FU')
self.VistA.wait('Select Action:')
self.VistA.write('CI')
if CInum[0] is not '1':
self.VistA.wait('Select Appointment(s):')
self.VistA.write(CInum[1])
self.VistA.wait('It is too soon to check in this appointment')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
def verapp(self, clinic, vlist, COnum=None, CInum=None):
'''Verify previous Appointments by clinic and with CI/CO check '''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('CD')
self.VistA.wait('Select Beginning Date:')
self.VistA.write('')
self.VistA.wait('Ending Date:')
self.VistA.write('t+100')
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
for vitem in vlist:
self.VistA.wait(vitem)
if COnum is not None:
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('FU')
self.VistA.wait('Select Action:')
self.VistA.write('CO')
if COnum[0] is not '1':
self.VistA.wait('Select Appointment(s):')
self.VistA.write(COnum[1])
rval = self.VistA.multiwait(['It is too soon to check out this appointment',
'You can not check out this appointment'])
if rval == 0:
self.VistA.write('')
elif rval == 1:
self.VistA.write('')
else:
self.VistA.wait('SPECIALERROR, rval: ' + str(rval)) # this should cause a timeout
if CInum is not None:
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('FU')
self.VistA.wait('Select Action:')
self.VistA.write('CI')
if CInum[0] is not '1':
self.VistA.wait('Select Appointment(s):')
self.VistA.write(CInum[1])
self.VistA.wait('It is too soon to check in this appointment')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
def ver_actions(self, clinic, patient, PRvlist, DXvlist, CPvlist):
''' verify action in menu, patient must be checked out'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
# EC
self.VistA.wait('Select Action:')
self.VistA.write('EC')
self.VistA.wait('Select Appointment(s)')
self.VistA.write('2')
self.VistA.wait('to continue')
self.VistA.write('')
self.VistA.wait('Select Action:')
# RT
self.VistA.write('RT')
for vitem in ['Chart Request', 'Fill Next Clinic Request', 'Profile of Charts', 'Recharge a Chart']:
self.VistA.wait(vitem)
self.VistA.wait('Select Record Tracking Option:')
self.VistA.write('^')
# PR
self.VistA.wait('Select Action:')
self.VistA.write('PR')
self.VistA.wait('CHOOSE 1-2:')
self.VistA.write('1')
self.VistA.wait('Select Appointment(s):')
self.VistA.write('1')
for vitem in PRvlist:
self.VistA.wait(vitem)
self.VistA.wait('Enter PROVIDER:')
self.VistA.write('')
self.VistA.wait('for this ENCOUNTER')
self.VistA.write('')
self.VistA.wait('Enter PROVIDER:')
self.VistA.write('')
# DX
self.VistA.wait('Select Action:')
self.VistA.write('DX')
self.VistA.wait('Select Appointment(s):')
self.VistA.write('1')
for vitem in DXvlist:
self.VistA.wait(vitem)
self.VistA.wait('Diagnosis :')
self.VistA.write('')
self.VistA.wait('Problem List')
self.VistA.write('no')
# CP
self.VistA.wait('Select Action:')
self.VistA.write('CP')
self.VistA.wait('Select Appointment(s):')
self.VistA.write('1')
for vitem in CPvlist:
self.VistA.wait(vitem)
self.VistA.wait('Enter PROCEDURE')
self.VistA.write('')
# PC
self.VistA.wait('Select Action:')
self.VistA.write('PC')
self.VistA.multiwait(['to continue','is locked'])
self.VistA.write('')
def use_sbar(self, clinic, patient, fresh=None):
'''Use the space bar to get previous clinic or patient '''
self.VistA.wait('Clinic name:')
self.VistA.write(' ') # spacebar to test recall
self.VistA.wait(patient) # check to make sure expected patient SSN is recalled
self.VistA.write('No')
self.VistA.wait(clinic) # check to make sure expected clinic is recalled
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('Select PATIENT NAME:')
self.VistA.write(' ') # spacebar to test recall
self.VistA.wait(patient) # check to make sure expected patient SSN is recalled
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
if fresh is not None:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('REQUEST')
self.VistA.write('Yes')
self.VistA.wait('DATE/TIME')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
def canapp(self, clinic, mult=None, future=None, rebook=None):
'''Cancel an Appointment, if there are multiple appts on schedule, send a string to the parameter "first"'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+100')
self.VistA.wait('Select Action:')
self.VistA.write('AL')
if future is None:
self.VistA.wait('Select List:')
self.VistA.write('TA')
else:
self.VistA.wait('Select List:')
self.VistA.write('FU')
self.VistA.wait('Select Action:')
self.VistA.write('CA')
if mult is not None:
# If there are more than 1 appointments
self.VistA.wait('Select Appointment')
self.VistA.write(mult)
self.VistA.wait('linic:')
self.VistA.write('Clinic')
self.VistA.wait('REASONS NAME')
self.VistA.write('Clinic Cancelled')
self.VistA.wait('REMARKS:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
if rebook is None:
self.VistA.wait('CANCELLED')
self.VistA.write('no')
self.VistA.wait('CANCELLED')
self.VistA.write('')
else:
self.VistA.wait('CANCELLED')
self.VistA.write('yes')
self.VistA.wait('OUTPUT REBOOKED APPT')
self.VistA.write('')
self.VistA.wait('TO BE REBOOKED:')
self.VistA.write('1')
self.VistA.wait('FROM WHAT DATE:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('CONTINUE')
self.VistA.write('')
self.VistA.wait('PRINT LETTERS FOR THE CANCELLED APPOINTMENT')
self.VistA.write('')
self.VistA.wait('exit:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def noshow(self, clinic, appnum):
'''Registers a patient as a no show'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('NS')
self.VistA.wait('Select Appointment')
self.VistA.write(appnum)
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('NOW')
self.VistA.write('')
self.VistA.wait('NOW')
self.VistA.write('')
self.VistA.wait('exit:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def checkin(self, clinic, vlist, mult=None):
'''Checks a patient in'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
self.VistA.wait('Select Action:')
self.VistA.write('CI')
if mult is not None:
self.VistA.wait('Appointment')
self.VistA.write(mult)
for vitem in vlist:
self.VistA.wait_re(vitem)
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def checkout(self, clinic, vlist1, vlist2, icd, icd10, mult=None):
'''Checks a Patient out'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
self.VistA.wait('Select Action:')
self.VistA.write('CO')
if mult is not None:
self.VistA.wait('Appointment')
self.VistA.write(mult)
for vitem in vlist1:
self.VistA.wait(vitem)
self.VistA.wait('appointment')
self.VistA.write('No')
self.VistA.wait('date and time:')
self.VistA.write('Now')
self.VistA.wait('PROVIDER:')
self.VistA.write('Alexander')
self.VistA.wait('ENCOUNTER')
self.VistA.write('Yes')
self.VistA.wait('PROVIDER')
self.VistA.write('')
self.VistA.wait('Diagnosis')
self.VistA.write(icd)
index = self.VistA.multiwait(['No records','OK'])
if index == 0:
self.VistA.write(icd10)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('ENCOUNTER')
self.VistA.write('Yes')
self.VistA.wait('Resulting:')
self.VistA.write('R')
for vitem in vlist2:
self.VistA.wait(vitem)
self.VistA.wait('Diagnosis')
self.VistA.write('')
self.VistA.wait('Problem List')
self.VistA.write('No')
self.VistA.wait('PROCEDURE')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('screen')
self.VistA.write('No')
self.VistA.wait('Clinic:')
self.VistA.write('')
def unschvisit(self, clinic, patient, patientname):
'''Makes a walk-in appointment. Automatically checks in'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('UN')
self.VistA.wait('Select Patient:')
self.VistA.write(patient)
self.VistA.wait('TIME:')
self.VistA.write('')
self.VistA.wait('TYPE:')
self.VistA.write('Regular')
self.VistA.wait('continue:')
self.VistA.write('')
index = self.VistA.multiwait(['Check Out:','ROUTING SLIP'])
if index == 1:
self.VistA.write('N')
self.VistA.wait('Check Out')
self.VistA.write('CI')
self.VistA.wait_re('CHECKED')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('SLIP NOW')
self.VistA.write('No')
self.VistA.wait(patientname)
self.VistA.wait('Checked In')
self.VistA.wait('Select Action')
self.VistA.write('')
def chgpatient(self, clinic, patient1, patient2, patientname1, patientname2):
'''Changes the patient between patient 1 and patient 2'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('PT')
self.VistA.wait('Patient:')
self.VistA.write(patient1)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait(patientname1.upper())
self.VistA.wait('Select Action:')
self.VistA.write('PT')
self.VistA.wait('Patient:')
self.VistA.write(patient2)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait(patientname2.upper())
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
def chgclinic(self):
'''Changes the clinic from clinic1 to clinic2'''
self.VistA.wait('Clinic name:')
self.VistA.write('Clinic1')
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Clinic1')
self.VistA.wait('Select Action:')
self.VistA.write('CL')
self.VistA.wait('Select Clinic:')
self.VistA.write('Clinic2')
self.VistA.wait('Clinic2')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
def chgdaterange(self, clinic):
'''Changes the date range of the clinic'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('CD')
self.VistA.wait('Date:')
self.VistA.write('t+7')
self.VistA.wait('Date:')
self.VistA.write('t+7')
self.VistA.wait('Select Action:')
self.VistA.write('CD')
self.VistA.wait('Date:')
self.VistA.write('t-4')
self.VistA.wait('Date:')
self.VistA.write('t+4')
self.VistA.wait('Select Action:')
self.VistA.write('')
def expandentry(self, clinic, vlist1, vlist2, vlist3, vlist4, vlist5, mult=None):
'''Expands an appointment entry for more detail'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('AL')
self.VistA.wait('Select List:')
self.VistA.write('TA')
self.VistA.wait('Select Action:')
self.VistA.write('EP')
if mult is not None:
self.VistA.wait('Appointment')
self.VistA.write(mult)
for vitem in vlist1:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('')
for vitem in vlist2:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('')
for vitem in vlist3:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('')
for vitem in vlist4:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('')
for vitem in vlist5:
self.VistA.wait(vitem)
self.VistA.wait('Select Action:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def addedit(self, clinic, name, icd, icd10):
'''
Functional but not complete. Exercises the Add/Edit menu but doesn't make any changes
Same problem as checkout with the CPT codes and the MPI
'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('AE')
self.VistA.wait('Name:')
self.VistA.write(name)
self.VistA.wait('exit:')
self.VistA.write('A')
self.VistA.wait('Clinic:')
self.VistA.write(clinic)
self.VistA.wait('Time:')
time = self.schtime()
self.VistA.write(time)
self.VistA.wait('APPOINTMENT TYPE:')
self.VistA.write('')
self.VistA.wait('PROVIDER:')
self.VistA.write('Alexander')
self.VistA.wait('ENCOUNTER')
self.VistA.write('Yes')
self.VistA.wait('Enter PROVIDER:')
self.VistA.write('')
self.VistA.wait('Diagnosis')
self.VistA.write(icd)
index = self.VistA.multiwait(['No records','Ok'])
if index == 0:
self.VistA.write(icd10)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('ENCOUNTER')
self.VistA.write('Yes')
self.VistA.wait('Resulting')
self.VistA.write('R')
self.VistA.wait('Diagnosis')
self.VistA.write('')
self.VistA.wait('Problem List')
self.VistA.write('')
self.VistA.wait('CPT CODE')
self.VistA.write('')
self.VistA.wait('encounter')
self.VistA.write('Yes')
self.VistA.wait('Select Action:')
self.VistA.write('')
def patdem(self, clinic, name, mult=None):
'''This edits the patients demographic information'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait(clinic)
self.VistA.wait('Select Action:')
self.VistA.write('PD')
if mult is not None:
self.VistA.wait('Appointment')
self.VistA.write(mult)
self.VistA.wait(name)
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('ADDRESS')
self.VistA.write('')
self.VistA.wait(':')
self.VistA.write('')
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('INDICATOR:')
self.VistA.write('')
self.VistA.wait('changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('SEX:')
self.VistA.write('')
self.VistA.wait('INFORMATION')
self.VistA.write('N')
self.VistA.wait('INFORMATION:')
self.VistA.write('W')
self.VistA.wait('RACE INFORMATION')
self.VistA.write('Yes')
self.VistA.wait('INFORMATION:')
self.VistA.write('')
self.VistA.wait('STATUS:')
self.VistA.write('Married')
self.VistA.wait('PREFERENCE:')
self.VistA.write('')
self.VistA.wait('ACTIVE')
self.VistA.write('No')
self.VistA.wait('NUMBER')
self.VistA.write('')
self.VistA.wait('NUMBER')
self.VistA.write('')
index = self.VistA.multiwait(['DOES THE','ADDRESS'])
if index == 0:
self.VistA.write('Y')
self.VistA.wait('EMAIL ADDRESS')
self.VistA.write('[email protected]')
self.VistA.wait('Select Action')
self.VistA.write('')
def teaminfo(self, clinic, patient=None):
'''This checks the display team info feature'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('TI')
if patient is not None:
self.VistA.wait('Select Patient')
self.VistA.write(patient)
self.VistA.wait('Team Information')
self.VistA.wait('Select Action:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def enroll(self, clinic, patient):
'''This enrolls a patient as an inpatient in a clinic'''
self.VistA.wait('OPTION NAME')
self.VistA.write('Appointment Menu')
self.VistA.wait('Appointment Menu')
self.VistA.write('Edit Clinic Enrollment Data')
self.VistA.wait('PATIENT NAME')
self.VistA.write(patient)
self.VistA.wait('CLINIC:')
self.VistA.write(clinic)
self.VistA.wait('ENROLLMENT CLINIC')
self.VistA.write('Yes')
self.VistA.wait('ENROLLMENT:')
self.VistA.write('t')
self.VistA.wait('Are you adding')
self.VistA.write('Yes')
self.VistA.wait('AC:')
self.VistA.write('OPT')
self.VistA.wait('DATE:')
self.VistA.write('')
self.VistA.wait('DISCHARGE:')
self.VistA.write('')
self.VistA.wait('DISCHARGE')
self.VistA.write('')
self.VistA.wait('CLINIC:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('ENROLLMENT')
self.VistA.write('')
self.VistA.wait('ENROLLMENT')
self.VistA.write('')
self.VistA.wait('AC:')
self.VistA.write('')
self.VistA.wait('DATE:')
self.VistA.write('')
self.VistA.wait('DISCHARGE')
self.VistA.write('')
self.VistA.wait('DISCHARGE')
self.VistA.write('')
self.VistA.wait('CLINIC')
self.VistA.write('')
self.VistA.wait('NAME:')
self.VistA.write('')
self.VistA.wait('Appointment Menu')
self.VistA.write('')
self.VistA.wait('halt')
self.VistA.write('')
def discharge(self, clinic, patient, appnum=None):
'''Discharges a patient from the clinic'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('DC')
if appnum is not None:
self.VistA.wait('Select Appointment')
self.VistA.write(appnum)
self.VistA.wait('Discharging patient from')
self.VistA.wait('DATE OF DISCHARGE:')
self.VistA.write('t')
self.VistA.wait('REASON FOR DISCHARGE')
self.VistA.write('testing')
self.VistA.wait('Action:')
self.VistA.write('')
def deletecheckout(self, clinic, appnum=None):
'''
Deletes checkout from the menu
Must be signed in as fakedoc1 (1Doc!@#$)
Must have the SD SUPERVISOR Key assigned to Dr. Alexander
'''
self.VistA.wait('Scheduling Manager\'s Menu')
self.VistA.write('Appointment Menu')
self.VistA.wait('Appointment Menu')
self.VistA.write('Appointment Management')
self.VistA.wait('Clinic name')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Action:')
self.VistA.write('DE')
if appnum is not None:
self.VistA.wait('Select Appointment')
self.VistA.write(appnum)
self.VistA.wait('check out')
self.VistA.write('Yes')
self.VistA.wait('deleting')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('deleting check out')
self.VistA.wait('exit:')
self.VistA.write('')
self.VistA.wait('Action:')
self.VistA.write('')
def waitlistentry(self, clinic, patient):
'''
Enters a patient into the wait list
This assumes that SDWL PARAMETER and SDWL MENU
keys are given to fakedoc1
'''
self.VistA.wait('Scheduling Manager\'s Menu')
self.VistA.write('Appointment Menu')
self.VistA.wait('Appointment Menu')
self.VistA.write('Appointment Management')
self.VistA.wait('name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Action:')
self.VistA.write('WE')
self.VistA.wait('NAME:')
self.VistA.write(patient)
self.VistA.wait('Patient')
self.VistA.write('Yes')
self.VistA.wait('response:')
# TODO: Explore all three options (PCMM TEAM ASSIGNMENT, SERVICE/SPECIALTY, SPECIFIC CLINIC
self.VistA.write('1')
self.VistA.wait('Institution:')
self.VistA.write('1327')
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Team:')
self.VistA.write('1')
self.VistA.wait('OK')
self.VistA.write('yes')
self.VistA.wait('Comments:')
self.VistA.write('test')
self.VistA.wait('Action:')
self.VistA.write('')
def waitlistdisposition(self, clinic, patient):
'''This verifies that the wait list disposition option is working'''
self.VistA.wait('Option:')
self.VistA.write('Appointment Management')
self.VistA.wait('name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Action:')
self.VistA.write('WD')
self.VistA.wait('PATIENT:')
self.VistA.write(patient)
self.VistA.wait('Quit')
self.VistA.write('Yes')
# TODO: For deeper coverage, execute all 6 disposition reasons
self.VistA.wait('response:')
self.VistA.write('D')
self.VistA.wait('removed from Wait List')
self.VistA.wait('exit:')
self.VistA.write('')
self.VistA.wait('no Wait List')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('')
def gotoApptMgmtMenu(self):
'''
Get to Appointment Management Menu via ZU
'''
self.VistA.wait('Scheduling Manager\'s Menu')
self.VistA.write('Appointment Menu')
self.VistA.wait('Appointment Menu')
self.VistA.write('Appointment Management')
def multiclinicdisplay(self, cliniclist, patient, timelist, pending=None):
'''
Create multiple clinic appointments
'''
self.VistA.wait('Scheduling Manager\'s Menu')
self.VistA.write('Appointment Menu')
self.VistA.wait('Appointment Menu')
self.VistA.write('Multiple Clinic Display')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
if pending:
self.VistA.wait('DISPLAY PENDING APPOINTMENTS')
self.VistA.write('')
self.VistA.wait('DISPLAY PENDING APPOINTMENTS')
self.VistA.write('')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
for clinic in cliniclist:
self.VistA.wait('Select CLINIC')
self.VistA.write(clinic)
self.VistA.wait('Select CLINIC:')
self.VistA.write('')
self.VistA.wait('OK to proceed')
self.VistA.write('Yes')
self.VistA.wait('LOOK FOR CLINIC AVAILABILITY STARTING WHEN:')
self.VistA.write('t+1')
self.VistA.wait('SELECT LATEST DATE TO CHECK FOR AVAILABLE SLOTS:')
self.VistA.write('t+10')
self.VistA.wait('REDISPLAY:')
self.VistA.write('B')
for ptime in timelist:
self.VistA.wait('SCHEDULE TIME:')
self.VistA.write(ptime)
rval = self.VistA.multiwait(['APPOINTMENT TYPE:', '...OK'])
if rval == 0:
self.VistA.write('Regular')
elif rval == 1:
self.VistA.write('Yes')
self.VistA.wait('APPOINTMENT TYPE:')
self.VistA.write('Regular')
self.VistA.wait('OR EKG STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
self.VistA.wait('Press RETURN to continue:')
self.VistA.write('')
self.VistA.wait('Select PATIENT NAME:')
self.VistA.write('')
self.VistA.wait('Appointment Menu')
self.VistA.write('')
def ma_clinicchk(self, clinic, patient, exp_apptype, datetime, cslots, cxrays, fresh=None, cvar=None, elig=None):
'''Makes Appointment to check clinic settings'''
self.VistA.wait('Clinic name:')
self.VistA.write(clinic)
self.VistA.wait('OK')
self.VistA.write('Yes')
self.VistA.wait('Date:')
self.VistA.write('')
self.VistA.wait('Date:')
self.VistA.write('t+1')
self.VistA.wait('Select Action:')
self.VistA.write('MA')
self.VistA.wait('PATIENT NAME:')
self.VistA.write('??')
self.VistA.multiwait(['TO STOP','to exit'])
self.VistA.write('^')
self.VistA.wait('PATIENT NAME:')
self.VistA.write(patient)
self.VistA.wait('APPOINTMENT TYPE: ' + exp_apptype)
self.VistA.write('REGULAR')
if fresh is not None:
self.VistA.wait('APPOINTMENTS:')
self.VistA.write('Yes')
self.VistA.wait('ETHNICITY:')
self.VistA.write('')
self.VistA.wait('RACE:')
self.VistA.write('')
self.VistA.wait('COUNTRY:')
self.VistA.write('')
self.VistA.wait('STREET ADDRESS')
self.VistA.write('')
self.VistA.wait('ZIP')
self.VistA.write('')
for x in range(0, 2):
self.VistA.wait('PHONE NUMBER')
self.VistA.write('')
self.VistA.wait('BAD ADDRESS')
self.VistA.write('')
self.VistA.wait('above changes')
self.VistA.write('No')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('APPOINTMENT REQUEST')
self.VistA.write('Yes')
self.VistA.wait(cslots)
self.VistA.wait('DATE/TIME')
self.VistA.write('t+5')
self.VistA.wait('DATE/TIME')
self.VistA.write(datetime)
if cvar is not None:
self.VistA.wait('LENGTH OF APPOINTMENT')
self.VistA.write('')
self.VistA.wait('CORRECT')
self.VistA.write('Yes')
self.VistA.wait('STOPS')
self.VistA.write('No')
self.VistA.wait('OTHER INFO:')
self.VistA.write('')
if elig is not None and self.VistA.type == 'cache':
self.VistA.wait('ENTER THE ELIGIBILITY FOR THIS APPOINTMENT:')
self.VistA.write('')
self.VistA.wait('continue:')
self.VistA.write('')
self.VistA.wait('Select Action:')
self.VistA.write('Quit')
self.VistA.wait('')
| agpl-3.0 | -6,406,373,654,367,182,000 | 35.681319 | 118 | 0.550367 | false |
Leopardob/Kistie | kcode/kcore/kmaya/kattrs/KstAttrs.py | 1 | 9982 | '''
K.I.S.T.I.E (Keep, It, Simple, Take, It, Easy)
Created on 1 Jan 2013
@author: Leonardo Bruni, [email protected]
Kistie Attrs Class lib
This Kistie implementation i's part of project 'Kistie_Autorig' by Leonardo Bruni, [email protected]
'''
import maya.cmds as cmds
# Import KstOut
import kcode.kcore.KstOut as _KstOut_
reload(_KstOut_)
KstOut = _KstOut_.KstOut()
class KstAttrs(object):
# Debug module name variable
_debug = 'KstAttrs'
def __init__(self):
KstOut.debug(KstAttrs._debug, 'Kistie Maya Attrs function module loaded...')
# Lock attr function
def lock_attr(self, obj_name, attr_name):
'''
Desc:
Lock maya attr
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, l=True, k=False)
# Unlock attr function
def unlock_attr(self, obj_name, attr_name):
'''
Desc:
Unlock maya attr
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, l=False, k=True)
# Set function for maya attributes
def set_attr(self, obj_name, attr_name, attr_value):
'''
Desc:
Set maya attribute
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
attr_value = attr value to set
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, attr_value)
return attr_value
# Get function for maya attributes
def get_attr(self, obj_name, attr_name):
'''
Desc:
Get maya attribute
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
attr_value = cmds.getAttr(obj_name+'.'+attr_name)
return attr_value
@staticmethod
def create_float_attr(obj, attr_name, default_value=0, limit_min=False, limit_max=False, min=0, max=1):
'''
Desc:
Make float attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default value
limit_min = attr min value
limit_max = attr max value
min = min value
max = max value
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dv=default_value, attributeType='float', min=min, max=max)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_double_attr(obj, attr_name, default_value=0, limit_min=False, limit_max=False, min=0, max=1):
'''
Desc:
Make double attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default value
limit_min = attr min value
limit_max = attr max value
min = min value
max = max value
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dv=default_value, attributeType='double')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_vector_attr(obj, attr_name, default_value=[0,0,0]):
'''
Desc:
Make vector attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default vector
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='double3')
cmds.addAttr(obj, shortName=attr_name+'X', longName=attr_name+'X', attributeType='double', p=attr_name)
cmds.addAttr(obj, shortName=attr_name+'Y', longName=attr_name+'Y', attributeType='double', p=attr_name)
cmds.addAttr(obj, shortName=attr_name+'Z', longName=attr_name+'Z', attributeType='double', p=attr_name)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'X', e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'Y', e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'Z', e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_bool_attr(obj, attr_name, value=False):
'''
Desc:
Make bool attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default bool
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='bool')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name, value)
return obj+'.'+attr_name
@staticmethod
def create_string_attr(obj, attr_name, str):
'''
Desc:
Make string attr
Parameter:
obj = object to attach attr
attr_name = attr name
str = string value
Return string
obj.attrname
'''
#print('current_obj: ', obj)
#print('attr_name: ', attr_name)
#print('str', str)
# Check if current attribute exists, if not, will add
if not cmds.attributeQuery(attr_name, node=obj, exists = True):
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dt='string')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name, str, type='string')
else:
KstOut.debug(KstAttrs._debug, 'Attribute %s already exists on node %s, skipped' % (attr_name, obj))
return obj+'.'+attr_name
@staticmethod
def create_enum_attr(obj, attr_name, enum_list):
'''
Desc:
Make enum attr
Parameter:
obj = object to attach attr
attr_name = attr name
enum_list = enum value list
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='enum', en=enum_list)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_matrix_attr(obj, attr_name, matrix):
'''
Desc:
Make matrix attr
Parameter:
obj = object to attach attr
attr_name = attr name
matrix = matrix
Return matrix
obj.attrname
'''
KstOut.debug(KstAttrs._debug, 'Matrix attr, not implemented yet!')
pass
@staticmethod
def create_separator_attr(obj, attr_name, enum_list='_'*16+':'):
'''
Desc:
Make separator attr
Parameter:
obj = object to attach attr
attr_name = attr name
enum_list = enum value list
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='enum', en=enum_list)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True, lock=True)
return obj+'.'+attr_name
@staticmethod
def read_message_attr(obj_attr_name, *args):
'''
Desc:
Read a message attr
Parameter:
obj = object that contain message attr
attr_name = attr name
args = other inputs
Return string
obj.attrname
'''
# Object
obj = str(obj_attr_name).split('.')[0]
# Attr name
attr_name = str(obj_attr_name).split('.')[1]
# Connections
connections = cmds.listConnections(obj+'.'+attr_name, s=1)
return connections[0]
def create_tag_attr(self, obj, tag_name, tag_value):
'''
Desc:
Create a tag for selected object
Parameter:
obj = object that contain tag
tag = tag name
value = tag value
Return:
obj.tag_name
'''
# Check if obj is valid
if (obj):
if not cmds.attributeQuery(tag_name, node=obj, exists = True):
cmds.addAttr(obj, shortName=tag_name, longName=tag_name, dt='string')
cmds.setAttr(obj+'.'+tag_name, e=True, keyable=False)
cmds.setAttr(obj+'.'+tag_name, tag_value, type='string')
KstAttrs.lock_attr(self, obj, tag_name)
else:
pass
#print('Attribute %s already exists on node %s, skipped' % (tag_name, obj))
return obj+'.'+tag_name
def __get__(self, instance, owner):
'''
:param instance:
:param owner:
:return:
'''
return self.getValue(instance)
def __set__(self, instance, value):
'''
:param instance:
:param value:
:return:
'''
if not self.checkDataType(value):
return
self.setValue(instance, value)
def setValue(self, instance, value):
'''
:param instance:
:return:
'''
raise NotImplementedError()
def getValue(self, instance):
'''
:param value:
:return:
'''
raise NotImplementedError()
def checkDataType(self, value):
'''
:param value:
:return:
'''
if type(self.data_type).__name__ != 'list':
if type(value).__name__ != self.data_type:
raise ValueError("Attribute : expected {x} got {y})".format(x=self.data_type, y=type(value).__name__))
else:
return True
else:
if type(value).__name__ not in self.data_type:
raise ValueError("Attribute : expected {x} got {y}".format(
x=self.data_type, y=type(value).__name__))
else:
return 1 | bsd-3-clause | -6,074,279,414,227,202,000 | 27.121127 | 125 | 0.560108 | false |
pypyr/pypyr-cli | pypyr/steps/dsl/cmd.py | 1 | 5630 | """pypyr step yaml definition for commands - domain specific language."""
import shlex
import subprocess
import logging
from pypyr.errors import ContextError
from pypyr.utils import types
# logger means the log level will be set correctly
logger = logging.getLogger(__name__)
class CmdStep():
"""A pypyr step that represents a command runner step.
This models a step that takes config like this:
cmd: <<cmd string>>
OR, as a dict
cmd:
run: str. mandatory. command + args to execute.
save: bool. defaults False. save output to cmdOut.
If save is True, will save the output to context as follows:
cmdOut:
returncode: 0
stdout: 'stdout str here. None if empty.'
stderr: 'stderr str here. None if empty.'
cmdOut.returncode is the exit status of the called process. Typically 0
means OK. A negative value -N indicates that the child was terminated by
signal N (POSIX only).
The run_step method does the actual work. init loads the yaml.
"""
def __init__(self, name, context):
"""Initialize the CmdStep.
The step config in the context dict looks like this:
cmd: <<cmd string>>
OR, as a dict
cmd:
run: str. mandatory. command + args to execute.
save: bool. optional. defaults False. save output to cmdOut.
cwd: str/path. optional. if specified, change the working
directory just for the duration of the command.
Args:
name: Unique name for step. Likely __name__ of calling step.
context: pypyr.context.Context. Look for config in this context
instance.
"""
assert name, ("name parameter must exist for CmdStep.")
assert context, ("context param must exist for CmdStep.")
# this way, logs output as the calling step, which makes more sense
# to end-user than a mystery steps.dsl.blah logging output.
self.logger = logging.getLogger(name)
context.assert_key_has_value(key='cmd', caller=name)
self.context = context
self.is_save = False
cmd_config = context.get_formatted('cmd')
if isinstance(cmd_config, str):
self.cmd_text = cmd_config
self.cwd = None
self.logger.debug("Processing command string: %s", cmd_config)
elif isinstance(cmd_config, dict):
context.assert_child_key_has_value(parent='cmd',
child='run',
caller=name)
self.cmd_text = cmd_config['run']
self.is_save = types.cast_to_bool(cmd_config.get('save', False))
cwd_string = cmd_config.get('cwd', None)
if cwd_string:
self.cwd = cwd_string
self.logger.debug("Processing command string in dir "
"%s: %s", self.cwd, self.cmd_text)
else:
self.cwd = None
self.logger.debug("Processing command string: %s",
self.cmd_text)
else:
raise ContextError(f"{name} cmd config should be either a simple "
"string cmd='mycommandhere' or a dictionary "
"cmd={'run': 'mycommandhere', 'save': False}.")
def run_step(self, is_shell):
"""Run a command.
Runs a program or executable. If is_shell is True, executes the command
through the shell.
Args:
is_shell: bool. defaults False. Set to true to execute cmd through
the default shell.
"""
assert is_shell is not None, ("is_shell param must exist for CmdStep.")
# why? If shell is True, it is recommended to pass args as a string
# rather than as a sequence.
if is_shell:
args = self.cmd_text
else:
args = shlex.split(self.cmd_text)
if self.is_save:
completed_process = subprocess.run(args,
cwd=self.cwd,
shell=is_shell,
# capture_output=True,only>py3.7
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
# text=True, only>=py3.7,
universal_newlines=True)
self.context['cmdOut'] = {
'returncode': completed_process.returncode,
'stdout': (completed_process.stdout.rstrip()
if completed_process.stdout else None),
'stderr': (completed_process.stderr.rstrip()
if completed_process.stderr else None)
}
# when capture is true, output doesn't write to stdout
self.logger.info("stdout: %s", completed_process.stdout)
if completed_process.stderr:
self.logger.error("stderr: %s", completed_process.stderr)
# don't swallow the error, because it's the Step swallow decorator
# responsibility to decide to ignore or not.
completed_process.check_returncode()
else:
# check=True throws CalledProcessError if exit code != 0
subprocess.run(args, shell=is_shell, check=True, cwd=self.cwd)
| apache-2.0 | 3,077,131,209,848,973,000 | 38.929078 | 79 | 0.54103 | false |
NestarZ/rank-my-library | src/main.py | 1 | 5177 | #!/usr/bin/python
# -*- coding: "utf-8"-*-
import requests
import os
import json
import time
import re
import sys
import csv_maker
import pick_best_data
class Master(object):
def __init__(self):
time_id = str(time.strftime("%m%Y"))
api_dic = self.load_apis()
self.scanner = RankMyLibrary(time_id, api_dic)
def run(self):
args = sys.argv + [0]*5
if "--movie" in args or "--all" in args or not("--movie" in args or "--anime" in args):
movie_folder = os.listdir("/media/nestarz/Disque local1/Videos/Films")
self.scanner.scan(movie_folder, "movie")
if "--anime" in args or "--all" in args or not("--movie" in args and "--anime" in args):
animes_folder1 = os.listdir("/media/nestarz/Disque local1/Videos/Animes/Series d'animation")
animes_folder2 = os.listdir("/media/nestarz/Disque local/Videos/Animes/Series d'animation")
animes_folder3 = os.listdir("/media/nestarz/8AB2AF54B2AF4413/Videos/Animes/Series d'animation")
animes_folders = animes_folder1 + animes_folder2 + animes_folder3
self.scanner.scan(animes_folders, "anime")
def load_apis(self):
file_dir = os.path.dirname(os.path.realpath(__file__))
with open(file_dir+"/../api/list_api.json",'r') as f:
data = json.load(f)
apis = {}
for content_type in data["categories"].keys():
apis[content_type] = []
for name, api in data["api"].items():
if name in data["categories"][content_type]:
apis[content_type].append(API(name, content_type, api))
return apis
class API(object):
def __init__(self, api_name, api_type, data):
self.name = api_name
self.api_type = api_type
self.data = data
def get_url(self, title, year=''):
url = "{}{}?{}={}&{}={}"
return url.format(self.data["url"],
self.data["endpoint"],
self.data["parameters"]["search_bytitle"],
title,
self.data["parameters"].get("search_byyear", ''),
year)
class Content(object):
def __init__(self, ctype, name, api, data={}):
self.content_type = ctype
self.name = name
self.api = api
self.data = data
def json_parse(self):
jp = {key:str(self.data[v]) if v else "" for key, v in self.api.data["metadata"].items()}
jp.update({"api_name":self.api.name, "dir":self.name, "type":self.content_type})
return jp
@classmethod
def fromJson(cls, data):
return Content(data["type"], data["dir"], data["api_name"], data)
class RankMyLibrary(object):
def __init__(self, time_id, api_dic):
self.time_id = time_id
self.api_dic = api_dic
def get(self, url):
return requests.get(url)
def find_in_api(self, api, content_type, name):
cname = re.sub(r'\([^)]*\)', '', name).rstrip().lstrip()
print(api.get_url(cname))
response = self.get(api.get_url(cname))
data = {}
if response.status_code == 200:
data = json.loads(response.text)
if isinstance(data, dict) and data and data.get("Response","True") == "True":
return Content(content_type, name, api, data)
elif isinstance(data, list):
data = pick_best_data.pick_best(name, api, data)
return Content(content_type, name, api, data)
def find_in_json(self, api_name, data, name):
for content in data:
if name in content.values() and content["api_name"] == api_name:
print('| {} ({})'.format(name, api_name))
return Content.fromJson(content)
def get_json(self, fname):
if not os.path.isfile(fname):
with open(fname,'w') as f:
json.dump([], f)
with open(fname,'r') as f:
return json.load(f)
def update_json(self, fname, data, content):
with open(fname,'w') as f:
json.dump(data, f, indent=4, sort_keys=True)
print('+ {} ({})'.format(content.name, content.api.name))
def update_csv(self, fname, data):
csv_maker.doCSV(fname, data)
def scan(self, folders, content_type):
file_dir = os.path.dirname(os.path.realpath(__file__))
fname = file_dir+"/../data/{}_{}".format(content_type,self.time_id)
csv_ext, json_ext = ".csv", ".json"
data = self.get_json(fname+json_ext)
for api in self.api_dic[content_type]:
print(api.name)
if (api.name) == "myapifilms_imdb": continue
for folder in folders:
content = self.find_in_json(api.name, data, folder)
if not content:
content = self.find_in_api(api, content_type, folder)
if not content:
continue
data.append(content.json_parse())
self.update_json(fname+json_ext, data, content)
self.update_csv(fname+csv_ext, data)
if __name__=="__main__":
m = Master()
m.run()
| mit | 5,913,420,024,278,776,000 | 37.066176 | 107 | 0.554375 | false |
t794104/ansible | lib/ansible/plugins/netconf/__init__.py | 1 | 16876 | #
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from abc import abstractmethod
from functools import wraps
from ansible.errors import AnsibleError
from ansible.plugins import AnsiblePlugin
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import missing_required_lib
try:
from ncclient.operations import RPCError
from ncclient.xml_ import to_xml, to_ele
HAS_NCCLIENT = True
NCCLIENT_IMP_ERR = None
except (ImportError, AttributeError) as err: # paramiko and gssapi are incompatible and raise AttributeError not ImportError
HAS_NCCLIENT = False
NCCLIENT_IMP_ERR = err
try:
from lxml.etree import Element, SubElement, tostring, fromstring
except ImportError:
from xml.etree.ElementTree import Element, SubElement, tostring, fromstring
def ensure_connected(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not self._connection._connected:
self._connection._connect()
return func(self, *args, **kwargs)
return wrapped
def ensure_ncclient(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
if not HAS_NCCLIENT:
raise AnsibleError("%s: %s" % (missing_required_lib('ncclient'), to_native(NCCLIENT_IMP_ERR)))
return func(self, *args, **kwargs)
return wrapped
class NetconfBase(AnsiblePlugin):
"""
A base class for implementing Netconf connections
.. note:: Unlike most of Ansible, nearly all strings in
:class:`TerminalBase` plugins are byte strings. This is because of
how close to the underlying platform these plugins operate. Remember
to mark literal strings as byte string (``b"string"``) and to use
:func:`~ansible.module_utils._text.to_bytes` and
:func:`~ansible.module_utils._text.to_text` to avoid unexpected
problems.
List of supported rpc's:
:get: Retrieves running configuration and device state information
:get_config: Retrieves the specified configuration from the device
:edit_config: Loads the specified commands into the remote device
:commit: Load configuration from candidate to running
:discard_changes: Discard changes to candidate datastore
:validate: Validate the contents of the specified configuration.
:lock: Allows the client to lock the configuration system of a device.
:unlock: Release a configuration lock, previously obtained with the lock operation.
:copy_config: create or replace an entire configuration datastore with the contents of another complete
configuration datastore.
:get-schema: Retrieves the required schema from the device
:get_capabilities: Retrieves device information and supported rpc methods
For JUNOS:
:execute_rpc: RPC to be execute on remote device
:load_configuration: Loads given configuration on device
Note: rpc support depends on the capabilites of remote device.
:returns: Returns output received from remote device as byte string
Note: the 'result' or 'error' from response should to be converted to object
of ElementTree using 'fromstring' to parse output as xml doc
'get_capabilities()' returns 'result' as a json string.
Usage:
from ansible.module_utils.connection import Connection
conn = Connection()
data = conn.execute_rpc(rpc)
reply = fromstring(reply)
data = conn.get_capabilities()
json.loads(data)
conn.load_configuration(config=[''set system ntp server 1.1.1.1''], action='set', format='text')
"""
__rpc__ = ['get_config', 'edit_config', 'get_capabilities', 'get']
def __init__(self, connection):
self._connection = connection
@property
def m(self):
return self._connection._manager
@ensure_ncclient
@ensure_connected
def rpc(self, name):
"""
RPC to be execute on remote device
:param name: Name of rpc in string format
:return: Received rpc response from remote host
"""
try:
obj = to_ele(name)
resp = self.m.rpc(obj)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
except RPCError as exc:
msg = exc.xml
raise Exception(to_xml(msg))
@ensure_connected
def get_config(self, source=None, filter=None):
"""
Retrieve all or part of a specified configuration
(by default entire configuration is retrieved).
:param source: Name of the configuration datastore being queried, defaults to running datastore
:param filter: This argument specifies the portion of the configuration data to retrieve
:return: Returns xml string containing the RPC response received from remote host
"""
if isinstance(filter, list):
filter = tuple(filter)
if not source:
source = 'running'
resp = self.m.get_config(source=source, filter=filter)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def get(self, filter=None, with_defaults=None):
"""
Retrieve device configuration and state information.
:param filter: This argument specifies the portion of the state data to retrieve
(by default entire state data is retrieved)
:param with_defaults: defines an explicit method of retrieving default values
from the configuration
:return: Returns xml string containing the RPC response received from remote host
"""
if isinstance(filter, list):
filter = tuple(filter)
resp = self.m.get(filter=filter, with_defaults=with_defaults)
response = resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
return response
@ensure_connected
def edit_config(self, config=None, format='xml', target='candidate', default_operation=None, test_option=None, error_option=None):
"""
Loads all or part of the specified *config* to the *target* configuration datastore.
:param config: Is the configuration, which must be rooted in the `config` element.
It can be specified either as a string or an :class:`~xml.etree.ElementTree.Element`.
:param format: The format of configuration eg. xml, text
:param target: Is the name of the configuration datastore being edited
:param default_operation: If specified must be one of { `"merge"`, `"replace"`, or `"none"` }
:param test_option: If specified must be one of { `"test_then_set"`, `"set"` }
:param error_option: If specified must be one of { `"stop-on-error"`, `"continue-on-error"`, `"rollback-on-error"` }
The `"rollback-on-error"` *error_option* depends on the `:rollback-on-error` capability.
:return: Returns xml string containing the RPC response received from remote host
"""
if config is None:
raise ValueError('config value must be provided')
resp = self.m.edit_config(config, format=format, target=target, default_operation=default_operation, test_option=test_option,
error_option=error_option)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def validate(self, source='candidate'):
"""
Validate the contents of the specified configuration.
:param source: Is the name of the configuration datastore being validated or `config` element
containing the configuration subtree to be validated
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.validate(source=source)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def copy_config(self, source, target):
"""
Create or replace an entire configuration datastore with the contents of another complete configuration datastore.
:param source: Is the name of the configuration datastore to use as the source of the copy operation or `config`
element containing the configuration subtree to copy
:param target: Is the name of the configuration datastore to use as the destination of the copy operation
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.copy_config(source, target)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def dispatch(self, rpc_command=None, source=None, filter=None):
"""
Execute rpc on the remote device eg. dispatch('clear-arp-table')
:param rpc_command: specifies rpc command to be dispatched either in plain text or in xml element format (depending on command)
:param source: name of the configuration datastore being queried
:param filter: specifies the portion of the configuration to retrieve (by default entire configuration is retrieved)
:return: Returns xml string containing the RPC response received from remote host
"""
if rpc_command is None:
raise ValueError('rpc_command value must be provided')
resp = self.m.dispatch(fromstring(rpc_command), source=source, filter=filter)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def lock(self, target="candidate"):
"""
Allows the client to lock the configuration system of a device.
:param target: is the name of the configuration datastore to lock,
defaults to candidate datastore
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.lock(target=target)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def unlock(self, target="candidate"):
"""
Release a configuration lock, previously obtained with the lock operation.
:param target: is the name of the configuration datastore to unlock,
defaults to candidate datastore
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.unlock(target=target)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def discard_changes(self):
"""
Revert the candidate configuration to the currently running configuration.
Any uncommitted changes are discarded.
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.discard_changes()
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def commit(self, confirmed=False, timeout=None, persist=None):
"""
Commit the candidate configuration as the device's new current configuration.
Depends on the `:candidate` capability.
A confirmed commit (i.e. if *confirmed* is `True`) is reverted if there is no
followup commit within the *timeout* interval. If no timeout is specified the
confirm timeout defaults to 600 seconds (10 minutes).
A confirming commit may have the *confirmed* parameter but this is not required.
Depends on the `:confirmed-commit` capability.
:param confirmed: whether this is a confirmed commit
:param timeout: specifies the confirm timeout in seconds
:param persist: make the confirmed commit survive a session termination,
and set a token on the ongoing confirmed commit
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.commit(confirmed=confirmed, timeout=timeout, persist=persist)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def get_schema(self, identifier=None, version=None, format=None):
"""
Retrieve a named schema, with optional revision and type.
:param identifier: name of the schema to be retrieved
:param version: version of schema to get
:param format: format of the schema to be retrieved, yang is the default
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.get_schema(identifier, version=version, format=format)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def delete_config(self, target):
"""
delete a configuration datastore
:param target: specifies the name or URL of configuration datastore to delete
:return: Returns xml string containing the RPC response received from remote host
"""
resp = self.m.delete_config(target)
return resp.data_xml if hasattr(resp, 'data_xml') else resp.xml
@ensure_connected
def locked(self, target):
return self.m.locked(target)
@abstractmethod
def get_capabilities(self):
"""
Retrieves device information and supported
rpc methods by device platform and return result
as a string
:return: Netconf session capability
"""
pass
@staticmethod
def guess_network_os(obj):
"""
Identifies the operating system of network device.
:param obj: ncclient manager connection instance
:return: The name of network operating system.
"""
pass
def get_base_rpc(self):
"""
Returns list of base rpc method supported by remote device
:return: List of RPC supported
"""
return self.__rpc__
def put_file(self, source, destination):
"""
Copies file to remote host
:param source: Source location of file
:param destination: Destination file path
:return: Returns xml string containing the RPC response received from remote host
"""
pass
def fetch_file(self, source, destination):
"""
Fetch file from remote host
:param source: Source location of file
:param destination: Source location of file
:return: Returns xml string containing the RPC response received from remote host
"""
pass
def get_device_operations(self, server_capabilities):
"""
Retrieve remote host capability from Netconf server hello message.
:param server_capabilities: Server capabilities received during Netconf session initialization
:return: Remote host capabilities in dictionary format
"""
operations = {}
capabilities = '\n'.join(server_capabilities)
operations['supports_commit'] = ':candidate' in capabilities
operations['supports_defaults'] = ':with-defaults' in capabilities
operations['supports_confirm_commit'] = ':confirmed-commit' in capabilities
operations['supports_startup'] = ':startup' in capabilities
operations['supports_xpath'] = ':xpath' in capabilities
operations['supports_writable_running'] = ':writable-running' in capabilities
operations['supports_validate'] = ':writable-validate' in capabilities
operations['lock_datastore'] = []
if operations['supports_writable_running']:
operations['lock_datastore'].append('running')
if operations['supports_commit']:
operations['lock_datastore'].append('candidate')
if operations['supports_startup']:
operations['lock_datastore'].append('startup')
operations['supports_lock'] = True if len(operations['lock_datastore']) else False
return operations
# TODO Restore .xml, when ncclient supports it for all platforms
| gpl-3.0 | -5,683,802,926,386,100,000 | 43.293963 | 135 | 0.662894 | false |
ajenta/dj-oydiv | dj_oydiv/views/wsauth.py | 1 | 7354 | import logging
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.debug import sensitive_variables
from django.core.exceptions import PermissionDenied
from django.utils.crypto import constant_time_compare
from django.utils.lru_cache import lru_cache
from django.utils.encoding import force_text
from django.contrib.auth import authenticate
from spyne.decorator import srpc
from spyne.service import ServiceBase
from spyne.model.primitive import Boolean, Unicode
from spyne.protocol.soap import Soap11
from spyne.server.django import DjangoView
from spyne.application import Application
from ..auth.basic import http_basic_credentials, HttpResponseBasicChallenge
from ..utils.request import get_client_ip
from ..config import config
logger = logging.getLogger(__name__)
__all__ = [
"portal_wsauth_as_view", "settings_portal_authenticator",
"deny_all"
]
# The class generation here is expensive, so cache its return value.
@lru_cache()
def portal_wsauth_as_view(user_authenticator, portal_authenticator):
"""
Create a view that validates a user login request from a portal.
``portal_authenticator`` and ```user_authenticator`` are both callbacks
to functions taking two positional arguments, (``username``, ``password``)
```user_authenticator`` authenticates a user against the Portal.
``portal_authenticator`` validates whether the VidyoPortal itself is allowed
to request validation for a particular user (essentially acting like an API key).
Authentication callbacks should avoid setting cookies, or actually authenticating
a user to the system, but simply check the user/password combination is valid
to avoid leaving uneccessary sessions lying around.
e.g.
>>> from django.contrib.auth.models import User
>>> def my_callback(username, password):
... try:
... user = User.objects.get(username=username)
... if not user.is_active or not user.is_staff:
... return False
... return user.check_password(password)
... except User.DoesNotExist:
... return False
"""
@sensitive_variables('username', 'password')
def authenticator_wrapper(callback):
"""
Close over the original callback with a simple exception handling wrapper
to protect against inadvertent information leakage in case the supplied
callback gets its knickers in a twist, and raises something like ``DoesNotExist``
which will subsequently be marshalled into a 500 rather than returning False.
That situation will allow for very easy object fingerprinting from remote.
We don't want it.
"""
def inner(username, password):
try:
return callback(username, password)
# Catching bare ``Exception`` here
except Exception as e: # NOQA
logger.exception("user callback failed with exception:%s", e)
return False
return inner
user_authenticator = authenticator_wrapper(user_authenticator)
portal_authenticator = authenticator_wrapper(portal_authenticator)
class VidyoPortalAuthSoapView(DjangoView):
"""
Checks the user/password header sent as part of the HTTP basic auth
request against the user's VidyoPortal validator, before dispatching
to the Spyne SOAP handler.
"""
@sensitive_variables('username', 'password')
def dispatch(self, request, *args, **kwargs):
# Vidyo sends the credentials in the HTTP_AUTHORIZATION header
if request.META['REQUEST_METHOD'] == 'POST':
try:
username, password = map(
force_text, http_basic_credentials(request)
)
except (AttributeError, ValueError):
return HttpResponseBasicChallenge()
if not portal_authenticator(username, password):
logger.info(
"failed authentication for '%s' from %s ",
username,
get_client_ip(request)
)
return HttpResponseBasicChallenge()
return super(VidyoPortalAuthSoapView, self).dispatch(request, *args, **kwargs)
return VidyoPortalAuthSoapView.as_view(
application=Application([
type(
'AuthenticationService',
(ServiceBase,),
{
'AuthenticationRequest': srpc(
Unicode,
Unicode,
_returns=Boolean,
_out_message_name='AuthenticationResponse',
_out_variable_name='passed'
)(user_authenticator)
}
)],
tns='http://ws.vidyo.com/authentication',
in_protocol=Soap11(validator='lxml'),
out_protocol=Soap11()
)
)
@sensitive_variables()
def settings_portal_authenticator(username, password):
"""
Check an incoming portal webservices authentication request against
a user and password from the project's settings.py
``settings.OYDIV_WSAUTH_CREDENTIALS``, should be a dictionary-like object of
password values indexed by username.
"""
try:
return constant_time_compare(config.WSAUTH_PORTAL_CREDENTIALS[username], password)
except KeyError:
logger.info("rejected portal auth request for %r", username)
return False
_deny = lambda u, p: False
deny_all = csrf_exempt(
portal_wsauth_as_view(_deny, settings_portal_authenticator)
)
deny_all.__doc__ = (
"""
A default deny policy for all incoming authentication requests.
The Portal itself is verified using ``settings_portal_authenticator``.
This is usefaul if you're handling all endpoint authentication yourself with
the webservices API, and will provide users an extra barrier to accessing
their account configuration on the portal.
"""
)
def _django_auth_validator(staff_only):
def inner(username, password):
try:
user = authenticate(username=username, password=password)
except PermissionDenied:
return False
if not user:
return False
if user.is_anonymous():
return False
if staff_only and not user.is_staff:
return False
if user.is_active:
return True
return False
return inner
django_auth_staffonly_view = csrf_exempt(
portal_wsauth_as_view(_django_auth_validator(staff_only=True), settings_portal_authenticator)
)
django_auth_staffonly_view.__doc__ = (
"""
Try and authenticate a user with ``django.contrib.auth``.
If the user is not a staff member, the portal authentication
will be denied.
Portal SOAP calls are validated with ``settings_portal_authenticator``
"""
)
django_auth_user_view = csrf_exempt(
portal_wsauth_as_view(_django_auth_validator(staff_only=False), settings_portal_authenticator)
)
django_auth_user_view.__doc__ = (
"""
Try and authenticate a user with the `django.contrib.auth`.
Portal SOAP calls are validated with ``settings_portal_authenticator``.
"""
)
| bsd-3-clause | 2,528,110,469,254,684,700 | 35.405941 | 98 | 0.649171 | false |
grplyler/todo | todo.py | 1 | 7413 | #!/usr/bin/python
__author__ = "Ryan Plyler"
__version__ = 0.2
import sys
import json
import os
########################################################################
# Config
########################################################################
TODO_FILENAME = os.path.join(os.getcwd(), '.todo.list')
########################################################################
# Global Classes: bcolors Status
########################################################################
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
RECV = '\033[33m' # yellow
FAIL = '\033[91m'
ENDC = '\033[0m'
INFO = '\033[37m'
WHITE = '\033[97m'
class Status:
PENDING = "PENDING"
DONE = " DONE"
########################################################################
# Helper Fuctions: usage() nextID()
########################################################################
def usage():
print "\nUsage:"
print "\ttodo | List the todos for the current directory"
print "\ttodo show|list | Same as 'todo'"
print "\ttodo new <new todo> | Create a new todo"
print "\ttodo complete|done <todo-id> | Mark a todo as complete"
print "\ttodo remove|rm|delete|del <todo-id> | Remove a todo"
print "\ttodo undo <todo-id> | Undo a 'DONE' todo. Make it pending again."
print "\ttodo purge | Delete all todos and todo savedata for the cwd"
print "\ttodo help | Show this help"
print
def getLineCount():
with open(TODO_FILENAME) as f:
lines = f.readlines()
return len(lines)
def readlines():
with open(TODO_FILENAME) as f:
lines = f.readlines()
linecount = len(lines)
return lines, linecount
def nextID():
"""Get the the number of what the next todo ID should be"""
return getLineCount() + 1
########################################################################
# Core functionality functions:
# newTodo() removeTodo(id) completeTodo(id) undoTodo(id)
# showTodos()
########################################################################
def newTodo(content):
formmated = bcolors.WHITE + "[" + "%id" + "] " + bcolors.ENDC + Status.PENDING + ": " + content + "\n"
with open(TODO_FILENAME, "a") as f:
f.write(formmated)
print "Added todo #%d" % getLineCount()
def removeTodo(id):
id = int(id)
lineCounter = 1
lines, linecount = readlines()
todoRemoved = False
newFile = open(TODO_FILENAME, 'w')
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter is not id:
newFile.write(line)
else:
todoRemoved = True
# increment the line counter
lineCounter += 1
newFile.close()
if todoRemoved:
print "Removed todo #%s" % id
else:
print "No todo #%s found" % id
def completeTodo(id):
id = int(id)
lines, linecount = readlines()
todoCompleted = False
newFile = open(TODO_FILENAME, 'w')
lineCounter = 1
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter == id:
line = line.replace(Status.PENDING, Status.DONE)
newFile.write(line)
todoCompleted = True
else:
newFile.write(line)
# increment the line counter
lineCounter += 1
newFile.close()
if todoCompleted:
print "Completed todo #%s" % id
else:
print "No todo #%s found." % id
def undoTodo(id):
# oldFile = open(TODO_FILENAME, 'r')
# lines = oldFile.readlines()
# oldFile.close()
# todoCompleted = False
# newFile = open(TODO_FILENAME, 'w')
# idFormmated = "[" + str(id) + "]"
#
# for line in lines:
# if idFormmated in line:
# line = line.replace(Status.DONE, Status.PENDING)
# newFile.write(line)
# todoCompleted = True
# else:
# newFile.write(line)
#
# newFile.close()
# if todoCompleted:
# print "Undid todo #" + id + " now its pending again..."
# else:
# print "No todo #" + id + " found."
id = int(id)
lines, linecount = readlines()
todoCompleted = False
newFile = open(TODO_FILENAME, 'w')
lineCounter = 1
for line in lines:
# Write all the lines back to the file except the line number of id
if lineCounter == id:
line = line.replace(Status.DONE, Status.PENDING)
newFile.write(line)
todoCompleted = True
else:
newFile.write(line)
# increment the line counter
lineCounter += 1
newFile.close()
if todoCompleted:
print "Undid todo #%s" % id
else:
print "No todo #%s found." % id
def showTodos():
lineCounter = 1
try:
lines, linecount = readlines()
for line in lines:
# if Status.PENDING in line:
# line = line.replace(Status.PENDING, bcolors.FAIL + Status.PENDING + bcolors.ENDC)
# elif Status.DONE in line:
# line = line.replace(Status.DONE, bcolors.OKGREEN + Status.DONE + bcolors.ENDC)
# sys.stdout.write(line)
# Auto assign the todo ID based on the the line its on in the todo.list file
line = line.replace("%id", str(lineCounter))
if Status.PENDING in line:
line = line.replace(Status.PENDING, bcolors.FAIL + Status.PENDING + bcolors.ENDC)
elif Status.DONE in line:
line = line.replace(Status.DONE, bcolors.OKGREEN + Status.DONE + bcolors.ENDC)
sys.stdout.write(line)
lineCounter += 1
except IOError:
print "No todos created for this directory yet"
########################################################################
# Parse command line arguments
########################################################################
if len(sys.argv) == 1:
showTodos()
elif sys.argv[1] == "new":
content = " ".join(sys.argv[2:])
newTodo(content)
elif sys.argv[1] == "complete" or sys.argv[1] == "done":
completeTodo(sys.argv[2])
elif sys.argv[1] == "undo":
undoTodo(sys.argv[2])
elif sys.argv[1] == "remove" or sys.argv[1] == "delete" or sys.argv[1] == "del" or sys.argv[1] == "rm":
if len(sys.argv) < 3:
print "You must specify a todo ID to remove."
else:
removeTodo(sys.argv[2])
elif sys.argv[1] == "show" or sys.argv[1] == "list":
showTodos()
elif sys.argv[1] == "help":
usage()
elif sys.argv[1] == "purge":
ans = raw_input("Are you sure you want to delete and remove all traces of todos? (y/n): ")
if ans == 'y':
if os.path.isfile(TODO_FILENAME):
os.remove(str(TODO_FILENAME))
print "Removed todo file"
else:
print "Could not delete todo file"
else:
print "Aborting deletion"
else:
print "Unknown operation: " + sys.argv[1]
usage()
########################################################################
# Cleanup and exit
########################################################################
| apache-2.0 | -809,055,889,478,442,900 | 28.652 | 106 | 0.499798 | false |
GovReady/govready-q | controls/templatetags/system_tags.py | 1 | 1134 | from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def json(value):
# Encode value as JSON for inclusion within a <script></script> tag.
# Since we are not using |escapejs (which would only be valid within
# strings), we must instead ensure that the literal "</script>" doesn't
# occur within the JSON content since that would break out of the script
# tag. This could occur both in string values and in the keys of objects.
# Since < and > can only occur within strings (i.e. they're not valid
# characters otherwise), we can JSON-escape them after serialization.
import json
value = json.dumps(value, sort_keys=True)
value = value.replace("<", r'\u003c')
value = value.replace(">", r'\u003e') # not necessary but for good measure
value = value.replace("&", r'\u0026') # not necessary but for good measure
return mark_safe(value) # nosec
@register.filter(is_safe=True)
def get_item(dictionary, key):
return dictionary.get(key, None)
| gpl-3.0 | -7,210,701,244,562,800,000 | 42.615385 | 78 | 0.716049 | false |
zehpunktbarron/iOSMAnalyzer | scripts/c5_tag_completeness_transport.py | 1 | 7485 | # -*- coding: utf-8 -*-
#!/usr/bin/python2.7
#description :This file creates a plot: Calculates the development of the tag-completeness [%] of all "transport" POIs
#author :Christopher Barron @ http://giscience.uni-hd.de/
#date :19.01.2013
#version :0.1
#usage :python pyscript.py
#==============================================================================
import psycopg2
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import pylab
# import db connection parameters
import db_conn_para as db
###
### Connect to database with psycopg2. Add arguments from parser to the connection-string
###
try:
conn_string="dbname= %s user= %s host= %s password= %s" %(db.g_my_dbname, db.g_my_username, db.g_my_hostname, db.g_my_dbpassword)
print "Connecting to database\n->%s" % (conn_string)
# Verbindung mit der DB mittels psycopg2 herstellen
conn = psycopg2.connect(conn_string)
print "Connection to database was established succesfully"
except:
print "Connection to database failed"
###
### Execute SQL query
###
# Mit dieser neuen "cursor Methode" koennen SQL-Abfragen abgefeuert werden
cur = conn.cursor()
# Execute SQL query. For more than one row use three '"'
try:
cur.execute("""
--
-- Transport und Verkehr
--
SELECT
generate_series,
-- START Key "name"
(CASE WHEN
cnt_total <> 0
THEN
ROUND((cnt_name * 100.00 / cnt_total), 2)
ELSE 0
END)::float AS perc_name,
-- END Key "name"
-- START Key "operator"
(CASE WHEN
cnt_total <> 0
THEN
ROUND((cnt_operator * 100.00 / cnt_total), 2)
ELSE 0
END)::float AS perc_operator
-- END Key "operator"
FROM
(SELECT generate_series,
(SELECT
count(distinct id)
FROM
(SELECT
id,
skeys(tags)
FROM
hist_plp h
WHERE
-- Transport und Verkehr
(
((tags->'amenity') = 'bicycle_parking') OR
((tags->'amenity') = 'bicycle_rental') OR
((tags->'amenity') = 'bus_station') OR
((tags->'amenity') = 'car_rental') OR
((tags->'amenity') = 'car_sharing') OR
((tags->'amenity') = 'car_wash') OR
((tags->'amenity') = 'ev_charging') OR
((tags->'amenity') = 'ferry_terminal') OR
((tags->'amenity') = 'fuel') OR
((tags->'amenity') = 'grit_bin') OR
((tags->'amenity') = 'parking') OR
((tags->'amenity') = 'parking_entrance') OR
((tags->'amenity') = 'parking_space') OR
((tags->'amenity') = 'taxi')
)
AND visible = 'true'
AND
(version = (SELECT max(version) FROM hist_plp WHERE typ = h.typ AND h.id = hist_plp.id) AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))
AND minor = (SELECT max(minor) from hist_plp where typ = h.typ AND h.id = hist_plp.id AND h.version = hist_plp.version AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))))
) AS foo
WHERE
skeys = 'name'
) AS cnt_name,
-- START operator
(SELECT
count(distinct id)
FROM
(SELECT
id,
skeys(tags)
FROM
hist_plp h
WHERE
-- Transport und Verkehr
(
((tags->'amenity') = 'bicycle_parking') OR
((tags->'amenity') = 'bicycle_rental') OR
((tags->'amenity') = 'bus_station') OR
((tags->'amenity') = 'car_rental') OR
((tags->'amenity') = 'car_sharing') OR
((tags->'amenity') = 'car_wash') OR
((tags->'amenity') = 'ev_charging') OR
((tags->'amenity') = 'ferry_terminal') OR
((tags->'amenity') = 'fuel') OR
((tags->'amenity') = 'grit_bin') OR
((tags->'amenity') = 'parking') OR
((tags->'amenity') = 'parking_entrance') OR
((tags->'amenity') = 'parking_space') OR
((tags->'amenity') = 'taxi')
)
AND visible = 'true'
AND
(version = (SELECT max(version) FROM hist_plp WHERE typ = h.typ AND h.id = hist_plp.id) AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))
AND minor = (SELECT max(minor) from hist_plp where typ = h.typ AND h.id = hist_plp.id AND h.version = hist_plp.version AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))))
) AS foo
WHERE
skeys = 'operator'
) AS cnt_operator,
-- START total
(SELECT
count(distinct id)
FROM
(SELECT
id,
skeys(tags)
FROM
hist_plp h
WHERE
-- Transport und Verkehr
(
((tags->'amenity') = 'bicycle_parking') OR
((tags->'amenity') = 'bicycle_rental') OR
((tags->'amenity') = 'bus_station') OR
((tags->'amenity') = 'car_rental') OR
((tags->'amenity') = 'car_sharing') OR
((tags->'amenity') = 'car_wash') OR
((tags->'amenity') = 'ev_charging') OR
((tags->'amenity') = 'ferry_terminal') OR
((tags->'amenity') = 'fuel') OR
((tags->'amenity') = 'grit_bin') OR
((tags->'amenity') = 'parking') OR
((tags->'amenity') = 'parking_entrance') OR
((tags->'amenity') = 'parking_space') OR
((tags->'amenity') = 'taxi')
)
AND visible = 'true'
AND
(version = (SELECT max(version) FROM hist_plp WHERE typ = h.typ AND h.id = hist_plp.id) AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))
AND minor = (SELECT max(minor) from hist_plp where typ = h.typ AND h.id = hist_plp.id AND h.version = hist_plp.version AND
( valid_from <= generate_series AND (valid_to >= generate_series OR valid_to is null))))
) AS foo
) AS cnt_total
-- END total
FROM generate_series(
(SELECT date_trunc ('month',(
SELECT MIN(valid_from) FROM hist_plp)) as foo), -- Select minimum date (month)
(SELECT MAX(valid_from) FROM hist_plp)::date, -- Select maximum date
interval '1 month')
) AS foo2
;
""")
# Getting a list of tuples from the database-cursor (cur)
data_tuples = []
for row in cur:
data_tuples.append(row)
except:
print "Query could not be executed"
###
### Plot (Multiline-Chart)
###
# Datatypes of the returning data
datatypes = [('date', 'S20'),('col2', 'double'), ('col3', 'double')]
# Data-tuple and datatype
data = np.array(data_tuples, dtype=datatypes)
# Date comes from 'col1'
col2 = data['col2']
col3 = data['col3']
# Converts date to a manageable date-format for matplotlib
dates = mdates.num2date(mdates.datestr2num(data['date']))
fig, ax = plt.subplots()
# set figure size
fig.set_size_inches(12,8)
# Create linechart
plt.plot(dates, col2, color = '#2dd700', linewidth=2, label='name = *')
plt.plot(dates, col3, color = '#ff6700', linewidth=2, linestyle='dashed', label='operator = *')
# Forces the plot to start from 0 and end at 100
pylab.ylim([0,100])
# Place a gray dashed grid behind the thicks (only for y-axis)
ax.yaxis.grid(color='gray', linestyle='dashed')
# Set this grid behind the thicks
ax.set_axisbelow(True)
# Rotate x-labels on the x-axis
fig.autofmt_xdate()
# Label x and y axis
plt.xlabel('Date')
plt.ylabel('Tag-Completeness [%]')
# Locate legend on the plot (http://matplotlib.org/users/legend_guide.html#legend-location)
# Shink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.9, box.height * 0.9])
# Put a legend to the right of the current axis and reduce the font size
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop={'size':9})
# Plot-title
plt.title('Development of the Tag-Completeness of all Transport POIs')
# Save plot to *.jpeg-file
plt.savefig('pics/c5_tag_completeness_transport.jpeg')
plt.clf()
| gpl-3.0 | -5,952,892,135,643,442,000 | 27.568702 | 131 | 0.621777 | false |
jonashaag/gpyconf | docs/source/conf.py | 1 | 6613 | # -*- coding: utf-8 -*-
#
# gpyconf documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 19 22:09:56 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
sys.path.append(os.path.abspath('_ext'))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest',
'sphinx.ext.intersphinx', 'sphinx.ext.todo',
'sphinx.ext.graphviz', 'emitrole']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gpyconf'
copyright = u'2008-2010 Jonas Haag'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2b'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'gpyconfdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gpyconf.tex', u'gpyconf documentation',
u'Jonas Haag', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| lgpl-2.1 | 8,782,303,980,827,583,000 | 31.737624 | 80 | 0.710721 | false |
stmobo/Machine-Learning | data-scripts/danbooru-get-prep.py | 1 | 1760 | # Requires: pybooru library
# Performs 'stage 3' input retrieval: image fetching and packing
import argparse
import csv
from pybooru import Danbooru
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('mapfile')
parser.add_argument('postfile')
parser.add_argument('outfile')
args = parser.parse_args()
tag_mapping = {}
with open(args.mapfile, newline='') as mapfile:
reader = csv.reader(mapfile)
tag_mapping = dict(reader)
n_posts_processed = 0
with open(args.postfile, newline='') as postfile, open(args.outfile, mode='w', newline='') as outfile:
reader = csv.reader(postfile)
writer = csv.writer(outfile)
client = Danbooru('danbooru')
for row in reader:
post_id = int(row[0])
# Get an image url:
normal_url = row[1]
large_url = row[2]
preview_url = row[3]
preferred_url = normal_url
if preferred_url.lstrip() == '':
preferred_url = large_url
if preferred_url.lstrip() == '':
post_details = client.post_show(post_id)
if 'source' in post_details:
print("Got source for post {}".format(post_id))
preferred_url = 'http://danbooru.donmai.us'+post_details['source']
else:
continue # skip this image
# Convert tags to tag indexes:
tag_idxs = []
tags = row[4:-1]
for tag in tags:
if tag in tag_mapping:
tag_idxs.append(tag_mapping[tag])
outrow = [post_id, preferred_url]
outrow.extend(tag_idxs)
writer.writerow(outrow)
if n_posts_processed % 20 == 0:
print("Processed {} posts...".format(n_posts_processed))
n_posts_processed += 1
| mit | -5,047,568,655,934,764,000 | 26.5 | 102 | 0.598295 | false |
pydotorg/pypi | admin.py | 1 | 12969 |
import sys, os, urllib, StringIO, traceback, cgi, binascii, getopt, shutil
import zipfile, gzip, tarfile
#sys.path.append('/usr/local/pypi/lib')
# Filesystem Handling
import fs.errors
import fs.multifs
import fs.osfs
import redis
import rq
prefix = os.path.dirname(__file__)
sys.path.insert(0, prefix)
CONFIG_FILE = os.environ.get("PYPI_CONFIG", os.path.join(prefix, 'config.ini'))
import store, config
def set_password(store, name, pw):
""" Reset the user's password and send an email to the address given.
"""
user = store.get_user(name.strip())
if user is None:
raise ValueError, 'user name unknown to me'
store.store_user(user['name'], pw.strip(), user['email'], None)
print 'done'
def remove_spam(store, namepat, confirm=False):
'''Remove packages that match namepat (SQL wildcards).
The packages will be removed. Additionally the user that created them will
have their password set to 'spammer'.
Pass the additional command-line argument "confirm" to perform the
deletions and modifications.
This will additionally display the IP address(es) of the spam submissions.
'''
assert confirm in (False, 'confirm')
cursor = st.get_cursor()
cursor.execute("""
select packages.name, submitted_date, submitted_by, submitted_from
from packages, journals
where packages.name LIKE %s
and packages.name = journals.name
and action = 'create'
""", (namepat,))
if not confirm:
print 'NOT taking any action; add "confirm" to the command line to act'
users = set()
ips = set()
for name, date, by, ip in cursor.fetchall():
ips.add(ip)
users.add(by)
print 'delete', name, 'submitted on', date
if confirm:
store.remove_package(name)
print 'IP addresses of spammers to possibly block:'
for ip in ips:
print ' ', ip
for user in users:
print 'disable user', user
if confirm:
cursor.execute("update accounts_user set password='spammer' where name=%s",
(user,))
def remove_spammer(store, name, confirm=False):
user = store.get_user(name)
if not user:
sys.exit('user %r does not exist' % name)
cursor = st.get_cursor()
cursor.execute("""
select distinct(submitted_from)
from journals
where submitted_by = %s
""", (name,))
print 'IP addresses of spammers to possibly block:'
for (ip,) in cursor.fetchall():
print ' ', ip
for p in store.get_user_packages(name):
print p['package_name']
if confirm:
store.remove_package(p['package_name'])
if confirm:
cursor.execute("update accounts_user set password='spammer' where name=%s",
(name,))
def remove_package(store, name):
''' Remove a package from the database
'''
store.remove_package(name)
print 'done'
def add_owner(store, package, owner):
user = store.get_user(owner)
if user is None:
raise ValueError, 'user name unknown to me'
if not store.has_package(package):
raise ValueError, 'no such package'
store.add_role(owner, 'Owner', package)
def delete_owner(store, package, owner):
user = store.get_user(owner)
if user is None:
raise ValueError, 'user name unknown to me'
if not store.has_package(package):
raise ValueError, 'no such package'
for role in store.get_package_roles(package):
if role['role_name']=='Owner' and role['user_name']==owner:
break
else:
raise ValueError, "user is not currently owner"
store.delete_role(owner, 'Owner', package)
def add_classifier(st, classifier):
''' Add a classifier to the trove_classifiers list
'''
cursor = st.get_cursor()
cursor.execute("select max(id) from trove_classifiers")
id = cursor.fetchone()[0]
if id:
id = int(id) + 1
else:
id = 1
fields = [f.strip() for f in classifier.split('::')]
for f in fields:
assert ':' not in f
levels = []
for l in range(2, len(fields)):
c2 = ' :: '.join(fields[:l])
store.safe_execute(cursor, 'select id from trove_classifiers where classifier=%s', (c2,))
l = cursor.fetchone()
if not l:
raise ValueError, c2 + " is not a known classifier"
levels.append(l[0])
levels += [id] + [0]*(3-len(levels))
store.safe_execute(cursor, 'insert into trove_classifiers (id, classifier, l2, l3, l4, l5) '
'values (%s,%s,%s,%s,%s,%s)', [id, classifier]+levels)
def rename_package(store, old, new):
''' Rename a package. '''
if not store.has_package(old):
raise ValueError, 'no such package'
if store.has_package(new):
raise ValueError, new+' exists'
store.rename_package(old, new)
print "Please give www-data permissions to all files of", new
def add_mirror(store, root, user):
''' Add a mirror to the mirrors list
'''
store.add_mirror(root, user)
print 'done'
def delete_mirror(store, root):
''' Delete a mirror
'''
store.delete_mirror(root)
print 'done'
def delete_old_docs(config, store):
'''Delete documentation directories for packages that have been deleted'''
for i in os.listdir(config.database_docs_dir):
if not store.has_package(i):
path = os.path.join(config.database_docs_dir, i)
print "Deleting", path
shutil.rmtree(path)
def keyrotate(config, store):
'''Rotate server key'''
key_dir = config.key_dir
prefixes = (os.path.join(key_dir, 'privkey'), os.path.join(key_dir,'pubkey'))
def rename_if_exists(oldsuffix, newsuffix):
for p in prefixes:
if os.path.exists(p+oldsuffix):
os.rename(p+oldsuffix, p+newsuffix)
# 1. generate new new key
os.system('openssl dsaparam -out /tmp/param 2048')
os.system('openssl gendsa -out %s/privkey.newnew /tmp/param' % key_dir)
os.system('openssl dsa -in %s/privkey.newnew -pubout -out %s/pubkey.newnew' % (key_dir, key_dir))
os.unlink('/tmp/param')
# 2. delete old old key
for p in prefixes:
if os.path.exists(p+'.old'):
os.unlink(p+'.old')
# 3. rotate current key -> old key
rename_if_exists('', '.old')
# 4. rotate new key -> current key
rename_if_exists('.new', '')
# 5. rotate new new key -> new key
rename_if_exists('.newnew', '.new')
# 6. restart web server
os.system('/usr/sbin/apache2ctl graceful')
# 7. log rotation
store.log_keyrotate()
def merge_user(store, old, new):
c = store.get_cursor()
if not store.get_user(old):
print "Old does not exist"
raise SystemExit
if not store.get_user(new):
print "New does not exist"
raise SystemExit
c.execute('update openids set name=%s where name=%s', (new, old))
c.execute('update sshkeys set name=%s where name=%s', (new, old))
c.execute('update roles set user_name=%s where user_name=%s', (new, old))
c.execute('delete from rego_otk where name=%s', (old,))
c.execute('update journals set submitted_by=%s where submitted_by=%s', (new, old))
c.execute('update mirrors set user_name=%s where user_name=%s', (new, old))
c.execute('update comments set user_name=%s where user_name=%s', (new, old))
c.execute('update ratings set user_name=%s where user_name=%s', (new, old))
c.execute('update comments_journal set submitted_by=%s where submitted_by=%s', (new, old))
c.execute('delete from accounts_email where user_id=(select id from accounts_user where username=%s)', (old,))
c.execute('delete from accounts_user where username=%s', (old,))
def rename_user(store, old, new):
c = store.get_cursor()
old_user = store.get_user(old)
if not old_user:
raise SystemExit("Old does not exist")
if store.get_user(new):
raise SystemExit("New user already exists!")
c.execute(
'UPDATE accounts_user SET username = %s WHERE username = %s',
(new, old),
)
c.execute('update openids set name=%s where name=%s', (new, old))
c.execute('update sshkeys set name=%s where name=%s', (new, old))
c.execute('update roles set user_name=%s where user_name=%s', (new, old))
c.execute('delete from rego_otk where name=%s', (old,))
c.execute('update journals set submitted_by=%s where submitted_by=%s', (new, old))
c.execute('update mirrors set user_name=%s where user_name=%s', (new, old))
c.execute('update comments set user_name=%s where user_name=%s', (new, old))
c.execute('update ratings set user_name=%s where user_name=%s', (new, old))
c.execute('update comments_journal set submitted_by=%s where submitted_by=%s', (new, old))
def show_user(store, name):
user = store.get_user(name)
if not user:
sys.exit('user %r does not exist' % name)
for key in user.keys():
print '%s: %s' % (key, user[key])
for p in store.get_user_packages(name):
print '%s: %s' % (p['package_name'], p['role_name'])
def nuke_nested_lists(store, confirm=False):
c = store.get_cursor()
c.execute("""select name, version, summary from releases
where lower(name) like '%nester%' or
lower(summary) like '%nested lists%' or
lower(summary) like '%geschachtelter listen%'""")
hits = {}
for name, version, summary in c.fetchall():
if "printer of nested lists" in summary:
hits[name] = summary
continue
if "Einfache Ausgabe geschachtelter Listen" in summary:
hits[name] = summary
continue
for f in store.list_files(name, version):
path = store.gen_file_path(f['python_version'], name, f['filename'])
if not store.package_fs.exists(path):
print "PACKAGE %s FILE %s MISSING" % (name, path)
continue
contents = StringIO.StringIO(store.package_fs.getcontents(path))
if path.endswith('.zip'):
z = zipfile.ZipFile(contents)
for i in z.infolist():
if not i.filename.endswith('.py'):
continue
src = z.read(i.filename)
if 'def print_lol' in src or 'def print_lvl' in src:
hits[name] = summary
elif path.endswith('.tar.gz'):
z = gzip.GzipFile(path, fileobj=contents)
t = tarfile.TarFile(fileobj=z)
for i in t.getmembers():
if not i.name.endswith('.py'): continue
f = t.extractfile(i.name)
src = f.read()
if 'def print_lol' in src or 'def print_lvl' in src:
hits[name] = summary
for name in hits:
if confirm:
store.remove_package(name)
print '%s: %s' % (name, hits[name])
if confirm:
print 'removed %d packages' % len(hits)
else:
print 'WOULD HAVE removed %d packages' % len(hits)
if __name__ == '__main__':
config = config.Config(CONFIG_FILE)
if config.queue_redis_url:
queue_redis = redis.Redis.from_url(config.queue_redis_url)
queue = rq.Queue(connection=queue_redis)
else:
queue = None
package_fs = fs.multifs.MultiFS()
package_fs.addfs(
"local", fs.osfs.OSFS(config.database_files_dir),
write=True,
)
st = store.Store(config, queue=queue, package_fs=package_fs)
st.open()
command = sys.argv[1]
args = (st, ) + tuple(sys.argv[2:])
try:
if command == 'password':
set_password(*args)
elif command == 'rmpackage':
remove_package(*args)
elif command == 'rmspam':
remove_spam(*args)
elif command == 'rmspammer':
remove_spammer(*args)
elif command == 'addclass':
add_classifier(*args)
print 'done'
elif command == 'addowner':
add_owner(*args)
elif command == 'delowner':
delete_owner(*args)
elif command == 'rename':
rename_package(*args)
elif command == 'addmirror':
add_mirror(*args)
elif command == 'delmirror':
delete_mirror(*args)
elif command == 'delolddocs':
delete_old_docs(config, *args)
elif command == 'send_comments':
send_comments(*args)
elif command == 'mergeuser':
merge_user(*args)
elif command == 'renameuser':
rename_user(*args)
elif command == 'nuke_nested_lists':
nuke_nested_lists(*args)
elif command == 'keyrotate':
keyrotate(config, *args)
elif command == 'user':
show_user(*args)
else:
print "unknown command '%s'!"%command
st.changed()
finally:
st.close()
| bsd-3-clause | 6,737,148,901,736,497,000 | 34.531507 | 114 | 0.595882 | false |
mfinzi/Numerical-Quantum | fullApp.py | 1 | 20379 | # Author: Marc Finzi
# Last updated: 1/14/2016
# Contactable at [email protected]
import globalV
import os,sys,time
import FileDialog
import Tkinter as tk
import ttk
import numpy as np
from matplotlib import use; use("TkAgg")
from matplotlib.pyplot import xkcd
from matplotlib.figure import Figure
#from matplotlib import patheffects
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from scipy.ndimage import zoom
from scipy.signal import tukey
# Home made imports
from solvers import *
from animator import *
import creationFunctions
class topApp(tk.Tk):
def __init__(self):
tk.Tk.__init__(self)
#self.geometry("650x500+300+300")
tk.Tk.wm_title(self,"Numerical SE Sim")
#self.resizable(0,0)
#tk.Tk.wm_iconbitmap(self,'ih.ico') #Why does this kill the render?
container = tk.Frame(self, relief="sunken")
container.pack(side="top", padx=5,pady=5,fill="both",expand=True)
#for i in np.arange(5):
# container.grid_rowconfigure(i,pad=3, weight=1)
# container.grid_columnconfigure(i,pad=3, weight=1)
self.mainPageFrame = MainPage(container,self)
#self.mainPageFrame.grid(row=0,column=0,sticky="nsew")
self.mainPageFrame.tkraise()
def quit(self):
self.destroy()
sys.exit()
class MainPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.controller = controller
# The main page frame owns the two figures, the Animator and the Graph communicate
# through the figures
self.psiFig = Figure(figsize=(8,6.5), dpi=100)#10# 8,8
# Initializing the particle into a default config, editable later
#self.myPart = Particle1d((lambda x: x*x*0.5)(np.linspace(-10,10,2000)),gaussian(0,1,10)(np.linspace(-10,10,2000)),
# N=2000,Lx=20.0,dt=.01,SOLVER=EigenSolver1d)
X = np.linspace(-10,10,1024)
V = creationFunctions.squareBarr(x0=0.1,width=.2,height=100)(X)
V[np.abs(X)>9.9] = 10000
self.myPart = Particle1d(V,creationFunctions.gaussian(-5,1,10)(X),
N=1024,Lx=20.0,dt=.01,SOLVER=EigenSolver1d)
# This is the object that will push updates to the figure
self.anim = Animator(self.myPart,self.psiFig)
#self.toggleXKCD()
self.initGrid()
figContainer = tk.Frame(self, relief="sunken"); figContainer.pack(side=tk.LEFT)
self.drawSpace = DrawableGraph(self.psiFig,figContainer)
self.drawSpace.get_tk_widget().pack(side=tk.TOP)#grid(row=1,column=0)#,
#rowspan=4,padx=5)#,sticky='EN')#pack(side=tk.BOTTOM)
nonFigContainer = tk.Frame(self, relief="sunken")
nonFigContainer.pack(side=tk.RIGHT)
#constantContainer = tk.Frame(nonFigContainer, relief="sunken")
nuclearContainer = tk.Frame(nonFigContainer, relief = "sunken"); nuclearContainer.pack(side = tk.TOP)
label = tk.Label(nuclearContainer,text="Control Panel", font=("Comic",18)); label.pack()
quitButton = ttk.Button(nuclearContainer,text="Quit",command=self.controller.quit); quitButton.pack(side=tk.RIGHT)
resetButton = ttk.Button(nuclearContainer,text='Reset',command = ()); #TODO hook up reset
resetButton.pack(side = tk.TOP)
# Display |psi| or real(psi),im(psi)
animOptionsContainer = tk.Frame(nonFigContainer, relief = "sunken"); animOptionsContainer.pack()
subAnimCont = tk.Frame(animOptionsContainer, relief = "sunken"); subAnimCont.pack()
updateButton = ttk.Button(subAnimCont,text="Redraw",command=self.changeGraph); updateButton.pack(side = tk.LEFT)
playPauseButton = ttk.Button(subAnimCont,text='Play/Pause',command = globalV.pauseUnpause); playPauseButton.pack(side =tk.RIGHT)
drawControlsContainer = tk.Frame(animOptionsContainer, relief = "sunken"); drawControlsContainer.pack()
presetContainer = tk.Frame(animOptionsContainer, relief = "sunken"); presetContainer.pack()
presetLabel = tk.Label(presetContainer,text="Presets", font=("Comic Sans",12)); presetLabel.pack()
self.presetDictionary = {
'Barrier Partial Reflection':("gaussian(-5,1,10)(x)","squareBarr(x0=0.1,width=.2,height=100)(x)"),
'Airy functions':("gaussian(-5,1,13)(x)","15*x"),
'Harmonic Oscillator':("gaussian(0,2,5)(x)",".5*x**2"),
'Coherent State w = 2':("gaussian(6/sqrt(2),1/sqrt(2),0)(x)",".5*4*x**2"),
'Abs(x) with Bumps':("(x>-4)*(x<0)*(np.exp(1j*8*x)/np.sqrt(2))*(np.sin(np.pi*x/2))","8*abs(x)+(x<7)*(x>5)*50*(5-x)*(x-7)-(x<-5)*(x>-7)*50*(-5-x)*(-x-7)"),
'Coulomb Like':("gaussian(3,1,-12)(x)","-80/(.25*(x-3)**2+.5)-120/(.25*(x+3)**2+.5)")
}
self.preset = tk.StringVar(); self.preset.set('Barrier Partial Reflection')
presetsBox = ttk.Combobox(presetContainer,textvariable=self.preset)
presetsBox['values'] = [key for key in self.presetDictionary]
presetsBox.pack(side=tk.BOTTOM)
self.preset.trace('w',self.presetCallback)
#Todo: Connect up dontInterpBox
self.dontInterp = tk.BooleanVar()
dontInterpBox = ttk.Checkbutton(drawControlsContainer,text="Don't Interpo",variable=self.dontInterp);# dontInterpBox.pack(side = tk.RIGHT)
self.startingK = tk.DoubleVar() # Initial K for psi
kSlider = ttk.Scale(drawControlsContainer,orient="h",from_=-30,to=30,variable=self.startingK)
kSlider.pack()
displayOptionsContainer = tk.Frame(animOptionsContainer, relief = "sunken"); displayOptionsContainer.pack(expand=True,fill=tk.X)
dispTypeButton = ttk.Button(displayOptionsContainer,text='Display Re{'+globalV.uniPsi+'}', command = self.anim.switchDisplayType)
dispTypeButton.pack(side=tk.LEFT)
#XKCDButton = ttk.Button(displayOptionsContainer, text="XKCD", command=self.toggleXKCD); XKCDButton.pack(side=tk.RIGHT)
EnergyButton = ttk.Button(displayOptionsContainer, text="Energies", command=self.anim.energyDisplaySwitch); EnergyButton.pack(side = tk.LEFT)
interpButton = ttk.Checkbutton(displayOptionsContainer,variable=None, command=self.anim.switchInterp); interpButton.pack(side=tk.RIGHT)
# Text inputs for psi and V
inputContainer = tk.Frame(nonFigContainer, relief="sunken"); inputContainer.pack()
inputLabel = ttk.Label(inputContainer,text="Direct input",font=("Comic",16)); inputLabel.pack(side=tk.TOP)
psiContainer = tk.Frame(inputContainer, relief="sunken"); psiContainer.pack()
psiDLabel = ttk.Label(psiContainer,text=globalV.uniPsi+"(x,0): example exp(-x**2 + 5*1j)"); psiDLabel.pack(side=tk.TOP)
self.textPsi = tk.StringVar()
psiInputBox = tk.Entry(psiContainer, textvariable=self.textPsi)
psiInputBox.pack(side = tk.LEFT)
self.usePsi = tk.BooleanVar()
psiCheckBox = ttk.Checkbutton(psiContainer,text="Enable",variable=self.usePsi); psiCheckBox.pack(side = tk.RIGHT)
vContainer = tk.Frame(inputContainer, relief="sunken"); vContainer.pack()
vDLabel = ttk.Label(vContainer,text="V(x): example ((x<-5)|(x>5))*100"); vDLabel.pack(side = tk.TOP)
self.textV = tk.StringVar()
vxInputBox = tk.Entry(vContainer, textvariable=self.textV)
vxInputBox.pack(side = tk.LEFT)
self.useV = tk.BooleanVar()
vCheckBox = ttk.Checkbutton(vContainer,text="Enable",variable=self.useV); vCheckBox.pack(side = tk.RIGHT)
#todo add other button functions
solverContainer = tk.Frame(nonFigContainer, relief = "sunken")
solverContainer.pack(side=tk.BOTTOM)
solverTypesContainer = tk.Frame(solverContainer, relief = "sunken")
solverTypesContainer.pack(side = tk.TOP,expand=True)
FinDiffButton = ttk.Button(solverTypesContainer, text="Finite Difference", command=(lambda: self.myPart.reInit(SOLVER=EigenSolver1d)))
FinDiffButton.pack(side = tk.LEFT,fill=tk.BOTH)
SplitStepButton = ttk.Button(solverTypesContainer, text="Split Step Fourier", command=(lambda: self.myPart.reInit(SOLVER=SplitStepper1d)))
SplitStepButton.pack(side = tk.RIGHT,fill=tk.BOTH)
# Solver settings
solverSettingsContainer = tk.Frame(solverContainer, relief = "sunken")
solverSettingsContainer.pack(side=tk.BOTTOM)
stencilContainer = tk.Frame(solverSettingsContainer); stencilContainer.pack(side = tk.LEFT)
stencilDescription = ttk.Label(stencilContainer,text="Hamiltonian Stencil"); stencilDescription.pack()
self.numSTerms = tk.IntVar(); self.numSTerms.set(2)
stencil3Button = ttk.Radiobutton(stencilContainer,text = "3 Term",variable = self.numSTerms, value = 1); stencil3Button.pack()
stencil5Button = ttk.Radiobutton(stencilContainer,text = "5 Term",variable = self.numSTerms, value = 2); stencil5Button.pack()
stencil7Button = ttk.Radiobutton(stencilContainer,text = "7 Term",variable = self.numSTerms, value = 3); stencil7Button.pack()
stencil9Button = ttk.Radiobutton(stencilContainer,text = "9 Term",variable = self.numSTerms, value = 4); stencil9Button.pack()
#Todo: fix placement of Nscale
nContainer = tk.Frame(solverSettingsContainer, relief = "sunken"); nContainer.pack(side = tk.RIGHT)
Ndescription = ttk.Label(nContainer,text="Samples: 1024",); Ndescription.pack(side=tk.TOP)
self.vectorLength = tk.IntVar()
self.vectorLength.set(10)
NScale = ttk.Scale(nContainer,orient='v',from_=3,to=13,variable = self.vectorLength,
command = lambda x: self.nPointsSliderCallback(Ndescription)); NScale.pack()
# eigenSlider and coefficients
altFigSettingsContainer = tk.Frame(figContainer,relief=tk.RAISED,borderwidth=1)
altFigSettingsContainer.pack(side=tk.BOTTOM,expand=True,fill=tk.BOTH)
self.eigenNum = tk.IntVar()
fnum = tk.Label(altFigSettingsContainer)
fnum.pack(side = tk.LEFT)
cNum = tk.Label(altFigSettingsContainer)
cNum.pack(side = tk.RIGHT)
self.eigenFunctionSlider = ttk.Scale(altFigSettingsContainer,orient="h",from_=0,to=90,variable = self.eigenNum, command = lambda x: self.altGraphSliderCallback((fnum,cNum)) )
self.eigenFunctionSlider.pack(expand=True,fill=tk.BOTH)
# Button for controlling altgraph output
self.altGraphType = tk.IntVar()
energyBasis = ttk.Radiobutton(altFigSettingsContainer,text = "psi in H basis",variable=self.altGraphType,value=0); energyBasis.pack(side = tk.RIGHT)
def presetCallback(self,*args,**kwargs):
self.textPsi.set(self.presetDictionary[self.preset.get()][0])
self.textV.set(self.presetDictionary[self.preset.get()][1])
self.usePsi.set(True)
self.useV.set(True)
def nPointsSliderCallback(self,label):
pointExponent = self.vectorLength.get()
label.config(text = "Samples: %i"%2**pointExponent)
#def threadedChangeGraph(self):
# thread.start_new_thread(self.changeGraph, ())
def altGraphSliderCallback(self,(fnum,cNum)):
Ni = self.eigenNum.get()
fnum.config(text="N_%i"%Ni)
if self.myPart.solverClass == EigenSolver1d:
a,b = np.real(self.myPart.solver.basisCoeffs[Ni]),np.imag(self.myPart.solver.basisCoeffs[Ni])
cNum.config(text="C*C = %.2E"%(a**2 +b**2))
self.anim.eigenDisplayNum = Ni
#print np.vdot(self.myPart.solver.basisCoeffs,self.myPart.solver.basisCoeffs)
def changeGraph(self):
# Get the drawn curves from the canvas
dasCurves,updatePsi,updateV = self.drawSpace.extractCurves()
# downSampling to 50 points with spline interpolation, then rescaling #### NWAS HERE, may have to mess with interpolation settings
numSamples = 620.
downSampleRatio = numSamples/float(np.shape(dasCurves)[1])
rescaleRatio = self.anim.particle.N/float(numSamples)
pYvY = zoom(dasCurves,(1,downSampleRatio),order = 1)
pYvY = zoom(pYvY,(1,rescaleRatio),order=1).astype('complex128')
if self.anim.displayType == 0: # For displaytype and thus drawtype is |psi|^2
pYvY[0] = np.sqrt(np.abs(pYvY[0]))*np.exp(1j*self.startingK.get()*self.anim.particle.X)
else:
pYvY[0] = pYvY[0]*np.exp(1j*self.startingK.get()*self.anim.particle.X)
oldPsi = self.anim.particle.getPsi()
oldV = self.anim.particle.getV()
# We may also need to rescale N if it has changed
newN = 2**self.vectorLength.get()
if self.anim.particle.N!= newN:
scaleRatio = newN/float(self.anim.particle.N)
oldPsi = zoom(np.real(oldPsi),scaleRatio,order = 1)+1j*zoom(np.imag(oldPsi),scaleRatio,order = 1)
oldV = zoom(oldV,scaleRatio,order = 1)
pYvY = zoom(np.real(pYvY),(1,scaleRatio),order=1) +1j*zoom(np.imag(pYvY),(1,scaleRatio),order=1)
newPsi = pYvY[0] if (updatePsi) else oldPsi
newV = np.real(pYvY[1]) if (updateV) else oldV
# If the direct input boxes are checked, input is taken directly
X = np.linspace(-self.anim.particle.Lx/2,self.anim.particle.Lx/2,newN) # note that this will inhibit future change of Lx
if (self.usePsi.get()):
lambdaPsi = creationFunctions.getFunctionFromText(self.textPsi.get())
if lambdaPsi!=None: newPsi = lambdaPsi(X)
if (self.useV.get()):
lambdaV = creationFunctions.getFunctionFromText(self.textV.get())
if lambdaV!=None: newV = lambdaV(X)
# set the new particle settings
self.anim.particle.reInit(psi = newPsi,Vx = newV,stencilNum = self.numSTerms.get(),N=newN)
# We need to change the bounds on the altgraph eigenslider
if self.anim.particle.solverClass == EigenSolver1d:
newMaxNBasis = self.anim.particle.solver.eigenBasis.shape[1]-1
self.eigenFunctionSlider.configure(to = newMaxNBasis)
# For out of bounds safety
self.anim.eigenDisplayNum = min(self.anim.eigenDisplayNum,newMaxNBasis)
def initGrid(self):
#ttk.Style().theme_use("xpnative")
self.pack(fill=tk.BOTH,expand=1)
self.columnconfigure(1, weight=1)
self.columnconfigure(3, pad=7)
self.rowconfigure(3, weight=1)
self.rowconfigure(5, pad=7)
def toggleXKCD(self):
xkcd()# function is still broken, since funcanimation does not terminate
#self.psiFig = mpl.figure.Figure(figsize=(8,6), dpi=100)
#self.anim = Animator(self.myPart,self.psiFig)
# Remember to cut window off
#
class DrawableGraph(FigureCanvasTkAgg):
def __init__(self,figure,master):
FigureCanvasTkAgg.__init__(self,figure,master)
self.master = master
self.cWidth,self.cHeight = figure.get_figwidth()*figure.dpi,\
figure.get_figheight()*figure.dpi
self.b1 = "up"
self.b2 = "up"
self.x1old,self.y1old = None,None
self.x2old,self.y2old = None,None
self.yCenter = 203 # center of the psiGraph in pixels
self.yScale = -80. # Number of pixels from 0 to 1, takes into account reversed directions
self.psiXlist,self.psiYlist = -1*np.ones(self.cWidth),self.yCenter*np.ones(self.cWidth)
self.VXlist,self.VYlist = -1*np.ones(self.cWidth),self.yCenter*np.ones(self.cWidth)
self.oldPsi = None # stores psYlist and
self.oldV = None # VYlist for modification
self.get_tk_widget().bind("<Motion>", self.motion)
self.get_tk_widget().bind("<ButtonPress-1>", self.b1down) # left click
self.get_tk_widget().bind("<ButtonRelease-1>", self.b1up) # for psi
self.get_tk_widget().bind("<ButtonPress-3>", self.b2down) # right click
self.get_tk_widget().bind("<ButtonRelease-3>", self.b2up) # for V(x)
self.get_tk_widget().bind("<ButtonPress-2>",self.loadOldCurves) # So you can extend a curve
self.get_tk_widget().bind("<space>",lambda event: globalV.pauseUnpause())
def loadOldCurves(self,event):
if self.oldPsi != None: self.psiYlist = self.oldPsi
if self.oldV != None: self.psiYlist = self.oldV
def extractCurves(self):
pX,pY,vX,vY = self.psiXlist,self.psiYlist, self.VXlist,self.VYlist
self.b1 = "up"
self.b2 = "up"
self.x1old,self.y1old = None,None
self.x2old,self.y2old = None,None
self.psiXlist,self.psiYlist = -1*np.ones(self.cWidth),self.yCenter*np.ones(self.cWidth)
self.VXlist,self.VYlist = -1*np.ones(self.cWidth),self.yCenter*np.ones(self.cWidth)
self.get_tk_widget().delete("line")
# Apply tukey window function to psi
pwY = tukey(620,alpha=.1)*(pY[100:720]-self.yCenter)/self.yScale
vwY = vY[100:720]-self.yCenter
dasCurves = np.array([pwY,vwY])
thresholdPsi = np.sum(pX[100:720])>-610
thresholdV = np.sum(vX[100:720])>-610
if thresholdPsi: self.oldPsi = pY
if thresholdV: self.oldV = vY
return dasCurves, thresholdPsi, thresholdV# Thresholds on activation
def b1down(self,event):
if self.inBounds(event.x,event.y):
self.b1 = "down" #
self.get_tk_widget().config(cursor="target") #
#globalV.pause()
def b2down(self,event):
if self.inBounds(event.x,event.y):
self.b2 = "down" #
self.get_tk_widget().config(cursor="tcross")
#globalV.pause() #
print event.x,event.y
print self.get_tk_widget().winfo_height()
def b1up(self, event):
self.b1 = "up"
self.x1old = None # reset the line when you let go of the button
self.y1old = None
self.get_tk_widget().config(cursor="arrow")
def b2up(self, event):
self.b2 = "up"
self.x2old = None # reset the line when you let go of the button
self.y2old = None
self.get_tk_widget().config(cursor="arrow")
def inBounds(self,x,y):
xGood = (x<self.cWidth) and (x>=0)
yGood = (y<self.cHeight) and (y>=0)
return (xGood and yGood)
def linearInterpRem(self,coords):
xold,yold,xnew,ynew=coords
slope = (ynew-yold)/float(xnew-xold)
return lambda x: ((x-xold)*slope + yold) #Switch up down
def motion(self,event):
if (self.b1 == "down") and self.inBounds(event.x,event.y):
if self.psiXlist[event.x]==-1:
color = "black"
else: color = "red";
if self.x1old is not None:
self.get_tk_widget().create_line(self.x1old,self.y1old,event.x,event.y,smooth=True,width=2,fill=color,tag="line")
if color == "black" and self.x1old!=event.x:
coords = self.x1old,self.y1old,event.x,event.y
xinRange = np.arange(self.x1old,event.x+1)
self.psiXlist[self.x1old:event.x+1] = 1
self.psiYlist[self.x1old:event.x+1] = self.linearInterpRem(coords)(xinRange)
self.x1old = event.x
self.y1old = event.y
if (self.b2 == "down") and self.inBounds(event.x,event.y):
if self.VXlist[event.x]==-1:
color = "blue"
else: color = "purple";
if self.x2old is not None:
self.get_tk_widget().create_line(self.x2old,self.y2old,event.x,event.y,smooth=True,width=2,fill=color, tag="line")
if color == "blue" and self.x2old != event.x:
coords = self.x2old,self.y2old,event.x,event.y
xinRange = np.arange(self.x2old,event.x+1)
self.VXlist[self.x2old:event.x+1] = 1
self.VYlist[self.x2old:event.x+1] = self.linearInterpRem(coords)(xinRange)
self.x2old = event.x
self.y2old = event.y
if __name__ == "__main__":
app = topApp()
app.mainPageFrame.anim.animate()
app.mainloop()
| mit | -1,090,661,446,433,497,500 | 45.527397 | 186 | 0.636979 | false |
rbalda/neural_ocr | env/lib/python2.7/site-packages/pybrain/rl/environments/twoplayergames/tasks/gomokutask.py | 1 | 3950 | __author__ = 'Tom Schaul, [email protected]'
from inspect import isclass
from pybrain.utilities import Named
from pybrain.rl.environments.twoplayergames import GomokuGame
from pybrain.rl.environments.twoplayergames.gomokuplayers import RandomGomokuPlayer, ModuleDecidingPlayer
from pybrain.rl.environments.twoplayergames.gomokuplayers.gomokuplayer import GomokuPlayer
from pybrain.structure.modules.module import Module
from pybrain.rl.environments.episodic import EpisodicTask
class GomokuTask(EpisodicTask, Named):
""" The task of winning the maximal number of Gomoku games against a fixed opponent. """
# first game, opponent is black
opponentStart = True
# on subsequent games, starting players are alternating
alternateStarting = False
# numerical reward value attributed to winning
winnerReward = 1.
# coefficient determining the importance of long vs. short games w.r. to winning/losing
numMovesCoeff = 0.
# average over some games for evaluations
averageOverGames = 10
noisy = True
def __init__(self, size, opponent = None, **args):
EpisodicTask.__init__(self, GomokuGame((size, size)))
self.setArgs(**args)
if opponent == None:
opponent = RandomGomokuPlayer(self.env)
elif isclass(opponent):
# assume the agent can be initialized without arguments then.
opponent = opponent(self.env)
if not self.opponentStart:
opponent.color = GomokuGame.WHITE
self.opponent = opponent
self.minmoves = 9
self.maxmoves = self.env.size[0] * self.env.size[1]
self.reset()
def reset(self):
self.switched = False
EpisodicTask.reset(self)
if self.opponent.color == GomokuGame.BLACK:
# first move by opponent
EpisodicTask.performAction(self, self.opponent.getAction())
def isFinished(self):
res = self.env.gameOver()
if res and self.alternateStarting and not self.switched:
# alternate starting player
self.opponent.color *= -1
self.switched = True
return res
def getReward(self):
""" Final positive reward for winner, negative for loser. """
if self.isFinished():
if self.env.winner == self.env.DRAW:
return 0
win = (self.env.winner != self.opponent.color)
moves = self.env.movesDone
res = self.winnerReward - self.numMovesCoeff * (moves -self.minmoves)/(self.maxmoves-self.minmoves)
if not win:
res *= -1
if self.alternateStarting and self.switched:
# opponent color has been inverted after the game!
res *= -1
return res
else:
return 0
def performAction(self, action):
EpisodicTask.performAction(self, action)
if not self.isFinished():
EpisodicTask.performAction(self, self.opponent.getAction())
def __call__(self, x):
""" If a module is given, wrap it into a ModuleDecidingAgent before evaluating it.
Also, if applicable, average the result over multiple games. """
if isinstance(x, Module):
agent = ModuleDecidingPlayer(x, self.env, greedySelection = True)
elif isinstance(x, GomokuPlayer):
agent = x
else:
raise NotImplementedError('Missing implementation for '+x.__class__.__name__+' evaluation')
res = 0
agent.game = self.env
self.opponent.game = self.env
for dummy in range(self.averageOverGames):
agent.color = -self.opponent.color
res += EpisodicTask.__call__(self, agent)
return res / float(self.averageOverGames)
| mit | -3,902,563,207,745,900,500 | 37.349515 | 111 | 0.611899 | false |
jabesq/home-assistant | homeassistant/components/hue/light.py | 1 | 14714 | """Support for the Philips Hue lights."""
import asyncio
from datetime import timedelta
import logging
from time import monotonic
import random
import aiohue
import async_timeout
from homeassistant.components import hue
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH,
ATTR_TRANSITION, ATTR_HS_COLOR, EFFECT_COLORLOOP, EFFECT_RANDOM,
FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_COLOR, SUPPORT_TRANSITION,
Light)
from homeassistant.util import color
SCAN_INTERVAL = timedelta(seconds=5)
_LOGGER = logging.getLogger(__name__)
SUPPORT_HUE_ON_OFF = (SUPPORT_FLASH | SUPPORT_TRANSITION)
SUPPORT_HUE_DIMMABLE = (SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS)
SUPPORT_HUE_COLOR_TEMP = (SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP)
SUPPORT_HUE_COLOR = (SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR)
SUPPORT_HUE_EXTENDED = (SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR)
SUPPORT_HUE = {
'Extended color light': SUPPORT_HUE_EXTENDED,
'Color light': SUPPORT_HUE_COLOR,
'Dimmable light': SUPPORT_HUE_DIMMABLE,
'On/Off plug-in unit': SUPPORT_HUE_ON_OFF,
'Color temperature light': SUPPORT_HUE_COLOR_TEMP,
}
ATTR_IS_HUE_GROUP = 'is_hue_group'
GAMUT_TYPE_UNAVAILABLE = 'None'
# Minimum Hue Bridge API version to support groups
# 1.4.0 introduced extended group info
# 1.12 introduced the state object for groups
# 1.13 introduced "any_on" to group state objects
GROUP_MIN_API_VERSION = (1, 13, 0)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up Hue lights.
Can only be called when a user accidentally mentions hue platform in their
config. But even in that case it would have been ignored.
"""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Hue lights from a config entry."""
bridge = hass.data[hue.DOMAIN][config_entry.data['host']]
cur_lights = {}
cur_groups = {}
api_version = tuple(
int(v) for v in bridge.api.config.apiversion.split('.'))
allow_groups = bridge.allow_groups
if allow_groups and api_version < GROUP_MIN_API_VERSION:
_LOGGER.warning('Please update your Hue bridge to support groups')
allow_groups = False
# Hue updates all lights via a single API call.
#
# If we call a service to update 2 lights, we only want the API to be
# called once.
#
# The throttle decorator will return right away if a call is currently
# in progress. This means that if we are updating 2 lights, the first one
# is in the update method, the second one will skip it and assume the
# update went through and updates it's data, not good!
#
# The current mechanism will make sure that all lights will wait till
# the update call is done before writing their data to the state machine.
#
# An alternative approach would be to disable automatic polling by Home
# Assistant and take control ourselves. This works great for polling as now
# we trigger from 1 time update an update to all entities. However it gets
# tricky from inside async_turn_on and async_turn_off.
#
# If automatic polling is enabled, Home Assistant will call the entity
# update method after it is done calling all the services. This means that
# when we update, we know all commands have been processed. If we trigger
# the update from inside async_turn_on, the update will not capture the
# changes to the second entity until the next polling update because the
# throttle decorator will prevent the call.
progress = None
light_progress = set()
group_progress = set()
async def request_update(is_group, object_id):
"""Request an update.
We will only make 1 request to the server for updating at a time. If a
request is in progress, we will join the request that is in progress.
This approach is possible because should_poll=True. That means that
Home Assistant will ask lights for updates during a polling cycle or
after it has called a service.
We keep track of the lights that are waiting for the request to finish.
When new data comes in, we'll trigger an update for all non-waiting
lights. This covers the case where a service is called to enable 2
lights but in the meanwhile some other light has changed too.
"""
nonlocal progress
progress_set = group_progress if is_group else light_progress
progress_set.add(object_id)
if progress is not None:
return await progress
progress = asyncio.ensure_future(update_bridge())
result = await progress
progress = None
light_progress.clear()
group_progress.clear()
return result
async def update_bridge():
"""Update the values of the bridge.
Will update lights and, if enabled, groups from the bridge.
"""
tasks = []
tasks.append(async_update_items(
hass, bridge, async_add_entities, request_update,
False, cur_lights, light_progress
))
if allow_groups:
tasks.append(async_update_items(
hass, bridge, async_add_entities, request_update,
True, cur_groups, group_progress
))
await asyncio.wait(tasks)
await update_bridge()
async def async_update_items(hass, bridge, async_add_entities,
request_bridge_update, is_group, current,
progress_waiting):
"""Update either groups or lights from the bridge."""
if is_group:
api_type = 'group'
api = bridge.api.groups
else:
api_type = 'light'
api = bridge.api.lights
try:
start = monotonic()
with async_timeout.timeout(4):
await api.update()
except (asyncio.TimeoutError, aiohue.AiohueException) as err:
_LOGGER.debug('Failed to fetch %s: %s', api_type, err)
if not bridge.available:
return
_LOGGER.error('Unable to reach bridge %s (%s)', bridge.host, err)
bridge.available = False
for light_id, light in current.items():
if light_id not in progress_waiting:
light.async_schedule_update_ha_state()
return
finally:
_LOGGER.debug('Finished %s request in %.3f seconds',
api_type, monotonic() - start)
if not bridge.available:
_LOGGER.info('Reconnected to bridge %s', bridge.host)
bridge.available = True
new_lights = []
for item_id in api:
if item_id not in current:
current[item_id] = HueLight(
api[item_id], request_bridge_update, bridge, is_group)
new_lights.append(current[item_id])
elif item_id not in progress_waiting:
current[item_id].async_schedule_update_ha_state()
if new_lights:
async_add_entities(new_lights)
class HueLight(Light):
"""Representation of a Hue light."""
def __init__(self, light, request_bridge_update, bridge, is_group=False):
"""Initialize the light."""
self.light = light
self.async_request_bridge_update = request_bridge_update
self.bridge = bridge
self.is_group = is_group
if is_group:
self.is_osram = False
self.is_philips = False
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
else:
self.is_osram = light.manufacturername == 'OSRAM'
self.is_philips = light.manufacturername == 'Philips'
self.gamut_typ = self.light.colorgamuttype
self.gamut = self.light.colorgamut
_LOGGER.debug("Color gamut of %s: %s", self.name, str(self.gamut))
if self.light.swupdatestate == "readytoinstall":
err = (
"Please check for software updates of the %s "
"bulb in the Philips Hue App."
)
_LOGGER.warning(err, self.name)
if self.gamut:
if not color.check_valid_gamut(self.gamut):
err = (
"Color gamut of %s: %s, not valid, "
"setting gamut to None."
)
_LOGGER.warning(err, self.name, str(self.gamut))
self.gamut_typ = GAMUT_TYPE_UNAVAILABLE
self.gamut = None
@property
def unique_id(self):
"""Return the ID of this Hue light."""
return self.light.uniqueid
@property
def name(self):
"""Return the name of the Hue light."""
return self.light.name
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
if self.is_group:
return self.light.action.get('bri')
return self.light.state.get('bri')
@property
def _color_mode(self):
"""Return the hue color mode."""
if self.is_group:
return self.light.action.get('colormode')
return self.light.state.get('colormode')
@property
def hs_color(self):
"""Return the hs color value."""
mode = self._color_mode
source = self.light.action if self.is_group else self.light.state
if mode in ('xy', 'hs') and 'xy' in source:
return color.color_xy_to_hs(*source['xy'], self.gamut)
return None
@property
def color_temp(self):
"""Return the CT color value."""
# Don't return color temperature unless in color temperature mode
if self._color_mode != "ct":
return None
if self.is_group:
return self.light.action.get('ct')
return self.light.state.get('ct')
@property
def is_on(self):
"""Return true if device is on."""
if self.is_group:
return self.light.state['any_on']
return self.light.state['on']
@property
def available(self):
"""Return if light is available."""
return self.bridge.available and (self.is_group or
self.bridge.allow_unreachable or
self.light.state['reachable'])
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_HUE.get(self.light.type, SUPPORT_HUE_EXTENDED)
@property
def effect(self):
"""Return the current effect."""
return self.light.state.get('effect', None)
@property
def effect_list(self):
"""Return the list of supported effects."""
if self.is_osram:
return [EFFECT_RANDOM]
return [EFFECT_COLORLOOP, EFFECT_RANDOM]
@property
def device_info(self):
"""Return the device info."""
if self.light.type in ('LightGroup', 'Room',
'Luminaire', 'LightSource'):
return None
return {
'identifiers': {
(hue.DOMAIN, self.unique_id)
},
'name': self.name,
'manufacturer': self.light.manufacturername,
# productname added in Hue Bridge API 1.24
# (published 03/05/2018)
'model': self.light.productname or self.light.modelid,
# Not yet exposed as properties in aiohue
'sw_version': self.light.raw['swversion'],
'via_device': (hue.DOMAIN, self.bridge.api.config.bridgeid),
}
async def async_turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
command = {'on': True}
if ATTR_TRANSITION in kwargs:
command['transitiontime'] = int(kwargs[ATTR_TRANSITION] * 10)
if ATTR_HS_COLOR in kwargs:
if self.is_osram:
command['hue'] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535)
command['sat'] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255)
else:
# Philips hue bulb models respond differently to hue/sat
# requests, so we convert to XY first to ensure a consistent
# color.
xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR],
self.gamut)
command['xy'] = xy_color
elif ATTR_COLOR_TEMP in kwargs:
temp = kwargs[ATTR_COLOR_TEMP]
command['ct'] = max(self.min_mireds, min(temp, self.max_mireds))
if ATTR_BRIGHTNESS in kwargs:
command['bri'] = kwargs[ATTR_BRIGHTNESS]
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command['alert'] = 'lselect'
del command['on']
elif flash == FLASH_SHORT:
command['alert'] = 'select'
del command['on']
else:
command['alert'] = 'none'
if ATTR_EFFECT in kwargs:
effect = kwargs[ATTR_EFFECT]
if effect == EFFECT_COLORLOOP:
command['effect'] = 'colorloop'
elif effect == EFFECT_RANDOM:
command['hue'] = random.randrange(0, 65535)
command['sat'] = random.randrange(150, 254)
else:
command['effect'] = 'none'
if self.is_group:
await self.light.set_action(**command)
else:
await self.light.set_state(**command)
async def async_turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
command = {'on': False}
if ATTR_TRANSITION in kwargs:
command['transitiontime'] = int(kwargs[ATTR_TRANSITION] * 10)
flash = kwargs.get(ATTR_FLASH)
if flash == FLASH_LONG:
command['alert'] = 'lselect'
del command['on']
elif flash == FLASH_SHORT:
command['alert'] = 'select'
del command['on']
else:
command['alert'] = 'none'
if self.is_group:
await self.light.set_action(**command)
else:
await self.light.set_state(**command)
async def async_update(self):
"""Synchronize state with bridge."""
await self.async_request_bridge_update(self.is_group, self.light.id)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
if self.is_group:
attributes[ATTR_IS_HUE_GROUP] = self.is_group
return attributes
| apache-2.0 | -3,169,966,392,359,769,000 | 33.70283 | 79 | 0.596371 | false |
jimmycallin/master-thesis | architectures/conll16st-hd-sdp/conll16_datautilities.py | 1 | 2894 | import sys
from pandas import json
import codecs
from Common_Utilities import CommonUtilities
class Conll2016DataUtilities(object):
@staticmethod
# Read relations data from pdtb file - relations.json
# http://nbviewer.jupyter.org/github/attapol/conll16st/blob/master/tutorial/tutorial.ipynb#relations.json-:-Gold-standard-discourse-relation-annotation
def read_relations_from_pdtb_file(file_name):
relations = []
with codecs.open(file_name, mode='r', encoding='utf-8') as pdtb_file:
relations = [json.loads(x) for x in pdtb_file]
return relations
@staticmethod
# Read data from input file parse.json
# http://nbviewer.jupyter.org/github/attapol/conll16st/blob/master/tutorial/tutorial.ipynb#parses.json-:-Input-for-the-main-task-and-the-supplementary-task
def read_input_data_from_parse_file(file_name):
with codecs.open(file_name, mode='r', encoding='utf-8') as parse_file:
json_str = parse_file.read().strip()
print json_str
en_parse_dict = json.loads(json_str)
return en_parse_dict
# SAMPLE USAGE
# python conll16_datautilities.py -rel_file:tutorial\conll16st-en-01-12-16-trial\relations.json -parse_file:tutorial\conll16st-en-01-12-16-trial\parses.json -sup_task_rel_file:tutorial\conll16st-en-01-12-16-trial\relations-no-senses.json
if __name__ == '__main__':
relations_file = CommonUtilities.get_param_value('rel_file', sys.argv, "")
if relations_file == "":
raise "please, specify -rel_file:tutorial\conll16st-en-01-12-16-trial\relations.json"
parse_file = CommonUtilities.get_param_value('parse_file', sys.argv, "")
if parse_file == "":
raise "please, specify -parse_file:tutorial\conll16st-en-01-12-16-trial\parses.json"
sup_task_rel_file = CommonUtilities.get_param_value('sup_task_rel_file', sys.argv, "")
if sup_task_rel_file == "":
raise "please, specify -sup_task_rel_file:tutorial\conll16st-en-01-12-16-trial\relations-no-senses.json"
relations = Conll2016DataUtilities.read_relations_from_pdtb_file(relations_file)
print "%s relations found!" % len(relations)
print "example relation [0]:"
# print relations[0]
en_parse_dict = Conll2016DataUtilities.read_input_data_from_parse_file(parse_file)
# example relation
en_example_relation = relations[10]
en_doc_id = en_example_relation["DocID"]
# en parse tree
en_parse_tree = en_parse_dict[en_doc_id]["sentences"][15]["parsetree"]
print "en parse tree:"
print en_parse_tree
# en dependencies
en_dependencies = en_parse_dict[en_doc_id]['sentences'][15]['dependencies']
print "en dependencies:"
print en_dependencies
# en single word info
en_single_word_info = en_parse_dict[en_doc_id]['sentences'][15]['words'][0]
print "en single word info:"
print en_single_word_info
| mit | -1,207,214,612,986,828,300 | 40.342857 | 237 | 0.692122 | false |
tempbottle/ghmm | ghmmwrapper/ghmm_gato/GraphUtil.py | 1 | 13735 | ################################################################################
#
# This file is part of Gato (Graph Animation Toolbox)
# version _VERSION_ from _BUILDDATE_. You can find more information at
# http://www.zpr.uni-koeln.de/~gato
#
# file: Graph.py
# author: Alexander Schliep ([email protected])
#
# Copyright (C) 1998-2002, Alexander Schliep, Winfried Hochstaettler and
# ZAIK/ZPR, Universitaet zu Koeln
#
# Contact: [email protected], [email protected]
#
# Information: http://gato.sf.net
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
#
# This file is version $Revision: 1876 $
# from $Date: 2007-08-01 15:13:25 +0200 (Wed, 01 Aug 2007) $
# last change by $Author: grunau $.
#
################################################################################
import types
import StringIO
from string import split
from GatoGlobals import *
from Graph import Graph
from DataStructures import Point2D, VertexLabeling, EdgeLabeling, EdgeWeight, VertexWeight, Queue
import logging
log = logging.getLogger("GraphUtil.py")
################################################################################
#
# Syntactic Sugar
#
################################################################################
def Vertices(G):
""" Returns the vertices of G. Hide method call """
return G.vertices
def Edges(G):
""" Returns the edges of G. Hide method call """
return G.Edges()
################################################################################
#
# Basic algorithms
#
################################################################################
def BFS(G,root,direction='forward'):
""" Calculate BFS distances and predecessor without showing animations.
If G is directed, direction does matter:
- 'forward' BFS will use outgoing edges
- 'backward' BFS will use incoming edges
It uses gInfinity (from GatoGlobals.py) as infinite distance.
returns (dist,pred) """
Q = Queue()
d = {}
pred = {}
for v in G.vertices:
d[v] = gInfinity
d[root] = 0
pred[root] = None
Q.Append(root)
while Q.IsNotEmpty():
v = Q.Top()
if G.QDirected() == 1 and direction == 'backward':
nbh = G.InNeighbors(v)
else:
nbh = G.Neighborhood(v)
for w in nbh:
if d[w] == gInfinity:
d[w] = d[v] + 1
Q.Append(w)
return (d,pred)
def ConnectedComponents(G):
""" Compute the connected components of the undirected graph G.
Returns a list of lists of vertices. """
result = []
visited = {}
for v in G.vertices:
visited[v] = None
for root in G.vertices:
if visited[root] is not None:
continue
else: # Found a new component
component = [root]
visited[root] = 1
Q = Queue()
Q.Append(root)
while Q.IsNotEmpty():
v = Q.Top()
nbh = G.Neighborhood(v)
for w in nbh:
if visited[w] == None:
visited[w] = 1
Q.Append(w)
component.append(w)
result.append(component)
return result
################################################################################
#
# GraphInformer
#
################################################################################
class GraphInformer:
""" Provides information about edges and vertices of a graph.
Used as argument for GraphDisplay.RegisterGraphInformer() """
def __init__(self,G):
self.G = G
self.info = ""
def DefaultInfo(self):
""" Provide an default text which is shown when no edge/vertex
info is displayed """
return self.info
def VertexInfo(self,v):
""" Provide an info text for vertex v """
return "Vertex %d at position (%d,%d)" % (v,
self.G.embedding[v].x,
self.G.embedding[v].y)
def EdgeInfo(self,tail,head):
""" Provide an info text for edge (tail,head) """
return "Edge (%d,%d)" % (tail, head)
def SetDefaultInfo(self, info=""):
self.info = info
class WeightedGraphInformer(GraphInformer):
""" Provides information about weighted edges and vertices of a graph.
Used as argument for GraphDisplay.RegisterGraphInformer() """
def __init__(self,G,weightDesc="weight"):
""" G is the graph we want to supply information about and weightDesc
a textual interpretation of the weight """
GraphInformer.__init__(self,G)
self.weightDesc = weightDesc
def EdgeInfo(self,tail,head):
""" Provide an info text for weighted edge (tail,head) """
# How to handle undirected graph
if self.G.QDirected() == 0:
try:
w = self.G.edgeWeights[0][(tail, head)]
except KeyError:
w = self.G.edgeWeights[0][(head, tail)]
else:
w = self.G.edgeWeights[0][(tail, head)]
if self.G.edgeWeights[0].QInteger():
return "Edge (%d,%d) %s: %d" % (tail, head, self.weightDesc, w)
else:
return "Edge (%d,%d) %s: %f" % (tail, head, self.weightDesc, w)
class MSTGraphInformer(WeightedGraphInformer):
def __init__(self,G,T):
WeightedGraphInformer.__init__(self,G)
self.T = T
def DefaultInfo(self):
""" Provide an default text which is shown when no edge/vertex
info is displayed """
return "Tree has %d vertices and weight %5.2f" % (self.T.Order(),self.T.Weight())
class FlowGraphInformer(GraphInformer):
def __init__(self,G,flow):
GraphInformer.__init__(self,G)
self.flow = flow
self.cap = flow.cap
self.res = flow.res
self.excess = flow.excess
def EdgeInfo(self,v,w):
return "Edge (%d,%d) - flow: %d of %d" % (v,w, self.flow[(v,w)], self.cap[(v,w)])
def VertexInfo(self,v):
tmp = self.excess[v]
if tmp == gInfinity:
str1 = "Infinity"
elif tmp == -gInfinity:
str1 = "-Infinity"
else:
str1 = "%d"%tmp
return "Vertex %d - excess: %s" % (v, str1)
class ResidualGraphInformer(FlowGraphInformer):
def EdgeInfo(self,v,w):
return "Edge (%d,%d) - residual capacity: %d" % (v, w, self.res[(v,w)])
################################################################################
#
# FILE I/O
#
################################################################################
def OpenCATBoxGraph(_file):
""" Reads in a graph from file fileName. File-format is supposed
to be from old CATBOX++ (*.cat) """
G = Graph()
E = VertexLabeling()
W = EdgeWeight(G)
L = VertexLabeling()
# get file from name or file object
graphFile=None
if type(_file) in types.StringTypes:
graphFile = open(_file, 'r')
elif type(_file)==types.FileType or issubclass(_file.__class__,StringIO.StringIO):
graphFile=_file
else:
raise Exception("got wrong argument")
lineNr = 1
firstVertexLineNr = -1
lastVertexLineNr = -1
firstEdgeLineNr = -1
lastEdgeLineNr = -1
intWeights = 0
while 1:
line = graphFile.readline()
if not line:
break
if lineNr == 2: # Read directed and euclidian
splitLine = split(line[:-1],';')
G.directed = eval(split(splitLine[0],':')[1])
G.simple = eval(split(splitLine[1],':')[1])
G.euclidian = eval(split(splitLine[2],':')[1])
intWeights = eval(split(splitLine[3],':')[1])
nrOfEdgeWeights = eval(split(splitLine[4],':')[1])
nrOfVertexWeights = eval(split(splitLine[5],':')[1])
for i in xrange(nrOfEdgeWeights):
G.edgeWeights[i] = EdgeWeight(G)
for i in xrange(nrOfVertexWeights):
G.vertexWeights[i] = VertexWeight(G)
if lineNr == 5: # Read nr of vertices
nrOfVertices = eval(split(line[:-2],':')[1]) # Strip of "\n" and ;
firstVertexLineNr = lineNr + 1
lastVertexLineNr = lineNr + nrOfVertices
if firstVertexLineNr <= lineNr and lineNr <= lastVertexLineNr:
splitLine = split(line[:-1],';')
v = G.AddVertex()
label =split(splitLine[1],':')[1]
G.labeling[v] = split(splitLine[1],':')[1]
#y = eval(split(splitLine[2],':')[1])
for i in xrange(nrOfVertexWeights):
w = eval(split(splitLine[2+i],':')[1])
G.vertexWeights[i][v] = w
#E[v] = Point2D(x,y)
if lineNr == lastVertexLineNr + 1: # Read Nr of edges
nrOfEdges = eval(split(line[:-2],':')[1]) # Strip of "\n" and ;
firstEdgeLineNr = lineNr + 1
lastEdgeLineNr = lineNr + nrOfEdges
if firstEdgeLineNr <= lineNr and lineNr <= lastEdgeLineNr:
splitLine = split(line[:-1],';')
h = eval(split(splitLine[0],':')[1])
t = eval(split(splitLine[1],':')[1])
G.AddEdge(t,h)
for i in xrange(nrOfEdgeWeights):
G.edgeWeights[i][(t,h)] = eval(split(splitLine[3+i],':')[1])
lineNr = lineNr + 1
graphFile.close()
if intWeights:
G.Integerize('all')
for i in xrange(nrOfVertexWeights):
G.vertexWeights[i].Integerize()
return G
def SaveCATBoxGraph(G, _file):
""" Save graph to file fileName in file-format from old CATBOX++ (*.cat) """
# get file from name or file object
file=None
if type(_file) in types.StringTypes:
file = open(_file, 'w')
elif type(_file)==types.FileType or issubclass(_file.__class__,StringIO.StringIO):
file=_file
else:
raise Exception("got wrong argument")
nrOfVertexWeights = len(G.vertexWeights.keys())
nrOfEdgeWeights = len(G.edgeWeights.keys())
integerEdgeWeights = G.edgeWeights[0].QInteger()
file.write("graph:\n")
file.write("dir:%d; simp:%d; eucl:%d; int:%d; ew:%d; vw:%d;\n" %
(G.QDirected(), G.simple, G.QEuclidian(), integerEdgeWeights,
nrOfEdgeWeights, nrOfVertexWeights))
file.write("scroller:\n")
file.write("vdim:1000; hdim:1000; vlinc:10; hlinc:10; vpinc:50; hpinc:50;\n")
file.write("vertices:" + `G.Order()` + ";\n")
# Force continous numbering of vertices
count = 1
save = {}
for v in G.vertices:
save[v] = count
count = count + 1
file.write("n:%d; l:%s; " % (save[v], G.labeling[v]))
for i in xrange(nrOfVertexWeights):
if integerEdgeWeights: # XXX
file.write(" w:%d;" % int(round(G.vertexWeights[i][v])))
else:
file.write(" w:%d;" % G.vertexWeights[i][v])
file.write("\n")
file.write("edges:" + `G.Size()` + ";\n")
for tail in G.vertices:
for head in G.OutNeighbors(tail):
file.write("h:%d; t:%d; e:2;" % (save[head], save[tail]))
for i in xrange(nrOfEdgeWeights):
if integerEdgeWeights:
file.write(" w:%d;" % int(round(G.edgeWeights[i][(tail,head)])))
else:
file.write(" w:%f;" % G.edgeWeights[i][(tail,head)])
file.write("\n")
#### GML
def ParseGML(file):
retval = []
while 1:
line = file.readline()
if not line:
return retval
token = filter(lambda x: x != '', split(line[:-1],"[\t ]*"))
if len(token) == 1 and token[0] == ']':
return retval
elif len(token) == 2:
if token[1] == '[':
retval.append((token[0], ParseGML(file)))
else:
retval.append((token[0], token[1]))
else:
log.error("Serious format error line %s:" % line)
def PairListToDictionary(l):
d = {}
for i in xrange(len(l)):
d[l[i][0]] = l[i][1]
return d
def OpenGMLGraph(fileName):
""" Reads in a graph from file fileName. File-format is supposed
to be GML (*.gml) """
G = Graph()
G.directed = 0
E = VertexLabeling()
W = EdgeWeight(G)
L = VertexLabeling()
VLabel = VertexLabeling()
ELabel = EdgeLabeling()
file = open(fileName, 'r')
g = ParseGML(file)
file.close()
if g[0][0] != 'graph':
log.error("Serious format error in %s. first key is not graph" % fileName)
return
else:
l = g[0][1]
for i in xrange(len(l)):
key = l[i][0]
value = l[i][1]
if key == 'node':
d = PairListToDictionary(value)
v = G.AddVertex()
try:
VLabel[v] = eval(d['label'])
P = PairListToDictionary(d['graphics'])
E[v] = Point2D(eval(P['x']), eval(P['y']))
except:
d = None
P = None
elif key == 'edge':
d = PairListToDictionary(value)
try:
s = eval(d['source'])
t = eval(d['target'])
G.AddEdge(s,t)
ELabel[(s,t)] = eval(d['label'])
W[(s,t)] = 0
except:
d = None
elif key == 'directed':
G.directed = 1
for v in G.vertices:
L[v] = v
G.embedding = E
G.labeling = L
G.nrEdgeWeights = 1
G.edgeWeights[0] = W
G.vertexAnnotation = VLabel
G.edgeAnnotation = ELabel
return G
| gpl-3.0 | -8,783,906,233,094,782,000 | 27.319588 | 97 | 0.54707 | false |
shanest/quantifier-rnn-learning | quant_verify.py | 1 | 15085 | """
Copyright (C) 2017 Shane Steinert-Threlkeld
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
from __future__ import print_function
from builtins import range
from collections import defaultdict
import argparse
import tensorflow as tf
import numpy as np
import data_gen
import quantifiers
import util
INPUT_FEATURE = 'x'
# for variable length sequences,
# see http://danijar.com/variable-sequence-lengths-in-tensorflow/
def length(data):
"""Gets real length of sequences from a padded tensor.
Args:
data: a Tensor, containing sequences
Returns:
a Tensor, of shape [data.shape[0]], containing the length
of each sequence
"""
data = tf.slice(data,
[0, 0, 0],
[-1, -1, quantifiers.Quantifier.num_chars])
used = tf.sign(tf.reduce_max(tf.abs(data), reduction_indices=2))
lengths = tf.reduce_sum(used, reduction_indices=1)
lengths = tf.cast(lengths, tf.int32)
return lengths
# TODO: some docs here, noting TF estimator stuff
def lstm_model_fn(features, labels, mode, params):
# BUILD GRAPH
# how big each input will be
num_quants = len(params['quantifiers'])
item_size = quantifiers.Quantifier.num_chars + num_quants
# -- input_models: [batch_size, max_len, item_size]
input_models = features[INPUT_FEATURE]
# -- input_labels: [batch_size, num_classes]
input_labels = labels
# -- lengths: [batch_size], how long each input really is
lengths = length(input_models)
cells = []
for _ in range(params['num_layers']):
# TODO: consider other RNN cells?
cell = tf.nn.rnn_cell.LSTMCell(params['hidden_size'])
# dropout
cell = tf.nn.rnn_cell.DropoutWrapper(
cell, state_keep_prob=params['dropout'])
cells.append(cell)
multi_cell = tf.nn.rnn_cell.MultiRNNCell(cells)
# run on input
# -- output: [batch_size, max_len, out_size]
output, _ = tf.nn.dynamic_rnn(
multi_cell, input_models,
dtype=tf.float64, sequence_length=lengths)
# TODO: modify to allow prediction at every time step
# extract output at end of reading sequence
# -- flat_output: [batch_size * max_len, out_size]
flat_output = tf.reshape(output, [-1, params['hidden_size']])
# -- indices: [batch_size]
output_length = tf.shape(output)[0]
indices = (tf.range(0, output_length) * params['max_len']
+ (lengths - 1))
# -- final_output: [batch_size, out_size]
final_output = tf.gather(flat_output, indices)
tf.summary.histogram('final_output', final_output)
# make prediction
# TODO: play with arguments here
# -- logits: [batch_size, num_classes]
logits = tf.contrib.layers.fully_connected(
inputs=final_output,
num_outputs=params['num_classes'],
activation_fn=None)
# -- probs: [batch_size, num_classes]
probs = tf.nn.softmax(logits)
# dictionary of outputs
outputs = {'probs': probs}
# exit before labels are used when in predict mode
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode,
predictions=outputs)
# -- loss: [batch_size]
loss = tf.nn.softmax_cross_entropy_with_logits(
labels=input_labels,
logits=logits)
# -- total_loss: scalar
total_loss = tf.reduce_mean(loss)
# training op
# TODO: try different optimizers, parameters for it, etc
optimizer = tf.train.AdamOptimizer(learning_rate=1e-4)
train_op = optimizer.minimize(total_loss,
global_step=tf.train.get_global_step())
# total accuracy
# -- prediction: [batch_size]
prediction = tf.argmax(probs, 1)
# -- target: [batch_size]
target = tf.argmax(input_labels, 1)
# list of metrics for evaluation
eval_metrics = {'total_accuracy': tf.metrics.accuracy(target, prediction)}
# metrics by quantifier
# -- flat_inputs: [batch_size * max_len, item_size]
flat_input = tf.reshape(input_models, [-1, item_size])
# -- final_inputs: [batch_size, item_size]
final_inputs = tf.gather(flat_input, indices)
# extract the portion of the input corresponding to the quantifier
# -- quants_by_seq: [batch_size, num_quants]
quants_by_seq = tf.slice(final_inputs,
[0, quantifiers.Quantifier.num_chars],
[-1, -1])
# index, in the quantifier list, of the quantifier for each data point
# -- quant_indices: [batch_size]
quant_indices = tf.to_int32(tf.argmax(quants_by_seq, 1))
# -- prediction_by_quant: a list num_quants long
# -- prediction_by_quant[i]: Tensor of predictions for quantifier i
prediction_by_quant = tf.dynamic_partition(
prediction, quant_indices, num_quants)
# -- target_by_quant: a list num_quants long
# -- target_by_quant[i]: Tensor containing true for quantifier i
target_by_quant = tf.dynamic_partition(
target, quant_indices, num_quants)
for idx in range(num_quants):
key = '{}_accuracy'.format(params['quantifiers'][idx]._name)
eval_metrics[key] = tf.metrics.accuracy(
target_by_quant[idx], prediction_by_quant[idx])
return tf.estimator.EstimatorSpec(
mode=mode,
loss=total_loss,
train_op=train_op,
eval_metric_ops=eval_metrics)
class EvalEarlyStopHook(tf.train.SessionRunHook):
"""Evaluates estimator during training and implements early stopping.
Writes output of a trial as CSV file.
See https://stackoverflow.com/questions/47137061/. """
def __init__(self, estimator, eval_input, filename,
num_steps=50, stop_loss=0.02):
self._estimator = estimator
self._input_fn = eval_input
self._num_steps = num_steps
self._stop_loss = stop_loss
# store results of evaluations
self._results = defaultdict(list)
self._filename = filename
def begin(self):
self._global_step_tensor = tf.train.get_or_create_global_step()
if self._global_step_tensor is None:
raise ValueError("global_step needed for EvalEarlyStop")
def before_run(self, run_context):
requests = {'global_step': self._global_step_tensor}
return tf.train.SessionRunArgs(requests)
def after_run(self, run_context, run_values):
global_step = run_values.results['global_step']
if (global_step-1) % self._num_steps == 0:
ev_results = self._estimator.evaluate(input_fn=self._input_fn)
print('')
for key, value in list(ev_results.items()):
self._results[key].append(value)
print('{}: {}'.format(key, value))
# TODO: add running total accuracy or other complex stop condition?
if ev_results['loss'] < self._stop_loss:
run_context.request_stop()
def end(self, session):
# write results to csv
util.dict_to_csv(self._results, self._filename)
def run_trial(eparams, hparams, trial_num,
write_path='/tmp/tensorflow/quantexp'):
tf.reset_default_graph()
write_dir = '{}/trial_{}'.format(write_path, trial_num)
csv_file = '{}/trial_{}.csv'.format(write_path, trial_num)
# BUILD MODEL
run_config = tf.estimator.RunConfig(
save_checkpoints_steps=eparams['eval_steps'],
save_checkpoints_secs=None,
save_summary_steps=eparams['eval_steps'])
model = tf.estimator.Estimator(
model_fn=lstm_model_fn,
params=hparams,
model_dir=write_dir,
config=run_config)
# GENERATE DATA
generator = data_gen.DataGenerator(
hparams['max_len'], hparams['quantifiers'],
mode=eparams['generator_mode'],
num_data_points=eparams['num_data'])
training_data = generator.get_training_data()
test_data = generator.get_test_data()
def get_np_data(data):
x_data = np.array([datum[0] for datum in data])
y_data = np.array([datum[1] for datum in data])
return x_data, y_data
# input fn for training
train_x, train_y = get_np_data(training_data)
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={INPUT_FEATURE: train_x},
y=train_y,
batch_size=eparams['batch_size'],
num_epochs=eparams['num_epochs'],
shuffle=True)
# input fn for evaluation
test_x, test_y = get_np_data(test_data)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={INPUT_FEATURE: test_x},
y=test_y,
batch_size=len(test_x),
shuffle=False)
print('\n------ TRIAL {} -----'.format(trial_num))
# train and evaluate model together, using the Hook
model.train(input_fn=train_input_fn,
hooks=[EvalEarlyStopHook(model, eval_input_fn, csv_file,
eparams['eval_steps'],
eparams['stop_loss'])])
# DEFINE AN EXPERIMENT
def experiment_one_a(write_dir='data/exp1a'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 100000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.at_least_n(4),
quantifiers.at_least_n_or_at_most_m(6, 2)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_one_b(write_dir='data/exp1b'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 100000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.at_most_n(3),
quantifiers.at_least_n_or_at_most_m(6, 2)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_one_c(write_dir='data/exp1c'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 100000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.at_least_n(4),
quantifiers.between_m_and_n(6, 10)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_one_d(write_dir='data/exp1d'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 100000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.at_most_n(4),
quantifiers.between_m_and_n(6, 10)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_two_a(write_dir='data/exp2a'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 200000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.first_n(3),
quantifiers.at_least_n(3)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_two_b(write_dir='data/exp2b'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 200000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.last_n(3),
quantifiers.at_least_n(3)]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_three_a(write_dir='data/exp3a'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 300000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.nall, quantifiers.notonly]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
def experiment_three_b(write_dir='data/exp3b'):
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 100000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.most, quantifiers.M]}
num_trials = 30
for idx in range(num_trials):
run_trial(eparams, hparams, idx, write_dir)
# TEST
def test():
eparams = {'num_epochs': 4, 'batch_size': 8,
'generator_mode': 'g', 'num_data': 10000,
'eval_steps': 50, 'stop_loss': 0.02}
hparams = {'hidden_size': 12, 'num_layers': 2, 'max_len': 20,
'num_classes': 2, 'dropout': 1.0,
'quantifiers': [quantifiers.at_least_n(4),
quantifiers.most]}
for idx in range(2):
run_trial(eparams, hparams, idx)
if __name__ == '__main__':
# RUN AN EXPERIMENT, with command-line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--exp', help='which experiment to run', type=str)
parser.add_argument('--out_path', help='path to output', type=str)
args = parser.parse_args()
func_map = {
'one_a': experiment_one_a,
'one_b': experiment_one_b,
'two_a': experiment_two_a,
'two_b': experiment_two_b,
'three_a': experiment_three_a,
'three_b': experiment_three_b,
'test': test
}
func = func_map[args.exp]
if args.out_path:
func(args.out_path)
else:
func()
| gpl-3.0 | 507,784,469,863,717,760 | 33.598624 | 79 | 0.593636 | false |
rsteed11/GAT | gat/scraping/ArabicTextExtractor/Text_Extractor.py | 1 | 4227 | import os
import random
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D, GlobalMaxPooling2D
from spp.SpatialPyramidPooling import SpatialPyramidPooling
from keras import backend as K
import scipy
import tensorflow as tf
import keras.backend.tensorflow_backend as KTF
#Might need to figure out what to do because 5875 is not divisible by 3
#First approach is just 47*125 output of softmax layer
#Potentially need to make the image reading better
def clean_line(line):
cleaned = line[line.find('.tif')+4:]
cleaned = cleaned.strip()
return cleaned
def get_labels(path):
training_labels = open(path)
line_dict = {}
for i ,line in enumerate(training_labels):
temp_array = line.split()
image_name = temp_array[0]
image_name = image_name[0:image_name.find(".")]
line_dict[image_name] = temp_array[1:]
return line_dict
def create_one_hot(labels):
done_labels = {}
for label in labels:
if label not in done_labels:
done_labels[label] = len(done_labels)
return done_labels
def get_batch(image_dir, label_dict, batch_size, width, height, channels):
image_batch = []
label_batch = []
for i in range(batch_size):
image_name = random.choice(os.listdir(image_dir))
img = load_img(image_dir+'/'+image_name, grayscale=False)
x = img_to_array(img)
image_batch.append(x)
label_batch.append(label_dict[image_name[0:image_name.find(".")]])
return image_batch, label_batch
def to_one_hot(label_list):
for labels in label_list:
for i, label in enumerate(labels):
temp = np.zeros(47, dtype=int)
temp[label] = 1
temp = temp.tolist()
labels[i] = temp
ans = []
for sublist in label_list:
for item in sublist:
for i in item:
ans.append(i)
while len(ans) < 5875:
ans.append(0)
return ans
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config = config)
KTF.set_session(sess)
model = Sequential()
model.add(Conv2D(32, (3,3), activation='relu', input_shape=(None, None, 3)))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(Dropout(.5))
model.add(SpatialPyramidPooling([1, 2, 4]))
model.add(Dense(5875, activation='sigmoid'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.adam(),
metrics=['accuracy'])
image_dir = "/home/cheesecake/Desktop/KHATT_v1.0/LineImages_v1.0/FixedTextLineImages/Train"
label_path = '/home/cheesecake/GAT/gat/scraping/ArabicTextExtractor/FixedTextLinesLatinTransliteration/TrainLabels_Translated.txt'
height,width,channels = 256,256,3
batch_size = 1
label_list = get_labels(label_path).values()
all_labels = []
for i in label_list:
for j in i:
all_labels.append(j)
one_hot_dict = create_one_hot(all_labels)
one_hot_dict[''] = len(one_hot_dict)
for i in range(1000):
image_batch, label_batch = get_batch(image_dir, get_labels(label_path), batch_size, width, height, channels)
for i in range(0):
image = image_batch[i]
plt.imshow(image, cmap='gray')
plt.show()
for i, image in enumerate(image_batch):
image_batch[i] = image_batch[i].astype('float32')
image_batch[i] = image_batch[i] / 255
for labels in label_batch:
for i,label in enumerate(labels):
labels[i] = one_hot_dict[labels[i]]
label_batch = to_one_hot(label_batch)
image = image_batch[0]
new_batch = np.zeros((1,image.shape[0],image.shape[1],image.shape[2]))
new_batch[0,:,:,:] = image
best_batch = []
best_batch.append(label_batch)
best_batch = np.array(best_batch)
print(best_batch)
model.fit(new_batch, best_batch, epochs=1)
print('Nice!') | mit | 4,479,877,642,176,214,000 | 32.824 | 130 | 0.668796 | false |
rivasd/djPsych | djsend/models/Instructions.py | 1 | 3239 | '''
Created on Feb 24, 2016
@author: Daniel Rivas
'''
from django.db import models
from django.utils.translation import ugettext_lazy as l_
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django_markdown.models import MarkdownField
from markdown import markdown
# TODO: maybe reuse this to have intro and outro 'instructions' attached to a global setting?
class Instruction(models.Model):
"""
This is the sole model for instructions so far, I thought there is not much need for multiple models as I don't see much diversity
in the kind of fields people may want in an instruction. It supports translation and has a flag for HTML.
Tell me if you think this is missing something, but I will not be subclassing it, this website is already complicated enough
This does not set the jsPsych cont_key param, until I find a solution to find some kind of ListField that doesnt need the hassle of a ManyToMany rel to keycodes
"""
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
task = GenericForeignKey('content_type', 'object_id') # See: https://docs.djangoproject.com/en/1.9/ref/contrib/contenttypes/#generic-relations
text = MarkdownField(help_text=l_('Write your instruction page here using Markdown syntax! see: https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet'))
order = models.PositiveIntegerField(help_text=l_("if a setting has multiple instruction pages, we use this number to sort the order in which you want them presented."))
after = models.BooleanField(help_text=l_("check if this instruction page is meant to be shown AFTER the task it is attached to."))
css_class = models.CharField(blank=True, null=True, default='', max_length=64, help_text=l_("All instructions are rendered inside an HTML 'p' element with a class attribute 'instructions'. You can add to the class attribute here."))
# TODO: switched to Markdown for better editing, but still make sure to disallow HTML sine markdown is a superset of HTML
show_clickable_nav = models.BooleanField(default=False, help_text=l_("If true, then a Previous and Next button will be displayed beneath the instructions. Subjects can click the buttons to navigate."))
key_forward = models.CharField(max_length=1, blank=True, null=True, help_text=l_("This is the key that the subject can press in order to advance to the next page"))
def toDict(self):
"""
Kinda serializes this object so that it is ready to be JSON'ed and sent. You could override, still call the parent method and set custom params like cont_key on the returned dict even though
a direct-from-database solution may be better (sorry!)
In the default only the 'text' and 'is_html' attributes are returned.
"""
html_wrap = "<p class=\"instructions {0.css_class}\"> {0.text} </p>".format(self)
dictionary = {
'type': 'instructions',
'text': markdown(self.text),
'show_clickable_nav': self.show_clickable_nav,
'key_forward': self.key_forward
}
return dictionary | gpl-3.0 | 7,438,366,763,565,597,000 | 61.307692 | 236 | 0.722136 | false |
sserrot/champion_relationships | venv/Lib/site-packages/networkx/algorithms/traversal/beamsearch.py | 1 | 3543 | # beamsearch.py - breadth-first search with limited queueing
#
# Copyright 2016-2019 NetworkX developers.
#
# This file is part of NetworkX.
#
# NetworkX is distributed under a BSD license; see LICENSE.txt for more
# information.
"""Basic algorithms for breadth-first searching the nodes of a graph."""
import networkx as nx
from .breadth_first_search import generic_bfs_edges
__all__ = ['bfs_beam_edges']
def bfs_beam_edges(G, source, value, width=None):
"""Iterates over edges in a beam search.
The beam search is a generalized breadth-first search in which only
the "best" *w* neighbors of the current node are enqueued, where *w*
is the beam width and "best" is an application-specific
heuristic. In general, a beam search with a small beam width might
not visit each node in the graph.
Parameters
----------
G : NetworkX graph
source : node
Starting node for the breadth-first search; this function
iterates over only those edges in the component reachable from
this node.
value : function
A function that takes a node of the graph as input and returns a
real number indicating how "good" it is. A higher value means it
is more likely to be visited sooner during the search. When
visiting a new node, only the `width` neighbors with the highest
`value` are enqueued (in decreasing order of `value`).
width : int (default = None)
The beam width for the search. This is the number of neighbors
(ordered by `value`) to enqueue when visiting each new node.
Yields
------
edge
Edges in the beam search starting from `source`, given as a pair
of nodes.
Examples
--------
To give nodes with, for example, a higher centrality precedence
during the search, set the `value` function to return the centrality
value of the node::
>>> G = nx.karate_club_graph()
>>> centrality = nx.eigenvector_centrality(G)
>>> source = 0
>>> width = 5
>>> for u, v in nx.bfs_beam_edges(G, source, centrality.get, width):
... print((u, v)) # doctest: +SKIP
"""
if width is None:
width = len(G)
def successors(v):
"""Returns a list of the best neighbors of a node.
`v` is a node in the graph `G`.
The "best" neighbors are chosen according to the `value`
function (higher is better). Only the `width` best neighbors of
`v` are returned.
The list returned by this function is in decreasing value as
measured by the `value` function.
"""
# TODO The Python documentation states that for small values, it
# is better to use `heapq.nlargest`. We should determine the
# threshold at which its better to use `heapq.nlargest()`
# instead of `sorted()[:]` and apply that optimization here.
#
# If `width` is greater than the number of neighbors of `v`, all
# neighbors are returned by the semantics of slicing in
# Python. This occurs in the special case that the user did not
# specify a `width`: in this case all neighbors are always
# returned, so this is just a (slower) implementation of
# `bfs_edges(G, source)` but with a sorted enqueue step.
return iter(sorted(G.neighbors(v), key=value, reverse=True)[:width])
# TODO In Python 3.3+, this should be `yield from ...`
for e in generic_bfs_edges(G, source, successors):
yield e
| mit | -1,393,016,021,845,931,300 | 35.153061 | 76 | 0.646909 | false |
ecervera/ODT_Tools | web/Personalizados.py | 1 | 4863 | import tornado
import tornado.ioloop
import tornado.web
import os, uuid
__UPLOADS__ = "uploads/"
import sys
sys.path.append('..')
from odt_parse import OdtData
from odt_diff import find_style_by_name
ref_name = '../libro_personalizados.odt'
par_prop = ['backgroundcolor', 'textalign', 'marginleft', 'marginright', 'margintop', 'marginbottom', 'breakbefore']
text_prop = ['fontsize', 'fontstyle', 'language']
ref = OdtData(ref_name, par_prop, text_prop)
sp_dict = {'Heading':'Título', 'Heading_1':'Título_1', 'breakbefore':'salto de página antes',
'backgroundcolor':'color de fondo', 'textalign':'alineación',
'Quotations':'Cita', 'fontsize':'tamaño de letra', 'fontstyle':'efecto tipográfico',
'marginleft':'sangría izquierda', 'marginright':'sangría derecha',
'margintop':'espacio superior', 'marginbottom':'espacio inferior',
'justify':'justificada', 'end':'derecha', 'start':'izquierda'}
def sp_trans(s):
try:
t = sp_dict[s]
except KeyError:
t = s
return t
errors = []
import io
def compare_style_attr(ref, doc, family, style_name, attr_list):
stref = find_style_by_name(ref.style[family], style_name)
stdoc = find_style_by_name(doc.style[family], style_name)
f = io.StringIO()
if stdoc:
for attr in attr_list:
try:
val_ref = stref[attr]
try:
val_doc = stdoc[attr]
if val_ref != val_doc:
f.write('Estilo %s tiene %s <br> %s en lugar de %s.<br><br>' % (sp_trans(style_name), sp_trans(attr),
sp_trans(val_doc), sp_trans(val_ref)))
except KeyError:
f.write('Estilo %s no tiene %s definido.<br><br>' % (sp_trans(style_name), sp_trans(attr)))
#except TypeError:
# f.write('Estilo %s no está definido.\n\n' % (sp_trans(style_name)))
except KeyError:
err = style_name + "_" + attr
if not err in errors:
errors.append(err)
print('Estilo %s no tiene %s definido en el fichero de referencia.' % (sp_trans(style_name), sp_trans(attr)))
else:
f.write('Estilo %s no está definido.<br><br>' % (sp_trans(style_name)))
return f.getvalue()
def compare_style_attrs(ref, doc):
s = '<h4>libro_personalizados</h4><br>'
errors = 0
err = compare_style_attr(ref, doc, 'paragraph', 'Párrafo',
['fontsize', 'textalign'])
if err:
s += err
errors += 1
err = compare_style_attr(ref, doc, 'paragraph', 'Heading_1',
['margintop', 'breakbefore'])
if err:
s += err
errors += 1
err = compare_style_attr(ref, doc, 'paragraph', 'Nota',
['fontsize', 'backgroundcolor', 'marginleft'])
if err:
s += err
errors += 1
err = compare_style_attr(ref, doc, 'paragraph', 'Quotations',
['marginleft','marginright','textalign'])
if err:
s += err
errors += 1
err = compare_style_attr(ref, doc, 'paragraph', 'Cita_en_inglés',
['language'])
if err:
s += err
errors += 1
err = compare_style_attr(ref, doc, 'paragraph', 'Autor_de_cita',
['textalign'])
if err:
s += err
errors += 1
if not errors:
s += "No s'han trobat errors."
return s
class Userform(tornado.web.RequestHandler):
def get(self):
self.render("Personalizados.html")
class UploadAndCheck(tornado.web.RequestHandler):
def post(self):
try:
fileinfo = self.request.files['filearg'][0]
fname = fileinfo['filename']
extn = os.path.splitext(fname)[1]
cname = str(uuid.uuid4()) + extn
fname = __UPLOADS__ + cname
fh = open(fname, 'wb')
fh.write(fileinfo['body'])
#self.finish(cname + " is uploaded!! Check %s folder" %__UPLOADS__)
doc = OdtData( fname, par_prop, text_prop )
if doc.err:
s = 'Error de lectura del fitxer\n'
else:
s = compare_style_attrs(ref, doc)
except KeyError:
s = "No s'ha triat cap fitxer."
s += '<br><hr><button type="button" onclick="javascript:history.back()">Back</button>'
self.finish(s)
application = tornado.web.Application([
(r"/", Userform),
(r"/checkPersonalizados", UploadAndCheck),
], debug=True)
if __name__ == "__main__":
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
| mit | 7,891,240,583,249,468,000 | 34.669118 | 129 | 0.532055 | false |
jsheffie/django-auth-experiments | djauth/quickstart/views.py | 1 | 2083 | from django.shortcuts import render
from django.contrib.auth.models import User, Group
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from django.contrib.auth.decorators import login_required
from quickstart.serializers import UserSerializer, GroupSerializer
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import logout
class UserViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows users to be viewed or edited.
Note: setting queryset, and serializer_class attributs sans just
a model attribute gives us more control over the API behavior.
This is the recommended style for most applications.
"""
# http://django-rest-framework.org/api-guide/permissions#api-reference
permission_classes = ( IsAuthenticated, )
queryset = User.objects.all()
serializer_class = UserSerializer
class GroupViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows groups to be viewed or edited.
"""
queryset = Group.objects.all()
serializer_class = GroupSerializer
@login_required
def testing_users( request ):
ret_str = "Hello Authenticated user required. "
ret_str += "<br>User: %s" % ( request.user.username )
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
def no_auth_view( request ):
ret_str = "No Authenticated user required"
ret_str += "<br>User: %s" % ( request.user.username )
ret_str += "<br><a href='/auth/view/'>Auth Required</a>"
ret_str += "<br><a href='/no/auth/view/'>No Auth Required</a>"
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
@login_required
def auth_view( request ):
ret_str = "Authenticated user required"
ret_str += "<br>User: %s" % ( request.user.username )
ret_str += "<br><a href='/auth/view/'>Auth Required</a>"
ret_str += "<br><a href='/no/auth/view/'>No Auth Required</a>"
ret_str +="<br><a href='/logout/'>Logout</a>"
return HttpResponse( ret_str );
def logout_view(request):
logout( request )
return HttpResponseRedirect(redirect_to="/no/auth/view/");
| mit | -253,710,595,698,105,470 | 33.716667 | 74 | 0.723476 | false |
Johnzero/OE7 | openerp/addons-fg/openerp-magento-master/tools/magento_update_images.py | 1 | 3107 | """
Update product images from Magento -> OpenERP
"""
import xmlrpclib
import optparse
from config import *
from magento import *
from ooop import OOOP
def main():
url = 'http' + (secure and 's' or '') + '://' + server
o = OOOP(user=username,pwd=password,dbname=dbname,uri=url,port=port)
url = 'http' + (secure and 's' or '') + '://' + server + ':' + port
common = xmlrpclib.ServerProxy(url + '/xmlrpc/common')
uid = common.login(dbname, username, password)
object = xmlrpclib.ServerProxy(url + '/xmlrpc/object')
print dbname
context = {}
context['magento_app'] = MAGENTO_APP
with ProductImages(MGN_URL, MGN_APIUSER, MGN_APIPASSWORD) as product_image_api:
args = [('magento_exportable','=',True)]
product_ids = object.execute(dbname,uid,password,'product.product','search',args)
print len(product_ids)
for product_id in product_ids:
args = [('oerp_id','=',product_id),('model_id','=',109)]
product = object.execute(dbname,uid,password,'magento.external.referential','search',args)
if len(product) > 0:
prod = object.execute(dbname,uid,password,'magento.external.referential','read',product,['mgn_id','oerp_id'])
try:
product_images = product_image_api.list(prod[0]['mgn_id'])
for product_image in product_images:
if 'url' in product_image: #magento == 1.3
url = product_image['url']
else: #magento < 1.5
url = product_image['filename']
splited_url = url.split('/')
filename = splited_url[len(splited_url)-1]
imgs = o.ProductImages.filter(filename=filename)
for i in imgs:
if product_image['exclude'] == '1':
i.magento_exclude = True
else:
i.magento_exclude = False
if 'image' in product_image['types']:
i.magento_base_image = True
else:
i.magento_base_image = False
if 'small_image' in product_image['types']:
i.magento_small_image = True
else:
i.magento_small_image = False
if 'thumbnail' in product_image['types']:
i.magento_thumbnail = True
else:
i.magento_thumbnail = False
i.save()
print "[UPDATE] %s Mgn - Image filename %s" % (prod[0]['mgn_id'],filename)
except:
print "[ALERT] Not update images %s" % (prod[0]['mgn_id'])
continue
return True
if __name__ == "__main__":
main()
| agpl-3.0 | 5,040,203,752,295,153,000 | 38.329114 | 125 | 0.471516 | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyQt4/QtCore/QStateMachine.py | 1 | 4846 | # encoding: utf-8
# module PyQt4.QtCore
# from /usr/lib/python3/dist-packages/PyQt4/QtCore.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import sip as __sip
from .QState import QState
class QStateMachine(QState):
""" QStateMachine(QObject parent=None) """
def addDefaultAnimation(self, QAbstractAnimation): # real signature unknown; restored from __doc__
""" QStateMachine.addDefaultAnimation(QAbstractAnimation) """
pass
def addState(self, QAbstractState): # real signature unknown; restored from __doc__
""" QStateMachine.addState(QAbstractState) """
pass
def cancelDelayedEvent(self, p_int): # real signature unknown; restored from __doc__
""" QStateMachine.cancelDelayedEvent(int) -> bool """
return False
def clearError(self): # real signature unknown; restored from __doc__
""" QStateMachine.clearError() """
pass
def configuration(self): # real signature unknown; restored from __doc__
""" QStateMachine.configuration() -> set-of-QAbstractState """
pass
def defaultAnimations(self): # real signature unknown; restored from __doc__
""" QStateMachine.defaultAnimations() -> list-of-QAbstractAnimation """
pass
def error(self): # real signature unknown; restored from __doc__
""" QStateMachine.error() -> QStateMachine.Error """
pass
def errorString(self): # real signature unknown; restored from __doc__
""" QStateMachine.errorString() -> str """
return ""
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QStateMachine.event(QEvent) -> bool """
return False
def eventFilter(self, QObject, QEvent): # real signature unknown; restored from __doc__
""" QStateMachine.eventFilter(QObject, QEvent) -> bool """
return False
def globalRestorePolicy(self): # real signature unknown; restored from __doc__
""" QStateMachine.globalRestorePolicy() -> QStateMachine.RestorePolicy """
pass
def isAnimated(self): # real signature unknown; restored from __doc__
""" QStateMachine.isAnimated() -> bool """
return False
def isRunning(self): # real signature unknown; restored from __doc__
""" QStateMachine.isRunning() -> bool """
return False
def onEntry(self, QEvent): # real signature unknown; restored from __doc__
""" QStateMachine.onEntry(QEvent) """
pass
def onExit(self, QEvent): # real signature unknown; restored from __doc__
""" QStateMachine.onExit(QEvent) """
pass
def postDelayedEvent(self, QEvent, p_int): # real signature unknown; restored from __doc__
""" QStateMachine.postDelayedEvent(QEvent, int) -> int """
return 0
def postEvent(self, QEvent, QStateMachine_EventPriority_priority=None): # real signature unknown; restored from __doc__
""" QStateMachine.postEvent(QEvent, QStateMachine.EventPriority priority=QStateMachine.NormalPriority) """
pass
def removeDefaultAnimation(self, QAbstractAnimation): # real signature unknown; restored from __doc__
""" QStateMachine.removeDefaultAnimation(QAbstractAnimation) """
pass
def removeState(self, QAbstractState): # real signature unknown; restored from __doc__
""" QStateMachine.removeState(QAbstractState) """
pass
def setAnimated(self, bool): # real signature unknown; restored from __doc__
""" QStateMachine.setAnimated(bool) """
pass
def setGlobalRestorePolicy(self, QStateMachine_RestorePolicy): # real signature unknown; restored from __doc__
""" QStateMachine.setGlobalRestorePolicy(QStateMachine.RestorePolicy) """
pass
def start(self): # real signature unknown; restored from __doc__
""" QStateMachine.start() """
pass
def started(self, *args, **kwargs): # real signature unknown
""" QStateMachine.started [signal] """
pass
def stop(self): # real signature unknown; restored from __doc__
""" QStateMachine.stop() """
pass
def stopped(self, *args, **kwargs): # real signature unknown
""" QStateMachine.stopped [signal] """
pass
def __init__(self, QObject_parent=None): # real signature unknown; restored from __doc__
pass
DontRestoreProperties = 0
Error = None # (!) real value is ''
EventPriority = None # (!) real value is ''
HighPriority = 1
NoCommonAncestorForTransitionError = 3
NoDefaultStateInHistoryStateError = 2
NoError = 0
NoInitialStateError = 1
NormalPriority = 0
RestorePolicy = None # (!) real value is ''
RestoreProperties = 1
SignalEvent = None # (!) real value is ''
WrappedEvent = None # (!) real value is ''
| gpl-2.0 | -69,707,933,853,761,150 | 35.712121 | 123 | 0.65064 | false |
fabisel/sem-metadata-ui | mapapp/views.py | 1 | 3147 | from django.shortcuts import render
# from django.views.decorators.csrf import csrf_exempt
from tempfile import NamedTemporaryFile
import os, sys
from django.http import HttpResponse
# import http.client
# python2 import httplib
# this for python3 maybe:
# import urllib.request
# import urllib.parse
# this for python2 I guess:
# import urllib
import urllib2
from osgeo import ogr, osr, gdal
import json
from collections import OrderedDict
# todo
# csrf with superagent and django (exempt only for development)
# @csrf_exempt
def upload(request):
# import ipdb; ipdb.set_trace()
if request.POST.get('url', False):
# get file
# conn = http.client.HTTPSConnection(request.POST['url'])
# conn.request("GET", "/")
# import ipdb; ipdb.set_trace()
# r1 = conn.getresponse()
# print(r1.status, r1.reason)
# data1 = r1.read()
# conn.close()
# python3:
# f = urllib.request.urlopen(request.POST['url'])
# python2:
# import ipdb; ipdb.set_trace()
if request.POST['suffix'].encode('utf-8') == 'isJson':
f = urllib2.urlopen(request.POST['url'])
externalJson = f.read()
# import ipdb; ipdb.set_trace()
return HttpResponse(externalJson)
else:
f = urllib2.urlopen(request.POST['url'])
contents = f.read().decode('utf-8')
# import ipdb; ipdb.set_trace()
# print(f.read())
elif request.FILES.get('inputFile', False):
# STEP: show uploaded file from memory and the file contents
upload_file = request.FILES['inputFile']
contents = upload_file.read().decode('utf-8')
# import ipdb; ipdb.set_trace()
# STEP: from memory to temprory file to have a path to file what gdal needs
# with NamedTemporaryFile(suffix=".kml") as t:
# issue (fixed): suffix needs to be variable and passed from request
with NamedTemporaryFile(suffix=request.POST['suffix']) as t:
contentsStr = contents.encode('utf-8')
# import ipdb; ipdb.set_trace()
t.write(contentsStr)
# next line is what was before (python3) dont see the reason for that
# t.write(bytes(contents, 'UTF-8'))
t.seek(0)
# # driver = ogr.GetDriverByName('KML')
# # sourceData = driver.Open(t.name)
sourceData = ogr.Open(t.name, 0)
inLayer = sourceData.GetLayer()
# STEP: Create new geojson from input
# feature_collection = {"type": "FeatureCollection", "features": []}
valuesSorted = OrderedDict([('type','FeatureCollection'),('features',[])])
# valuesSorted['features'] = []
for inFeature in inLayer:
# feature_collection["features"].append(json.loads(inFeature.ExportToJson()))
valuesSorted['features'].append(json.loads(inFeature.ExportToJson()))
# geojsonFile = json.dumps(feature_collection, sort_keys=False)
geojsonFile = json.dumps(valuesSorted, sort_keys=False)
t.close()
# import ipdb; ipdb.set_trace()
# pass
return HttpResponse(geojsonFile)
# import ipdb; ipdb.set_trace()
# pass
| apache-2.0 | 5,562,341,868,102,526,000 | 34.761364 | 89 | 0.634573 | false |
rodo/pyrede | pyrede/provider/management/commands/import_latest.py | 1 | 1970 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Rodolphe Quiédeville <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
import last package
"""
import logging
import feedparser
from datetime import datetime
from django.core.management.base import BaseCommand
from pyrede.drp.models import Package
from pyrede.drp.models import PackageVersion
from pyrede.provider.utils.main import create_update_pack
from pyrede.provider.utils.main import import_package
from pyrede.provider.utils.main import split_title
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Read the RSS about the 40 last package, and import all of them'
def handle(self, *args, **options):
url = 'http://pypi.python.org/pypi?%3Aaction=rss'
if len(args):
url = args[0]
logger.debug('parse %s' % url)
nbp = self.parse(url)
logger.debug('found %s package' % nbp)
def parse(self, url):
datas = feedparser.parse(url)
for item in datas['items']:
name = None
version = None
try:
name, version = split_title(item['title'])
except:
logger.error("ERROR cant split {}".format(item['title']))
import_package(name)
return len(datas)
| gpl-3.0 | 5,167,084,117,490,336,000 | 33.54386 | 75 | 0.667852 | false |
dash1291/major | webserver/conceptual/conceptual/settings.py | 1 | 2764 | """
Django settings for conceptual project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'et^hrtu$a)=3*hxd!b68e%@hmv2xlvkirko%giuzpq!vrt1+as'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api',
'webapp',
'south'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'conceptual.urls'
WSGI_APPLICATION = 'conceptual.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
# Use hyperlinked styles by default.
# Only used if the `serializer_class` attribute is not set on a view.
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.ModelSerializer',
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
TEMPLATE_DIRS = (
'conceptual/templates',
)
STATICFILES_DIRS = (
"conceptual/static",
)
LOGIN_URL = '/signin'
EXTRACTOR_SERVICE = 'tcp://127.0.0.1:8080'
EXTRACTIONS_PATH = os.path.join(BASE_DIR, 'extractions')
| gpl-3.0 | -395,477,153,099,388,900 | 23.034783 | 73 | 0.702605 | false |
ibm-security-intelligence/api-samples | introduction/04_BodyParameters.py | 1 | 2433 | #!/usr/bin/env python3
# This sample demonstrates how to send a parameter in the body of a request.
# For a list of the endpoints that you can use along with the parameters that
# they accept you can view the REST API interactive help page on your
# deployment at https://<hostname>/api_doc
# You can also retrieve a list of available endpoints through the API itself
# at the /api/help/endpoints endpoint.
import sys
import os
import Cleanup
import importlib
sys.path.append(os.path.realpath('../modules'))
client_module = importlib.import_module('RestApiClient')
SampleUtilities = importlib.import_module('SampleUtilities')
def main():
# Create our client and set up some sample data.
client = client_module.RestApiClient(version='6.0')
setup_data(client)
# Some endpoints accept body parameters. An example of this is the
# /reference_data/sets/bulk_load endpoint.
# Body parameters may appear with path parameters, as in this case, but
# will never appear with query parameters.
# You must make sure that you set the content type correctly to a type
# accepted by the endpoint.
headers = client.get_headers().copy()
headers['Content-type'] = 'application/json'
body = b'["abc", "def", "123"]'
# Send the request.
SampleUtilities.pretty_print_request(
client, 'reference_data/sets/bulk_load/rest_api_samples_testset',
'POST', headers=headers)
response = client.call_api(
'reference_data/sets/bulk_load/rest_api_samples_testset', 'POST',
headers=headers, data=body)
SampleUtilities.pretty_print_response(response)
# The response from the previous command only shows information about the
# set, not the contents of the set. We can view the contents of the set
# with this command:
response = client.call_api('reference_data/sets/rest_api_samples_testset',
'GET')
SampleUtilities.pretty_print_response(response)
# You can uncomment this line to have this script remove the data it
# creates after it is done, or you can invoke the Cleanup script directly
# Cleanup.cleanup_introduction_data(client)
# This helper function sets up data used in this sample.
def setup_data(client):
SampleUtilities.data_setup(
client, 'reference_data/sets?name=rest_api_samples_testset' +
'&element_type=ALN', 'POST')
if __name__ == "__main__":
main()
| apache-2.0 | -325,346,525,511,229,600 | 35.313433 | 78 | 0.70448 | false |
richardbuckle/EDRefCard | www/scripts/bindings.py | 1 | 57002 | #!/usr/bin/env python3
__version__ = '1.3'
from lxml import etree
from collections import OrderedDict
from wand.drawing import Drawing
from wand.image import Image
from wand.font import Font
from wand.color import Color
import cgi
import cgitb
import html
import sys
import string
import random
import datetime
import codecs
import os
import pickle
import re
from enum import Enum
from pathlib import Path
from urllib.parse import urljoin
try:
from .bindingsData import *
except: # pragma: no cover
from bindingsData import *
class Config:
def dirRoot():
return Path(os.environ.get('CONTEXT_DOCUMENT_ROOT', '..')).resolve()
def webRoot():
return urljoin(os.environ.get('SCRIPT_URI', 'https://edrefcard.info/'), '/')
def newRandom():
config = Config(Config.randomName())
while(config.exists()):
config = Config(Config.randomName())
return config
def __init__(self, name):
if not name:
raise ValueError('Config must have a name')
self.name = name
def __repr__(self):
return "Config('%s')" % self.name
def randomName():
name = ''.join(random.choice(string.ascii_lowercase) for x in range(6))
return name
def configsPath():
return Config.dirRoot() / 'configs'
def path(self):
path = Config.configsPath() / self.name[:2] / self.name
return path
def pathWithNameAndSuffix(self, name, suffix):
newName = '-'.join([self.name, name])
p = self.path().with_name(newName)
return p.with_suffix(suffix)
def pathWithSuffix(self, suffix):
return self.path().with_suffix(suffix)
def exists(self):
return self.path().exists()
def makeDir(self):
fullPath = self.path()
dirPath = fullPath.parent
dirPath.mkdir(parents=True, exist_ok=True)
def refcardURL(self):
url = urljoin(Config.webRoot(), "binds/%s" % self.name)
return url
def bindsURL(self):
url = urljoin(Config.webRoot(), "configs/%s.binds" % self.name)
return url
def unpickle(path):
with path.open('rb') as file:
object = pickle.load(file)
object['runID'] = path.stem
return object
def allConfigs(sortKey=None):
configsPath = Config.configsPath()
picklePaths = list(configsPath.glob('**/*.replay'))
objs = [Config.unpickle(path) for path in picklePaths]
if sortKey is not None:
objs.sort(key=sortKey)
return objs
class Mode(Enum):
invalid = 0
blocks = 1
list = 2
replay = 3
generate = 4
listDevices = 5
class Errors:
def __init__(
self,
unhandledDevicesWarnings = '',
deviceWarnings = '',
misconfigurationWarnings = '',
errors = ''
):
self.unhandledDevicesWarnings = unhandledDevicesWarnings
self.deviceWarnings = deviceWarnings
self.misconfigurationWarnings = misconfigurationWarnings
self.errors = errors
def __repr__(self):
return ("Errors(unhandledDevicesWarnings='%s', deviceWarnings='%s', misconfigurationWarnings='%s', errors='%s')"
% (self.unhandledDevicesWarnings, self.deviceWarnings, self.misconfigurationWarnings, self.errors))
# Utility section
# Helper to obtain a font path
def getFontPath(weight, style):
if style == 'Normal':
style = ''
if weight == 'Regular' and style != '':
weight = ''
return '../fonts/Exo2.0-%s%s.otf' % (weight, style)
# Command group styling
groupStyles = {
'General': {'Color': Color('Black'), 'Font': getFontPath('Regular', 'Normal')},
'Misc': {'Color': Color('Black'), 'Font': getFontPath('Regular', 'Normal')},
'Modifier': {'Color': Color('Black'), 'Font': getFontPath('Bold', 'Normal')},
'Galaxy map': {'Color': Color('ForestGreen'), 'Font': getFontPath('Regular', 'Normal')},
'Holo-Me': {'Color': Color('Sienna'), 'Font': getFontPath('Regular', 'Normal')},
'Multicrew': {'Color': Color('SteelBlue'), 'Font': getFontPath('Bold', 'Normal')},
'Fighter': {'Color': Color('DarkSlateBlue'), 'Font': getFontPath('Regular', 'Normal')},
'Camera': {'Color': Color('OliveDrab'), 'Font': getFontPath('Regular', 'Normal')},
'Head look': {'Color': Color('IndianRed'), 'Font': getFontPath('Regular', 'Normal')},
'Ship': {'Color': Color('Crimson'), 'Font': getFontPath('Regular', 'Normal')},
'SRV': {'Color': Color('MediumPurple'), 'Font': getFontPath('Regular', 'Normal')},
'Scanners': {'Color': Color('DarkOrchid'), 'Font': getFontPath('Regular', 'Normal')},
'UI': {'Color': Color('DarkOrange'), 'Font': getFontPath('Regular', 'Normal')},
'OnFoot': {'Color': Color('CornflowerBlue'), 'Font': getFontPath('Regular', 'Normal')},
}
# Command category styling
categoryStyles = {
'General': {'Color': Color('DarkSlateBlue'), 'Font': getFontPath('Regular', 'Normal')},
'Combat': {'Color': Color('Crimson'), 'Font': getFontPath('Regular', 'Normal')},
'Social': {'Color': Color('ForestGreen'), 'Font': getFontPath('Regular', 'Normal')},
'Navigation': {'Color': Color('Black'), 'Font': getFontPath('Regular', 'Normal')},
'UI': {'Color': Color('DarkOrange'), 'Font': getFontPath('Regular', 'Normal')},
}
# Modifier styling - note a list not a dictionary as modifiers are numeric
class ModifierStyles:
styles = [
{'Color': Color('Black'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('Crimson'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('ForestGreen'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('DarkSlateBlue'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('DarkOrange'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('DarkOrchid'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('SteelBlue'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('Sienna'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('IndianRed'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('CornflowerBlue'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('OliveDrab'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('MediumPurple'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('DarkSalmon'), 'Font': getFontPath('Regular', 'Normal')},
{'Color': Color('LightSlateGray'), 'Font': getFontPath('Regular', 'Normal')},
]
def index(num):
i= num % len(ModifierStyles.styles)
return ModifierStyles.styles[i]
def transKey(key):
if key is None:
return None
trans = keymap.get(key)
if trans is None:
trans = key.replace('Key_', '')
return trans
# Output section
def writeUrlToDrawing(config, drawing, public):
url = config.refcardURL() if public else Config.webRoot()
drawing.push()
drawing.font = getFontPath('SemiBold', 'Normal')
drawing.font_size = 72
drawing.text(x=23, y=252, body=url)
drawing.pop()
# Create a keyboard image from the template plus bindings
def createKeyboardImage(physicalKeys, modifiers, source, imageDevices, biggestFontSize, displayGroups, runId, public):
config = Config(runId)
filePath = config.pathWithNameAndSuffix(source, '.jpg')
# See if it already exists or if we need to recreate it
if filePath.exists():
return True
with Image(filename='../res/' + source + '.jpg') as sourceImg:
with Drawing() as context:
# Defaults for the font
context.font = getFontPath('Regular', 'Normal')
context.text_antialias = True
context.font_style = 'normal'
context.stroke_width = 0
context.fill_color = Color('Black')
context.fill_opacity = 1
# Add the ID to the title
writeUrlToDrawing(config, context, public)
outputs = {}
for group in displayGroups:
outputs[group] = {}
# Find the correct bindings and order them appropriately
for physicalKeySpec, physicalKey in physicalKeys.items():
itemDevice = physicalKey.get('Device')
itemKey = physicalKey.get('Key')
# Only show it if we are handling the appropriate image at this time
if itemDevice not in imageDevices:
continue
for modifier, bind in physicalKey.get('Binds').items():
for controlKey, control in bind.get('Controls').items():
bind = {}
bind['Control'] = control
bind['Key'] = itemKey
bind['Modifiers'] = []
if modifier != 'Unmodified':
for modifierKey, modifierControls in modifiers.items():
for modifierControl in modifierControls:
if modifierControl.get('ModifierKey') == modifier and modifierControl.get('Key') is not None:
bind['Modifiers'].append(modifierControl.get('Key'))
outputs[control['Group']][control['Name']] = bind
# Set up a screen state to handle output
screenState = {}
screenState['baseX'] = 60
screenState['baseY'] = 320
screenState['maxWidth'] = 0
screenState['thisWidth'] = 0
screenState['currentX'] = screenState['baseX']
screenState['currentY'] = screenState['baseY']
font = Font(getFontPath('Regular', 'Normal'), antialias=True, size=biggestFontSize)
groupTitleFont = Font(getFontPath('Regular', 'Normal'), antialias=True, size=biggestFontSize*2)
context.stroke_width=2
context.stroke_color=Color('Black')
context.fill_opacity=0
# Go through once for each display group
for displayGroup in displayGroups:
if outputs[displayGroup] == {}:
continue
writeText(context, sourceImg, displayGroup, screenState, groupTitleFont, False, True)
orderedOutputs = OrderedDict(sorted(outputs[displayGroup].items(), key=lambda x: x[1].get('Control').get('Order')))
for bindKey, bind in orderedOutputs.items():
for modifier in bind.get('Modifiers', []):
writeText(context, sourceImg, transKey(modifier), screenState, font, True, False)
writeText(context, sourceImg, transKey(bind.get('Key')), screenState, font, True, False)
writeText(context, sourceImg, bind.get('Control').get('Name'), screenState, font, False, True)
context.draw(sourceImg)
sourceImg.save(filename=str(filePath))
return True
def appendKeyboardImage(createdImages, physicalKeys, modifiers, displayGroups, runId, public):
def countKeyboardItems(physicalKeys):
keyboardItems = 0
for physicalKey in physicalKeys.values():
if physicalKey.get('Device') == 'Keyboard':
for bind in physicalKey.get('Binds').values():
keyboardItems = keyboardItems + len(bind.get('Controls'))
return keyboardItems
def fontSizeForKeyBoardItems(physicalKeys):
keyboardItems = countKeyboardItems(physicalKeys)
if keyboardItems > 48:
fontSize = 40 - int(((keyboardItems - 48) / 20) * 4)
if fontSize < 24:
fontSize = 24
else:
fontSize = 40
return fontSize
fontSize = fontSizeForKeyBoardItems(physicalKeys)
createKeyboardImage(physicalKeys, modifiers, 'keyboard', ['Keyboard'], fontSize, displayGroups, runId, public)
createdImages.append('Keyboard')
# Write text, possible wrapping
def writeText(context, img, text, screenState, font, surround, newLine):
border = 4
# Work out the size of the text
context.font = font.path
context.font_style = 'normal'
context.font_size = font.size
context.push()
context.stroke_width=0
context.fill_color=Color('Black')
context.fill_opacity=1
if text is None or text == '':
text = 'invalid'
context.fill_color=Color('Red')
metrics = context.get_font_metrics(img, text, multiline=False)
if screenState['currentY'] + int(metrics.text_height + 32) > 2160:
# Gone off the bottom of the page; go to next column
screenState['currentY'] = screenState['baseY']
screenState['baseX'] = screenState['baseX'] + screenState['maxWidth'] + 49
screenState['currentX'] = screenState['baseX']
screenState['maxWidth'] = 0
screenState['thisWidth'] = 0
# Center the text
x = screenState['currentX']
y = screenState['currentY'] + int(metrics.ascender)
context.text(x=x, y=y, body=text)
context.pop()
if surround is True:
# text y is baseline, rectangle y is top
y = screenState['currentY'] - border
context.rectangle(left=x - (border * 4), top=y - (border * 2), width=int(metrics.text_width) + (border*8), height=int(metrics.text_height) + (border*4), radius=30)
width = int(metrics.text_width + 48)
else:
width = int((metrics.text_width + 72)/48)*48
screenState['thisWidth'] = screenState['thisWidth'] + width
if newLine is True:
if screenState['thisWidth'] > screenState['maxWidth']:
screenState['maxWidth'] = screenState['thisWidth']
screenState['currentY'] = screenState['currentY'] + int(metrics.text_height + 32)
screenState['currentX'] = screenState['baseX']
screenState['thisWidth'] = 0
else:
screenState['currentX'] = screenState['currentX'] + width
def createBlockImage(supportedDeviceKey, strokeColor='Red', fillColor='LightGreen', dryRun=False):
supportedDevice = supportedDevices[supportedDeviceKey]
# Set up the path for our file
templateName = supportedDevice['Template']
config = Config(templateName)
config.makeDir()
filePath = config.pathWithSuffix('.jpg')
with Image(filename='../res/' + supportedDevice['Template'] + '.jpg') as sourceImg:
with Drawing() as context:
if not dryRun:
context.font = getFontPath('Regular', 'Normal')
context.text_antialias = True
context.font_style = 'normal'
maxFontSize = 40
for keyDevice in supportedDevice.get('KeyDevices', supportedDevice.get('HandledDevices')):
for (keycode, box) in hotasDetails[keyDevice].items():
if keycode == 'displayName':
continue
if not dryRun:
context.stroke_width = 1
context.stroke_color = Color(strokeColor)
context.fill_color = Color(fillColor)
context.rectangle(top=box['y'], left=box['x'], width=box['width'], height=box.get('height', 54))
context.stroke_width = 0
context.fill_color = Color('Black')
sourceTexts = [{'Text': keycode, 'Group': 'General', 'Style': groupStyles['General']}]
texts = layoutText(sourceImg, context, sourceTexts, box, maxFontSize)
for text in texts:
context.font_size = text['Size']
# TODO dry this up
context.font = text['Style']['Font']
context.text(x=text['X'], y=text['Y'], body=text['Text'])
if not dryRun:
context.draw(sourceImg)
sourceImg.save(filename=str(filePath))
# Return whether a binding is a redundant specialisation and thus can be hidden
def isRedundantSpecialisation(control, bind):
moreGeneralControls = control.get('HideIfSameAs')
if len(moreGeneralControls) == 0:
return False
for moreGeneralMatch in bind.get('Controls').keys():
if moreGeneralMatch in moreGeneralControls:
return True
return False
# Create a HOTAS image from the template plus bindings
def createHOTASImage(physicalKeys, modifiers, source, imageDevices, biggestFontSize, config, public, styling, deviceIndex, misconfigurationWarnings):
# Set up the path for our file
runId = config.name
if deviceIndex == 0:
name = source
else:
name = '%s-%s' % (source, deviceIndex)
filePath = config.pathWithNameAndSuffix(name, '.jpg')
# See if it already exists or if we need to recreate it
if filePath.exists():
return True
with Image(filename='../res/' + source + '.jpg') as sourceImg:
with Drawing() as context:
# Defaults for the font
context.font = getFontPath('Regular', 'Normal')
context.text_antialias = True
context.font_style = 'normal'
context.stroke_width = 0
context.fill_color = Color('Black')
context.fill_opacity = 1
# Add the ID to the title
writeUrlToDrawing(config, context, public)
for physicalKeySpec, physicalKey in physicalKeys.items():
itemDevice = physicalKey.get('Device')
itemDeviceIndex = int(physicalKey.get('DeviceIndex'))
itemKey = physicalKey.get('Key')
# Only show it if we are handling the appropriate image at this time
if itemDevice not in imageDevices:
continue
# Only show it if we are handling the appropriate index at this time
if itemDeviceIndex != deviceIndex:
continue
# Find the details for the control
texts = []
hotasDetail = None
try:
hotasDetail = hotasDetails.get(itemDevice).get(itemKey)
except AttributeError:
hotasDetail = None
if hotasDetail is None:
logError('%s: No drawing box found for %s\n' % (runId, physicalKeySpec))
continue
# First obtain the modifiers if there are any
for keyModifier in modifiers.get(physicalKeySpec, []):
if styling == 'Modifier':
style = ModifierStyles.index(keyModifier.get('Number'))
else:
style = groupStyles.get('Modifier')
texts.append({'Text': 'Modifier %s' % (keyModifier.get('Number')), 'Group': 'Modifier', 'Style': style})
if '::Joy' in physicalKeySpec:
# Same again but for positive modifier
for keyModifier in modifiers.get(physicalKeySpec.replace('::Joy', '::Pos_Joy'), []):
if styling == 'Modifier':
style = ModifierStyles.index(keyModifier.get('Number'))
else:
style = groupStyles.get('Modifier')
texts.append({'Text': 'Modifier %s' % (keyModifier.get('Number')), 'Group': 'Modifier', 'Style': style})
# Same again but for negative modifier
for keyModifier in modifiers.get(physicalKeySpec.replace('::Joy', '::Neg_Joy'), []):
if styling == 'Modifier':
style = ModifierStyles.index(keyModifier.get('Number'))
else:
style = groupStyles.get('Modifier')
texts.append({'Text': 'Modifier %s' % (keyModifier.get('Number')), 'Group': 'Modifier', 'Style': style})
# Next obtain unmodified bindings
for modifier, bind in physicalKey.get('Binds').items():
if modifier == 'Unmodified':
for controlKey, control in bind.get('Controls').items():
if isRedundantSpecialisation(control, bind):
continue
# Check if this is a digital control on an analogue stick with an analogue equivalent
if control.get('Type') == 'Digital' and control.get('HasAnalogue') is True and hotasDetail.get('Type') == 'Analogue':
if misconfigurationWarnings == '':
misconfigurationWarnings = '<h1>Misconfiguration detected</h1>You have one or more analogue controls configured incorrectly. Please see <a href="https://forums.frontier.co.uk/showthread.php?t=209792">this thread</a> for details of the problem and how to correct it.<br/> <b>Your misconfigured controls:</b> <b>%s</b> ' % control['Name']
else:
misconfigurationWarnings = '%s, <b>%s</b>' % (misconfigurationWarnings, control['Name'])
#logError('%s: Digital command %s found on hotas control %s::%s\n' % (runId, control['Name'], itemDevice, itemKey))
if styling == 'Modifier':
texts.append({'Text': '%s' % (control.get('Name')), 'Group': control.get('Group'), 'Style': ModifierStyles.index(0)})
elif styling == 'Category':
texts.append({'Text': '%s' % (control.get('Name')), 'Group': control.get('Group'), 'Style': categoryStyles.get(control.get('Category', 'General'))})
else:
texts.append({'Text': '%s' % (control.get('Name')), 'Group': control.get('Group'), 'Style': groupStyles.get(control.get('Group'))})
# Next obtain bindings with modifiers
# Lazy approach to do this but covers us for now
for curModifierNum in range(1, 200):
for modifier, bind in physicalKey.get('Binds').items():
if modifier != 'Unmodified':
keyModifiers = modifiers.get(modifier)
modifierNum = 0
for keyModifier in keyModifiers:
if keyModifier['ModifierKey'] == modifier:
modifierNum = keyModifier['Number']
break
if modifierNum != curModifierNum:
continue
for controlKey, control in bind.get('Controls').items():
if isRedundantSpecialisation(control, bind):
continue
if styling == 'Modifier':
texts.append({'Text': '%s' % control.get('Name'), control.get('Group'): 'Modifier', 'Style': ModifierStyles.index(curModifierNum)})
elif styling == 'Category':
texts.append({'Text': '%s[%s]' % (control.get('Name'), curModifierNum), 'Group': control.get('Group'), 'Style': categoryStyles.get(control.get('Category', 'General'))})
else:
texts.append({'Text': '%s[%s]' % (control.get('Name'), curModifierNum), 'Group': control.get('Group'), 'Style': groupStyles.get(control.get('Group'))})
# Obtain the layout of the texts and write them
texts = layoutText(sourceImg, context, texts, hotasDetail, biggestFontSize)
for text in texts:
context.font_size = text['Size']
context.font = text['Style']['Font']
if styling != 'None':
context.fill_color = text['Style']['Color']
context.text(x=text['X'], y=text['Y'], body=text['Text'])
# Also need to add standalone modifiers (those without other binds)
for modifierSpec, keyModifiers in modifiers.items():
modifierTexts = []
for keyModifier in keyModifiers:
if keyModifier.get('Device') not in imageDevices:
# We don't have an image for this device
continue
if int(keyModifier.get('DeviceIndex')) != deviceIndex:
# This is not four our current device
continue
if '/' in modifierSpec:
# This is a logical modifier so ignore it
continue
if physicalKeys.get(modifierSpec) is not None or physicalKeys.get(modifierSpec.replace('::Pos_Joy', '::Joy')) is not None or physicalKeys.get(modifierSpec.replace('::Neg_Joy', '::Joy')) is not None:
# This has already been handled because it has other binds
continue
modifierKey = keyModifier.get('Key')
hotasDetail = hotasDetails.get(keyModifier.get('Device')).get(modifierKey)
if hotasDetail is None:
logError('%s: No location for %s\n' % (runId, modifierSpec))
continue
if styling == 'Modifier':
style = ModifierStyles.index(keyModifier.get('Number'))
else:
style = groupStyles.get('Modifier')
modifierTexts.append({'Text': 'Modifier %s' % (keyModifier.get('Number')), 'Group': 'Modifier', 'Style': style})
if modifierTexts != []:
# Obtain the layout of the modifier text and write it
modifierTexts = layoutText(sourceImg, context, modifierTexts, hotasDetail, biggestFontSize)
for text in modifierTexts:
context.font_size = text['Size']
context.font = text['Style']['Font']
if styling != 'None':
context.fill_color = text['Style']['Color']
context.text(x=text['X'], y=text['Y'], body=text['Text'])
context.draw(sourceImg)
sourceImg.save(filename=str(filePath))
return True
def layoutText(img, context, texts, hotasDetail, biggestFontSize):
width = hotasDetail.get('width')
height = hotasDetail.get('height', 54)
# Work out the best font size
fontSize = calculateBestFitFontSize(context, width, height, texts, biggestFontSize)
# Work out location of individual texts
currentX = hotasDetail.get('x')
currentY = hotasDetail.get('y')
maxX = hotasDetail.get('x') + hotasDetail.get('width')
metrics = None
for text in texts:
text['Size'] = fontSize
context.font = text['Style']['Font']
context.font_size = fontSize
metrics = context.get_font_metrics(img, text['Text'], multiline=False)
if currentX + int(metrics.text_width) > maxX:
# Newline
currentX = hotasDetail.get('x')
currentY = currentY + fontSize
text['X'] = currentX
text['Y'] = currentY + int(metrics.ascender)
currentX = currentX + int(metrics.text_width + metrics.character_width)
# We want to centre the texts vertically, which we can now do as we know how much space the texts take up
textHeight = currentY + fontSize - hotasDetail.get('y')
yOffset = int((height - textHeight) / 2) - int(fontSize / 6)
for text in texts:
text['Y'] = text['Y'] + yOffset
return texts
# Calculate the best fit font size for our text given the dimensions of the box
def calculateBestFitFontSize(context, width, height, texts, biggestFontSize):
fontSize = biggestFontSize
context.push()
with Image(width=width, height=height) as img:
# Step through the font size until we find one that fits
fits = False
while fits == False:
currentX = 0
currentY = 0
tooLong = False
for text in texts:
context.font = text['Style']['Font']
context.font_size = fontSize
metrics = context.get_font_metrics(img, text['Text'], multiline=False)
if currentX + int(metrics.text_width) > width:
if currentX == 0:
# This single entry is too long for the box; shrink it
tooLong = True
break
else:
# Newline
currentX = 0
currentY = currentY + fontSize
text['X'] = currentX
text['Y'] = currentY + int(metrics.ascender)
currentX = currentX + int(metrics.text_width + metrics.character_width)
if tooLong is False and currentY + metrics.text_height < height:
fits = True
else:
fontSize = fontSize -1
context.pop()
return fontSize
def calculateBestFontSize(context, text, hotasDetail, biggestFontSize):
width = hotasDetail.get('width')
height = hotasDetail.get('height', 54)
with Image(width=width, height=height) as img:
# Step through the font size until we find one that fits
fontSize = biggestFontSize
fits = False
while fits == False:
fitText = text
context.font_size = fontSize
# See if it fits on a single line
metrics = context.get_font_metrics(img, fitText, multiline=False)
if metrics.text_width <= hotasDetail.get('width'):
fits = True
else:
# See if we can break out the text on to multiple lines
lines = max(int(height / metrics.text_height), 1)
if lines == 1:
# Not enough room for more lines
fontSize = fontSize - 1
else:
fitText = ''
minLineLength = int(len(text) / lines)
regex = r'.{%s}[^,]*, |.+' % minLineLength
matches = re.findall(regex, text)
for match in matches:
if fitText == '':
fitText = match
else:
fitText = '%s\n%s' % (fitText, match)
metrics = context.get_font_metrics(img, fitText, multiline=True)
if metrics.text_width <= hotasDetail.get('width'):
fits = True
else:
fontSize = fontSize - 1
return (fitText, fontSize, metrics)
# Returns a set of controller names used by the binding
def controllerNames(configObj):
rawKeys = configObj['devices'].keys()
controllers = [fullKey.split('::')[0] for fullKey in rawKeys]
silencedControllers = ['Mouse', 'Keyboard']
def displayName(controller):
try:
return hotasDetails[controller]['displayName']
except:
return controller
controllers = {displayName(controller) for controller in controllers if not controller in silencedControllers}
return controllers
def printListItem(configObj, searchOpts):
config = Config(configObj['runID'])
refcardURL = str(config.refcardURL())
dateStr = str(configObj['timestamp'].ctime())
name = str(configObj['description'])
controllers = controllerNames(configObj)
# Apply search filter if provided
searchControllers = searchOpts.get('controllers', set())
if searchControllers:
# Resolve device name from select list (from 'supportedDevices') into their 'handledDevices' (which are
# referenced in the bindings files)
requestedDevices = [supportedDevices.get(controller,{}).get('HandledDevices',{}) for controller in searchControllers]
requestedDevices = set([item for sublist in requestedDevices for item in sublist]) # Flatten into a set
# Compare against the list of devices supported in this binding config
devices = [fullKey.split('::')[0] for fullKey in configObj['devices'].keys()]
#print('<!-- Checking if any requested devices %s are in config\'s devices %s -->' % (requestedDevices, devices))
if not any(requestedDevice in devices for requestedDevice in requestedDevices):
return
controllersStr = ', '.join(sorted(controllers))
if name == '':
# if the uploader didn't bother to name their config, skip it
return
print('''
<tr>
<td class="description">
<a href=%s>%s</a>
</td>
<td class="controllers">
%s
</td>
<td class="date">
%s
</td>
</tr>
''' % (refcardURL, html.escape(name, quote=True), controllersStr, dateStr))
def modeTitle(mode):
if mode == Mode.list:
return 'EDRefCard: public configurations'
elif mode == Mode.listDevices:
return 'EDRefCard: supported devices'
else:
return 'EDRefCard'
def printDeviceList(mode):
print('<div id="list"><h1>%s</h1></div>' % modeTitle(mode))
print('<ul>')
devices = sorted(supportedDevices.keys())
for device in devices:
print('<li><a href=device/%s>%s</a> <a href="list?deviceFilter=%s" title="search">🔍</a></li>' % (device, device, device))
print('</ul>')
def printSearchForm(searchOptions):
print('<div>')
print('<form action="" id="searchForm">')
print('<table>')
print('<tr>')
print('<td><label for="deviceFilter">Select Controller(s)</label></td>')
print('<td><select name="deviceFilter" id="deviceFilter" multiple size=10>')
controllers = sorted(supportedDevices.keys())
for controller in controllers:
selected = "selected" if controller in searchOptions.get("controllers",[]) else ""
print('<option value="%s" %s>%s</option>' % (controller, selected, controller))
print('</select></td>')
print('</tr>')
print('<tr>')
print('<td colspan=2><input type="submit" value="Search"></input></td>')
print('</tr>')
print('</table>')
print('</form>')
print('</div>')
def printList(mode, searchOpts):
print('<div id="list"><h1>%s</h1></div>' % modeTitle(mode))
printSearchForm(searchOpts)
objs = Config.allConfigs(sortKey=lambda obj: str(obj['description']).casefold())
print('<table>')
print('''
<tr>
<th align="left" class="description">Description</th>
<th align="left" class="controllers">Controllers</th>
<th align="left" class="date">Date</th>
</tr>
''')
print("<!--\nSearch options: \n%s\n-->\n" % str(searchOpts))
for obj in objs:
try:
printListItem(obj, searchOpts)
except Exception as e:
print('<tr><td>ERROR in item %s<td>%s</td></td></tr>' % (obj['runID'], str(e)))
#cgitb.handler() # only for use when needed
continue
print ('</table>')
def printRefCard(config, public, createdImages, deviceForBlockImage, errors):
runId = config.name
if errors.unhandledDevicesWarnings != '':
print('%s<br/>' % errors.unhandledDevicesWarnings)
if errors.misconfigurationWarnings != '':
print('%s<br/>' % errors.misconfigurationWarnings)
if errors.deviceWarnings != '':
print('%s<br/>' % errors.deviceWarnings)
if errors.errors != '':
print('%s<br/>' % errors.errors)
else:
for createdImage in createdImages:
if '::' in createdImage:
# Split the created image in to device and device index
m = re.search(r'(.*)\:\:([01])', createdImage)
device = m.group(1)
deviceIndex = int(m.group(2))
else:
device = createdImage
deviceIndex = 0
if deviceIndex == 0:
print('<img width="100%%" src="../configs/%s/%s-%s.jpg"/><br/>' % (runId[:2], runId, supportedDevices[device]['Template']))
else:
print('<img width="100%%" src="../configs/%s/%s-%s-%s.jpg"/><br/>' % (runId[:2], runId, supportedDevices[device]['Template'], deviceIndex))
if deviceForBlockImage is not None:
print('<img width="100%%" src="../configs/%s/%s.jpg"/><br/>' % (supportedDevices[deviceForBlockImage]['Template'][:2], supportedDevices[deviceForBlockImage]['Template']))
if deviceForBlockImage is None and public is True:
linkURL = config.refcardURL()
bindsURL = config.bindsURL()
print('<p/>Link directly to this page with the URL <a href="%s">%s</a>' % (linkURL, linkURL))
print('<p/>You can download the custom binds file for the configuration shown above at <a href="%s">%s</a>. Replace your existing custom binds file with this file to use these controls.' % (bindsURL, bindsURL))
print('<p/>')
def printBodyMain(mode, options, config, public, createdImages, deviceForBlockImage, errors):
if mode == Mode.list:
printList(mode, options)
elif mode == Mode.listDevices:
printDeviceList(mode)
else:
printRefCard(config, public, createdImages, deviceForBlockImage, errors)
def printBody(mode, options, config, public, createdImages, deviceForBlockImage, errors):
# guard against bad server configs
encoding = sys.stdout.encoding
if encoding != 'utf-8':
print(f'''
<p>It seems that your server is configured to use encoding "{encoding}" rather than "utf-8".<br>
For Apache, this can be fixed by adding <code>SetEnv PYTHONIOENCODING utf-8</code> at the end of <code>/etc/apache2/apache2.conf</code>.</p>
''')
return
printBodyMain(mode, options, config, public, createdImages, deviceForBlockImage, errors)
printSupportPara()
print('<p><a href="/">Home</a>.</p>')
def printSupportPara():
supportPara = '<p>Version %s<br>Please direct questions, suggestions and support requests to <a href="https://forums.frontier.co.uk/threads/edrefcard-makes-a-printable-reference-card-of-your-controller-bindings.464400/">the thread on the official Elite: Dangerous forums</a>.</p>' % __version__
print(supportPara)
def printHTML(mode, options, config, public, createdImages, deviceForBlockImage, errors):
print('''Content-Type: text/html
<html>
<head>
<meta charset="utf-8">
<meta name="robots" content="all">
<title>%s</title>
<link href='https://fonts.googleapis.com/css?family=Domine:400,700' rel='stylesheet' type='text/css'>
<style type="text/css" media="all">@import"ed.css";</style>
</head>
<body>''' % modeTitle(mode))
printBody(mode, options, config, public, createdImages, deviceForBlockImage, errors)
print('''
</body>
</html>''')
# Parser section
def parseBindings(runId, xml, displayGroups, errors):
parser = etree.XMLParser(encoding='utf-8', resolve_entities=False)
try:
tree = etree.fromstring(bytes(xml, 'utf-8'), parser=parser)
except SyntaxError as e:
errors.errors = '''<h3>There was a problem parsing the file you supplied.</h3>
<p>%s.</p>
<p>Possibly you submitted the wrong file, or hand-edited it and made a mistake.</p>''' % html.escape(str(e), quote=True)
xml = '<root></root>'
tree = etree.fromstring(bytes(xml, 'utf-8'), parser=parser)
physicalKeys = {}
modifiers = {}
hotasModifierNum = 1
keyboardModifierNum = 101
devices = {}
if len(tree.findall(".//*[@Device='T16000MTHROTTLE']")) > 0:
hasT16000MThrottle = True
else:
hasT16000MThrottle = False
xmlBindings = tree.findall(".//Binding") + tree.findall(".//Primary") + tree.findall(".//Secondary")
for xmlBinding in xmlBindings:
controlName = xmlBinding.getparent().tag
device = xmlBinding.get('Device')
if device == '{NoDevice}':
continue
# Rewrite the device if this is a T16000M stick and we have a T16000M throttle
if device == 'T16000M' and hasT16000MThrottle == True:
device = 'T16000MFCS'
deviceIndex = xmlBinding.get('DeviceIndex', 0)
key = xmlBinding.get('Key')
# Remove the Neg_ and Pos_ headers to put digital buttons on analogue devices
if key is not None:
if key.startswith('Neg_'):
key = key.replace('Neg_', '', 1)
if key.startswith('Pos_'):
key = key.replace('Pos_', '', 1)
def modifierSortKey(modifierInfo):
modifierDevice = modifierInfo.get('Device')
# Rewrite the device if this is a T16000M stick and we have a T16000M throttle
if modifierDevice == 'T16000M' and hasT16000MThrottle == True:
modifierDevice = 'T16000MFCS'
modifierKey = '%s::%s::%s' % (modifierDevice, modifierInfo.get('DeviceIndex', 0), modifierInfo.get('Key'))
return modifierKey
modifiersInfo = xmlBinding.findall('Modifier')
modifiersInfo = sorted(modifiersInfo, key=modifierSortKey)
modifiersKey = 'Unmodified'
if modifiersInfo:
modifiersKey = ''
for modifierInfo in modifiersInfo:
modifierKey = modifierSortKey(modifierInfo)
if modifiersKey == '':
modifiersKey = modifierKey
else:
modifiersKey = '%s/%s' % (modifiersKey, modifierKey)
# See if we already have the modifier
foundKeyModifier = False
keyModifiers = modifiers.get(modifiersKey, [])
# Store it in case it didn't exist prior to the above call
modifiers[modifiersKey] = keyModifiers
for keyModifier in keyModifiers:
if keyModifier.get('ModifierKey') == modifiersKey:
foundKeyModifier = True
break
if not foundKeyModifier:
# Create individual modifiers
for modifierInfo in modifiersInfo:
modifier = {}
modifier['ModifierKey'] = modifiersKey
modifierDevice = modifierInfo.get('Device')
# Rewrite the device if this is a T16000M stick and we have a T16000M throttle
if modifierDevice == 'T16000M' and hasT16000MThrottle == True:
modifierDevice = 'T16000MFCS'
if modifierDevice == 'Keyboard':
modifier['Number'] = keyboardModifierNum
else:
modifier['Number'] = hotasModifierNum
modifier['Device'] = modifierDevice
modifier['DeviceIndex'] = modifierInfo.get('DeviceIndex', 0)
modifier['Key'] = modifierInfo.get('Key')
modifierKey = '%s::%s::%s' % (modifierDevice, modifierInfo.get('DeviceIndex', 0), modifierInfo.get('Key'))
updatedModifiers = modifiers.get(modifierKey, [])
updatedModifiers.append(modifier)
modifiers[modifierKey] = updatedModifiers
if '/' in modifiersKey:
# Also need to add composite modifier
modifier = {}
modifier['ModifierKey'] = modifiersKey
modifierDevice = modifierInfo.get('Device')
# Rewrite the device if this is a T16000M stick and we have a T16000M throttle
if modifierDevice == 'T16000M' and hasT16000MThrottle == True:
modifierDevice = 'T16000MFCS'
if modifierDevice == 'Keyboard':
modifier['Number'] = keyboardModifierNum
else:
modifier['Number'] = hotasModifierNum
keyModifiers.append(modifier)
if modifierInfo.get('Device') == 'Keyboard':
keyboardModifierNum = keyboardModifierNum + 1
else:
hotasModifierNum = hotasModifierNum + 1
control = controls.get(controlName)
if control is None:
logError('%s: No control for %s\n' % (runId, controlName))
control = {}
control['Group'] = 'General'
control['Name'] = controlName
control['Order'] = 999
control['HideIfSameAs'] = []
control['Type'] = 'Digital'
if control['Group'] not in displayGroups:
# The user isn't interested in this control group so drop it
continue
itemKey = '%s::%s::%s' % (device, deviceIndex, key)
deviceKey = '%s::%s' % (device, deviceIndex)
# Obtain the relevant supported device
thisDevice = None
for supportedDevice in supportedDevices.values():
if device in supportedDevice['HandledDevices']:
thisDevice = supportedDevice
break
devices[deviceKey] = thisDevice
physicalKey = physicalKeys.get(itemKey)
if physicalKey is None:
physicalKey = {}
physicalKey['Device'] = device
physicalKey['DeviceIndex'] = deviceIndex
# Get the unaltered key (might be prefixed with Neg_ or Pos_) and the mapped key
physicalKey['BaseKey'] = xmlBinding.get('Key')
physicalKey['Key'] = key
physicalKey['Binds'] = {}
physicalKeys[itemKey] = physicalKey
bind = physicalKey['Binds'].get(modifiersKey)
if bind is None:
bind = {}
bind['Controls'] = OrderedDict()
physicalKey['Binds'][modifiersKey] = bind
bind['Controls'][controlName] = control
return (physicalKeys, modifiers, devices)
def parseForm(form):
displayGroups = []
if form.getvalue('showgalaxymap'):
displayGroups.append('Galaxy map')
if form.getvalue('showheadlook'):
displayGroups.append('Head look')
if form.getvalue('showsrv'):
displayGroups.append('SRV')
if form.getvalue('showscanners'):
displayGroups.append('Scanners')
if form.getvalue('showship'):
displayGroups.append('Ship')
if form.getvalue('showui'):
displayGroups.append('UI')
if form.getvalue('showfighter'):
displayGroups.append('Fighter')
if form.getvalue('showonfoot'):
displayGroups.append('OnFoot')
if form.getvalue('showmulticrew'):
displayGroups.append('Multicrew')
if form.getvalue('showcamera'):
displayGroups.append('Camera')
if form.getvalue('showcommandercreator'):
displayGroups.append('Holo-Me')
if form.getvalue('showmisc'):
displayGroups.append('Misc')
styling = 'None' # Yes we do mean a string 'None'
if form.getvalue('styling') == 'group':
styling = 'Group'
if form.getvalue('styling') == 'category':
styling = 'Category'
if form.getvalue('styling') == 'modifier':
styling = 'Modifier'
description = form.getvalue('description')
if description is None:
description = ''
return (displayGroups, styling, description)
def determineMode(form):
deviceForBlockImage = form.getvalue('blocks')
wantList = form.getvalue('list')
wantDeviceList = form.getvalue('devicelist')
runIdToReplay = form.getvalue('replay')
description = form.getvalue('description')
if description is None:
description = ''
if len(description) > 0 and not description[0].isalnum():
mode = Mode.invalid
elif deviceForBlockImage is not None:
mode = Mode.blocks
elif wantList is not None:
mode = Mode.list
elif wantDeviceList is not None:
mode = Mode.listDevices
elif runIdToReplay is not None:
mode = Mode.replay
else:
mode = Mode.generate
return mode
def saveReplayInfo(config, description, styling, displayGroups, devices, errors):
replayInfo = {}
replayInfo['displayGroups'] = displayGroups
replayInfo['misconfigurationWarnings'] = errors.misconfigurationWarnings
replayInfo['unhandledDevicesWarnings'] = errors.unhandledDevicesWarnings
replayInfo['deviceWarnings'] = errors.deviceWarnings
replayInfo['styling'] = styling
replayInfo['description'] = description
replayInfo['timestamp'] = datetime.datetime.now(datetime.timezone.utc)
replayInfo['devices'] = devices
replayPath = config.pathWithSuffix('.replay')
with replayPath.open('wb') as pickleFile:
pickle.dump(replayInfo, pickleFile)
def parseLocalFile(filePath):
displayGroups = groupStyles.keys()
styling = 'None' # Yes we do mean a string 'None'
config = Config('000000')
errors = Errors()
with filePath.open() as f:
xml = f.read()
(physicalKeys, modifiers, devices) = parseBindings(config.name, xml, displayGroups, errors)
return ((physicalKeys, modifiers, devices), errors)
# API section
def processForm(form):
config = Config.newRandom()
styling = 'None'
description = ''
options = {}
public = False
createdImages = []
errors = Errors()
deviceForBlockImage = form.getvalue('blocks')
mode = determineMode(form)
if mode is Mode.invalid:
errors.errors = 'That is not a valid description. Leading punctuation is not allowed.</h1>'
xml = '<root></root>'
elif mode is Mode.blocks:
try:
deviceForBlockImage = form.getvalue('blocks')
createBlockImage(deviceForBlockImage)
except KeyError:
errors.errors = '<h1>%s is not a supported controller.</h1>' % deviceForBlockImage
xml = '<root></root>'
createdImages = []
elif mode is Mode.replay:
fileitem = {}
runId = form.getvalue('replay')
public = True
try:
config = Config(runId)
bindsPath = config.pathWithSuffix('.binds')
replayPath = config.pathWithSuffix('.replay')
if not (bindsPath.exists() and replayPath.exists):
raise FileNotFoundError
with codecs.open(str(bindsPath), 'r', 'utf-8') as fileInput:
xml = fileInput.read()
try:
with replayPath.open("rb") as pickleFile:
replayInfo = pickle.load(pickleFile)
displayGroups = replayInfo.get('displayGroups', ['Galaxy map', 'General', 'Head look', 'SRV', 'Ship', 'UI'])
errors.misconfigurationWarnings = replayInfo.get('misconfigurationWarnings', replayInfo.get('warnings', ''))
errors.deviceWarnings = replayInfo.get('deviceWarnings', '')
errors.unhandledDevicesWarnings = ''
styling = replayInfo.get('styling', 'None')
description = replayInfo.get('description', '')
timestamp = replayInfo.get('timestamp')
# devices = replayInfo['devices']
except FileNotFoundError:
displayGroups = ['Galaxy map', 'General', 'Head look', 'SRV', 'Ship', 'UI']
except (ValueError, FileNotFoundError):
errors.errors = '<h1>Configuration "%s" not found</h1>' % runId
displayGroups = ['Galaxy map', 'General', 'Head look', 'SRV', 'Ship', 'UI']
xml = '<root></root>'
elif mode is Mode.generate:
config = Config.newRandom()
config.makeDir()
runId = config.name
displayGroups = []
(displayGroups, styling, description) = parseForm(form)
xml = form.getvalue('bindings')
if xml is None or xml == b'':
errors.errors = '<h1>No bindings file supplied; please go back and select your binds file as per the instructions.</h1>'
xml = '<root></root>'
else:
xml = xml.decode(encoding='utf-8')
bindsPath = config.pathWithSuffix('.binds')
with codecs.open(str(bindsPath), 'w', 'utf-8') as xmlOutput:
xmlOutput.write(xml)
public = len(description) > 0
elif mode is Mode.list:
deviceFilters = form.getvalue("deviceFilter", [])
if deviceFilters:
if type(deviceFilters) is not type([]):
deviceFilters = [ deviceFilters ]
options['controllers'] = set(deviceFilters)
if mode is Mode.replay or mode is Mode.generate:
(physicalKeys, modifiers, devices) = parseBindings(runId, xml, displayGroups, errors)
alreadyHandledDevices = []
createdImages = []
for supportedDeviceKey, supportedDevice in supportedDevices.items():
if supportedDeviceKey == 'Keyboard':
# We handle the keyboard separately below
continue
for deviceIndex in [0, 1]:
# See if we handle this device
handled = False
for handledDevice in supportedDevice.get('KeyDevices', supportedDevice.get('HandledDevices')):
if devices.get('%s::%s' % (handledDevice, deviceIndex)) is not None:
handled = True
break
if handled is True:
# See if we have any new bindings for this device
hasNewBindings = False
for device in supportedDevice.get('KeyDevices', supportedDevice.get('HandledDevices')):
deviceKey = '%s::%s' % (device, deviceIndex)
if deviceKey not in alreadyHandledDevices:
hasNewBindings = True
break
if hasNewBindings is True:
createHOTASImage(physicalKeys, modifiers, supportedDevice['Template'], supportedDevice['HandledDevices'], 40, config, public, styling, deviceIndex, errors.misconfigurationWarnings)
createdImages.append('%s::%s' % (supportedDeviceKey, deviceIndex))
for handledDevice in supportedDevice['HandledDevices']:
alreadyHandledDevices.append('%s::%s' % (handledDevice, deviceIndex))
if devices.get('Keyboard::0') is not None:
appendKeyboardImage(createdImages, physicalKeys, modifiers, displayGroups, runId, public)
for deviceKey, device in devices.items():
# Arduino Leonardo is used for head tracking so ignore it, along with vJoy (Tobii Eyex) and 16D00AEA (EDTracker)
if device is None and deviceKey != 'Mouse::0' and deviceKey != 'ArduinoLeonardo::0' and deviceKey != 'vJoy::0' and deviceKey != 'vJoy::1' and deviceKey != '16D00AEA::0':
logError('%s: found unsupported device %s\n' % (runId, deviceKey))
if errors.unhandledDevicesWarnings == '':
errors.unhandledDevicesWarnings = '<h1>Unknown controller detected</h1>You have a device that is not supported at this time. Please report details of your device by following the link at the bottom of this page supplying the reference "%s" and we will attempt to add support for it.' % runId
if device is not None and 'ThrustMasterWarthogCombined' in device['HandledDevices'] and errors.deviceWarnings == '':
errors.deviceWarnings = '<h2>Mapping Software Detected</h2>You are using the ThrustMaster TARGET software. As a result it is possible that not all of the controls will show up. If you have missing controls then you should remove the mapping from TARGET and map them using Elite\'s own configuration UI.'
if len(createdImages) == 0 and errors.misconfigurationWarnings == '' and errors.unhandledDevicesWarnings == '' and errors.errors == '':
errors.errors = '<h1>The file supplied does not have any bindings for a supported controller or keyboard.</h1>'
# Save variables for later replays
if (mode is Mode.generate and public):
saveReplayInfo(config, description, styling, displayGroups, devices, errors)
printHTML(mode, options, config, public, createdImages, deviceForBlockImage, errors)
def logError(message):
sys.stderr.write("EDRefCard: %s", message)
def main():
cgitb.enable()
form = cgi.FieldStorage()
processForm(form)
if __name__ == '__main__':
main()
| mit | 4,428,225,583,223,794,000 | 44.275616 | 372 | 0.579839 | false |
google/deluca | deluca/envs/classic/_cartpole.py | 1 | 1907 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jax.numpy as jnp
from deluca.core import Env
from deluca.core import field
from deluca.core import Obj
def AB(m, M, l, g, dt):
return ()
class CartpoleState(Obj):
arr: jnp.ndarray = field(trainable=True)
h: int = field(0, trainable=False)
offset: float = field(0.0, trainable=False)
class Cartpole(Env):
m: float = field(0.1, trainable=False)
M: float = field(1.0, trainable=False)
l: float = field(1.0, trainable=False)
g: float = field(9.81, trainable=False)
dt: float = field(0.02, trainable=False)
H: int = field(10, trainable=False)
goal_state: jnp.ndarray = field(jnp.array([0.0, 0.0, 0.0, 0.0]), trainable=False)
dynamics: bool = field(False, trainable=False)
def init(self):
return CartpoleState(arr=jnp.array([0.0, 0.0, 0.0, 0.0]))
def __call__(self, state, action):
A = jnp.array(
[
[1.0, 0.0, self.dt, 0.0],
[0.0, 1.0, 0.0, self.dt],
[0.0, self.dt * self.m * self.g / self.M, 1.0, 0.0],
[0.0, self.dt * (self.m + self.M) * self.g / (self.M * self.l), 0.0, 1.0],
]
)
B = (jnp.array([[0.0], [0.0], [self.dt / self.M], [self.dt / (self.M * self.l)]]),)
return state.replace(arr=A @ state.arr + B @ (action + state.offset))
| apache-2.0 | 921,190,591,304,049,000 | 33.672727 | 91 | 0.618249 | false |
JulienCote/advent_of_code_2015 | day22/wizard_battle.py | 1 | 3890 | import re
import time
lowest_cost = 9999999999999
best_spells = []
# MUCH faster than deepcopy
def spell_copy(spells):
return {'recharge' : [x for x in spells['recharge']],
'poison' : [x for x in spells['poison']],
'shield' : [x for x in spells['shield']],
'magic missile' : [x for x in spells['magic missile']],
'drain' : [x for x in spells['drain']]}
def cast(player, boss, spells, spell):
if spells[spell][2] > 0 or spells[spell][0] > player[1]:
return -1
if spell == 'magic missile':
boss[0] -= 4
player[1] -= spells[spell][0]
return spells[spell][0]
elif spell == 'drain':
boss[0] -= 2
player[0] += 2
player[1] -= spells[spell][0]
return spells[spell][0]
elif spell == 'shield':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
elif spell == 'poison':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
elif spell == 'recharge':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
def apply_effects(player, boss, spells):
if spells['shield'][2] > 0:
player[2] = 7
spells['shield'][2] -= 1
if spells['poison'][2] > 0:
boss[0] -= 3
spells['poison'][2] -= 1
if spells['recharge'][2] > 0:
player[1] += 101
spells['recharge'][2] -= 1
def remove_effects(player, spells):
if spells['shield'][2] == 0:
player[2] = 0
def play(player, boss, spells, hard_game=False, mana_used=0, is_player_turn=True, used_spells=[]):
global lowest_cost
global best_spells
if mana_used >= lowest_cost or player[0] <= 0: #check for win/lose or if the current game is worse than one done in the past
return
elif boss[0] <= 0:
lowest_cost = mana_used
best_spells = used_spells
return
if hard_game and is_player_turn: #health penalty for playing on hard
player[0] -= 1
apply_effects(player, boss, spells) #apply passive effects if applicable
if player[0] <= 0: #check for win/lose again
return
elif boss[0] <= 0:
lowest_cost = mana_used
best_spells = used_spells
return
if is_player_turn:
for spell in ['poison', 'recharge', 'shield', 'drain', 'magic missile']: # try every spell
new_player = [x for x in player]
new_boss = [x for x in boss]
new_spells = spell_copy(spells)
cost = cast(new_player, new_boss, new_spells, spell)
if cost == -1:
continue
remove_effects(new_player, spells) #remove the effect of shield, quick and dirty implementation
play(new_player, new_boss, new_spells, hard_game, cost + mana_used, False, used_spells + [spell]) #next turn -> boss
else: #boss turn
new_player = [x for x in player]
new_player[0] -= max(boss[1] - player[2], 1)
remove_effects(new_player, spells)
play(new_player, boss, spells, hard_game, mana_used, True, used_spells) #next turn -> player
with file('input.txt') as input_file:
spells = {'recharge' : [229, 5, 0], 'poison' : [173, 6, 0], 'shield' : [113, 6, 0], 'magic missile' : [53, 0, 0], 'drain' : [73, 0, 0]}
boss_stats = [] #hitpoint, damage
player_stats = [50, 500, 0] #hitpoint, mana, armor
for line in input_file:
boss_stats.append(int(re.search('(\d+)', line).group(1)))
start_time = time.time()
play([x for x in player_stats], [x for x in boss_stats], spell_copy(spells), False)
print 'To beat the boss on normal, it took this much mana:', lowest_cost
print 'These are the spells used, in order:', best_spells
print 'It took this many seconds to figure all of this out:', time.time() - start_time
start_time = time.time()
lowest_cost = 99999999
best_spells = []
play([x for x in player_stats], [x for x in boss_stats], spell_copy(spells), True)
print 'To beat the boss on hard, it took this much mana:', lowest_cost
print 'These are the spells used, in order:', best_spells
print 'It took this many seconds to figure all of this out:', time.time() - start_time | mit | 5,469,189,831,190,145,000 | 30.128 | 136 | 0.651414 | false |
brapastor/djdiscuss | discuss/discuss/apps/discuss/migrations/0001_initial.py | 1 | 2497 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-22 05:10
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=200)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=10)),
],
),
migrations.AddField(
model_name='question',
name='tag',
field=models.ManyToManyField(to='discuss.Tag'),
),
migrations.AddField(
model_name='question',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='answer',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='discuss.Question'),
),
migrations.AddField(
model_name='answer',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| mit | -6,811,147,327,068,727,000 | 34.169014 | 114 | 0.548258 | false |
choderalab/perses | perses/utils/openeye.py | 1 | 17484 | """
Utility functions for simulations using openeye toolkits
"""
__author__ = 'John D. Chodera'
from simtk import unit
from simtk.openmm import app
import simtk.unit as unit
import numpy as np
import logging
logging.basicConfig(level=logging.NOTSET)
_logger = logging.getLogger("utils.openeye")
_logger.setLevel(logging.INFO)
def system_generator_wrapper(oemols,
barostat = None,
forcefield_files = ['amber14/protein.ff14SB.xml', 'amber14/tip3p.xml'],
forcefield_kwargs = {'removeCMMotion': False, 'ewaldErrorTolerance': 1e-4, 'constraints' : app.HBonds, 'hydrogenMass' : 4 * unit.amus},
nonperiodic_forcefield_kwargs = {'nonbondedMethod': app.NoCutoff},
small_molecule_forcefield = 'gaff-2.11',
**kwargs
):
"""
make a system generator (vacuum) for a small molecule
Parameters
----------
oemols : list of openeye.oechem.OEMol
oemols
barostat : openmm.MonteCarloBarostat, default None
barostat
forcefield_files : list of str
pointers to protein forcefields and solvent
forcefield_kwargs : dict
dict of forcefield_kwargs
nonperiodic_forcefield_kwargs : dict
dict of args for non-periodic system
small_molecule_forcefield : str
pointer to small molecule forcefield to use
Returns
-------
system_generator : openmmforcefields.generators.SystemGenerator
"""
from openff.toolkit.topology import Molecule
from openmmforcefields.generators import SystemGenerator
from openeye import oechem
system_generator = SystemGenerator(forcefields = forcefield_files, barostat=barostat, forcefield_kwargs=forcefield_kwargs,nonperiodic_forcefield_kwargs=nonperiodic_forcefield_kwargs,
small_molecule_forcefield = small_molecule_forcefield, molecules=[Molecule.from_openeye(oemol) for oemol in oemols], cache=None)
return system_generator
def smiles_to_oemol(smiles, title='MOL', max_confs=1):
"""
Generate an oemol from a SMILES string
Parameters
----------
smiles : str
SMILES string of molecule
title : str, default 'MOL'
title of OEMol molecule
max_confs : int, default 1
maximum number of conformers to generate
Returns
-------
molecule : openeye.oechem.OEMol
OEMol object of the molecule
"""
from openeye import oeomega, oechem
# Create molecule
molecule = oechem.OEMol()
oechem.OESmilesToMol(molecule, smiles)
# create unique atom names
if len([atom.GetName() for atom in molecule.GetAtoms()]) > len(set([atom.GetName() for atom in molecule.GetAtoms()])):
# the atom names are not unique
molecule = generate_unique_atom_names(molecule)
else:
pass
# Set title.
molecule.SetTitle(title)
# Assign aromaticity and hydrogens.
oechem.OEAssignAromaticFlags(molecule, oechem.OEAroModelOpenEye)
oechem.OEAssignHybridization(molecule)
oechem.OEAddExplicitHydrogens(molecule)
oechem.OEPerceiveChiral(molecule)
# Create atom names.
oechem.OETriposAtomNames(molecule)
oechem.OETriposBondTypeNames(molecule)
# perceive chirality before attempting omega geometry proposal
assert oechem.OEPerceiveChiral(molecule), f"chirality perception failed"
# Assign geometry
omega = oeomega.OEOmega()
omega.SetMaxConfs(max_confs)
omega.SetIncludeInput(False)
omega.SetStrictStereo(True)
omega(molecule)
return molecule
def iupac_to_oemol(iupac, title='MOL', max_confs=1):
"""
Generate an oemol from an IUPAC name
Parameters
----------
iupac : str
iupac name of molecule
title : str, default 'MOL'
title of OEMol molecule
max_confs : int, default 1
maximum number of conformers to generate
Returns
-------
molecule : openeye.oechem.OEMol
OEMol object of the molecule
"""
from openeye import oeiupac, oeomega, oechem
# Create molecule
molecule = oechem.OEMol()
oeiupac.OEParseIUPACName(molecule, iupac)
# Set title.
molecule.SetTitle(title)
# Assign aromaticity and hydrogens.
oechem.OEAssignAromaticFlags(molecule, oechem.OEAroModelOpenEye)
oechem.OEAssignHybridization(molecule)
oechem.OEAddExplicitHydrogens(molecule)
oechem.OEPerceiveChiral(molecule)
# Create atom names.
oechem.OETriposAtomNames(molecule)
oechem.OETriposBondTypeNames(molecule)
# Assign geometry
omega = oeomega.OEOmega()
omega.SetMaxConfs(max_confs)
omega.SetIncludeInput(False)
omega.SetStrictStereo(True)
omega(molecule)
return molecule
def extractPositionsFromOEMol(molecule,units=unit.angstrom):
"""
Get a molecules coordinates from an openeye.oemol
Parameters
----------
molecule : openeye.oechem.OEMol object
units : simtk.unit, default angstrom
Returns
-------
positions : np.array
"""
positions = unit.Quantity(np.zeros([molecule.NumAtoms(), 3], np.float32), units)
coords = molecule.GetCoords()
for index in range(molecule.NumAtoms()):
positions[index,:] = unit.Quantity(coords[index], units)
return positions
def giveOpenmmPositionsToOEMol(positions, molecule):
"""
Replace OEMol positions with openmm format positions
Parameters
----------
positions : openmm.topology.positions
molecule : openeye.oechem.OEMol object
Returns
-------
molecule : openeye.oechem.OEMol
molecule with updated positions
"""
assert molecule.NumAtoms() == len(positions), "Number of openmm positions does not match number of atoms in OEMol object"
coords = molecule.GetCoords()
for key in coords.keys(): # openmm in nm, openeye in A
coords[key] = (positions[key][0]/unit.angstrom,positions[key][1]/unit.angstrom,positions[key][2]/unit.angstrom)
molecule.SetCoords(coords)
return molecule
def OEMol_to_omm_ff(molecule, system_generator):
"""
Convert an openeye.oechem.OEMol to a openmm system, positions and topology
Parameters
----------
oemol : openeye.oechem.OEMol object
input molecule to convert
system_generator : openmmforcefields.generators.SystemGenerator
Returns
-------
system : openmm.system
positions : openmm.positions
topology : openmm.topology
"""
from openmoltools.forcefield_generators import generateTopologyFromOEMol
topology = generateTopologyFromOEMol(molecule)
system = system_generator.create_system(topology)
positions = extractPositionsFromOEMol(molecule)
return system, positions, topology
def createSystemFromIUPAC(iupac_name, title="MOL", **system_generator_kwargs):
"""
Create an openmm system out of an oemol
Parameters
----------
iupac_name : str
IUPAC name
Returns
-------
molecule : openeye.oechem.OEMol
OEMol molecule
system : openmm.System object
OpenMM system
positions : [n,3] np.array of floats
Positions
topology : openmm.app.Topology object
Topology
"""
from openeye import oechem
from openmoltools.openeye import generate_conformers
# Create OEMol
# TODO write our own of this function so we can be
# sure of the oe flags that are being used
molecule = iupac_to_oemol(iupac_name, title=title)
molecule = generate_conformers(molecule, max_confs=1)
system_generator = system_generator_wrapper([molecule], **system_generator_kwargs)
# generate openmm system, positions and topology
system, positions, topology = OEMol_to_omm_ff(molecule, system_generator)
return (molecule, system, positions, topology)
def createSystemFromSMILES(smiles,title='MOL', **system_generator_kwargs):
"""
Create an openmm system from a smiles string
Parameters
----------
smiles : str
smiles string of molecule
Returns
-------
molecule : openeye.oechem.OEMol
OEMol molecule
system : openmm.System object
OpenMM system
positions : [n,3] np.array of floats
Positions
topology : openmm.app.Topology object
Topology
"""
# clean up smiles string
from perses.utils.smallmolecules import sanitizeSMILES
smiles = sanitizeSMILES([smiles])
smiles = smiles[0]
# Create OEMol
molecule = smiles_to_oemol(smiles, title=title)
system_generator = system_generator_wrapper([molecule], **system_generator_kwargs)
# generate openmm system, positions and topology
system, positions, topology = OEMol_to_omm_ff(molecule, system_generator)
return (molecule, system, positions, topology)
def describe_oemol(mol):
"""
Render the contents of an OEMol to a string.
Parameters
----------
mol : OEMol
Molecule to describe
Returns
-------
description : str
The description
"""
#TODO this needs a test
description = ""
description += "ATOMS:\n"
for atom in mol.GetAtoms():
description += "%8d %5s %5d\n" % (atom.GetIdx(), atom.GetName(), atom.GetAtomicNum())
description += "BONDS:\n"
for bond in mol.GetBonds():
description += "%8d %8d\n" % (bond.GetBgnIdx(), bond.GetEndIdx())
return description
def createOEMolFromSDF(sdf_filename, index=0, add_hydrogens=True, allow_undefined_stereo=False):
"""
# TODO change this to return a list of all the mols if required
Load an SDF file into an OEMol. Since SDF files can contain multiple
molecules, an index can be provided as well.
Parameters
----------
sdf_filename : str
The name of the SDF file
index : int, default 0
The index of the molecule in the SDF file
allow_undefined_stereo : bool, default=False
wether to skip stereo perception
Returns
-------
mol : openeye.oechem.OEMol object
The loaded oemol object
"""
from openeye import oechem
# TODO this needs a test
ifs = oechem.oemolistream()
ifs.open(sdf_filename)
# get the list of molecules
mol_list = [oechem.OEMol(mol) for mol in ifs.GetOEMols()]
# we'll always take the first for now
# pick out molecule of interest
molecule = mol_list[index]
# Generate unique atom names
if len([atom.GetName() for atom in molecule.GetAtoms()]) > len(set([atom.GetName() for atom in molecule.GetAtoms()])):
molecule = generate_unique_atom_names(molecule)
# Assign aromaticity and hydrogens.
oechem.OEAssignAromaticFlags(molecule, oechem.OEAroModelOpenEye)
oechem.OEAssignHybridization(molecule)
if add_hydrogens:
oechem.OEAddExplicitHydrogens(molecule)
oechem.OEPerceiveChiral(molecule)
# perceive chirality
if not allow_undefined_stereo:
assert oechem.OE3DToInternalStereo(molecule), f"the stereochemistry perception from 3D coordinates failed"
assert not has_undefined_stereocenters(molecule), f"there is an atom with an undefined stereochemistry"
return molecule
def calculate_mol_similarity(molA, molB):
"""
Function to calculate the similarity between two oemol objects
should be used to utils/openeye.py or openmoltools
:param molA: oemol object of molecule A
:param molB: oemol object of molecule B
:return: float, tanimoto score of the two molecules, between 0 and 1
"""
from openeye import oegraphsim
fpA = oegraphsim.OEFingerPrint()
fpB = oegraphsim.OEFingerPrint()
oegraphsim.OEMakeFP(fpA, molA, oegraphsim.OEFPType_MACCS166)
oegraphsim.OEMakeFP(fpB, molB, oegraphsim.OEFPType_MACCS166)
return oegraphsim.OETanimoto(fpA, fpB)
def createSMILESfromOEMol(molecule):
from openeye import oechem
smiles = oechem.OECreateSmiString(molecule,
oechem.OESMILESFlag_DEFAULT |
oechem.OESMILESFlag_Hydrogens)
return smiles
def generate_unique_atom_names(molecule):
"""
Check if an oemol has unique atom names, and if not, then assigns them
Parameters
----------
molecule : openeye.oechem.OEMol object
oemol object to check
Returns
-------
molecule : openeye.oechem.OEMol object
oemol, either unchanged if atom names are
already unique, or newly generated atom names
"""
from openeye import oechem
atom_names = []
atom_count = 0
for atom in molecule.GetAtoms():
atom_names.append(atom.GetName())
atom_count += 1
if len(set(atom_names)) == atom_count:
# one name per atom therefore unique
_logger.info(f'molecule {molecule.GetTitle()} \
has unique atom names already')
return molecule
else:
# generating new atom names
from collections import defaultdict
try:
from openmm.app.element import Element
except ModuleNotFoundError: # <=7.5.0
from simtk.openmm.app import Element
_logger.info(f'molecule {molecule.GetTitle()} \
does not have unique atom names. Generating now...')
element_counts = defaultdict(int)
for atom in molecule.GetAtoms():
element = Element.getByAtomicNumber(atom.GetAtomicNum())
element_counts[element._symbol] += 1
name = element._symbol + str(element_counts[element._symbol])
atom.SetName(name)
return molecule
def has_undefined_stereocenters(mol):
"""
Check that _if_ a molecule has a stereocenter,
the stereochemistry is defined
if no stereocenter then will return False too
Parameters
----------
molecule : openeye.oechem.OEMol object
oemol object to check
Returns
-------
bool : True if undefined Stereochemistry
False if no stereochemistry or all stereocenter's are labelled
"""
from openeye import oechem
assert oechem.OEPerceiveChiral(mol), f"chirality perception failed"
for atom in mol.GetAtoms():
if atom.IsChiral():
if not atom.HasStereoSpecified():
return True # we have a stereocenter with no stereochemistry!
for bond in mol.GetBonds():
if bond.IsChiral():
if not bond.HasStereoSpecified():
return True # we have a geometric isomer that isn't specified!
return False # nothing bad found
def generate_expression(list):
"""Turns a list of strings into an oechem atom or bond expression
This allows us to pass in matching expressions in the input .yaml
Note: strings are case sensitive
>>> atom_expr = generate_expression("Hybridization", "IntType")
Parameters
----------
list : list of strings
List of strings
Returns
-------
integer
Integer that openeye magically understands for matching expressions
"""
from openeye import oechem
total_expr = 0
for string in list:
try:
expr = getattr(oechem, f'OEExprOpts_{string}')
except AttributeError:
raise Exception(f'{string} not recognised, no expression of oechem.OEExprOpts_{string}.\
This is case sensitive, so please check carefully and see , \
https://docs.eyesopen.com/toolkits/python/oechemtk/OEChemConstants/OEExprOpts.html\
for options')
# doing bitwise OR check
total_expr = total_expr | expr
return total_expr
def get_scaffold(molecule, adjustHcount=False):
"""
Takes an openeye.oechem.oemol and returns
an openeye.oechem.oemol of the scaffold
The scaffold is a molecule where all the atoms that are not in rings, and are not linkers between rings.
double bonded atoms exo to a ring are included as ring atoms
This function has been completely taken from openeye's extractscaffold.py script
https://docs.eyesopen.com/toolkits/python/oechemtk/oechem_examples/oechem_example_extractscaffold.html#section-example-oechem-extractscaffold
Parameters
----------
mol : openeye.oechem.oemol
entire molecule to get the scaffold of
adjustHcount : bool, default=False
add/remove hydrogens to satisfy valence of scaffold
Returns
-------
openeye.oechem.oemol
scaffold oemol of the input mol. New oemol.
"""
from openeye import oechem
def TraverseForRing(visited, atom):
visited.add(atom.GetIdx())
for nbor in atom.GetAtoms():
if nbor.GetIdx() not in visited:
if nbor.IsInRing():
return True
if TraverseForRing(visited, nbor):
return True
return False
def DepthFirstSearchForRing(root, nbor):
visited = set()
visited.add(root.GetIdx())
return TraverseForRing(visited, nbor)
class IsInScaffold(oechem.OEUnaryAtomPred):
def __call__(self, atom):
if atom.IsInRing():
return True
count = 0
for nbor in atom.GetAtoms():
if DepthFirstSearchForRing(atom, nbor):
count += 1
return count > 1
dst = oechem.OEMol()
pred = IsInScaffold()
oechem.OESubsetMol(dst, molecule, pred, adjustHcount)
return dst
| mit | 4,699,503,931,967,505,000 | 29.406957 | 186 | 0.658259 | false |
gerruz/DeepConvSep | examples/hiphopss/trainCNN.py | 1 | 19338 | """
This file is part of DeepConvSep.
Copyright (c) 2014-2017 Marius Miron <miron.marius at gmail.com>
DeepConvSep is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
DeepConvSep is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with DeepConvSep. If not, see <http://www.gnu.org/licenses/>.
"""
import os,sys
import transform
from transform import transformFFT
import dataset
from dataset import LargeDataset
import util
import numpy as np
import re
from scipy.signal import blackmanharris as blackmanharris
import shutil
import time
import cPickle
import re
import climate
import ConfigParser
import theano
import theano.tensor as T
import theano.sandbox.rng_mrg
import lasagne
from lasagne.layers import ReshapeLayer,Layer
from lasagne.init import Normal
from lasagne.regularization import regularize_layer_params_weighted, l2, l1
from lasagne.regularization import regularize_layer_params
logging = climate.get_logger('trainer')
climate.enable_default_logging()
def load_model(filename):
f=file(filename,'rb')
params=cPickle.load(f)
f.close()
return params
def save_model(filename, model):
params=lasagne.layers.get_all_param_values(model)
f = file(filename, 'wb')
cPickle.dump(params,f,protocol=cPickle.HIGHEST_PROTOCOL)
f.close()
return None
def build_ca(input_var=None, batch_size=32,time_context=30,feat_size=513):
"""
Builds a network with lasagne
Parameters
----------
input_var : Theano tensor
The input for the network
batch_size : int, optional
The number of examples in a batch
time_context : int, optional
The time context modeled by the network.
feat_size : int, optional
The feature size modeled by the network (last dimension of the feature vector)
Yields
------
l_out : Theano tensor
The output of the network
"""
input_shape=(batch_size,1,time_context,feat_size)
#input layer
l_in_1 = lasagne.layers.InputLayer(shape=input_shape, input_var=input_var)
#vertical convolution layer
l_conv1 = lasagne.layers.Conv2DLayer(l_in_1, num_filters=50, filter_size=(1,feat_size),stride=(1,1), pad='valid', nonlinearity=None)
l_conv1b= lasagne.layers.BiasLayer(l_conv1)
#horizontal convolution layer
l_conv2 = lasagne.layers.Conv2DLayer(l_conv1b, num_filters=50, filter_size=(int(time_context/2),1),stride=(1,1), pad='valid', nonlinearity=None)
l_conv2b= lasagne.layers.BiasLayer(l_conv2)
#bottlneck layer
l_fc=lasagne.layers.DenseLayer(l_conv2b,128)
#build output for source1
l_fc11=lasagne.layers.DenseLayer(l_fc,l_conv2.output_shape[1]*l_conv2.output_shape[2]*l_conv2.output_shape[3])
l_reshape1 = lasagne.layers.ReshapeLayer(l_fc11,(batch_size,l_conv2.output_shape[1],l_conv2.output_shape[2], l_conv2.output_shape[3]))
l_inverse11=lasagne.layers.InverseLayer(l_reshape1, l_conv2)
l_inverse41=lasagne.layers.InverseLayer(l_inverse11, l_conv1)
#build output for source2
l_fc12=lasagne.layers.DenseLayer(l_fc,l_conv2.output_shape[1]*l_conv2.output_shape[2]*l_conv2.output_shape[3])
l_reshape2 = lasagne.layers.ReshapeLayer(l_fc12,(batch_size,l_conv2.output_shape[1],l_conv2.output_shape[2], l_conv2.output_shape[3]))
l_inverse12=lasagne.layers.InverseLayer(l_reshape2, l_conv2)
l_inverse42=lasagne.layers.InverseLayer(l_inverse12, l_conv1)
#build output for source3
l_fc13=lasagne.layers.DenseLayer(l_fc,l_conv2.output_shape[1]*l_conv2.output_shape[2]*l_conv2.output_shape[3])
l_reshape3 = lasagne.layers.ReshapeLayer(l_fc13,(batch_size,l_conv2.output_shape[1],l_conv2.output_shape[2], l_conv2.output_shape[3]))
l_inverse13=lasagne.layers.InverseLayer(l_reshape3, l_conv2)
l_inverse43=lasagne.layers.InverseLayer(l_inverse13, l_conv1)
#build output for source4
l_fc14=lasagne.layers.DenseLayer(l_fc,l_conv2.output_shape[1]*l_conv2.output_shape[2]*l_conv2.output_shape[3])
l_reshape4 = lasagne.layers.ReshapeLayer(l_fc12,(batch_size,l_conv2.output_shape[1],l_conv2.output_shape[2], l_conv2.output_shape[3]))
l_inverse14=lasagne.layers.InverseLayer(l_reshape4, l_conv2)
l_inverse44=lasagne.layers.InverseLayer(l_inverse14, l_conv1)
#build final output
l_merge=lasagne.layers.ConcatLayer([l_inverse41,l_inverse42,l_inverse43,l_inverse44],axis=1)
l_out = lasagne.layers.NonlinearityLayer(lasagne.layers.BiasLayer(l_merge), nonlinearity=lasagne.nonlinearities.rectify)
return l_out
def train_auto(train,fun,transform,testdir,outdir,num_epochs=30,model="1.pkl",scale_factor=0.3,load=False,skip_train=False,skip_sep=False):
"""
Trains a network built with \"fun\" with the data generated with \"train\"
and then separates the files in \"testdir\",writing the result in \"outdir\"
Parameters
----------
train : Callable, e.g. LargeDataset object
The callable which generates training data for the network: inputs, target = train()
fun : lasagne network object, Theano tensor
The network to be trained
transform : transformFFT object
The Transform object which was used to compute the features (see compute_features.py)
testdir : string, optional
The directory where the files to be separated are located
outdir : string, optional
The directory where to write the separated files
num_epochs : int, optional
The number the epochs to train for (one epoch is when all examples in the dataset are seen by the network)
model : string, optional
The path where to save the trained model (theano tensor containing the network)
scale_factor : float, optional
Scale the magnitude of the files to be separated with this factor
Yields
------
losser : list
The losses for each epoch, stored in a list
"""
logging.info("Building Autoencoder")
input_var2 = T.tensor4('inputs')
target_var2 = T.tensor4('targets')
rand_num = T.tensor4('rand_num')
eps=1e-8
alpha=0.001
beta=0.01
beta_voc=0.03
network2 = fun(input_var=input_var2,batch_size=train.batch_size,time_context=train.time_context,feat_size=train.input_size)
if load:
params=load_model(model)
lasagne.layers.set_all_param_values(network2,params)
prediction2 = lasagne.layers.get_output(network2, deterministic=True)
rand_num = np.random.uniform(size=(train.batch_size,1,train.time_context,train.input_size))
voc=prediction2[:,0:1,:,:]+eps*rand_num
bas=prediction2[:,1:2,:,:]+eps*rand_num
dru=prediction2[:,2:3,:,:]+eps*rand_num
oth=prediction2[:,3:4,:,:]+eps*rand_num
mask1=voc/(voc+bas+dru+oth)
mask2=bas/(voc+bas+dru+oth)
mask3=dru/(voc+bas+dru+oth)
mask4=oth/(voc+bas+dru+oth)
vocals=mask1*input_var2
bass=mask2*input_var2
drums=mask3*input_var2
others=mask4*input_var2
train_loss_recon_vocals = lasagne.objectives.squared_error(vocals,target_var2[:,0:1,:,:])
alpha_component = alpha*lasagne.objectives.squared_error(vocals,target_var2[:,1:2,:,:])
alpha_component += alpha*lasagne.objectives.squared_error(vocals,target_var2[:,2:3,:,:])
train_loss_recon_neg_voc = beta_voc*lasagne.objectives.squared_error(vocals,target_var2[:,3:4,:,:])
train_loss_recon_bass = lasagne.objectives.squared_error(bass,target_var2[:,1:2,:,:])
alpha_component += alpha*lasagne.objectives.squared_error(bass,target_var2[:,0:1,:,:])
alpha_component += alpha*lasagne.objectives.squared_error(bass,target_var2[:,2:3,:,:])
train_loss_recon_neg = beta*lasagne.objectives.squared_error(bass,target_var2[:,3:4,:,:])
train_loss_recon_drums = lasagne.objectives.squared_error(drums,target_var2[:,2:3,:,:])
alpha_component += alpha*lasagne.objectives.squared_error(drums,target_var2[:,0:1,:,:])
alpha_component += alpha*lasagne.objectives.squared_error(drums,target_var2[:,1:2,:,:])
train_loss_recon_neg += beta*lasagne.objectives.squared_error(drums,target_var2[:,3:4,:,:])
vocals_error=train_loss_recon_vocals.sum()
drums_error=train_loss_recon_drums.sum()
bass_error=train_loss_recon_bass.sum()
negative_error=train_loss_recon_neg.sum()
negative_error_voc=train_loss_recon_neg_voc.sum()
alpha_component=alpha_component.sum()
loss=abs(vocals_error+drums_error+bass_error-negative_error-alpha_component-negative_error_voc)
params1 = lasagne.layers.get_all_params(network2, trainable=True)
updates = lasagne.updates.adadelta(loss, params1)
# val_updates=lasagne.updates.nesterov_momentum(loss1, params1, learning_rate=0.00001, momentum=0.7)
train_fn = theano.function([input_var2,target_var2], loss, updates=updates,allow_input_downcast=True)
train_fn1 = theano.function([input_var2,target_var2], [vocals_error,bass_error,drums_error,negative_error,alpha_component,negative_error_voc], allow_input_downcast=True)
predict_function2=theano.function([input_var2],[vocals,bass,drums,others],allow_input_downcast=True)
losser=[]
loss2=[]
if not skip_train:
logging.info("Training...")
for epoch in range(num_epochs):
train_err = 0
train_batches = 0
vocals_err=0
drums_err=0
bass_err=0
negative_err=0
alpha_component=0
beta_voc=0
start_time = time.time()
for batch in range(train.iteration_size):
inputs, target = train()
jump = inputs.shape[2]
inputs=np.reshape(inputs,(inputs.shape[0],1,inputs.shape[1],inputs.shape[2]))
targets=np.ndarray(shape=(inputs.shape[0],4,inputs.shape[2],inputs.shape[3]))
#import pdb;pdb.set_trace()
targets[:,0,:,:]=target[:,:,:jump]
targets[:,1,:,:]=target[:,:,jump:jump*2]
targets[:,2,:,:]=target[:,:,jump*2:jump*3]
targets[:,3,:,:]=target[:,:,jump*3:jump*4]
target = None
train_err+=train_fn(inputs,targets)
[vocals_erre,bass_erre,drums_erre,negative_erre,alpha,betae_voc]=train_fn1(inputs,targets)
vocals_err +=vocals_erre
bass_err +=bass_erre
drums_err +=drums_erre
negative_err +=negative_erre
beta_voc+=betae_voc
alpha_component+=alpha
train_batches += 1
print("Epoch {} of {} took {:.3f}s".format(
epoch + 1, num_epochs, time.time() - start_time))
print(" training loss:\t\t{:.6f}".format(train_err/train_batches))
losser.append(train_err / train_batches)
print(" training loss for vocals:\t\t{:.6f}".format(vocals_err/train_batches))
print(" training loss for bass:\t\t{:.6f}".format(bass_err/train_batches))
print(" training loss for drums:\t\t{:.6f}".format(drums_err/train_batches))
print(" Beta component:\t\t{:.6f}".format(negative_err/train_batches))
print(" Beta component for voice:\t\t{:.6f}".format(beta_voc/train_batches))
print(" alpha component:\t\t{:.6f}".format(alpha_component/train_batches))
losser.append(train_err / train_batches)
save_model(model,network2)
if not skip_sep:
logging.info("Separating")
source = ['vocals','bass','drums','other']
dev_directory = os.listdir(os.path.join(testdir,"Dev"))
test_directory = os.listdir(os.path.join(testdir,"Test")) #we do not include the test dir
dirlist = []
dirlist.extend(dev_directory)
dirlist.extend(test_directory)
for f in sorted(dirlist):
if not f.startswith('.'):
if f in dev_directory:
song=os.path.join(testdir,"Dev",f,"mixture.wav")
else:
song=os.path.join(testdir,"Test",f,"mixture.wav")
audioObj, sampleRate, bitrate = util.readAudioScipy(song)
assert sampleRate == 44100,"Sample rate needs to be 44100"
try:
audio = (audioObj[:,0] + audioObj[:,1])/2
except:
audio = audioObj
audioObj = None
mag,ph=transform.compute_file(audio,phase=True)
mag=scale_factor*mag.astype(np.float32)
batches,nchunks = util.generate_overlapadd(mag,input_size=mag.shape[-1],time_context=train.time_context,overlap=train.overlap,batch_size=train.batch_size,sampleRate=sampleRate)
output=[]
batch_no=1
for batch in batches:
batch_no+=1
start_time=time.time()
output.append(predict_function2(batch))
output=np.array(output)
mm=util.overlapadd_multi(output,batches,nchunks,overlap=train.overlap)
#write audio files
if f in dev_directory:
dirout=os.path.join(outdir,"Dev",f)
else:
dirout=os.path.join(outdir,"Test",f)
if not os.path.exists(dirout):
os.makedirs(dirout)
for i in range(mm.shape[0]):
audio_out=transform.compute_inverse(mm[i,:len(ph)]/scale_factor,ph)
if len(audio_out)>len(audio):
audio_out=audio_out[:len(audio)]
util.writeAudioScipy(os.path.join(dirout,source[i]+'.wav'),audio_out,sampleRate,bitrate)
audio_out=None
audio = None
return losser
if __name__ == "__main__":
"""
Separating Professionally Produced Music
https://sisec.inria.fr/home/2016-professionally-produced-music-recordings/
More details in the following article:
P. Chandna, M. Miron, J. Janer, and E. Gomez
Given the features computed previusly with compute_features, train a network and perform the separation.
Parameters
----------
db : string
The path to the HHDS dataset
nepochs : int, optional
The number the epochs to train for (one epoch is when all examples in the dataset are seen by the network)
model : string, optional
The name of the trained model
scale_factor : float, optional
Scale the magnitude of the files to be separated with this factor
batch_size : int, optional
The number of examples in a batch (see LargeDataset in dataset.py)
batch_memory : int, optional
The number of batches to load in memory at once (see LargeDataset in dataset.py)
time_context : int, optional
The time context modeled by the network
overlap : int, optional
The number of overlapping frames between adjacent segments (see LargeDataset in dataset.py)
nprocs : int, optional
The number of CPU to use when loading the data in parallel: the more, the faster (see LargeDataset in dataset.py)
"""
if len(sys.argv)>-1:
climate.add_arg('--db', help="the HHDS dataset path")
climate.add_arg('--model', help="the name of the model to test/save")
climate.add_arg('--nepochs', help="number of epochs to train the net")
climate.add_arg('--time_context', help="number of frames for the recurrent/lstm/conv net")
climate.add_arg('--batch_size', help="batch size for training")
climate.add_arg('--batch_memory', help="number of big batches to load into memory")
climate.add_arg('--overlap', help="overlap time context for training")
climate.add_arg('--nprocs', help="number of processor to parallelize file reading")
climate.add_arg('--scale_factor', help="scale factor for the data")
climate.add_arg('--feature_path', help="the path where to load the features from")
db=None
kwargs = climate.parse_args()
if kwargs.__getattribute__('db'):
db = kwargs.__getattribute__('db')
else:
db='../dataset'
if kwargs.__getattribute__('feature_path'):
feature_path = kwargs.__getattribute__('feature_path')
else:
feature_path=os.path.join(db,'transforms','t1')
assert os.path.isdir(db), "Please input the directory for the HHDS dataset with --db path_to_HHDS"
if kwargs.__getattribute__('model'):
model = kwargs.__getattribute__('model')
else:
model="hh_fft_1024"
if kwargs.__getattribute__('batch_size'):
batch_size = int(kwargs.__getattribute__('batch_size'))
else:
batch_size = 32
if kwargs.__getattribute__('batch_memory'):
batch_memory = int(kwargs.__getattribute__('batch_memory'))
else:
batch_memory = 200
if kwargs.__getattribute__('time_context'):
time_context = int(kwargs.__getattribute__('time_context'))
else:
time_context = 30
if kwargs.__getattribute__('overlap'):
overlap = int(kwargs.__getattribute__('overlap'))
else:
overlap = 25
if kwargs.__getattribute__('nprocs'):
nprocs = int(kwargs.__getattribute__('nprocs'))
else:
nprocs = 7
if kwargs.__getattribute__('nepochs'):
nepochs = int(kwargs.__getattribute__('nepochs'))
else:
nepochs = 40
if kwargs.__getattribute__('scale_factor'):
scale_factor = int(kwargs.__getattribute__('scale_factor'))
else:
scale_factor = 0.3
#tt object needs to be the same as the one in compute_features
tt = transformFFT(frameSize=1024, hopSize=512, sampleRate=44100, window=blackmanharris)
ld1 = LargeDataset(path_transform_in=feature_path, nsources=4, batch_size=batch_size, batch_memory=batch_memory, time_context=time_context, overlap=overlap, nprocs=nprocs,mult_factor_in=scale_factor,mult_factor_out=scale_factor)
logging.info(" Maximum:\t\t{:.6f}".format(ld1.getMax()))
logging.info(" Mean:\t\t{:.6f}".format(ld1.getMean()))
logging.info(" Standard dev:\t\t{:.6f}".format(ld1.getStd()))
if not os.path.exists(os.path.join(db,'output',model)):
os.makedirs(os.path.join(db,'output',model))
if not os.path.exists(os.path.join(db,'models')):
os.makedirs(os.path.join(db,'models'))
train_errs=train_auto(train=ld1,fun=build_ca,transform=tt,outdir=os.path.join(db,'output',model),testdir=os.path.join(db,'Mixtures'),model=os.path.join(db,'models',"model_"+model+".pkl"),num_epochs=nepochs,scale_factor=scale_factor)
f = file(db+"models/"+"loss_"+model+".data", 'wb')
cPickle.dump(train_errs,f,protocol=cPickle.HIGHEST_PROTOCOL)
f.close()
| agpl-3.0 | 5,829,622,917,967,645,000 | 42.85034 | 242 | 0.644327 | false |
vauxoo-dev/server-tools | auth_password_settings/models/res_users.py | 1 | 4119 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dhaval Patel
# Copyright (C) 2011 - TODAY Denero Team. (<http://www.deneroteam.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, _
from openerp.exceptions import Warning as UserError
from openerp.tools.safe_eval import safe_eval
import string
class ResUsers(models.Model):
_inherit = "res.users"
def _validate_password(self, password):
password_rules = []
config_data = {
'auth_password_min_character': safe_eval(
self.env['ir.config_parameter'].get_param(
'auth_password_settings.auth_password_min_character',
'6'
)),
'auth_password_has_capital_letter': safe_eval(
self.env['ir.config_parameter'].get_param(
'auth_password_settings.auth_password_has_capital_letter',
'False'
)),
'auth_password_has_digit': safe_eval(
self.env['ir.config_parameter'].get_param(
'auth_password_settings.auth_password_has_digit',
'False'
)),
'auth_password_has_special_letter': safe_eval(
self.env['ir.config_parameter'].get_param(
'auth_password_settings.auth_password_has_special_letter',
'False'
)),
}
password_rules.append(
lambda s:
len(s) >= config_data.get('auth_password_min_character', 6) or
_('Has %s or more characters') % (config_data.get(
'auth_password_min_character', 6)
)
)
if (config_data.get('auth_password_has_capital_letter', False)):
password_rules.append(
lambda s: any(x.isupper() for x in s) or
_('Has at least One Capital letter')
)
if (config_data.get('auth_password_has_digit', False)):
password_rules.append(
lambda s: any(x.isdigit() for x in s) or
_('Has one Number')
)
if (config_data.get('auth_password_has_special_letter', False)):
password_rules.append(
lambda s: any(x in string.punctuation for x in s) or
_('Has one Special letter')
)
problems = [
p for p in [
r(password) for r in password_rules
] if p and p is not True]
return problems
@api.multi
def write(self, values):
if('password' in values):
problems = self._validate_password(values['password'])
if(problems):
raise UserError(
_("Password must match following rules\n %s ")
% ("\n-- ".join(problems))
)
return super(ResUsers, self).write(values)
@api.multi
def _set_password(self, password):
if(password):
problems = self._validate_password(password)
if(problems):
raise UserError(
_("Password must match following rules\n %s ")
% ("\n-- ".join(problems))
)
return super(ResUsers, self)._set_password(password)
| agpl-3.0 | -7,792,218,272,381,485,000 | 38.228571 | 78 | 0.526827 | false |
dmlc/tvm | tests/python/contrib/test_arm_compute_lib/test_network.py | 1 | 4907 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Arm Compute Library network tests."""
import numpy as np
import pytest
from tvm import testing
from tvm import relay
from test_arm_compute_lib.infrastructure import skip_runtime_test, build_and_run, verify
from test_arm_compute_lib.infrastructure import Device
def _build_and_run_network(mod, params, inputs, device, tvm_ops, acl_partitions, atol, rtol):
"""Helper function to build and run a network."""
data = {}
np.random.seed(0)
for name, (shape, dtype) in inputs.items():
if dtype == "uint8":
low, high = 0, 255
else:
low, high = -127, 128
data[name] = np.random.uniform(low, high, shape).astype(dtype)
outputs = []
for acl in [False, True]:
outputs.append(
build_and_run(
mod,
data,
1,
params,
device,
enable_acl=acl,
tvm_ops=tvm_ops,
acl_partitions=acl_partitions,
)[0]
)
verify(outputs, atol=atol, rtol=rtol, verify_saturation=False)
def _get_tflite_model(tflite_model_path, inputs_dict):
"""Convert TFlite graph to relay."""
import tflite.Model
with open(tflite_model_path, "rb") as f:
tflite_model_buffer = f.read()
try:
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buffer, 0)
except AttributeError:
tflite_model = tflite.Model.GetRootAsModel(tflite_model_buffer, 0)
shape_dict = {}
dtype_dict = {}
for input in inputs_dict:
input_shape, input_dtype = inputs_dict[input]
shape_dict[input] = input_shape
dtype_dict[input] = input_dtype
return relay.frontend.from_tflite(tflite_model, shape_dict=shape_dict, dtype_dict=dtype_dict)
def _get_keras_model(keras_model, inputs_dict):
"""Convert Keras graph to relay."""
inputs = {}
for name, (shape, _) in inputs_dict.items():
inputs[keras_model.input_names[0]] = shape
return relay.frontend.from_keras(keras_model, inputs, layout="NHWC")
def test_vgg16():
Device.load("test_config.json")
if skip_runtime_test():
return
device = Device()
def get_model():
from keras.applications import VGG16
vgg16 = VGG16(include_top=True, weights="imagenet", input_shape=(224, 224, 3), classes=1000)
inputs = {vgg16.input_names[0]: ((1, 224, 224, 3), "float32")}
mod, params = _get_keras_model(vgg16, inputs)
return mod, params, inputs
_build_and_run_network(
*get_model(), device=device, tvm_ops=4, acl_partitions=21, atol=0.002, rtol=0.01
)
def test_mobilenet():
Device.load("test_config.json")
if skip_runtime_test():
return
device = Device()
def get_model():
from keras.applications import MobileNet
mobilenet = MobileNet(
include_top=True, weights="imagenet", input_shape=(224, 224, 3), classes=1000
)
inputs = {mobilenet.input_names[0]: ((1, 224, 224, 3), "float32")}
mod, params = _get_keras_model(mobilenet, inputs)
return mod, params, inputs
_build_and_run_network(
*get_model(), device=device, tvm_ops=73, acl_partitions=18, atol=0.002, rtol=0.01
)
def test_quantized_mobilenet():
Device.load("test_config.json")
if skip_runtime_test():
return
import tvm.relay.testing.tf as tf_testing
device = Device()
def get_model():
model_path = tf_testing.get_workload_official(
"https://storage.googleapis.com/download.tensorflow.org/"
"models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz",
"mobilenet_v1_1.0_224_quant.tflite",
)
inputs = {"input": ((1, 224, 224, 3), "uint8")}
mod, params = _get_tflite_model(model_path, inputs_dict=inputs)
return mod, params, inputs
_build_and_run_network(
*get_model(), device=device, tvm_ops=42, acl_partitions=17, atol=8, rtol=0
)
if __name__ == "__main__":
test_vgg16()
test_mobilenet()
test_quantized_mobilenet()
| apache-2.0 | 655,426,237,094,965,100 | 30.056962 | 100 | 0.631751 | false |
vhosouza/invesalius3 | invesalius/gui/bitmap_preview_panel.py | 1 | 20494 | import wx
import vtk
import time
import numpy
from vtk.util import numpy_support
from vtk.wx.wxVTKRenderWindowInteractor import wxVTKRenderWindowInteractor
from pubsub import pub as Publisher
import invesalius.constants as const
import invesalius.data.vtk_utils as vtku
import invesalius.data.converters as converters
import invesalius.reader.bitmap_reader as bitmap_reader
import invesalius.utils as utils
NROWS = 3
NCOLS = 6
NUM_PREVIEWS = NCOLS*NROWS
PREVIEW_WIDTH = 70
PREVIEW_HEIGTH = 70
PREVIEW_BACKGROUND = (255, 255, 255) # White
STR_SIZE = _("Image size: %d x %d")
STR_SPC = _("Spacing: %.2f")
STR_LOCAL = _("Location: %.2f")
STR_PATIENT = "%s\n%s"
STR_ACQ = _("%s %s\nMade in InVesalius")
myEVT_PREVIEW_CLICK = wx.NewEventType()
EVT_PREVIEW_CLICK = wx.PyEventBinder(myEVT_PREVIEW_CLICK, 1)
myEVT_PREVIEW_DBLCLICK = wx.NewEventType()
EVT_PREVIEW_DBLCLICK = wx.PyEventBinder(myEVT_PREVIEW_DBLCLICK, 1)
myEVT_CLICK_SLICE = wx.NewEventType()
# This event occurs when the user select a preview
EVT_CLICK_SLICE = wx.PyEventBinder(myEVT_CLICK_SLICE, 1)
myEVT_CLICK_SERIE = wx.NewEventType()
# This event occurs when the user select a preview
EVT_CLICK_SERIE = wx.PyEventBinder(myEVT_CLICK_SERIE, 1)
myEVT_CLICK = wx.NewEventType()
EVT_CLICK = wx.PyEventBinder(myEVT_CLICK, 1)
class SelectionEvent(wx.PyCommandEvent):
pass
class PreviewEvent(wx.PyCommandEvent):
def __init__(self , evtType, id):
super(PreviewEvent, self).__init__(evtType, id)
def GetSelectID(self):
return self.SelectedID
def SetSelectedID(self, id):
self.SelectedID = id
def GetItemData(self):
return self.data
def GetPressedShift(self):
return self.pressed_shift
def SetItemData(self, data):
self.data = data
def SetShiftStatus(self, status):
self.pressed_shift = status
class SerieEvent(PreviewEvent):
def __init__(self , evtType, id):
super(SerieEvent, self).__init__(evtType, id)
class BitmapInfo(object):
"""
Keep the informations and the image used by preview.
"""
def __init__(self, data):
#self.id = id
self.id = data[7]
self.title = data[6]
self.data = data
self.pos = data[8]
self._preview = None
self.selected = False
self.thumbnail_path = data[1]
@property
def preview(self):
if not self._preview:
bmp = wx.Bitmap(self.thumbnail_path, wx.BITMAP_TYPE_PNG)
self._preview = bmp.ConvertToImage()
return self._preview
def release_thumbnail(self):
self._preview = None
class DicomPaintPanel(wx.Panel):
def __init__(self, parent):
super(DicomPaintPanel, self).__init__(parent)
self._bind_events()
self.image = None
self.last_size = (10,10)
def _bind_events(self):
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
def _build_bitmap(self, image):
bmp = wx.Bitmap(image)
return bmp
def _image_resize(self, image):
self.Update()
self.Layout()
new_size = self.GetSize()
# This is necessary due to darwin problem #
if new_size != (0,0):
self.last_size = new_size
return image.Scale(*new_size)
else:
return image.Scale(*self.last_size)
def SetImage(self, image):
self.image = image
r_img = self._image_resize(image)
self.bmp = self._build_bitmap(r_img)
self.Refresh()
def OnPaint(self, evt):
if self.image:
dc = wx.PaintDC(self)
dc.Clear()
dc.DrawBitmap(self.bmp, 0, 0)
def OnSize(self, evt):
if self.image:
self.bmp = self._build_bitmap(self._image_resize(self.image))
self.Refresh()
evt.Skip()
class Preview(wx.Panel):
"""
The little previews.
"""
def __init__(self, parent):
super(Preview, self).__init__(parent)
# Will it be white?
self.select_on = False
self.bitmap_info = None
self._init_ui()
self._bind_events()
def _init_ui(self):
self.SetBackgroundColour(PREVIEW_BACKGROUND)
self.title = wx.StaticText(self, -1, _("Image"))
self.subtitle = wx.StaticText(self, -1, _("Image"))
self.image_viewer = DicomPaintPanel(self)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.title, 0,
wx.ALIGN_CENTER_HORIZONTAL)
self.sizer.Add(self.subtitle, 0,
wx.ALIGN_CENTER_HORIZONTAL)
self.sizer.Add(self.image_viewer, 1, wx.ALIGN_CENTRE_HORIZONTAL \
| wx.SHAPED | wx.ALL, 5)
self.sizer.Fit(self)
self.SetSizer(self.sizer)
self.Layout()
self.Update()
self.Fit()
self.SetAutoLayout(1)
def _bind_events(self):
self.Bind( wx.EVT_LEFT_DCLICK, self.OnDClick)
self.Bind(wx.EVT_ENTER_WINDOW, self.OnEnter)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_LEFT_DOWN, self.OnSelect)
self.title.Bind(wx.EVT_LEFT_DOWN, self.OnSelect)
self.subtitle.Bind(wx.EVT_LEFT_DOWN, self.OnSelect)
self.image_viewer.Bind(wx.EVT_LEFT_DOWN, self.OnSelect)
def SetBitmapToPreview(self, bitmap_info):
if self.bitmap_info:
self.bitmap_info.release_thumbnail()
self.bitmap_info = bitmap_info
self.SetTitle(self.bitmap_info.title[-10:])
self.SetSubtitle('')
image = self.bitmap_info.preview
self.image_viewer.SetImage(image)
self.select_on = bitmap_info.selected
self.Select()
self.Update()
def SetTitle(self, title):
self.title.SetLabel(title)
def SetSubtitle(self, subtitle):
self.subtitle.SetLabel(subtitle)
def OnEnter(self, evt):
if not self.select_on:
try:
c = wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE)
except AttributeError:
c = wx.SystemSettings_GetColour(wx.SYS_COLOUR_BTNFACE)
self.SetBackgroundColour(c)
def OnLeave(self, evt):
if not self.select_on:
c = (PREVIEW_BACKGROUND)
self.SetBackgroundColour(c)
def OnSelect(self, evt):
shift_pressed = False
if evt.shiftDown:
shift_pressed = True
self.select_on = True
self.bitmap_info.selected = True
self.Select()
# Generating a EVT_PREVIEW_CLICK event
my_evt = SerieEvent(myEVT_PREVIEW_CLICK, self.GetId())
my_evt.SetSelectedID(self.bitmap_info.id)
my_evt.SetItemData(self.bitmap_info.data)
my_evt.SetShiftStatus(shift_pressed)
my_evt.SetEventObject(self)
self.GetEventHandler().ProcessEvent(my_evt)
Publisher.sendMessage('Set bitmap in preview panel', pos=self.bitmap_info.pos)
evt.Skip()
def OnSize(self, evt):
if self.bitmap_info:
self.SetBitmapToPreview(self.bitmap_info)
evt.Skip()
def Select(self, on=True):
if self.select_on:
try:
c = wx.SystemSettings.GetColour(wx.SYS_COLOUR_HIGHLIGHT)
except AttributeError:
c = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT)
else:
c = (PREVIEW_BACKGROUND)
self.SetBackgroundColour(c)
self.Refresh()
def OnDClick(self, evt):
my_evt = SerieEvent(myEVT_PREVIEW_DBLCLICK, self.GetId())
my_evt.SetSelectedID(self.bitmap_info.id)
my_evt.SetItemData(self.bitmap_info.data)
my_evt.SetEventObject(self)
self.GetEventHandler().ProcessEvent(my_evt)
evt.Skip()
class BitmapPreviewSeries(wx.Panel):
"""A dicom series preview panel"""
def __init__(self, parent):
super(BitmapPreviewSeries, self).__init__(parent)
# TODO: 3 pixels between the previews is a good idea?
# I have to test.
self.displayed_position = 0
self.nhidden_last_display = 0
self.selected_dicom = None
self.selected_panel = None
self._init_ui()
def _init_ui(self):
scroll = wx.ScrollBar(self, -1, style=wx.SB_VERTICAL)
self.scroll = scroll
self.grid = wx.GridSizer(rows=NROWS, cols=NCOLS, vgap=3, hgap=3)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.grid, 1, wx.EXPAND|wx.GROW|wx.ALL, 2)
background_sizer = wx.BoxSizer(wx.HORIZONTAL)
background_sizer.Add(sizer, 1, wx.EXPAND|wx.GROW|wx.ALL, 2)
background_sizer.Add(scroll, 0, wx.EXPAND|wx.GROW)
self.SetSizer(background_sizer)
background_sizer.Fit(self)
self.Layout()
self.Update()
self.SetAutoLayout(1)
self.sizer = background_sizer
self._Add_Panels_Preview()
self._bind_events()
self._bind_pub_sub_events()
def _Add_Panels_Preview(self):
self.previews = []
for i in range(NROWS):
for j in range(NCOLS):
p = Preview(self)
p.Bind(EVT_PREVIEW_CLICK, self.OnSelect)
self.previews.append(p)
self.grid.Add(p, 1, flag=wx.EXPAND)
def _bind_events(self):
# When the user scrolls the window
self.Bind(wx.EVT_SCROLL, self.OnScroll)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnWheel)
def _bind_pub_sub_events(self):
Publisher.subscribe(self.RemovePanel, 'Remove preview panel')
def OnSelect(self, evt):
my_evt = SerieEvent(myEVT_CLICK_SERIE, self.GetId())
my_evt.SetSelectedID(evt.GetSelectID())
my_evt.SetItemData(evt.GetItemData())
if self.selected_dicom:
self.selected_dicom.selected = self.selected_dicom is \
evt.GetEventObject().bitmap_info
self.selected_panel.select_on = self.selected_panel is evt.GetEventObject()
self.selected_panel.Select()
self.selected_panel = evt.GetEventObject()
self.selected_dicom = self.selected_panel.bitmap_info
self.GetEventHandler().ProcessEvent(my_evt)
evt.Skip()
def SetBitmapFiles(self, data):
self.files = []
bitmap = bitmap_reader.BitmapData()
bitmap.SetData(data)
pos = 0
for d in data:
d.append(pos)
info = BitmapInfo(d)
self.files.append(info)
pos += 1
scroll_range = len(self.files)/NCOLS
if scroll_range * NCOLS < len(self.files):
scroll_range +=1
self.scroll.SetScrollbar(0, NROWS, scroll_range, NCOLS)
self._display_previews()
def RemovePanel(self, data):
for p, f in zip(self.previews, self.files):
if p.bitmap_info != None:
if data.encode('utf-8') in p.bitmap_info.data:
self.files.remove(f)
p.Hide()
self._display_previews()
Publisher.sendMessage('Update max of slidebar in single preview image', max_value=len(self.files))
self.Update()
self.Layout()
for n, p in enumerate(self.previews):
if p.bitmap_info != None:
if p.IsShown():
p.bitmap_info.pos = n
def _display_previews(self):
initial = self.displayed_position * NCOLS
final = initial + NUM_PREVIEWS
if len(self.files) < final:
for i in range(final-len(self.files)):
try:
self.previews[-i-1].Hide()
except IndexError:
utils.debug("doesn't exist!")
pass
self.nhidden_last_display = final-len(self.files)
else:
if self.nhidden_last_display:
for i in range(self.nhidden_last_display):
try:
self.previews[-i-1].Show()
except IndexError:
utils.debug("doesn't exist!")
pass
self.nhidden_last_display = 0
for f, p in zip(self.files[initial:final], self.previews):
p.SetBitmapToPreview(f)
if f.selected:
self.selected_panel = p
for f, p in zip(self.files[initial:final], self.previews):
p.Show()
def OnScroll(self, evt=None):
if evt:
if self.displayed_position != evt.GetPosition():
self.displayed_position = evt.GetPosition()
else:
if self.displayed_position != self.scroll.GetThumbPosition():
self.displayed_position = self.scroll.GetThumbPosition()
self._display_previews()
def OnWheel(self, evt):
d = evt.GetWheelDelta() / evt.GetWheelRotation()
self.scroll.SetThumbPosition(self.scroll.GetThumbPosition() - d)
self.OnScroll()
class SingleImagePreview(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, -1)
self.actor = None
self.__init_gui()
self.__init_vtk()
self.__bind_evt_gui()
self.__bind_pubsub()
self.dicom_list = []
self.nimages = 1
self.current_index = 0
self.window_width = const.WINDOW_LEVEL[_("Bone")][0]
self.window_level = const.WINDOW_LEVEL[_("Bone")][1]
def __init_vtk(self):
text_image_size = vtku.Text()
text_image_size.SetPosition(const.TEXT_POS_LEFT_UP)
text_image_size.SetValue("")
text_image_size.SetSize(const.TEXT_SIZE_SMALL)
self.text_image_size = text_image_size
text_image_location = vtku.Text()
text_image_location.SetVerticalJustificationToBottom()
text_image_location.SetPosition(const.TEXT_POS_LEFT_DOWN)
text_image_location.SetValue("")
text_image_location.SetSize(const.TEXT_SIZE_SMALL)
self.text_image_location = text_image_location
text_patient = vtku.Text()
text_patient.SetJustificationToRight()
text_patient.SetPosition(const.TEXT_POS_RIGHT_UP)
text_patient.SetValue("")
text_patient.SetSize(const.TEXT_SIZE_SMALL)
self.text_patient = text_patient
text_acquisition = vtku.Text()
text_acquisition.SetJustificationToRight()
text_acquisition.SetVerticalJustificationToBottom()
text_acquisition.SetPosition(const.TEXT_POS_RIGHT_DOWN)
text_acquisition.SetValue("")
text_acquisition.SetSize(const.TEXT_SIZE_SMALL)
self.text_acquisition = text_acquisition
renderer = vtk.vtkRenderer()
renderer.AddActor(text_image_size.actor)
renderer.AddActor(text_image_location.actor)
renderer.AddActor(text_patient.actor)
renderer.AddActor(text_acquisition.actor)
self.renderer = renderer
style = vtk.vtkInteractorStyleImage()
interactor = wxVTKRenderWindowInteractor(self.panel, -1,
size=wx.Size(340,340))
interactor.SetRenderWhenDisabled(True)
interactor.GetRenderWindow().AddRenderer(renderer)
interactor.SetInteractorStyle(style)
interactor.Render()
self.interactor = interactor
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(interactor, 1, wx.GROW|wx.EXPAND)
sizer.Fit(self.panel)
self.panel.SetSizer(sizer)
self.Layout()
self.Update()
def __init_gui(self):
self.panel = wx.Panel(self, -1)
slider = wx.Slider(self,
id=-1,
value=0,
minValue=0,
maxValue=99,
style=wx.SL_HORIZONTAL|wx.SL_AUTOTICKS)
slider.SetWindowVariant(wx.WINDOW_VARIANT_SMALL)
slider.SetTickFreq(1)
self.slider = slider
checkbox = wx.CheckBox(self, -1, _("Auto-play"))
self.checkbox = checkbox
in_sizer = wx.BoxSizer(wx.HORIZONTAL)
in_sizer.Add(slider, 1, wx.GROW|wx.EXPAND)
in_sizer.Add(checkbox, 0)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.panel, 20, wx.GROW|wx.EXPAND)
sizer.Add(in_sizer, 1, wx.GROW|wx.EXPAND)
sizer.Fit(self)
self.SetSizer(sizer)
self.Layout()
self.Update()
self.SetAutoLayout(1)
def __bind_evt_gui(self):
self.slider.Bind(wx.EVT_SLIDER, self.OnSlider)
self.checkbox.Bind(wx.EVT_CHECKBOX, self.OnCheckBox)
def __bind_pubsub(self):
Publisher.subscribe(self.ShowBitmapByPosition, 'Set bitmap in preview panel')
Publisher.subscribe(self.UpdateMaxValueSliderBar, 'Update max of slidebar in single preview image')
Publisher.subscribe(self.ShowBlackSlice, 'Show black slice in single preview image')
def ShowBitmapByPosition(self, pos):
if pos != None:
self.ShowSlice(int(pos))
def OnSlider(self, evt):
pos = evt.GetInt()
self.ShowSlice(pos)
evt.Skip()
def OnCheckBox(self, evt):
self.ischecked = evt.IsChecked()
if evt.IsChecked():
wx.CallAfter(self.OnRun)
evt.Skip()
def OnRun(self):
pos = self.slider.GetValue()
pos += 1
if not (self.nimages- pos):
pos = 0
self.slider.SetValue(pos)
self.ShowSlice(pos)
time.sleep(0.2)
if self.ischecked:
try:
wx.Yield()
#TODO: temporary fix necessary in the Windows XP 64 Bits
#BUG in wxWidgets http://trac.wxwidgets.org/ticket/10896
except(wx._core.PyAssertionError):
utils.debug("wx._core.PyAssertionError")
finally:
wx.CallAfter(self.OnRun)
def SetBitmapFiles(self, data):
#self.dicom_list = group.GetHandSortedList()
self.bitmap_list = data
self.current_index = 0
self.nimages = len(data)
# GUI
self.slider.SetMax(self.nimages-1)
self.slider.SetValue(0)
self.ShowSlice()
def UpdateMaxValueSliderBar(self, max_value):
self.slider.SetMax(max_value - 1)
self.slider.Refresh()
self.slider.Update()
def ShowBlackSlice(self, pub_sub):
n_array = numpy.zeros((100,100))
self.text_image_size.SetValue('')
image = converters.to_vtk(n_array, spacing=(1,1,1),\
slice_number=1, orientation="AXIAL")
colorer = vtk.vtkImageMapToWindowLevelColors()
colorer.SetInputData(image)
colorer.Update()
if self.actor is None:
self.actor = vtk.vtkImageActor()
self.renderer.AddActor(self.actor)
# PLOT IMAGE INTO VIEWER
self.actor.SetInputData(colorer.GetOutput())
self.renderer.ResetCamera()
self.interactor.Render()
# Setting slider position
self.slider.SetValue(0)
def ShowSlice(self, index = 0):
bitmap = self.bitmap_list[index]
# UPDATE GUI
## Text related to size
value = STR_SIZE %(bitmap[3], bitmap[4])
self.text_image_size.SetValue(value)
value1 = ''
value2 = ''
value = "%s\n%s" %(value1, value2)
self.text_image_location.SetValue(value)
#self.text_patient.SetValue(value)
self.text_patient.SetValue('')
#self.text_acquisition.SetValue(value)
self.text_acquisition.SetValue('')
n_array = bitmap_reader.ReadBitmap(bitmap[0])
image = converters.to_vtk(n_array, spacing=(1,1,1),\
slice_number=1, orientation="AXIAL")
# ADJUST CONTRAST
window_level = n_array.max()/2
window_width = n_array.max()
colorer = vtk.vtkImageMapToWindowLevelColors()
colorer.SetInputData(image)
colorer.SetWindow(float(window_width))
colorer.SetLevel(float(window_level))
colorer.Update()
if self.actor is None:
self.actor = vtk.vtkImageActor()
self.renderer.AddActor(self.actor)
# PLOT IMAGE INTO VIEWER
self.actor.SetInputData(colorer.GetOutput())
self.renderer.ResetCamera()
self.interactor.Render()
# Setting slider position
self.slider.SetValue(index)
| gpl-2.0 | 4,217,784,846,437,319,000 | 29.864458 | 118 | 0.592368 | false |
yehnan/python_book_yehnan | ch13/ch13_tkinter.py | 1 | 1091 |
from tkinter import *
def fib(n):
a, b = 0, 1
for i in range(n):
a, b = b, a+b
return a
class App(Frame):
def __init__(self, master=None):
Frame.__init__(self, master)
self.pack()
self.entry_n = Entry(self, width=10)
self.entry_n.pack()
self.fn = StringVar()
self.label_fn = Label(self, textvariable=self.fn,
width=50)
self.fn.set('result')
self.label_fn.pack()
self.btn_cal = Button(self, text="Calculate",
command=self.cal_fib)
self.btn_cal.pack()
self.btn_quit = Button(self, text="Quit", fg="red",
command=root.destroy)
self.btn_quit.pack(side=BOTTOM)
def cal_fib(self):
try:
n = int(self.entry_n.get())
self.fn.set(str(fib(n)))
except Exception:
self.fn.set('Invalid input')
root = Tk()
app = App(root)
app.mainloop()
| gpl-2.0 | -7,059,458,919,310,843,000 | 19.82 | 59 | 0.454629 | false |
kdaily/altanalyze | sampleIndexSelection.py | 1 | 3739 | import sys,string
import os
def makeTestFile():
all_data = [['name','harold','bob','frank','sally','kim','jim'],
['a','0','0','1','2','0','5'],['b','0','0','1','2','0','5'],
['c','0','0','1','2','0','5'],['d','0','0','1','2','0','5']]
input_file = 'test.txt'
export_object = open(input_file,'w')
for i in all_data:
export_object.write(string.join(i,'\t')+'\n')
export_object.close()
return input_file
def filterFile(input_file,output_file,filter_names):
export_object = open(output_file,'w')
firstLine = True
for line in open(input_file,'rU').xreadlines():
data = line.rstrip()
values = string.split(data,'\t')
if firstLine:
if data[0]!='#':
sample_index_list = map(lambda x: values.index(x), filter_names)
firstLine = False
header = values
try: filtered_values = map(lambda x: values[x], sample_index_list) ### simple and fast way to reorganize the samples
except Exception:
### For PSI files with missing values at the end of each line, often
if len(header) != len(values):
diff = len(header)-len(values)
values+=diff*['']
filtered_values = map(lambda x: values[x], sample_index_list) ### simple and fast way to reorganize the samples
#print values[0]; print sample_index_list; print values; print len(values); print len(prior_values);kill
prior_values=values
export_object.write(string.join([values[0]]+filtered_values,'\t')+'\n')
export_object.close()
print 'Filtered columns printed to:',output_file
def filterRows(input_file,output_file,filterDB=None):
export_object = open(output_file,'w')
firstLine = True
for line in open(input_file,'rU').xreadlines():
data = line.rstrip()
values = string.split(data,'\t')
if firstLine:
firstLine = False
export_object.write(line)
else:
if filterDB!=None:
if values[0] in filterDB:
export_object.write(line)
else:
max_val = max(map(float,values[1:]))
#min_val = min(map(float,values[1:]))
#if max_val>5:
if max_val < 0.1:
export_object.write(line)
export_object.close()
print 'Filtered rows printed to:',output_file
def getFilters(filter_file):
filter_list=[]
for line in open(filter_file,'rU').xreadlines():
data = line.rstrip()
sample = string.split(data,'\t')[0]
filter_list.append(sample)
return filter_list
if __name__ == '__main__':
################ Comand-line arguments ################
import getopt
filter_rows=False
if len(sys.argv[1:])<=1: ### Indicates that there are insufficient number of command-line arguments
filter_names = ['bob','sally','jim']
input_file = makeTestFile()
#Filtering samples in a datasets
#python SampleSelect.py --i /Users/saljh8/Desktop/C4-hESC/ExpressionInput/exp.C4.txt --f /Users/saljh8/Desktop/C4-hESC/ExpressionInput/groups.C4.txt
else:
options, remainder = getopt.getopt(sys.argv[1:],'', ['i=','f=','r='])
#print sys.argv[1:]
for opt, arg in options:
if opt == '--i': input_file=arg
elif opt == '--f': filter_file=arg
elif opt == '--r': filter_rows=True
output_file = input_file[:-4]+'-output.txt'
if filter_rows:
filterRows(input_file,output_file)
else:
filter_names = getFilters(filter_file)
filterFile(input_file,output_file,filter_names)
| apache-2.0 | 1,056,679,316,745,209,900 | 38.776596 | 156 | 0.562182 | false |
beeftornado/sentry | src/sentry/migrations/0126_make_platformexternalissue_group_id_flexfk.py | 1 | 2332 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-11-09 21:35
from __future__ import unicode_literals
from django.db import migrations
import django.db.models.deletion
import sentry.db.models.fields.foreignkey
class Migration(migrations.Migration):
# This flag is used to mark that a migration shouldn't be automatically run in
# production. We set this to True for operations that we think are risky and want
# someone from ops to run manually and monitor.
# General advice is that if in doubt, mark your migration as `is_dangerous`.
# Some things you should always mark as dangerous:
# - Large data migrations. Typically we want these to be run manually by ops so that
# they can be monitored. Since data migrations will now hold a transaction open
# this is even more important.
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
# This flag is used to decide whether to run this migration in a transaction or not.
# By default we prefer to run in a transaction, but for migrations where you want
# to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
# want to create an index concurrently when adding one to an existing table.
atomic = True
dependencies = [
('sentry', '0125_add_platformexternalissue_project_id'),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=[
migrations.AddField(
model_name='platformexternalissue',
name='group',
field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
db_constraint=False,
db_index=False,
null=False,
on_delete=django.db.models.deletion.CASCADE,
to='sentry.Group'
),
),
migrations.RemoveField(
model_name='platformexternalissue',
name='group_id',
),
migrations.AlterUniqueTogether(
name='platformexternalissue',
unique_together=set([('group', 'service_type')]),
),
]
)
]
| bsd-3-clause | 2,365,008,032,185,962,500 | 39.912281 | 88 | 0.603345 | false |
HeavenDuke/DeepIC | models/HybridResSPPNet.py | 1 | 2266 | from keras.models import Model
from keras.layers import Dense, ZeroPadding2D, BatchNormalization, Conv2D, MaxPooling2D, Activation, Input, Concatenate
from keras.optimizers import RMSprop
from layers.SpatialPyramidPooling import SpatialPyramidPooling
from keras.applications.resnet50 import identity_block, conv_block
def EnhancedHybridResSppNet(class_num, enhanced_class_num):
_input = Input(shape = (None, None, 3))
model = _input
model = ZeroPadding2D((3, 3))(model)
model = Conv2D(64, (7, 7), strides = (2, 2))(model)
model = BatchNormalization(axis = 3)(model)
model = Activation('relu')(model)
model = MaxPooling2D((3, 3), strides = (2, 2))(model)
model = conv_block(model, 3, [64, 64, 256], stage = 2, block = 'a', strides = (1, 1))
model = identity_block(model, 3, [64, 64, 256], stage = 2, block = 'b')
model = identity_block(model, 3, [64, 64, 256], stage = 2, block = 'c')
model = conv_block(model, 3, [128, 128, 512], stage = 3, block = 'a')
model = identity_block(model, 3, [128, 128, 512], stage = 3, block = 'b')
model = identity_block(model, 3, [128, 128, 512], stage = 3, block = 'c')
model = identity_block(model, 3, [128, 128, 512], stage = 3, block = 'd')
model = MaxPooling2D((2, 2))(model)
model = SpatialPyramidPooling([1, 2, 4])(model)
model1 = Dense(units = class_num)(model)
model1 = Activation(activation = "softmax")(model1)
model1 = Model(_input, model1)
model1.compile(loss = "categorical_crossentropy", optimizer = RMSprop(lr = 1e-4, decay = 1e-6), metrics = ['accuracy'])
model2 = Dense(units = enhanced_class_num)(model)
model2 = Activation(activation = "softmax")(model2)
model2 = Model(_input, model2)
model2.compile(loss = "categorical_crossentropy", optimizer = RMSprop(lr = 1e-4, decay = 1e-6), metrics = ['accuracy'])
input2 = Input(shape = (100, ))
model3 = Concatenate((input2, model))
model3 = Dense(units = class_num)(model3)
model3 = Activation(activation = "softmax")(model3)
model3 = Model(inputs = [_input, input2], outputs = model3)
model3.compile(loss = "categorical_crossentropy", optimizer = RMSprop(lr = 1e-4, decay = 1e-6), metrics = ['accuracy'])
return model1, model2, model3
| mit | -6,292,805,518,916,216,000 | 46.208333 | 123 | 0.657988 | false |
USGSDenverPychron/pychron | pychron/remote_hardware/protocols/laser_protocol.py | 1 | 1676 | '''
Copyright 2011 Jake Ross
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing:None, software
distributed under the License is distributed on an "AS IS" BASIS:None,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND:None, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
# =============enthought library imports=======================
# ============= standard library imports ========================
# ============= local library imports ==========================
class LaserProtocol:
commands = {'ReadLaserPower': None,
'GetLaserStatus': None,
'Enable': None,
'Disable': None,
'SetXY': '1,1',
'SetX': '1,1',
'SetY': '1',
'SetZ': '1',
'GetPosition': None,
'GetDriveMoving': None,
'GetXMoving': None,
'GetYMoving': None,
'GetZMoving': None,
'StopDrive': None,
'SetDriveHome': None,
'SetHomeX': None,
'SetHomeY': None,
'SetHomeZ': None,
'GoToHole': '1',
'GetJogProcedures': None,
'DoJog': ('addc', 10),
'AbortJog': None,
'SetBeamDiameter': '1',
'GetBeamDiameter': None,
'SetZoom': '1',
'GetZoom': None,
'SetSampleHolder': 'Ba',
'GetSampleHolder': None,
'SetLaserPower': '1'
}
# ============= EOF =====================================
| apache-2.0 | 2,419,733,984,905,574,400 | 30.037037 | 77 | 0.547733 | false |
cherrygirl/micronaet-mx8 | mx_order_report_aeroo/purchase.py | 1 | 3153 | # -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class PurchaseOrder(orm.Model):
""" Model name: PurchaseOrder
"""
_inherit = 'purchase.order'
# ------------------
# Override function:
# ------------------
def print_quotation(self, cr, uid, ids, context=None):
''' This function prints the purchases order and mark it as sent
so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, \
'This option should only be used for a single id at a time'
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(
uid, 'purchase.order', ids[0], 'quotation_sent', cr)
datas = {
'model': 'purchase.order',
'ids': ids,
'form': self.read(cr, uid, ids[0], context=context),
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'custom_mx_order_report',
'datas': datas,
'nodestroy': True,
}
_columns = {
'printed_time': fields.boolean('Printed time'),
'quotation_mode': fields.selection([
('with', 'With code'),
('without', 'Without code'),
('type1', 'TODO Type 1'), # TODO
('type2', 'TODO Type 2'), # TODO
('type3', 'TODO Type 3'), # TODO
], 'Order print mode', required=True)
}
_defaults = {
'printed_time': lambda *x: True,
'quotation_mode': lambda *x: 'with',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 4,859,380,557,574,244,000 | 35.241379 | 79 | 0.587377 | false |
wazo-pbx/xivo-auth | wazo_auth/plugins/external_auth/microsoft/websocket_oauth2.py | 1 | 2605 | # Copyright 2019-2020 The Wazo Authors (see the AUTHORS file)
# SPDX-License-Identifier: GPL-3.0-or-later
import json
import logging
from threading import Thread
import websocket
from wazo_auth.exceptions import ExternalAuthAlreadyExists
from wazo_auth.database.helpers import commit_or_rollback
from .helpers import get_timestamp_expiration
logger = logging.getLogger(__name__)
class WebSocketOAuth2(Thread):
def __init__(self, host, auth, external_auth, client_secret, token_url, auth_type):
super().__init__()
self.host = host
self.oauth2 = auth
self.external_auth_service = external_auth
self.client_secret = client_secret
self.token_url = token_url
self.user_uuid = None
self.auth_type = auth_type
self.ws = None
def run(self, state, user_uuid):
self.user_uuid = user_uuid
self.ws = websocket.WebSocketApp(
'{}/ws/{}'.format(self.host, state),
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close,
)
logger.debug('WebSocketOAuth2 opened.')
try:
self.ws.run_forever()
finally:
if self.ws:
self.ws.close()
self.ws = None
def _on_message(self, message):
logger.debug(
"Confirmation has been received on websocketOAuth, message : {}.".format(
message
)
)
msg = json.loads(message)
if self.ws:
self.ws.close()
self.ws = None
self.create_first_token(self.user_uuid, msg.get('code'))
commit_or_rollback()
def _on_error(self, error):
logger.error(error)
def _on_close(self):
logger.debug("WebsocketOAuth closed.")
def create_first_token(self, user_uuid, code):
logger.debug('Trying to fetch token on {}'.format(self.token_url))
token_data = self.oauth2.fetch_token(
self.token_url, client_secret=self.client_secret, code=code
)
data = {
'access_token': token_data['access_token'],
'refresh_token': token_data['refresh_token'],
'token_expiration': get_timestamp_expiration(token_data['expires_in']),
'scope': token_data['scope'],
}
logger.debug('Microsoft token created.')
try:
self.external_auth_service.create(user_uuid, self.auth_type, data)
except ExternalAuthAlreadyExists:
self.external_auth_service.update(user_uuid, self.auth_type, data)
| gpl-3.0 | -8,497,690,443,662,269,000 | 31.160494 | 87 | 0.596545 | false |
vponomaryov/rally | samples/plugins/context/context_plugin.py | 1 | 3099 | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.benchmark import context
from rally.common import log as logging
from rally import consts
from rally import osclients
LOG = logging.getLogger(__name__)
@context.context(name="create_flavor", order=1000)
class CreateFlavorContext(context.Context):
"""Create sample flavor
This sample create flavor with specified options before task starts and
delete it after task completion.
To create your own context plugin, inherit it from
rally.benchmark.context.Context
"""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"additionalProperties": False,
"properties": {
"flavor_name": {
"type": "string",
},
"ram": {
"type": "integer",
"minimum": 1
},
"vcpus": {
"type": "integer",
"minimum": 1
},
"disk": {
"type": "integer",
"minimum": 1
}
}
}
def setup(self):
"""This method is called before the task start."""
try:
# use rally.osclients to get necessary client instance
nova = osclients.Clients(self.context["admin"]["endpoint"]).nova()
# and than do what you need with this client
self.context["flavor"] = nova.flavors.create(
# context settings are stored in self.config
name=self.config.get("flavor_name", "rally_test_flavor"),
ram=self.config.get("ram", 1),
vcpus=self.config.get("vcpus", 1),
disk=self.config.get("disk", 1)).to_dict()
LOG.debug("Flavor with id '%s'" % self.context["flavor"]["id"])
except Exception as e:
msg = "Can't create flavor: %s" % e.message
if logging.is_debug():
LOG.exception(msg)
else:
LOG.warning(msg)
def cleanup(self):
"""This method is called after the task finish."""
try:
nova = osclients.Clients(self.context["admin"]["endpoint"]).nova()
nova.flavors.delete(self.context["flavor"]["id"])
LOG.debug("Flavor '%s' deleted" % self.context["flavor"]["id"])
except Exception as e:
msg = "Can't delete flavor: %s" % e.message
if logging.is_debug():
LOG.exception(msg)
else:
LOG.warning(msg)
| apache-2.0 | 1,991,532,405,387,455,000 | 34.215909 | 78 | 0.573411 | false |
spirali/aislinn | src/aislinn/vgtool/resource.py | 1 | 2249 | #
# Copyright (C) 2014 Stanislav Bohm
#
# This file is part of Aislinn.
#
# Aislinn is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 2 of the License, or
# (at your option) any later version.
#
# Aislinn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Aislinn. If not, see <http://www.gnu.org/licenses/>.
#
class Resource:
def __init__(self, manager, id):
self.ref_count = 1
self.manager = manager
self.id = id
def inc_ref(self):
self.ref_count += 1
assert self.ref_count > 1
def inc_ref_revive(self):
self.ref_count += 1
if self.ref_count == 1:
self.manager.revive(self)
def dec_ref(self):
self.ref_count -= 1
if self.ref_count < 1:
assert self.ref_count == 0
self.manager.add_not_used_resource(self)
def __repr__(self):
return "<{0} {1:x} {2} ref={3}>".format(
self.__class__, id(self), self.id, self.ref_count)
class ResourceManager:
def __init__(self, resource_class=Resource):
self.not_used_resources = None
self.resource_class = resource_class
self.resource_count = 0
def new(self, id):
self.resource_count += 1
return self.resource_class(self, id)
def revive(self, resource):
assert resource.ref_count == 1
self.resource_count += 1
self.not_used_resources.remove(resource)
def pickup_resources_to_clean(self):
r = self.not_used_resources
self.not_used_resources = None
return r
def add_not_used_resource(self, resource):
self.resource_count -= 1
assert self.resource_count >= 0
if self.not_used_resources is None:
self.not_used_resources = [resource]
else:
self.not_used_resources.append(resource)
| gpl-2.0 | 7,020,461,418,649,479,000 | 28.592105 | 73 | 0.617608 | false |
dpinney/omf | omf/scratch/amiAnomalyDetection/meterDataEnergyUnitedConversion.py | 1 | 3750 | ''' Convert meter data from EU format to our anomly detection input format. '''
'''A script to read data from inCSV, and write it to outCSV'''
import csv, os, datetime, operator
from os.path import join as pJoin
#import json, pprint, random
#import matplotlib.pyplot as plt
#import numpy as np
# Path variables
workDir = os.getcwd()
# Reading the AMI data from .csv file
#inCSV = pJoin(workDir, 'Xample Input - real AMI measurements.csv')
inCSV = pJoin(workDir, 'Xample SMALLER Input - real AMI measurements.csv')
# Other Inputs
MinDetRunTime = 2 # Minimum time (in hours)to detect anomalies
MinDevFromAve = 5 # The minimum deviation as (percentage %) from the the average power
# The following function puts the datetime on the standerd format
def dateFormatter(dateStr):
# Try to format a date string to a datetime object.
toTry = ["%m/%d/%Y %H:%M:%S %p", "%m/%d/%y %H:%M", "%m/%d/%y", "%m/%d/%Y"]
for dateFormat in toTry:
try:
readDateTime = datetime.datetime.strptime(dateStr, dateFormat).isoformat()
return readDateTime
except:
continue
error = "We don't have a test case for our date: "+dateStr+" :("
print(error)
return error
def readToArr(inCSV):
# Read data into dict.
subStationData = []
with open(inCSV, newline='') as amiFile:
amiReader = csv.DictReader(amiFile, delimiter=',')
for row in amiReader:
subStation = row['SUBSTATION']
meterName = row['METER_ID']
readDateTime = dateFormatter(row['READ_DTM'])
kWh = row['READ_VALUE']
subStationData.append([subStation, meterName, readDateTime, kWh])
return subStationData
# This sorts the data by Sub--Meter--Time
def sortData(inCSV):
# Sort data.
subData = readToArr(inCSV)
# print "Read in:\n"
# pprint.pprint(subData[:25])
# random.shuffle(subData)
# print "Randomized:\n"
# pprint.pprint(subData[:25])
subData = sorted(subData, key=operator.itemgetter(0,1,2), reverse=False)
# print "Sorted:\n"
#pprint.pprint(subData[295:310])
return subData
# Run operationg here:
outArr = sortData(inCSV)
outData = {}
for row in outArr:
meterName = row[1]
energyCons = row[3]
date = row[2]
if True: # this where you'll check if the meter is in the list
if outData.get(meterName,'') == '':
outData[meterName] = {'energyCons': [energyCons], 'dates' : [date]}
else:
outData[meterName]['energyCons'].append(energyCons)
outData[meterName]['dates'].append(date)
i = 0
for key in outData.keys():
print(outData[key])
i = i+1
if i == 10:
break
# output = []
# power = []
# meanPower = []
# for meterName in outData.keys():
# energyCons = [int(x) for x in outData[meterName]['energyCons']]
# power = np.diff(energyCons)
# pMean = [np.mean(power)]* (len(outData[meterName]['energyCons'])-1)
# outData[meterName] ['power'] = power
# outData[meterName] ['meanPower'] = pMean
# outData[meterName] ['dates'] = date
# index = [i for i, j in enumerate(outData[meterName]['power']) if j <= MinDevFromAve*0.01*pMean[0]]
# Test= np.split(index, np.where(np.diff(index) !=1)[0]+1)
# print Test
# # for i in range(Test):
# # if Test[i] >= 5:
# # print meterName
# # if count >= (MinDetRunTime-1):
# # print meterName
# # print index
# # #plt.plot (outData[113560340] ['meanPower'])
# # #plt.plot(outData[113560340] ['power'])
# # #plt.show()
# # eDiv = [0 if y==0 else x/y for x, y in zip(power, pMean)]
# output.append([meterName, date, power])
# # # Write the output.
# # with open(pJoin(workDir,"allOutputData.json"),"w") as outFile:
# # json.dump(output, outFile, indent=4) | gpl-2.0 | 6,837,612,547,506,741,000 | 32.740741 | 101 | 0.635467 | false |
AKSW/LODStats_WWW | rdfstats/controllers/vocabularies.py | 1 | 5389 | """
Copyright 2012 Jan Demter <[email protected]>
This file is part of LODStatsWWW.
LODStats is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
LODStats is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with LODStats. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from rdfstats.lib.base import BaseController, render, Session
from rdfstats import model
from sqlalchemy import and_, func, desc
from webhelpers.paginate import Page, PageURL_WebOb
log = logging.getLogger(__name__)
class VocabulariesController(BaseController):
"""REST Controller styled on the Atom Publishing Protocol"""
# To properly map this controller, ensure your config/routing.py
# file has a resource setup:
# map.resource('vocabulary', 'vocabularies')
def index(self, format='html'):
"""GET /vocabularies: All items in the collection"""
# url('vocabularies')
vocabs = Session.query(model.Vocab.uri, model.Vocab.id, func.sum(model.RDFVocabStat.count),
func.count(model.StatResult.id))\
.join(model.RDFVocabStat).join(model.StatResult)\
.filter(model.StatResult.current_of!=None)\
.group_by(model.Vocab.uri, model.Vocab.id)
c.query_string = '?'
# optional search
c.search = ''
if request.GET.has_key('search'):
vocabs = vocabs.filter(model.Vocab.uri.ilike("%%%s%%" % request.GET['search']))
c.query_string += 'search=%s&' % request.GET['search']
c.search = request.GET['search']
# sort results
c.sort_order = request.GET.get('sort')
if request.GET.has_key('sort'):
if request.GET['sort'] == 'uri':
c.vocabs = vocabs.order_by(model.Vocab.uri)
elif request.GET['sort'] == 'overall':
c.vocabs = vocabs.order_by(desc(func.sum(model.RDFVocabStat.count)),
desc(func.count(model.StatResult.id)), model.Vocab.uri)
elif request.GET['sort'] == 'datasets':
c.vocabs = vocabs.order_by(desc(func.count(model.StatResult.id)),
desc(func.sum(model.RDFVocabStat.count)), model.Vocab.uri)
else:
c.vocabs = vocabs.order_by(desc(func.count(model.StatResult.id)),
desc(func.sum(model.RDFVocabStat.count)), model.Vocab.uri)
else:
c.vocabs = vocabs.order_by(desc(func.count(model.StatResult.id)),
desc(func.sum(model.RDFVocabStat.count)), model.Vocab.uri)
if request.GET.has_key('page'):
page = request.GET['page']
else:
page = 1
page_url = PageURL_WebOb(request)
c.vocabs_page = Page(c.vocabs, page=page, items_per_page=50, url=page_url)
c.count = c.vocabs_page.item_count
return render('/vocabularies/index.html')
def create(self):
"""POST /vocabularies: Create a new item"""
# url('vocabularies')
def new(self, format='html'):
"""GET /vocabularies/new: Form to create a new item"""
# url('new_vocabulary')
def update(self, id):
"""PUT /vocabularies/id: Update an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="PUT" />
# Or using helpers:
# h.form(url('vocabulary', id=ID),
# method='put')
# url('vocabulary', id=ID)
def delete(self, id):
"""DELETE /vocabularies/id: Delete an existing item"""
# Forms posted to this method should contain a hidden field:
# <input type="hidden" name="_method" value="DELETE" />
# Or using helpers:
# h.form(url('vocabulary', id=ID),
# method='delete')
# url('vocabulary', id=ID)
def show(self, id, format='html'):
"""GET /vocabularies/id: Show info and current_of-usage about Vocabulary"""
# url('vocabulary', id=ID)
if id is None:
abort(404)
try:
c.vocab = Session.query(model.Vocab).get(int(id))
except ValueError, e:
abort(404)
if c.vocab is None:
abort(404)
vs=Session.query(model.RDFVocabStat).join(model.StatResult).join(model.StatResult.current_of).filter(
and_(
model.RDFVocabStat.vocab==c.vocab,
model.StatResult.current_of!=None)).order_by(model.RDFDoc.name).all()
c.vs = vs
c.count = len(vs)
return render('/vocabularies/view.html')
def edit(self, id, format='html'):
"""GET /vocabularies/id/edit: Form to edit an existing item"""
# url('edit_vocabulary', id=ID)
| gpl-3.0 | 8,171,228,687,355,812,000 | 41.433071 | 109 | 0.601781 | false |
googleads/google-ads-python | google/ads/googleads/v6/services/types/remarketing_action_service.py | 1 | 5168 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v6.resources.types import remarketing_action
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from google.rpc import status_pb2 as status # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v6.services",
marshal="google.ads.googleads.v6",
manifest={
"GetRemarketingActionRequest",
"MutateRemarketingActionsRequest",
"RemarketingActionOperation",
"MutateRemarketingActionsResponse",
"MutateRemarketingActionResult",
},
)
class GetRemarketingActionRequest(proto.Message):
r"""Request message for
[RemarketingActionService.GetRemarketingAction][google.ads.googleads.v6.services.RemarketingActionService.GetRemarketingAction].
Attributes:
resource_name (str):
Required. The resource name of the
remarketing action to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1)
class MutateRemarketingActionsRequest(proto.Message):
r"""Request message for
[RemarketingActionService.MutateRemarketingActions][google.ads.googleads.v6.services.RemarketingActionService.MutateRemarketingActions].
Attributes:
customer_id (str):
Required. The ID of the customer whose
remarketing actions are being modified.
operations (Sequence[google.ads.googleads.v6.services.types.RemarketingActionOperation]):
Required. The list of operations to perform
on individual remarketing actions.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(proto.STRING, number=1)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="RemarketingActionOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3)
validate_only = proto.Field(proto.BOOL, number=4)
class RemarketingActionOperation(proto.Message):
r"""A single operation (create, update) on a remarketing action.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v6.resources.types.RemarketingAction):
Create operation: No resource name is
expected for the new remarketing action.
update (google.ads.googleads.v6.resources.types.RemarketingAction):
Update operation: The remarketing action is
expected to have a valid resource name.
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=remarketing_action.RemarketingAction,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=remarketing_action.RemarketingAction,
)
class MutateRemarketingActionsResponse(proto.Message):
r"""Response message for remarketing action mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v6.services.types.MutateRemarketingActionResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status.Status,
)
results = proto.RepeatedField(
proto.MESSAGE, number=2, message="MutateRemarketingActionResult",
)
class MutateRemarketingActionResult(proto.Message):
r"""The result for the remarketing action mutate.
Attributes:
resource_name (str):
Returned for successful operations.
"""
resource_name = proto.Field(proto.STRING, number=1)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 5,328,040,285,230,881,000 | 34.156463 | 140 | 0.688467 | false |
memsharded/conan | conans/client/generators/b2.py | 1 | 17176 |
"""
B2 Conan Generator
This is a generator for conanbuildinfo.jam files declaring all conan dependencies
as B2 project and library targets. This generates multiple tagged build info
source files, each containing a single variant as specified by the user's
conan install profile in addition to the central generic one that includes
all possible variants.
"""
from hashlib import md5
from conans.model import Generator
class B2Generator(Generator):
_b2_variation_key = None
_b2_variation_id = None
@property
def filename(self):
pass # in this case, filename defined in return value of content method
@property
def content(self):
"""
Generates two content files: conanbuildinfo.jam and conanbuildinfo-ID.jam which
the former defines sub-projects for each package and loads the other files and
the latter define variables and targets for the packages.
"""
result = {
'conanbuildinfo.jam': None,
self.conanbuildinfo_variation_jam: None
}
# Generate the common conanbuildinfo.jam which does four things:
# 1. Defines some common utility functions to make the rest of the code short.
# 2. Includes the conanbuildinfo-*.jam sub-files for constant declarations.
# 3. Defines all the package sub-projects.
# 4. Includes the conanbuildinfo-*.jam sub-files again, this time for declaring targets.
cbi = [self.conanbuildinfo_header_text]
# The prefix that does 1 & 2.
cbi += [self.conanbuildinfo_prefix_text]
# The sub-project definitions, i.e. 3.
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
cbi += ["", "# %s" % (dep_name.lower())]
cbi += self.b2_project_for_dep(dep_name, dep_cpp_info)
# The postfix which does 4.
cbi += [self.conanbuildinfo_postfix_text]
# The combined text.
result['conanbuildinfo.jam'] = "\n".join(cbi)
# Generate the current build variation conanbuildinfo-/variation/.jam which does two things:
# 1. Defines project constants for the corresponding conan buildinfo variables.
# 2. Declares targets, with b2 requirements to select the variation, for each
# library in a package and one "libs" target for the collection of all the libraries
# in the package.
cbiv = [self.conanbuildinfo_header_text]
# The first, 1, set of variables are collective in that they have the info for all
# of the packages combined, 1a.
cbiv += ["# global"]
cbiv += self.b2_constants_for_dep('conan', self.deps_build_info)
# Now the constants for individual packages, 1b.
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
cbiv += ["# %s" % (dep_name.lower())]
cbiv += self.b2_constants_for_dep(
dep_name, dep_cpp_info, self.deps_user_info[dep_name])
# Second, 2, part are the targets for the packages.
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
cbiv += ["# %s" % (dep_name.lower())]
cbiv += self.b2_targets_for_dep(dep_name, dep_cpp_info)
result[self.conanbuildinfo_variation_jam] = "\n".join(cbiv)
return result
def b2_project_for_dep(self, name, info):
"""
Generates a sub-project definition to match the package. Which is used later
to define targets for the package libs.
"""
if not info:
return []
name = name.lower()
# Create a b2 project for the package dependency.
return [self.conanbuildinfo_project_template.format(name=name)]
def b2_constants_for_dep(self, name, info, user=None):
"""
Generates a list of constant variable definitions for the information in the
CppInfo conan data given for the package. If user variables map is also given
those are also generated following the package variables.
"""
if not info:
return []
name = name.lower()
# Define the info specific variables. Note that the 'usage-requirements' one
# needs to be last as it references the others.
# TODO: Should be cppflags -> cxxflags
result = \
self.b2_constant(name, 'rootpath', [info.rootpath], True) + \
self.b2_constant(name, 'includedirs', info.include_paths, True) + \
self.b2_constant(name, 'libdirs', info.lib_paths, True) + \
self.b2_constant(name, 'defines', info.defines) + \
self.b2_constant(name, 'cppflags', info.cxxflags) + \
self.b2_constant(name, 'cflags', info.cflags) + \
self.b2_constant(name, 'sharedlinkflags', info.sharedlinkflags) + \
self.b2_constant(name, 'exelinkflags', info.exelinkflags) + \
self.b2_constant(name, 'requirements', self.b2_features(self.b2_variation)) + \
self.b2_constant(name, 'usage-requirements', [
'<include>$(includedirs({name},{variation}))'.format(name=name, variation=self.b2_variation_id),
'<define>$(defines({name},{variation}))'.format(name=name, variation=self.b2_variation_id),
'<cflags>$(cflags({name},{variation}))'.format(name=name, variation=self.b2_variation_id),
'<cxxflags>$(cppflags({name},{variation}))'.format(name=name, variation=self.b2_variation_id),
'<link>shared:<linkflags>$(sharedlinkflags({name},{variation}))'.format(name=name, variation=self.b2_variation_id)
])
if user:
for uk, uv in user.vars.items():
result += self.b2_constant(uk.lower() + ',' + name, 'user', [uv])
return result
def b2_targets_for_dep(self, name, info):
"""
Generates individual targets for the libraries in a package and a single "libs"
collective alias target that refers to them.
"""
if not info:
return []
name = name.lower()
result = []
if info.libs:
for lib in info.libs:
result += [self.conanbuildinfo_variation_lib_template.format(
name=name, lib=lib, variation=self.b2_variation_id)]
result += [self.conanbuildinfo_variation_alias_template.format(
name=name, libs=" ".join(info.libs), variation=self.b2_variation_id)]
else:
result += [self.conanbuildinfo_variation_alias_template.format(
name=name, libs="", variation=self.b2_variation_id)]
return result
def b2_constant(self, name, var, val, is_paths=False):
"""
Generates a constant definition for the given variable and value(s). If is_path
is True the value(s) are reformatted to be acceptable to b2.
"""
if not val:
return []
if is_paths:
val = list(self.b2_path(p) for p in val)
value = []
for v in val:
if v.startswith('<'):
value += [' {val}'.format(val=v)]
else:
value += [' "{val}"'.format(val=v)]
return [self.conanbuildinfo_variation_constant_template.format(
name=name, var=var, variation=self.b2_variation_id, value="\n".join(value)
)]
@staticmethod
def b2_path(path):
"""
Adjust a regular path to the form b2 can use in source code.
"""
return path.replace('\\', '/')
@staticmethod
def b2_features(variations):
"""
Generated a b2 requirements list, i.e. <name>value list, from the given 'variations' dict.
"""
result = []
for k, v in sorted(variations.items()):
if v:
result += ['<%s>%s' % (k, v)]
return result
@property
def conanbuildinfo_variation_jam(self):
return 'conanbuildinfo-%s.jam' % self.b2_variation_key
@property
def b2_variation_key(self):
"""
A hashed key of the variation to use a UID for the variation.
"""
if not self._b2_variation_key:
self._b2_variation_key = md5(self.b2_variation_id.encode('utf-8')).hexdigest()
return self._b2_variation_key
@property
def b2_variation_id(self):
"""
A compact single comma separated list of the variation where only the values
of the b2 variation are included in sorted by feature name order.
"""
if not self._b2_variation_id:
vid = []
for k in sorted(self.b2_variation.keys()):
if self.b2_variation[k]:
vid += [self.b2_variation[k]]
self._b2_variation_id = ",".join(vid)
return self._b2_variation_id
@property
def b2_variation(self):
"""
Returns a map of b2 features & values as translated from conan settings that
can affect the link compatibility of libraries.
"""
if not getattr(self, "_b2_variation_key", None):
self._b2_variation = {}
self._b2_variation['toolset'] = self.b2_toolset_name + '-' + self.b2_toolset_version
self._b2_variation['architecture'] = {
'x86': 'x86', 'x86_64': 'x86',
'ppc64le': 'power', 'ppc64': 'power', 'ppc32': 'power',
'armv5el': 'arm', 'armv5hf': 'arm',
'armv6': 'arm', 'armv7': 'arm', 'armv7hf': 'arm', 'armv7s': 'arm', 'armv7k': 'arm',
'armv8': 'arm', 'armv8_32': 'arm', 'armv8.3': 'arm',
'sparc': 'sparc', 'sparcv9': 'sparc',
'mips': 'mips1', 'mips64': 'mips64',
}.get(self.conanfile.settings.get_safe('arch'))
self._b2_variation['instruction-set'] = {
'armv5el': None, 'armv5hf': None,
'armv6': 'armv6', 'armv7': 'armv7', 'armv7hf': None, 'armv7k': None,
'armv7s': 'armv7s', 'armv8': None, 'armv8_32': None, 'armv8.3': None, 'avr': None,
'mips': None, 'mips64': None,
'ppc64le': None, 'ppc64': 'powerpc64', 'ppc32': None,
'sparc': None, 'sparcv9': 'v9',
'x86': None, 'x86_64': None,
}.get(self.conanfile.settings.get_safe('arch'))
self._b2_variation['address-model'] = {
'x86': '32', 'x86_64': '64',
'ppc64le': '64', 'ppc64': '64', 'ppc32': '32',
'armv5el': '32', 'armv5hf': '32',
'armv6': '32', 'armv7': '32', 'armv7s': '32', 'armv7k': '32', 'armv7hf': '32',
'armv8': '64', 'armv8_32': '32', 'armv8.3': "64",
'sparc': '32', 'sparcv9': '64',
'mips': '32', 'mips64': '64',
}.get(self.conanfile.settings.get_safe('arch'))
self._b2_variation['target-os'] = {
'Windows': 'windows', 'WindowsStore': 'windows', 'WindowsCE': 'windows',
'Linux': 'linux',
'Macos': 'darwin',
'Android': 'android',
'iOS': 'darwin', 'watchOS': 'darwin', 'tvOS': 'darwin',
'FreeBSD': 'freebsd',
'SunOS': 'solaris',
'Arduino': 'linux',
}.get(self.conanfile.settings.get_safe('os'))
self._b2_variation['variant'] = {
'Debug': 'debug',
'Release': 'release',
'RelWithDebInfo': 'relwithdebinfo',
'MinSizeRel': 'minsizerel',
}.get(self.conanfile.settings.get_safe('build_type'))
self._b2_variation['cxxstd'] = {
'98': '98', 'gnu98': '98',
'11': '11', 'gnu11': '11',
'14': '14', 'gnu14': '14',
'17': '17', 'gnu17': '17',
'2a': '2a', 'gnu2a': '2a',
'2b': '2b', 'gnu2b': '2b',
'2c': '2c', 'gnu2c': '2c',
}.get(self.conanfile.settings.get_safe('cppstd'))
self._b2_variation['cxxstd:dialect'] = {
'98': None, 'gnu98': 'gnu',
'11': None, 'gnu11': 'gnu',
'14': None, 'gnu14': 'gnu',
'17': None, 'gnu17': 'gnu',
'2a': None, 'gnu2a': 'gnu',
'2b': None, 'gnu2b': 'gnu',
'2c': None, 'gnu2c': 'gnu',
}.get(self.conanfile.settings.get_safe('cppstd'))
return self._b2_variation
@property
def b2_toolset_name(self):
compiler = {
'sun-cc': 'sun',
'gcc': 'gcc',
'Visual Studio': 'msvc',
'clang': 'clang',
'apple-clang': 'clang'
}.get(self.conanfile.settings.get_safe('compiler'))
return str(compiler)
@property
def b2_toolset_version(self):
if self.conanfile.settings.get_safe('compiler') == 'Visual Studio':
if self.conanfile.settings.compiler.version == '15':
return '14.1'
else:
return str(self.conanfile.settings.compiler.version)+'.0'
return str(self.conanfile.settings.get_safe('compiler.version'))
conanbuildinfo_header_text = """\
#|
B2 definitions for Conan packages. This is a generated file.
Edit the corresponding conanfile.txt instead.
|#
"""
conanbuildinfo_prefix_text = """\
import path ;
import project ;
import modules ;
import feature ;
local base-project = [ project.current ] ;
local base-project-mod = [ $(base-project).project-module ] ;
local base-project-location = [ project.attribute $(base-project-mod) location ] ;
rule project-define ( id )
{
id = $(id:L) ;
local saved-project = [ modules.peek project : .base-project ] ;
local id-location = [ path.join $(base-project-location) $(id) ] ;
local id-mod = [ project.load $(id-location) : synthesize ] ;
project.initialize $(id-mod) : $(id-location) ;
project.inherit-attributes $(id-mod) : $(base-project-mod) ;
local attributes = [ project.attributes $(id-mod) ] ;
$(attributes).set parent-module : $(base-project-mod) : exact ;
modules.poke $(base-project-mod) : $(id)-mod : $(id-mod) ;
modules.poke [ CALLER_MODULE ] : $(id)-mod : $(id-mod) ;
modules.poke project : .base-project : $(saved-project) ;
IMPORT $(__name__)
: constant-if call-in-project
: $(id-mod)
: constant-if call-in-project ;
if [ project.is-jamroot-module $(base-project-mod) ]
{
use-project /$(id) : $(id) ;
}
return $(id-mod) ;
}
rule constant-if ( name : value * )
{
if $(__define_constants__) && $(value)
{
call-in-project : constant $(name) : $(value) ;
modules.poke $(__name__) : $(name) : [ modules.peek $(base-project-mod) : $(name) ] ;
}
}
rule call-in-project ( project-mod ? : rule-name args * : * )
{
project-mod ?= $(base-project-mod) ;
project.push-current [ project.target $(project-mod) ] ;
local result = [ modules.call-in $(project-mod) :
$(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) : $(10) :
$(11) : $(12) : $(13) : $(14) : $(15) : $(16) : $(17) : $(18) :
$(19) ] ;
project.pop-current ;
return $(result) ;
}
rule include-conanbuildinfo ( cbi )
{
include $(cbi) ;
}
IMPORT $(__name__)
: project-define constant-if call-in-project include-conanbuildinfo
: $(base-project-mod)
: project-define constant-if call-in-project include-conanbuildinfo ;
if ! ( relwithdebinfo in [ feature.values variant ] )
{
variant relwithdebinfo : : <optimization>speed <debug-symbols>on <inlining>full <runtime-debugging>off ;
}
if ! ( minsizerel in [ feature.values variant ] )
{
variant minsizerel : : <optimization>space <debug-symbols>off <inlining>full <runtime-debugging>off ;
}
local __conanbuildinfo__ = [ GLOB $(__file__:D) : conanbuildinfo-*.jam : downcase ] ;
{
local __define_constants__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
"""
conanbuildinfo_project_template = """\
# {name}
project-define {name} ;
"""
conanbuildinfo_postfix_text = """\
{
local __define_targets__ = yes ;
for local __cbi__ in $(__conanbuildinfo__)
{
call-in-project : include-conanbuildinfo $(__cbi__) ;
}
}
"""
conanbuildinfo_variation_constant_template = """\
constant-if {var}({name},{variation}) :
{value}
;
"""
conanbuildinfo_variation_lib_template = """\
if $(__define_targets__) {{
call-in-project $({name}-mod) : lib {lib}
:
: <name>{lib} <search>$(libdirs({name},{variation})) $(requirements({name},{variation}))
:
: $(usage-requirements({name},{variation})) ;
call-in-project $({name}-mod) : explicit {lib} ; }}
"""
conanbuildinfo_variation_alias_template = """\
if $(__define_targets__) {{
call-in-project $({name}-mod) : alias libs
: {libs}
: $(requirements({name},{variation}))
:
: $(usage-requirements({name},{variation})) ;
call-in-project $({name}-mod) : explicit libs ; }}
"""
| mit | 7,826,880,889,923,561,000 | 38.851508 | 130 | 0.561074 | false |
owtf/owtf | owtf/models/test_group.py | 1 | 1161 | """
owtf.models.test_group
~~~~~~~~~~~~~~~~~~~~~~
"""
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from owtf.db.model_base import Model
class TestGroup(Model):
__tablename__ = "test_groups"
code = Column(String, primary_key=True)
group = Column(String) # web, network
descrip = Column(String)
hint = Column(String, nullable=True)
url = Column(String)
priority = Column(Integer)
plugins = relationship("Plugin")
@classmethod
def get_by_code(cls, session, code):
"""Get the test group based on plugin code
:param code: Plugin code
:type code: `str`
:return: Test group dict
:rtype: `dict`
"""
group = session.query(TestGroup).get(code)
return group.to_dict()
@classmethod
def get_all(cls, session):
"""Get all test groups from th DB
:return:
:rtype:
"""
test_groups = session.query(TestGroup).order_by(TestGroup.priority.desc()).all()
dict_list = []
for obj in test_groups:
dict_list.append(obj.to_dict())
return dict_list
| bsd-3-clause | -226,405,339,908,348,930 | 24.23913 | 88 | 0.596899 | false |
CCI-MOC/nova | nova/tests/unit/image/test_glance.py | 1 | 52178 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from six.moves import StringIO
import glanceclient.exc
import mock
from oslo_config import cfg
from oslo_utils import netutils
import testtools
from nova import context
from nova import exception
from nova.image import glance
from nova import test
CONF = cfg.CONF
NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
class tzinfo(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
return datetime.timedelta()
NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22, tzinfo=tzinfo())
class TestConversions(test.NoDBTestCase):
def test_convert_timestamps_to_datetimes(self):
fixture = {'name': None,
'properties': {},
'status': None,
'is_public': None,
'created_at': NOW_GLANCE_FORMAT,
'updated_at': NOW_GLANCE_FORMAT,
'deleted_at': NOW_GLANCE_FORMAT}
result = glance._convert_timestamps_to_datetimes(fixture)
self.assertEqual(result['created_at'], NOW_DATETIME)
self.assertEqual(result['updated_at'], NOW_DATETIME)
self.assertEqual(result['deleted_at'], NOW_DATETIME)
def _test_extracting_missing_attributes(self, include_locations):
# Verify behavior from glance objects that are missing attributes
# TODO(jaypipes): Find a better way of testing this crappy
# glanceclient magic object stuff.
class MyFakeGlanceImage(object):
def __init__(self, metadata):
IMAGE_ATTRIBUTES = ['size', 'owner', 'id', 'created_at',
'updated_at', 'status', 'min_disk',
'min_ram', 'is_public']
raw = dict.fromkeys(IMAGE_ATTRIBUTES)
raw.update(metadata)
self.__dict__['raw'] = raw
def __getattr__(self, key):
try:
return self.__dict__['raw'][key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
try:
self.__dict__['raw'][key] = value
except KeyError:
raise AttributeError(key)
metadata = {
'id': 1,
'created_at': NOW_DATETIME,
'updated_at': NOW_DATETIME,
}
image = MyFakeGlanceImage(metadata)
observed = glance._extract_attributes(
image, include_locations=include_locations)
expected = {
'id': 1,
'name': None,
'is_public': None,
'size': 0,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': NOW_DATETIME,
'updated_at': NOW_DATETIME,
'deleted_at': None,
'deleted': None,
'status': None,
'properties': {},
'owner': None
}
if include_locations:
expected['locations'] = None
expected['direct_url'] = None
self.assertEqual(expected, observed)
def test_extracting_missing_attributes_include_locations(self):
self._test_extracting_missing_attributes(include_locations=True)
def test_extracting_missing_attributes_exclude_locations(self):
self._test_extracting_missing_attributes(include_locations=False)
class TestExceptionTranslations(test.NoDBTestCase):
def test_client_forbidden_to_imagenotauthed(self):
in_exc = glanceclient.exc.Forbidden('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotAuthorized)
def test_client_httpforbidden_converts_to_imagenotauthed(self):
in_exc = glanceclient.exc.HTTPForbidden('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotAuthorized)
def test_client_notfound_converts_to_imagenotfound(self):
in_exc = glanceclient.exc.NotFound('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotFound)
def test_client_httpnotfound_converts_to_imagenotfound(self):
in_exc = glanceclient.exc.HTTPNotFound('123')
out_exc = glance._translate_image_exception('123', in_exc)
self.assertIsInstance(out_exc, exception.ImageNotFound)
class TestGlanceSerializer(test.NoDBTestCase):
def test_serialize(self):
metadata = {'name': 'image1',
'is_public': True,
'foo': 'bar',
'properties': {
'prop1': 'propvalue1',
'mappings': [
{'virtual': 'aaa',
'device': 'bbb'},
{'virtual': 'xxx',
'device': 'yyy'}],
'block_device_mapping': [
{'virtual_device': 'fake',
'device_name': '/dev/fake'},
{'virtual_device': 'ephemeral0',
'device_name': '/dev/fake0'}]}}
# NOTE(tdurakov): Assertion of serialized objects won't work
# during using of random PYTHONHASHSEED. Assertion of
# serialized/deserialized object and initial one is enough
converted = glance._convert_to_string(metadata)
self.assertEqual(glance._convert_from_string(converted), metadata)
class TestGetImageService(test.NoDBTestCase):
@mock.patch.object(glance.GlanceClientWrapper, '__init__',
return_value=None)
def test_get_remote_service_from_id(self, gcwi_mocked):
id_or_uri = '123'
_ignored, image_id = glance.get_remote_image_service(
mock.sentinel.ctx, id_or_uri)
self.assertEqual(id_or_uri, image_id)
gcwi_mocked.assert_called_once_with()
@mock.patch.object(glance.GlanceClientWrapper, '__init__',
return_value=None)
def test_get_remote_service_from_href(self, gcwi_mocked):
id_or_uri = 'http://127.0.0.1/123'
_ignored, image_id = glance.get_remote_image_service(
mock.sentinel.ctx, id_or_uri)
self.assertEqual('123', image_id)
gcwi_mocked.assert_called_once_with(context=mock.sentinel.ctx,
host='127.0.0.1',
port=80,
use_ssl=False)
class TestCreateGlanceClient(test.NoDBTestCase):
@mock.patch('oslo_utils.netutils.is_valid_ipv6')
@mock.patch('glanceclient.Client')
def test_headers_passed_glanceclient(self, init_mock, ipv6_mock):
self.flags(auth_strategy='keystone')
ipv6_mock.return_value = False
auth_token = 'token'
ctx = context.RequestContext('fake', 'fake', auth_token=auth_token)
host = 'host4'
port = 9295
use_ssl = False
expected_endpoint = 'http://host4:9295'
expected_params = {
'identity_headers': {
'X-Auth-Token': 'token',
'X-User-Id': 'fake',
'X-Roles': '',
'X-Tenant-Id': 'fake',
'X-Identity-Status': 'Confirmed'
},
'token': 'token'
}
glance._create_glance_client(ctx, host, port, use_ssl)
init_mock.assert_called_once_with('1', expected_endpoint,
**expected_params)
# Test the version is properly passed to glanceclient.
ipv6_mock.reset_mock()
init_mock.reset_mock()
expected_endpoint = 'http://host4:9295'
expected_params = {
'identity_headers': {
'X-Auth-Token': 'token',
'X-User-Id': 'fake',
'X-Roles': '',
'X-Tenant-Id': 'fake',
'X-Identity-Status': 'Confirmed'
},
'token': 'token'
}
glance._create_glance_client(ctx, host, port, use_ssl, version=2)
init_mock.assert_called_once_with('2', expected_endpoint,
**expected_params)
# Test that non-keystone auth strategy doesn't bother to pass
# glanceclient all the Keystone-related headers.
ipv6_mock.reset_mock()
init_mock.reset_mock()
self.flags(auth_strategy='non-keystone')
expected_endpoint = 'http://host4:9295'
expected_params = {
}
glance._create_glance_client(ctx, host, port, use_ssl)
init_mock.assert_called_once_with('1', expected_endpoint,
**expected_params)
# Test that the IPv6 bracketization adapts the endpoint properly.
ipv6_mock.reset_mock()
init_mock.reset_mock()
ipv6_mock.return_value = True
expected_endpoint = 'http://[host4]:9295'
expected_params = {
}
glance._create_glance_client(ctx, host, port, use_ssl)
init_mock.assert_called_once_with('1', expected_endpoint,
**expected_params)
class TestGlanceClientWrapper(test.NoDBTestCase):
@mock.patch('time.sleep')
@mock.patch('nova.image.glance._create_glance_client')
def test_static_client_without_retries(self, create_client_mock,
sleep_mock):
client_mock = mock.MagicMock()
images_mock = mock.MagicMock()
images_mock.get.side_effect = glanceclient.exc.ServiceUnavailable
type(client_mock).images = mock.PropertyMock(return_value=images_mock)
create_client_mock.return_value = client_mock
self.flags(num_retries=0, group='glance')
ctx = context.RequestContext('fake', 'fake')
host = 'host4'
port = 9295
use_ssl = False
client = glance.GlanceClientWrapper(context=ctx, host=host, port=port,
use_ssl=use_ssl)
create_client_mock.assert_called_once_with(ctx, host, port, use_ssl, 1)
self.assertRaises(exception.GlanceConnectionFailed,
client.call, ctx, 1, 'get', 'meow')
self.assertFalse(sleep_mock.called)
@mock.patch('nova.image.glance.LOG')
@mock.patch('time.sleep')
@mock.patch('nova.image.glance._create_glance_client')
def test_static_client_with_retries_negative(self, create_client_mock,
sleep_mock, mock_log):
client_mock = mock.Mock(spec=glanceclient.Client)
images_mock = mock.Mock()
images_mock.get.side_effect = glanceclient.exc.ServiceUnavailable
client_mock.images = images_mock
create_client_mock.return_value = client_mock
self.flags(num_retries=-1, group='glance')
ctx = context.RequestContext('fake', 'fake')
host = 'host4'
port = 9295
use_ssl = False
client = glance.GlanceClientWrapper(context=ctx, host=host, port=port,
use_ssl=use_ssl)
create_client_mock.assert_called_once_with(ctx, host, port, use_ssl, 1)
self.assertRaises(exception.GlanceConnectionFailed,
client.call, ctx, 1, 'get', 'meow')
self.assertTrue(mock_log.warning.called)
msg = mock_log.warning.call_args_list[0]
self.assertIn('Treating negative config value', msg[0][0])
self.assertFalse(sleep_mock.called)
@mock.patch('time.sleep')
@mock.patch('nova.image.glance._create_glance_client')
def test_static_client_with_retries(self, create_client_mock,
sleep_mock):
self.flags(num_retries=1, group='glance')
client_mock = mock.MagicMock()
images_mock = mock.MagicMock()
images_mock.get.side_effect = [
glanceclient.exc.ServiceUnavailable,
None
]
type(client_mock).images = mock.PropertyMock(return_value=images_mock)
create_client_mock.return_value = client_mock
ctx = context.RequestContext('fake', 'fake')
host = 'host4'
port = 9295
use_ssl = False
client = glance.GlanceClientWrapper(context=ctx,
host=host, port=port, use_ssl=use_ssl)
client.call(ctx, 1, 'get', 'meow')
sleep_mock.assert_called_once_with(1)
@mock.patch('random.shuffle')
@mock.patch('time.sleep')
@mock.patch('nova.image.glance._create_glance_client')
def test_default_client_without_retries(self, create_client_mock,
sleep_mock, shuffle_mock):
api_servers = [
'host1:9292',
'https://host2:9293',
'http://host3:9294'
]
client_mock = mock.MagicMock()
images_mock = mock.MagicMock()
images_mock.get.side_effect = glanceclient.exc.ServiceUnavailable
type(client_mock).images = mock.PropertyMock(return_value=images_mock)
create_client_mock.return_value = client_mock
shuffle_mock.return_value = api_servers
self.flags(num_retries=0, group='glance')
self.flags(api_servers=api_servers, group='glance')
# Here we are testing the behaviour that calling client.call() twice
# when there are no retries will cycle through the api_servers and not
# sleep (which would be an indication of a retry)
ctx = context.RequestContext('fake', 'fake')
client = glance.GlanceClientWrapper()
self.assertRaises(exception.GlanceConnectionFailed,
client.call, ctx, 1, 'get', 'meow')
self.assertFalse(sleep_mock.called)
self.assertRaises(exception.GlanceConnectionFailed,
client.call, ctx, 1, 'get', 'meow')
self.assertFalse(sleep_mock.called)
create_client_mock.assert_has_calls(
[
mock.call(ctx, 'host1', 9292, False, 1),
mock.call(ctx, 'host2', 9293, True, 1),
]
)
@mock.patch('random.shuffle')
@mock.patch('time.sleep')
@mock.patch('nova.image.glance._create_glance_client')
def test_default_client_with_retries(self, create_client_mock,
sleep_mock, shuffle_mock):
api_servers = [
'host1:9292',
'https://host2:9293',
'http://host3:9294'
]
client_mock = mock.MagicMock()
images_mock = mock.MagicMock()
images_mock.get.side_effect = [
glanceclient.exc.ServiceUnavailable,
None
]
type(client_mock).images = mock.PropertyMock(return_value=images_mock)
create_client_mock.return_value = client_mock
self.flags(num_retries=1, group='glance')
self.flags(api_servers=api_servers, group='glance')
ctx = context.RequestContext('fake', 'fake')
# And here we're testing that if num_retries is not 0, then we attempt
# to retry the same connection action against the next client.
client = glance.GlanceClientWrapper()
client.call(ctx, 1, 'get', 'meow')
create_client_mock.assert_has_calls(
[
mock.call(ctx, 'host1', 9292, False, 1),
mock.call(ctx, 'host2', 9293, True, 1),
]
)
sleep_mock.assert_called_once_with(1)
@mock.patch('oslo_service.sslutils.is_enabled')
@mock.patch('glanceclient.Client')
def test_create_glance_client_with_ssl(self, client_mock,
ssl_enable_mock):
self.flags(ca_file='foo.cert', cert_file='bar.cert',
key_file='wut.key', group='ssl')
ctxt = mock.sentinel.ctx
glance._create_glance_client(ctxt, 'host4', 9295, use_ssl=True)
client_mock.assert_called_once_with(
'1', 'https://host4:9295', insecure=False, ssl_compression=False,
cert_file='bar.cert', key_file='wut.key', cacert='foo.cert')
class TestDownloadNoDirectUri(test.NoDBTestCase):
"""Tests the download method of the GlanceImageService when the
default of not allowing direct URI transfers is set.
"""
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_no_data_no_dest_path(self, show_mock, open_mock):
client = mock.MagicMock()
client.call.return_value = mock.sentinel.image_chunks
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id)
self.assertFalse(show_mock.called)
self.assertFalse(open_mock.called)
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
self.assertEqual(mock.sentinel.image_chunks, res)
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_data_no_dest_path(self, show_mock, open_mock):
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
data = mock.MagicMock()
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id, data=data)
self.assertFalse(show_mock.called)
self.assertFalse(open_mock.called)
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
self.assertIsNone(res)
data.write.assert_has_calls(
[
mock.call(1),
mock.call(2),
mock.call(3)
]
)
self.assertFalse(data.close.called)
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_no_data_dest_path(self, show_mock, open_mock):
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
writer = mock.MagicMock()
open_mock.return_value = writer
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id,
dst_path=mock.sentinel.dst_path)
self.assertFalse(show_mock.called)
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
open_mock.assert_called_once_with(mock.sentinel.dst_path, 'wb')
self.assertIsNone(res)
writer.write.assert_has_calls(
[
mock.call(1),
mock.call(2),
mock.call(3)
]
)
writer.close.assert_called_once_with()
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_data_dest_path(self, show_mock, open_mock):
# NOTE(jaypipes): This really shouldn't be allowed, but because of the
# horrible design of the download() method in GlanceImageService, no
# error is raised, and the dst_path is ignored...
# #TODO(jaypipes): Fix the aforementioned horrible design of
# the download() method.
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
data = mock.MagicMock()
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id, data=data)
self.assertFalse(show_mock.called)
self.assertFalse(open_mock.called)
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
self.assertIsNone(res)
data.write.assert_has_calls(
[
mock.call(1),
mock.call(2),
mock.call(3)
]
)
self.assertFalse(data.close.called)
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_data_dest_path_write_fails(self, show_mock, open_mock):
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
# NOTE(mikal): data is a file like object, which in our case always
# raises an exception when we attempt to write to the file.
class FakeDiskException(Exception):
pass
class Exceptionator(StringIO):
def write(self, _):
raise FakeDiskException('Disk full!')
self.assertRaises(FakeDiskException, service.download, ctx,
mock.sentinel.image_id, data=Exceptionator())
@mock.patch('nova.image.glance.GlanceImageService._get_transfer_module')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_direct_file_uri(self, show_mock, get_tran_mock):
self.flags(allowed_direct_url_schemes=['file'], group='glance')
show_mock.return_value = {
'locations': [
{
'url': 'file:///files/image',
'metadata': mock.sentinel.loc_meta
}
]
}
tran_mod = mock.MagicMock()
get_tran_mock.return_value = tran_mod
client = mock.MagicMock()
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id,
dst_path=mock.sentinel.dst_path)
self.assertIsNone(res)
self.assertFalse(client.call.called)
show_mock.assert_called_once_with(ctx,
mock.sentinel.image_id,
include_locations=True)
get_tran_mock.assert_called_once_with('file')
tran_mod.download.assert_called_once_with(ctx, mock.ANY,
mock.sentinel.dst_path,
mock.sentinel.loc_meta)
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService._get_transfer_module')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_direct_exception_fallback(self, show_mock,
get_tran_mock,
open_mock):
# Test that we fall back to downloading to the dst_path
# if the download method of the transfer module raised
# an exception.
self.flags(allowed_direct_url_schemes=['file'], group='glance')
show_mock.return_value = {
'locations': [
{
'url': 'file:///files/image',
'metadata': mock.sentinel.loc_meta
}
]
}
tran_mod = mock.MagicMock()
tran_mod.download.side_effect = Exception
get_tran_mock.return_value = tran_mod
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
writer = mock.MagicMock()
open_mock.return_value = writer
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id,
dst_path=mock.sentinel.dst_path)
self.assertIsNone(res)
show_mock.assert_called_once_with(ctx,
mock.sentinel.image_id,
include_locations=True)
get_tran_mock.assert_called_once_with('file')
tran_mod.download.assert_called_once_with(ctx, mock.ANY,
mock.sentinel.dst_path,
mock.sentinel.loc_meta)
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
# NOTE(jaypipes): log messages call open() in part of the
# download path, so here, we just check that the last open()
# call was done for the dst_path file descriptor.
open_mock.assert_called_with(mock.sentinel.dst_path, 'wb')
self.assertIsNone(res)
writer.write.assert_has_calls(
[
mock.call(1),
mock.call(2),
mock.call(3)
]
)
@mock.patch('__builtin__.open')
@mock.patch('nova.image.glance.GlanceImageService._get_transfer_module')
@mock.patch('nova.image.glance.GlanceImageService.show')
def test_download_direct_no_mod_fallback(self, show_mock,
get_tran_mock,
open_mock):
# Test that we fall back to downloading to the dst_path
# if no appropriate transfer module is found...
# an exception.
self.flags(allowed_direct_url_schemes=['funky'], group='glance')
show_mock.return_value = {
'locations': [
{
'url': 'file:///files/image',
'metadata': mock.sentinel.loc_meta
}
]
}
get_tran_mock.return_value = None
client = mock.MagicMock()
client.call.return_value = [1, 2, 3]
ctx = mock.sentinel.ctx
writer = mock.MagicMock()
open_mock.return_value = writer
service = glance.GlanceImageService(client)
res = service.download(ctx, mock.sentinel.image_id,
dst_path=mock.sentinel.dst_path)
self.assertIsNone(res)
show_mock.assert_called_once_with(ctx,
mock.sentinel.image_id,
include_locations=True)
get_tran_mock.assert_called_once_with('file')
client.call.assert_called_once_with(ctx, 1, 'data',
mock.sentinel.image_id)
# NOTE(jaypipes): log messages call open() in part of the
# download path, so here, we just check that the last open()
# call was done for the dst_path file descriptor.
open_mock.assert_called_with(mock.sentinel.dst_path, 'wb')
self.assertIsNone(res)
writer.write.assert_has_calls(
[
mock.call(1),
mock.call(2),
mock.call(3)
]
)
writer.close.assert_called_once_with()
class TestIsImageAvailable(test.NoDBTestCase):
"""Tests the internal _is_image_available function."""
class ImageSpecV2(object):
visibility = None
properties = None
class ImageSpecV1(object):
is_public = None
properties = None
def test_auth_token_override(self):
ctx = mock.MagicMock(auth_token=True)
img = mock.MagicMock()
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
self.assertFalse(img.called)
def test_admin_override(self):
ctx = mock.MagicMock(auth_token=False, is_admin=True)
img = mock.MagicMock()
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
self.assertFalse(img.called)
def test_v2_visibility(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False)
# We emulate warlock validation that throws an AttributeError
# if you try to call is_public on an image model returned by
# a call to V2 image.get(). Here, the ImageSpecV2 does not have
# an is_public attribute and MagicMock will throw an AttributeError.
img = mock.MagicMock(visibility='PUBLIC',
spec=TestIsImageAvailable.ImageSpecV2)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
def test_v1_is_public(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False)
img = mock.MagicMock(is_public=True,
spec=TestIsImageAvailable.ImageSpecV1)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
def test_project_is_owner(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False,
project_id='123')
props = {
'owner_id': '123'
}
img = mock.MagicMock(visibility='private', properties=props,
spec=TestIsImageAvailable.ImageSpecV2)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
ctx.reset_mock()
img = mock.MagicMock(is_public=False, properties=props,
spec=TestIsImageAvailable.ImageSpecV1)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
def test_project_context_matches_project_prop(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False,
project_id='123')
props = {
'project_id': '123'
}
img = mock.MagicMock(visibility='private', properties=props,
spec=TestIsImageAvailable.ImageSpecV2)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
ctx.reset_mock()
img = mock.MagicMock(is_public=False, properties=props,
spec=TestIsImageAvailable.ImageSpecV1)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
def test_no_user_in_props(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False,
project_id='123')
props = {
}
img = mock.MagicMock(visibility='private', properties=props,
spec=TestIsImageAvailable.ImageSpecV2)
res = glance._is_image_available(ctx, img)
self.assertFalse(res)
ctx.reset_mock()
img = mock.MagicMock(is_public=False, properties=props,
spec=TestIsImageAvailable.ImageSpecV1)
res = glance._is_image_available(ctx, img)
self.assertFalse(res)
def test_user_matches_context(self):
ctx = mock.MagicMock(auth_token=False, is_admin=False,
user_id='123')
props = {
'user_id': '123'
}
img = mock.MagicMock(visibility='private', properties=props,
spec=TestIsImageAvailable.ImageSpecV2)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
ctx.reset_mock()
img = mock.MagicMock(is_public=False, properties=props,
spec=TestIsImageAvailable.ImageSpecV1)
res = glance._is_image_available(ctx, img)
self.assertTrue(res)
class TestShow(test.NoDBTestCase):
"""Tests the show method of the GlanceImageService."""
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_show_success(self, is_avail_mock, trans_from_mock):
is_avail_mock.return_value = True
trans_from_mock.return_value = {'mock': mock.sentinel.trans_from}
client = mock.MagicMock()
client.call.return_value = {}
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
info = service.show(ctx, mock.sentinel.image_id)
client.call.assert_called_once_with(ctx, 1, 'get',
mock.sentinel.image_id)
is_avail_mock.assert_called_once_with(ctx, {})
trans_from_mock.assert_called_once_with({}, include_locations=False)
self.assertIn('mock', info)
self.assertEqual(mock.sentinel.trans_from, info['mock'])
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_show_not_available(self, is_avail_mock, trans_from_mock):
is_avail_mock.return_value = False
client = mock.MagicMock()
client.call.return_value = mock.sentinel.images_0
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
with testtools.ExpectedException(exception.ImageNotFound):
service.show(ctx, mock.sentinel.image_id)
client.call.assert_called_once_with(ctx, 1, 'get',
mock.sentinel.image_id)
is_avail_mock.assert_called_once_with(ctx, mock.sentinel.images_0)
self.assertFalse(trans_from_mock.called)
@mock.patch('nova.image.glance._reraise_translated_image_exception')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_show_client_failure(self, is_avail_mock, trans_from_mock,
reraise_mock):
raised = exception.ImageNotAuthorized(image_id=123)
client = mock.MagicMock()
client.call.side_effect = glanceclient.exc.Forbidden
ctx = mock.sentinel.ctx
reraise_mock.side_effect = raised
service = glance.GlanceImageService(client)
with testtools.ExpectedException(exception.ImageNotAuthorized):
service.show(ctx, mock.sentinel.image_id)
client.call.assert_called_once_with(ctx, 1, 'get',
mock.sentinel.image_id)
self.assertFalse(is_avail_mock.called)
self.assertFalse(trans_from_mock.called)
reraise_mock.assert_called_once_with(mock.sentinel.image_id)
@mock.patch('nova.image.glance._is_image_available')
def test_show_queued_image_without_some_attrs(self, is_avail_mock):
is_avail_mock.return_value = True
client = mock.MagicMock()
# fake image cls without disk_format, container_format, name attributes
class fake_image_cls(dict):
id = 'b31aa5dd-f07a-4748-8f15-398346887584'
deleted = False
protected = False
min_disk = 0
created_at = '2014-05-20T08:16:48'
size = 0
status = 'queued'
is_public = False
min_ram = 0
owner = '980ec4870033453ead65c0470a78b8a8'
updated_at = '2014-05-20T08:16:48'
glance_image = fake_image_cls()
client.call.return_value = glance_image
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
image_info = service.show(ctx, glance_image.id)
client.call.assert_called_once_with(ctx, 1, 'get',
glance_image.id)
NOVA_IMAGE_ATTRIBUTES = set(['size', 'disk_format', 'owner',
'container_format', 'status', 'id',
'name', 'created_at', 'updated_at',
'deleted', 'deleted_at', 'checksum',
'min_disk', 'min_ram', 'is_public',
'properties'])
self.assertEqual(NOVA_IMAGE_ATTRIBUTES, set(image_info.keys()))
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_include_locations_success(self, avail_mock, trans_from_mock):
locations = [mock.sentinel.loc1]
avail_mock.return_value = True
trans_from_mock.return_value = {'locations': locations}
client = mock.Mock()
client.call.return_value = mock.sentinel.image
service = glance.GlanceImageService(client)
ctx = mock.sentinel.ctx
image_id = mock.sentinel.image_id
info = service.show(ctx, image_id, include_locations=True)
client.call.assert_called_once_with(ctx, 2, 'get', image_id)
avail_mock.assert_called_once_with(ctx, mock.sentinel.image)
trans_from_mock.assert_called_once_with(mock.sentinel.image,
include_locations=True)
self.assertIn('locations', info)
self.assertEqual(locations, info['locations'])
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_include_direct_uri_success(self, avail_mock, trans_from_mock):
locations = [mock.sentinel.loc1]
avail_mock.return_value = True
trans_from_mock.return_value = {'locations': locations,
'direct_uri': mock.sentinel.duri}
client = mock.Mock()
client.call.return_value = mock.sentinel.image
service = glance.GlanceImageService(client)
ctx = mock.sentinel.ctx
image_id = mock.sentinel.image_id
info = service.show(ctx, image_id, include_locations=True)
client.call.assert_called_once_with(ctx, 2, 'get', image_id)
expected = locations
expected.append({'url': mock.sentinel.duri, 'metadata': {}})
self.assertIn('locations', info)
self.assertEqual(expected, info['locations'])
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_do_not_show_deleted_images(self, is_avail_mock, trans_from_mock):
class fake_image_cls(dict):
id = 'b31aa5dd-f07a-4748-8f15-398346887584'
deleted = True
glance_image = fake_image_cls()
client = mock.MagicMock()
client.call.return_value = glance_image
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
with testtools.ExpectedException(exception.ImageNotFound):
service.show(ctx, glance_image.id, show_deleted=False)
client.call.assert_called_once_with(ctx, 1, 'get',
glance_image.id)
self.assertFalse(is_avail_mock.called)
self.assertFalse(trans_from_mock.called)
class TestDetail(test.NoDBTestCase):
"""Tests the detail method of the GlanceImageService."""
@mock.patch('nova.image.glance._extract_query_params')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_detail_success_available(self, is_avail_mock, trans_from_mock,
ext_query_mock):
params = {}
is_avail_mock.return_value = True
ext_query_mock.return_value = params
trans_from_mock.return_value = mock.sentinel.trans_from
client = mock.MagicMock()
client.call.return_value = [mock.sentinel.images_0]
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
images = service.detail(ctx, **params)
client.call.assert_called_once_with(ctx, 1, 'list')
is_avail_mock.assert_called_once_with(ctx, mock.sentinel.images_0)
trans_from_mock.assert_called_once_with(mock.sentinel.images_0)
self.assertEqual([mock.sentinel.trans_from], images)
@mock.patch('nova.image.glance._extract_query_params')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_detail_success_unavailable(self, is_avail_mock, trans_from_mock,
ext_query_mock):
params = {}
is_avail_mock.return_value = False
ext_query_mock.return_value = params
trans_from_mock.return_value = mock.sentinel.trans_from
client = mock.MagicMock()
client.call.return_value = [mock.sentinel.images_0]
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
images = service.detail(ctx, **params)
client.call.assert_called_once_with(ctx, 1, 'list')
is_avail_mock.assert_called_once_with(ctx, mock.sentinel.images_0)
self.assertFalse(trans_from_mock.called)
self.assertEqual([], images)
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_detail_params_passed(self, is_avail_mock, _trans_from_mock):
client = mock.MagicMock()
client.call.return_value = [mock.sentinel.images_0]
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
service.detail(ctx, page_size=5, limit=10)
expected_filters = {
'is_public': 'none'
}
client.call.assert_called_once_with(ctx, 1, 'list',
filters=expected_filters,
page_size=5,
limit=10)
@mock.patch('nova.image.glance._reraise_translated_exception')
@mock.patch('nova.image.glance._extract_query_params')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._is_image_available')
def test_detail_client_failure(self, is_avail_mock, trans_from_mock,
ext_query_mock, reraise_mock):
params = {}
ext_query_mock.return_value = params
raised = exception.Forbidden()
client = mock.MagicMock()
client.call.side_effect = glanceclient.exc.Forbidden
ctx = mock.sentinel.ctx
reraise_mock.side_effect = raised
service = glance.GlanceImageService(client)
with testtools.ExpectedException(exception.Forbidden):
service.detail(ctx, **params)
client.call.assert_called_once_with(ctx, 1, 'list')
self.assertFalse(is_avail_mock.called)
self.assertFalse(trans_from_mock.called)
reraise_mock.assert_called_once_with()
class TestCreate(test.NoDBTestCase):
"""Tests the create method of the GlanceImageService."""
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._translate_to_glance')
def test_create_success(self, trans_to_mock, trans_from_mock):
translated = {
'image_id': mock.sentinel.image_id
}
trans_to_mock.return_value = translated
trans_from_mock.return_value = mock.sentinel.trans_from
image_mock = mock.MagicMock(spec=dict)
client = mock.MagicMock()
client.call.return_value = mock.sentinel.image_meta
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
image_meta = service.create(ctx, image_mock)
trans_to_mock.assert_called_once_with(image_mock)
client.call.assert_called_once_with(ctx, 1, 'create',
image_id=mock.sentinel.image_id)
trans_from_mock.assert_called_once_with(mock.sentinel.image_meta)
self.assertEqual(mock.sentinel.trans_from, image_meta)
# Now verify that if we supply image data to the call,
# that the client is also called with the data kwarg
client.reset_mock()
service.create(ctx, image_mock, data=mock.sentinel.data)
client.call.assert_called_once_with(ctx, 1, 'create',
image_id=mock.sentinel.image_id,
data=mock.sentinel.data)
@mock.patch('nova.image.glance._reraise_translated_exception')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._translate_to_glance')
def test_create_client_failure(self, trans_to_mock, trans_from_mock,
reraise_mock):
translated = {}
trans_to_mock.return_value = translated
image_mock = mock.MagicMock(spec=dict)
raised = exception.Invalid()
client = mock.MagicMock()
client.call.side_effect = glanceclient.exc.BadRequest
ctx = mock.sentinel.ctx
reraise_mock.side_effect = raised
service = glance.GlanceImageService(client)
self.assertRaises(exception.Invalid, service.create, ctx, image_mock)
trans_to_mock.assert_called_once_with(image_mock)
self.assertFalse(trans_from_mock.called)
class TestUpdate(test.NoDBTestCase):
"""Tests the update method of the GlanceImageService."""
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._translate_to_glance')
def test_update_success(self, trans_to_mock, trans_from_mock):
translated = {
'id': mock.sentinel.image_id,
'name': mock.sentinel.name
}
trans_to_mock.return_value = translated
trans_from_mock.return_value = mock.sentinel.trans_from
image_mock = mock.MagicMock(spec=dict)
client = mock.MagicMock()
client.call.return_value = mock.sentinel.image_meta
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
image_meta = service.update(ctx, mock.sentinel.image_id, image_mock)
trans_to_mock.assert_called_once_with(image_mock)
# Verify that the 'id' element has been removed as a kwarg to
# the call to glanceclient's update (since the image ID is
# supplied as a positional arg), and that the
# purge_props default is True.
client.call.assert_called_once_with(ctx, 1, 'update',
mock.sentinel.image_id,
name=mock.sentinel.name,
purge_props=True)
trans_from_mock.assert_called_once_with(mock.sentinel.image_meta)
self.assertEqual(mock.sentinel.trans_from, image_meta)
# Now verify that if we supply image data to the call,
# that the client is also called with the data kwarg
client.reset_mock()
service.update(ctx, mock.sentinel.image_id,
image_mock, data=mock.sentinel.data)
client.call.assert_called_once_with(ctx, 1, 'update',
mock.sentinel.image_id,
name=mock.sentinel.name,
purge_props=True,
data=mock.sentinel.data)
@mock.patch('nova.image.glance._reraise_translated_image_exception')
@mock.patch('nova.image.glance._translate_from_glance')
@mock.patch('nova.image.glance._translate_to_glance')
def test_update_client_failure(self, trans_to_mock, trans_from_mock,
reraise_mock):
translated = {
'name': mock.sentinel.name
}
trans_to_mock.return_value = translated
trans_from_mock.return_value = mock.sentinel.trans_from
image_mock = mock.MagicMock(spec=dict)
raised = exception.ImageNotAuthorized(image_id=123)
client = mock.MagicMock()
client.call.side_effect = glanceclient.exc.Forbidden
ctx = mock.sentinel.ctx
reraise_mock.side_effect = raised
service = glance.GlanceImageService(client)
self.assertRaises(exception.ImageNotAuthorized,
service.update, ctx, mock.sentinel.image_id,
image_mock)
client.call.assert_called_once_with(ctx, 1, 'update',
mock.sentinel.image_id,
purge_props=True,
name=mock.sentinel.name)
self.assertFalse(trans_from_mock.called)
reraise_mock.assert_called_once_with(mock.sentinel.image_id)
class TestDelete(test.NoDBTestCase):
"""Tests the delete method of the GlanceImageService."""
def test_delete_success(self):
client = mock.MagicMock()
client.call.return_value = True
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
service.delete(ctx, mock.sentinel.image_id)
client.call.assert_called_once_with(ctx, 1, 'delete',
mock.sentinel.image_id)
def test_delete_client_failure(self):
client = mock.MagicMock()
client.call.side_effect = glanceclient.exc.NotFound
ctx = mock.sentinel.ctx
service = glance.GlanceImageService(client)
self.assertRaises(exception.ImageNotFound, service.delete, ctx,
mock.sentinel.image_id)
class TestGlanceUrl(test.NoDBTestCase):
def test_generate_glance_http_url(self):
generated_url = glance.generate_glance_url()
glance_host = CONF.glance.host
# ipv6 address, need to wrap it with '[]'
if netutils.is_valid_ipv6(glance_host):
glance_host = '[%s]' % glance_host
http_url = "http://%s:%d" % (glance_host, CONF.glance.port)
self.assertEqual(generated_url, http_url)
def test_generate_glance_https_url(self):
self.flags(protocol="https", group='glance')
generated_url = glance.generate_glance_url()
glance_host = CONF.glance.host
# ipv6 address, need to wrap it with '[]'
if netutils.is_valid_ipv6(glance_host):
glance_host = '[%s]' % glance_host
https_url = "https://%s:%d" % (glance_host, CONF.glance.port)
self.assertEqual(generated_url, https_url)
class TestGlanceApiServers(test.NoDBTestCase):
def test_get_ipv4_api_servers(self):
self.flags(api_servers=['10.0.1.1:9292',
'https://10.0.0.1:9293',
'http://10.0.2.2:9294'], group='glance')
glance_host = ['10.0.1.1', '10.0.0.1',
'10.0.2.2']
api_servers = glance.get_api_servers()
i = 0
for server in api_servers:
i += 1
self.assertIn(server[0], glance_host)
if i > 2:
break
def test_get_ipv6_api_servers(self):
self.flags(api_servers=['[2001:2012:1:f101::1]:9292',
'https://[2010:2013:1:f122::1]:9293',
'http://[2001:2011:1:f111::1]:9294'],
group='glance')
glance_host = ['2001:2012:1:f101::1', '2010:2013:1:f122::1',
'2001:2011:1:f111::1']
api_servers = glance.get_api_servers()
i = 0
for server in api_servers:
i += 1
self.assertIn(server[0], glance_host)
if i > 2:
break
class TestUpdateGlanceImage(test.NoDBTestCase):
@mock.patch('nova.image.glance.GlanceImageService')
def test_start(self, mock_glance_image_service):
consumer = glance.UpdateGlanceImage(
'context', 'id', 'metadata', 'stream')
with mock.patch.object(glance, 'get_remote_image_service') as a_mock:
a_mock.return_value = (mock_glance_image_service, 'image_id')
consumer.start()
mock_glance_image_service.update.assert_called_with(
'context', 'image_id', 'metadata', 'stream', purge_props=False)
| apache-2.0 | -628,681,970,858,753,000 | 40.280063 | 79 | 0.577753 | false |
ndparker/tdi3 | tdi/_util.py | 1 | 1257 | # -*- coding: ascii -*-
u"""
:Copyright:
Copyright 2006 - 2017
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================
Misc Utilities
================
Misc utilities.
"""
__author__ = u"Andr\xe9 Malo"
__docformat__ = "restructuredtext en"
def find_public(space):
"""
Determine all public names in space
:Parameters:
`space` : ``dict``
Name space to inspect
:Return: List of public names
:Rtype: ``list``
"""
if '__all__' in space:
return list(space['__all__'])
return [key for key in space.keys() if not key.startswith('_')]
# pylint: disable = invalid-name
if str is bytes:
ur = lambda s: s.decode('ascii')
else:
ur = lambda s: s
| apache-2.0 | -3,375,396,118,363,708,000 | 23.173077 | 73 | 0.661098 | false |
eisen-dev/eisen_engine | resources/HostsList.py | 1 | 6165 | # (c) 2015, Alice Ferrazzi <[email protected]>
#
# This file is part of Eisen
#
# Eisen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eisen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Eisen. If not, see <http://www.gnu.org/licenses/>.
from flask import Flask, jsonify, abort, make_response
from flask_restful import Api, Resource, reqparse, fields, marshal
from flask_httpauth import HTTPBasicAuth
from core import dispatcher
import core.AnsibleV1Inv as ans_inv
auth = HTTPBasicAuth()
#TODO make password auth to be same for all resource
@auth.get_password
def get_password(username):
if username == 'ansible':
return 'default'
return None
host_fields = {
'host': fields.String,
'port': fields.String,
'groups': fields.String,
'uri': fields.Url('host')
}
module = dispatcher.use_module()
hosts = dispatcher.HostsList(module)
class HostsAPI(Resource):
"""
"""
decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('host', type=str, required=True,
help='No task title provided',
location='json')
self.reqparse.add_argument('os', type=str,
help='No task title provided',
location='json')
self.reqparse.add_argument('groups', type=str, default="",
location='json')
super(HostsAPI, self).__init__()
def get(self):
return {'host': [marshal(host, host_fields) for host in hosts]}
def post(self):
"""
:return:
"""
args = self.reqparse.parse_args()
inv_host = ans_inv.set_host(args['host'], '22')
inv_group = ans_inv.set_group(args['groups'], inv_host)
inv_group = ans_inv.set_group_host(inv_group,inv_host)
inv = ans_inv.set_inv(inv_group)
host = {
'id': hosts[-1]['id'] + 1,
'host': args['host'],
'groups': args['groups'],
}
hosts.append(host)
inv = ans_inv.get_inv()
print (inv.groups_list())
return {'host': marshal(host, host_fields)}, 201
class HostAPI(Resource):
"""
"""
decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('host', type=str, location='json')
super(HostAPI, self).__init__()
def get(self, id):
host = [host for host in hosts if host['id'] == id]
if len(host) == 0:
abort(404)
return {'host': marshal(host[0], host_fields)}
def put(self, id):
host = [host for host in hosts if host['id'] == id]
if len(host) == 0:
abort(404)
host = host[0]
args = self.reqparse.parse_args()
for k, v in args.items():
if v is not None:
host[k] = v
return {'host': marshal(host, host_fields)}
def delete(self, id):
host = [host for host in hosts if host['id'] == id]
if len(host) == 0:
abort(404)
hosts.remove(host[0])
return {'result': True}
var_fields = {
'host': fields.String,
'variable_key': fields.String,
'variable_value': fields.String,
'uri': fields.Url('host')
}
class HostVarsAPI(Resource):
"""
"""
decorators = [auth.login_required]
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('host', type=str, location='json')
self.reqparse.add_argument('variable_key', type=str, location='json')
self.reqparse.add_argument('variable_value', type=str, location='json')
super(HostVarsAPI, self).__init__()
def get(self, id):
"""
retrive variable information per host
:param id:
:return:
"""
# correcting id for get the right information
# because /host/<id>/ start from 1 not 0
id -= 1
if id < 0:
return make_response(jsonify({'message': 'Id '+str(id+1)+' not exist'}), 403)
# try to get host variable
try:
vars = dispatcher.HostVarsList(module, id)
except:
return make_response(jsonify({'message': 'Id '+str(id+1)+' not found'}), 403)
return {'var': [marshal(var, var_fields) for var in vars]}
def post(self, id):
"""
:param id:
:return:
"""
args = self.reqparse.parse_args()
inv_host = ans_inv.dynamic_inventory.get_host(args['host'])
ans_inv.set_host_variable(args['variable_key'],
args['variable_value'],
inv_host)
host = {
'id': hosts[-1]['id'] + 1,
'host': args['host'],
'variable_name': args['variable_key'],
'variable_key': args['variable_value'],
}
inv = ans_inv.get_inv()
print (inv.groups_list())
return {'host': marshal(host, var_fields)}, 201
def put(self, id):
host = [host for host in hosts if host['id'] == id]
if len(host) == 0:
abort(404)
host = host[0]
args = self.reqparse.parse_args()
for k, v in args.items():
if v is not None:
host[k] = v
return {'host': marshal(host, var_fields)}
def delete(self, id):
host = [host for host in hosts if host['id'] == id]
if len(host) == 0:
abort(404)
hosts.remove(host[0])
return {'result': True} | gpl-3.0 | -6,245,616,614,568,332,000 | 29.83 | 89 | 0.554745 | false |
ogun/starmap | starcharts/svg.py | 1 | 2937 | XML_HEADER = '<?xml version="1.0" encoding="UTF-8" standalone="no"?>'
SVG_HEADER = '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="600" height="600" viewBox="0 0 600 600">'
SVG_FOOTER = "</svg>"
class Svg:
def __init__(self):
self.elements = []
def line(self, x1, y1, x2, y2, width, colour):
self.elements.append(
f'<line x1="{x1}" y1="{y1}" x2="{x2}" y2="{y2}" stroke-width="{width}" stroke="{colour}"/>'
)
def text(self, x, y, l, colour, size, align="left", decoration="None"):
self.elements.append(
f'<text x="{x}" y="{y}" text-anchor="{align}" text-decoration="{decoration}" style="fill: {colour}; font-size: {size}px; font-family: monospace">{l}</text>'
)
def circle(self, x, y, d, colour):
self.elements.append(f'<circle cx="{x}" cy="{y}" r="{d}" fill="{colour}" />')
def circle2(self, x, y, d, width, colour):
self.elements.append(
f'<circle cx="{x}" cy="{y}" r="{d}" stroke="{colour}" stroke-width="{width}" fill="none" />'
)
def curve(self, _points, width, colour):
points = sum(_points, ())
# http://schepers.cc/getting-to-the-point
d = f"M {points[0]} {points[1]} "
i = 0
points_length = len(points)
while points_length - 2 > i:
p = []
if i == 0:
p.append((points[i], points[i + 1]))
p.append((points[i], points[i + 1]))
p.append((points[i + 2], points[i + 3]))
p.append((points[i + 4], points[i + 5]))
elif points_length - 4 == i:
p.append((points[i - 2], points[i - 1]))
p.append((points[i], points[i + 1]))
p.append((points[i + 2], points[i + 3]))
p.append((points[i + 2], points[i + 3]))
else:
p.append((points[i - 2], points[i - 1]))
p.append((points[i], points[i + 1]))
p.append((points[i + 2], points[i + 3]))
p.append((points[i + 4], points[i + 5]))
i += 2
bp = []
bp.append((p[1][0], p[1][1]))
bp.append(
(
((-(p[0][0]) + 6 * p[1][0] + p[2][0]) / 6),
(-(p[0][1]) + 6 * p[1][1] + p[2][1]) / 6,
)
)
bp.append(
(
((p[1][0] + 6 * p[2][0] - p[3][0]) / 6),
(p[1][1] + 6 * p[2][1] - p[3][1]) / 6,
)
)
bp.append((p[2][0], p[2][1]))
d += f"C {bp[1][0]} {bp[1][1]},{bp[2][0]} {bp[2][1]},{bp[3][0]} {bp[3][1]} "
self.elements.append(
f'<path d="{d}" stroke="{colour}" stroke-width="{width}" fill="transparent"/>'
)
def to_list(self):
return [XML_HEADER, SVG_HEADER] + self.elements + [SVG_FOOTER]
| mit | 6,502,522,732,376,947,000 | 36.653846 | 168 | 0.431733 | false |
google-research/federated | reconstruction/stackoverflow/federated_stackoverflow.py | 1 | 13247 | # Copyright 2020, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Federated Stack Overflow next word prediction library using TFF."""
import functools
from typing import Callable
from absl import logging
import tensorflow as tf
import tensorflow_federated as tff
from reconstruction.shared import federated_trainer_utils
from reconstruction.stackoverflow import models
from reconstruction.stackoverflow import stackoverflow_dataset
from utils import keras_metrics
from utils.datasets import stackoverflow_word_prediction
def run_federated(
iterative_process_builder: Callable[..., tff.templates.IterativeProcess],
evaluation_computation_builder: Callable[..., tff.Computation],
client_batch_size: int,
clients_per_round: int,
global_variables_only: bool,
vocab_size: int = 10000,
num_oov_buckets: int = 1,
sequence_length: int = 20,
max_elements_per_user: int = 1000,
embedding_size: int = 96,
latent_size: int = 670,
num_layers: int = 1,
total_rounds: int = 1500,
experiment_name: str = 'federated_so_nwp',
root_output_dir: str = '/tmp/fed_recon',
rounds_per_eval: int = 1,
rounds_per_checkpoint: int = 50,
split_dataset_strategy: str = federated_trainer_utils
.SPLIT_STRATEGY_AGGREGATED,
split_dataset_proportion: int = 2,
compose_dataset_computation: bool = False):
"""Runs an iterative process on the Stack Overflow next word prediction task.
This method will load and pre-process dataset and construct a model used for
the task. It then uses `iterative_process_builder` to create an iterative
process that it applies to the task, using
`federated_research.utils.training_loop`.
This model only sends updates for its embeddings corresponding to the most
common words. Embeddings for out of vocabulary buckets are reconstructed on
device at the beginning of each round, and destroyed at the end of these
rounds.
We assume that the iterative process has the following functional type
signatures:
* `initialize`: `( -> S@SERVER)` where `S` represents the server state.
* `next`: `<S@SERVER, {B*}@CLIENTS> -> <S@SERVER, T@SERVER>` where `S`
represents the server state, `{B*}` represents the client datasets,
and `T` represents a python `Mapping` object.
The iterative process must also have a callable attribute `get_model_weights`
that takes as input the state of the iterative process, and returns a
`tff.learning.ModelWeights` object.
Args:
iterative_process_builder: A function that accepts a no-arg `model_fn`, a
`loss_fn`, a `metrics_fn`, and a `client_weight_fn`, and returns a
`tff.templates.IterativeProcess`. The `model_fn` must return a
`reconstruction_model.ReconstructionModel`. See `federated_trainer.py` for
an example.
evaluation_computation_builder: A function that accepts a no-arg `model_fn`,
a loss_fn`, and a `metrics_fn`, and returns a `tff.Computation` for
federated reconstruction evaluation. The `model_fn` must return a
`reconstruction_model.ReconstructionModel`. See `federated_trainer.py` for
an example.
client_batch_size: An integer representing the batch size used on clients.
clients_per_round: An integer representing the number of clients
participating in each round.
global_variables_only: If True, the `ReconstructionModel` contains all model
variables as global variables. This can be useful for baselines involving
aggregating all variables.
vocab_size: Integer dictating the number of most frequent words to use in
the vocabulary.
num_oov_buckets: The number of out-of-vocabulary buckets to use.
sequence_length: The maximum number of words to take for each sequence.
max_elements_per_user: The maximum number of elements processed for each
client's dataset.
embedding_size: The dimension of the word embedding layer.
latent_size: The dimension of the latent units in the recurrent layers.
num_layers: The number of stacked recurrent layers to use.
total_rounds: The number of federated training rounds.
experiment_name: The name of the experiment being run. This will be appended
to the `root_output_dir` for purposes of writing outputs.
root_output_dir: The name of the root output directory for writing
experiment outputs.
rounds_per_eval: How often to compute validation metrics.
rounds_per_checkpoint: How often to checkpoint the iterative process state.
If you expect the job to restart frequently, this should be small. If no
interruptions are expected, this can be made larger.
split_dataset_strategy: The method to use to split the data. Must be one of
`skip`, in which case every `split_dataset_proportion` example is used for
reconstruction, or `aggregated`, when the first
1/`split_dataset_proportion` proportion of the examples is used for
reconstruction.
split_dataset_proportion: Parameter controlling how much of the data is used
for reconstruction. If `split_dataset_proportion` is n, then 1 / n of the
data is used for reconstruction.
compose_dataset_computation: Whether to compose dataset computation with
training and evaluation computations. If True, may speed up experiments by
parallelizing dataset computations in multimachine setups. Not currently
supported in OSS.
"""
loss_fn = functools.partial(
tf.keras.losses.SparseCategoricalCrossentropy, from_logits=True)
special_tokens = stackoverflow_word_prediction.get_special_tokens(
vocab_size, num_oov_buckets)
pad_token = special_tokens.pad
oov_tokens = special_tokens.oov
eos_token = special_tokens.eos
def metrics_fn():
return [
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_with_oov', masked_tokens=[pad_token]),
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_no_oov', masked_tokens=[pad_token] + oov_tokens),
# Notice BOS never appears in ground truth.
keras_metrics.MaskedCategoricalAccuracy(
name='accuracy_no_oov_or_eos',
masked_tokens=[pad_token, eos_token] + oov_tokens),
keras_metrics.NumBatchesCounter(),
keras_metrics.NumTokensCounter(masked_tokens=[pad_token])
]
train_clientdata, validation_clientdata, test_clientdata = (
tff.simulation.datasets.stackoverflow.load_data())
vocab = stackoverflow_word_prediction.create_vocab(vocab_size)
dataset_preprocess_fn = stackoverflow_dataset.create_preprocess_fn(
vocab=vocab,
num_oov_buckets=num_oov_buckets,
client_batch_size=client_batch_size,
max_sequence_length=sequence_length,
max_elements_per_client=max_elements_per_user,
sort_by_date=True)
feature_dtypes = train_clientdata.element_type_structure
@tff.tf_computation(tff.SequenceType(feature_dtypes))
def dataset_preprocess_comp(dataset):
return dataset_preprocess_fn(dataset)
preprocess_train = train_clientdata.preprocess(dataset_preprocess_fn)
input_spec = preprocess_train.element_type_structure
model_fn = functools.partial(
models.create_recurrent_reconstruction_model,
vocab_size=vocab_size,
num_oov_buckets=num_oov_buckets,
embedding_size=embedding_size,
latent_size=latent_size,
num_layers=num_layers,
input_spec=input_spec,
global_variables_only=global_variables_only)
def client_weight_fn(local_outputs):
# Num_tokens is a tensor with type int64[1], to use as a weight need
# a float32 scalar.
return tf.cast(tf.squeeze(local_outputs['num_tokens']), tf.float32)
iterative_process = iterative_process_builder(
model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
client_weight_fn=client_weight_fn,
dataset_split_fn_builder=functools.partial(
federated_trainer_utils.build_dataset_split_fn,
split_dataset_strategy=split_dataset_strategy,
split_dataset_proportion=split_dataset_proportion))
base_eval_computation = evaluation_computation_builder(
model_fn,
loss_fn=loss_fn,
metrics_fn=metrics_fn,
dataset_split_fn_builder=functools.partial(
federated_trainer_utils.build_dataset_split_fn,
split_dataset_strategy=split_dataset_strategy,
split_dataset_proportion=split_dataset_proportion))
if compose_dataset_computation:
# Compose dataset computations with client training and evaluation to avoid
# linear cost of computing centrally. This changes the expected input of
# the `IterativeProcess` and `tff.Computation` to be a list of client IDs
# instead of datasets.
training_process = (
tff.simulation.compose_dataset_computation_with_iterative_process(
dataset_preprocess_comp, iterative_process))
training_process = (
tff.simulation.compose_dataset_computation_with_iterative_process(
train_clientdata.dataset_computation, training_process))
training_process.get_model_weights = iterative_process.get_model_weights
base_eval_computation = (
tff.simulation.compose_dataset_computation_with_computation(
dataset_preprocess_comp, base_eval_computation))
val_computation = (
tff.simulation.compose_dataset_computation_with_computation(
validation_clientdata.dataset_computation, base_eval_computation))
test_computation = (
tff.simulation.compose_dataset_computation_with_computation(
test_clientdata.dataset_computation, base_eval_computation))
# Create client sampling functions for each of train/val/test.
# We need to sample client IDs, not datasets, and we do not need to apply
# `dataset_preprocess_comp` since this is applied as part of the training
# process and evaluation computation.
train_client_datasets_fn = federated_trainer_utils.build_list_sample_fn(
train_clientdata.client_ids, size=clients_per_round, replace=False)
val_client_datasets_fn = federated_trainer_utils.build_list_sample_fn(
validation_clientdata.client_ids, size=clients_per_round, replace=False)
test_client_datasets_fn = federated_trainer_utils.build_list_sample_fn(
test_clientdata.client_ids, size=clients_per_round, replace=False)
else:
training_process = iterative_process
val_computation = base_eval_computation
test_computation = base_eval_computation
# Apply dataset computations.
train_clientdata = train_clientdata.preprocess(dataset_preprocess_comp)
validation_clientdata = validation_clientdata.preprocess(
dataset_preprocess_comp)
test_clientdata = test_clientdata.preprocess(dataset_preprocess_comp)
# Create client sampling functions for each of train/val/test.
train_client_datasets_fn = functools.partial(
tff.simulation.build_uniform_sampling_fn(train_clientdata.client_ids),
size=clients_per_round)
val_client_datasets_fn = functools.partial(
tff.simulation.build_uniform_sampling_fn(
validation_clientdata.client_ids),
size=clients_per_round)
test_client_datasets_fn = functools.partial(
tff.simulation.build_uniform_sampling_fn(test_clientdata.client_ids),
size=clients_per_round)
# Create final evaluation functions to pass to `training_loop`.
val_fn = federated_trainer_utils.build_eval_fn(
evaluation_computation=val_computation,
client_datasets_fn=val_client_datasets_fn,
get_model=training_process.get_model_weights)
test_fn = federated_trainer_utils.build_eval_fn(
evaluation_computation=test_computation,
client_datasets_fn=test_client_datasets_fn,
get_model=training_process.get_model_weights)
test_fn = functools.partial(test_fn, round_num=0)
def round_end_evaluation_fn(state, round_num):
if round_num % rounds_per_eval == 0:
validation_metrics = val_fn(state, round_num)
else:
validation_metrics = {}
return validation_metrics
checkpoint_manager, metrics_managers = federated_trainer_utils.configure_managers(
root_output_dir, experiment_name, rounds_per_checkpoint)
logging.info('Starting training loop.')
state = tff.simulation.run_simulation(
process=training_process,
client_selection_fn=train_client_datasets_fn,
total_rounds=total_rounds,
validation_fn=round_end_evaluation_fn,
file_checkpoint_manager=checkpoint_manager,
metrics_managers=metrics_managers)
test_metrics = test_fn(state)
logging.info('Test metrics:\n %s', test_metrics)
for metrics_manager in metrics_managers:
metrics_manager.save_metrics(test_metrics, total_rounds + 1)
| apache-2.0 | 5,246,149,194,456,275,000 | 44.522337 | 84 | 0.726051 | false |
ferrisvienna/Disco_Defense | data/discodefense.py | 1 | 22583 | #004BB1#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Disco Defense
Open source game by Ferris(FerrisofVienna) Bartak
and Paolo "Broccolimaniac" Perfahl
using python3 and pygame
"""
#the next line is only needed for python2.x and not necessary for python3.x
from __future__ import print_function, division
import random
import pygame
import time as t
class Game(object):
LIVES=20
FORCE_OF_GRAVITY=3
ACTORSPEEDMAX=20
ACTORSPEEDMIN=10
DISCTHROWERRANGE=150
DISCMAXSPEED=100
SPAWNRATE = 0.02
def __init__(self):
Monster.images.append(pygame.image.load("data/discodudel.png")) # 0
Monster.images[0].set_colorkey((255,0,182))
Monster.images.append(pygame.image.load("data/discodudel4.png")) # 1
Monster.images[1].set_colorkey((255,0,182))
Monster.images.append(pygame.image.load("data/discodudel.png")) # 2
Monster.images[2].set_colorkey((255,0,182))
Monster.images.append(pygame.image.load("data/discodudel2.png")) # 3
Monster.images[3].set_colorkey((255,0,182))
Monster.images.append(pygame.image.load("data/discodudel3.png")) # 4
Monster.images[4].set_colorkey((255,0,182))
Monster.images.append(pygame.image.load("data/discodudel2.png")) # 5
Monster.images[5].set_colorkey((255,0,182))
Monster.images[0].convert_alpha()
Monster.images[1].convert_alpha()
Monster.images[2].convert_alpha()
Monster.images[3].convert_alpha()
Monster.images[4].convert_alpha()
Monster.images[5].convert_alpha()
self.h= [pygame.image.load("data/h0.png"),
pygame.image.load("data/h1.png"),
pygame.image.load("data/h2.png"),
pygame.image.load("data/h3.png"),
pygame.image.load("data/h4.png"),
pygame.image.load("data/h5.png")]
self.h[0].set_colorkey((255,0,182))
self.h[1].set_colorkey((255,0,182))
self.h[2].set_colorkey((255,0,182))
self.h[3].set_colorkey((255,0,182))
self.h[4].set_colorkey((255,0,182))
self.h[5].set_colorkey((255,0,182))
self.p= pygame.image.load("data/p.png")
self.p.set_colorkey((255,0,182))
self.e= pygame.image.load("data/protect.png")
self.p.set_colorkey((255,0,182))
self.i= [pygame.image.load("data/i0.png"),
pygame.image.load("data/i1.png"),
pygame.image.load("data/i2.png"),
pygame.image.load("data/i3.png"),
pygame.image.load("data/i4.png"),
pygame.image.load("data/i5.png")]
self.i[1].set_colorkey((255,0,182))
self.i[2].set_colorkey((255,0,182))
self.i[3].set_colorkey((255,0,182))
self.i[4].set_colorkey((255,0,182))
self.i[5].set_colorkey((255,0,182))
self.i[0].set_colorkey((255,0,182))
self.d= [pygame.image.load("data/d0.png"),
pygame.image.load("data/d1.png"),
pygame.image.load("data/d2.png"),
pygame.image.load("data/d3.png"),
pygame.image.load("data/d4.png"),
pygame.image.load("data/d5.png")]
self.g= [pygame.image.load("data/g0.png"),
pygame.image.load("data/g1.png"),
pygame.image.load("data/g2.png"),
pygame.image.load("data/g3.png"),
pygame.image.load("data/g4.png"),
pygame.image.load("data/g5.png")]
self.v= [pygame.image.load("data/discodiscgunf.png"),
pygame.image.load("data/discodiscgunl.png"),
pygame.image.load("data/discodiscgunb.png"),
pygame.image.load("data/discodiscgunr.png"),
pygame.image.load("data/discodiscgunr.png"),
pygame.image.load("data/discodiscgunr.png")]
self.k= [pygame.image.load("data/konfettif.png"),
pygame.image.load("data/konfettir.png"),
pygame.image.load("data/konfettib.png"),
pygame.image.load("data/konfettil.png"),
pygame.image.load("data/konfettil.png"),
pygame.image.load("data/konfettil.png")]
self.w= [pygame.image.load("data/discogunf.png"),
pygame.image.load("data/discogunr.png"),
pygame.image.load("data/discogunb.png"),
pygame.image.load("data/discogunl.png"),
pygame.image.load("data/discogunl.png"),
pygame.image.load("data/discogunl.png")]
self.w[1].set_colorkey((255,0,182))
self.w[2].set_colorkey((255,0,182))
self.w[3].set_colorkey((255,0,182))
self.w[4].set_colorkey((255,0,182))
self.w[5].set_colorkey((255,0,182))
self.w[0].set_colorkey((255,0,182))
self.anim=0
self.level=["hppppppppppppwppppppe",
"ihpppppppppihippppppe",
"idddvddddddhidvddddde",
"dddddddddddddddddddde",
"vddddddgdvddddkddddve",
"dddddddddddddggddddde",
"ddddvdddddddddvddddde",
"gggggggdgggdggdggggge"]
anim = 0
self.legende={"h":self.h[anim],#towertop
"p":self.p,#nothing
"i":self.i[anim],#dirt
"g":self.g[anim],#lava
"d":self.d[anim], #grass
"v":self.v[anim], #discodiscgun
"w":self.w[anim], #discogun
"k":self.k[anim], #konfettigun
"e":self.e #end of world
}
class Fragment(pygame.sprite.Sprite):
"""a fragment of an exploding Bird"""
gravity = True # fragments fall down ?
def __init__(self, pos):
pygame.sprite.Sprite.__init__(self, self.groups)
self.pos = [0.0,0.0]
self.pos[0] = pos[0]
self.pos[1] = pos[1]
self.image = pygame.Surface((10,10))
self.image.set_colorkey((0,0,0)) # black transparent
pygame.draw.circle(self.image, (random.randint(20,230),random.randint(20,230),random.randint(20,230)), (5,5),
random.randint(3,10))
self.image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.center = self.pos #if you forget this line the sprite sit in the topleft corner
self.lifetime = 1 + random.random()*5 # max 6 seconds
self.time = 0.0
self.fragmentmaxspeed = 200 # try out other factors !
self.dx = random.randint(-self.fragmentmaxspeed,self.fragmentmaxspeed)
self.dy = random.randint(-self.fragmentmaxspeed,self.fragmentmaxspeed)
def update(self, seconds):
self.time += seconds
if self.time > self.lifetime:
self.kill()
self.pos[0] += self.dx * seconds
self.pos[1] += self.dy * seconds
if Fragment.gravity:
self.dy += FORCE_OF_GRAVITY # gravity suck fragments down
self.rect.centerx = round(self.pos[0],0)
self.rect.centery = round(self.pos[1],0)
class DiscProjectile(pygame.sprite.Sprite):
"""a projectile of a Disc gun"""
gravity = False # fragments fall down ?
image=pygame.image.load("data/disc.png")
def __init__(self, pos=(random.randint(640,1024),random.randint(100,300)),
dx=random.randint(-Game.DISCMAXSPEED,Game.DISCMAXSPEED),
dy=random.randint(-Game.DISCMAXSPEED,Game.DISCMAXSPEED)):
pygame.sprite.Sprite.__init__(self, self.groups)
self.pos = [0.0,0.0]
self.pos[0] = pos[0]
self.pos[1] = pos[1]
self.image = DiscProjectile.image
self.image.set_colorkey((255,0,182)) # black transparent
#pygame.draw.circle(self.image, (random.randint(1,255),0,0), (5,5),
#random.randint(2,5))
self.image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.center = self.pos #if you forget this line the sprite sit in the topleft corner
self.lifetime = 1 + random.random()*5 # max 6 seconds
self.time = 0.0
#self.fragmentmaxspeed = 200 # try out other factors !
self.dx = dx
self.dy = dy
def update(self, seconds):
self.time += seconds
if self.time > self.lifetime:
self.kill()
self.pos[0] += self.dx * seconds
self.pos[1] += self.dy * seconds
#if Fragment.gravity:
# self.dy += FORCE_OF_GRAVITY # gravity suck fragments down
self.rect.centerx = round(self.pos[0],0)
self.rect.centery = round(self.pos[1],0)
class Healthbar(pygame.sprite.Sprite):
"""shows a bar with the hitpoints of a Bird sprite"""
def __init__(self, boss):
pygame.sprite.Sprite.__init__(self,self.groups)
self.boss = boss
self.image = pygame.Surface((self.boss.rect.width,7))
self.image.set_colorkey((3,3,3)) # black transparent
pygame.draw.rect(self.image, (1,1,1), (0,0,self.boss.rect.width,7),1)
self.rect = self.image.get_rect()
self.oldpercent = 0
self.bossnumber = self.boss.number # the unique number (name)
def update(self, time):
self.percent = self.boss.hitpoints / self.boss.hitpointsfull * 1.0
if self.percent != self.oldpercent:
pygame.draw.rect(self.image, (77,77,77), (1,1,self.boss.rect.width-2,5)) # fill black
pygame.draw.rect(self.image, (222,22,2), (1,1,
int(self.boss.rect.width * self.percent),5),0) # fill green
self.oldpercent = self.percent
self.rect.centerx = self.boss.rect.centerx
self.rect.centery = self.boss.rect.centery - self.boss.rect.height /2 - 10
#check if boss is still alive if not
if self.boss.hitpoints<1:
self.kill()
# self.kill() # kill the hitbar
class Monster(pygame.sprite.Sprite):
"""Generic Monster"""
images=[] # list of all images
# not necessary:
monsters = {} # a dictionary of all monsters
number = 0
def __init__(self, level, startpos=(0,200), hitpointsfull=600):
pygame.sprite.Sprite.__init__(self, self.groups ) #call parent class. NEVER FORGET !
self.z = 0 # animationsnummer
self.duration = 0.0 # how long was the current animation visible in seconds
self.level=level
self.nomove = False
#startpos=(0,screen.get_rect().center[1])
startpos=(0,random.randint(100,350))
self.pos = [float(startpos[0]),float (startpos[1])] # dummy values to create a list
#self.pos[0] = float(startpos[0]) # float for more precise calculation
#self.pos[1] = float(startpos[1])
# self.area = screen.get_rect()
self.area = pygame.Rect(0,100,1024,300)
self.image = Monster.images[self.z]
self.hitpointsfull = float(hitpointsfull) # maximal hitpoints , float makes decimal
self.hitpoints = float(hitpointsfull) # actual hitpoints
self.rect = self.image.get_rect()
self.radius = max(self.rect.width, self.rect.height) / 2.0
self.dx= random.random()*10+20
self.dy= random.randint(-70,70)
self.rect.centerx = round(self.pos[0],0)
self.rect.centery = round(self.pos[1],0) #kackabraun
#self.newspeed()
#self.cleanstatus()
#self.catched = False
#self.crashing = False
#--- not necessary:
self.number = Monster.number # get my personal Birdnumber
Monster.number+= 1 # increase the number for next Bird
Monster.monsters[self.number] = self #
Healthbar(self)
#def newspeed(self):
# new birdspeed, but not 0
#speedrandom = random.choice([-1,1]) # flip a coin
#self.dx = random.random() * ACTORSPEEDMAX * speedrandom + speedrandom
#self.dy = random.random() * ACTORSPEEDMAX * speedrandom + speedrandom
def getChar(self):
#Tile = 50*50
x=int(self.pos[0]/50)
y=int(self.pos[1]/50)+0 # correction value to get the tile under the feet doesn't actually work :\
try:
char=self.level[y][x]
except:
char="?"
return char
def kill(self):
"""because i want to do some special effects (sound, dictionary etc.)
before killing the Bird sprite i have to write my own kill(self)
function and finally call pygame.sprite.Sprite.kill(self)
to do the 'real' killing"""
#cry.play()
#print Bird.birds, "..."
for _ in range(random.randint(7,20)):
Fragment(self.pos)
Monster.monsters[self.number] = None # kill Bird in sprite dictionary
pygame.sprite.Sprite.kill(self) # kill the actual Bird
def update(self, seconds):
# friction make birds slower
#if abs(self.dx) > ACTORSPEEDMIN and abs(self.dy) > BIRDSPEEDMIN:10.000
# self.dx *= FRICTION
# self.dy *= FRICTION
# spped limit
#if abs(self.dx) > BIRDSPEEDMAX:
# self.dx = BIRDSPEEDMAX * self.dx / self.dx
#if abs(self.dy) > BIRDSPEEDMAX:
# self.dy = BIRDSPEEDMAX * self.dy / self.dy
# new position
#------ check if lava
#Animation#
# 6 bilder sind in Monster.images []
self.duration += seconds
if self.duration > 0.5:
self.duration= 0
self.z +=1
if self.z >= len(Monster.images):
self.z = 0
self.image=Monster.images[self.z]
#-------
if self.getChar()=="g":
self.hitpoints-=1
if self.getChar()=="?":
self.hitpoints=0
if self.getChar()=="e":
self.hitpoints=0
Game.lives-=1
if self.getChar()=="h":
self.nomove = True
self.dy=random.randint(-10, 10)
self.dx= 20#random.randint(10,10)
if self.nomove:
self.dx = 0
self.pos[0] += self.dx * seconds
self.pos[1] += self.dy * seconds
# -- check if Bird out of screen
if not self.area.contains(self.rect):
#self.crashing = True # change colour later
# --- compare self.rect and area.rect
if self.pos[0] + self.rect.width/2 > self.area.right:
self.pos[0] = self.area.right - self.rect.width/2
if self.pos[0] - self.rect.width/2 < self.area.left:
self.pos[0] = self.area.left + self.rect.width/2
if self.pos[1] + self.rect.height/2 > self.area.bottom:
self.pos[1] = self.area.bottom - self.rect.height/2
if self.pos[1] - self.rect.height/2 < self.area.top:
self.pos[1] = self.area.top + self.rect.height/2
#self.newspeed() # calculate a new direction
#--- calculate actual image: crasing, catched, both, nothing ?
#self.image = Bird.image[self.crashing + self.catched*2]
#--- calculate new position on screen -----
self.rect.centerx = round(self.pos[0],0)
self.rect.centery = round(self.pos[1],0)
#--- loose hitpoins
#if self.crashing:
#self.hitpoints -=1
#--- check if still alive if not, then let a juicy fart off
if self.hitpoints <= 0:
self.kill()
class Viewer(object):
def __init__(self, width=1024, height=400, fps=30):
"""Initialize pygame, window, background, font,...
default arguments
"""
pygame.mixer.pre_init(44100, -16, 2, 2048) # setup mixer to avoid sound lag
pygame.init()
pygame.display.set_caption("Press ESC to quit")
self.width = width
self.height = height
self.screen = pygame.display.set_mode((self.width, self.height), pygame.DOUBLEBUF)
self.background = pygame.Surface(self.screen.get_size()).convert()
#self.background.fill((255,255,255)) # fill background white
self.background.fill((1,75,176)) # fill the background white (red,green,blue)
self.clock = pygame.time.Clock()
self.fps = fps
self.playtime = 0.0
self.font = pygame.font.SysFont('mono', 24, bold=True)
# sprite groups
self.playergroup = pygame.sprite.LayeredUpdates()
self.bargroup = pygame.sprite.Group()
self.stuffgroup = pygame.sprite.Group()
self.fragmentgroup = pygame.sprite.Group()
self.allgroup = pygame.sprite.LayeredUpdates()
self.projectilegroup = pygame.sprite.Group()
self.monstergroup=pygame.sprite.Group()
self.allgroup=pygame.sprite.LayeredUpdates()
self.bargroup = pygame.sprite.Group()
self.fragmentgroup = pygame.sprite.Group()
DiscProjectile.groups = self.allgroup, self.projectilegroup
Monster.groups = self.allgroup, self.monstergroup
Fragment.groups = self.allgroup, self.fragmentgroup
Healthbar.groups = self.allgroup, self.bargroup
self.game = Game()
def paint(self):
# paint the level of self.game
x=0
y=0
self.game.fleckanim=[]
for zeile in self.game.level:
for fleck in zeile:
self.game.fleckanim.append(0)
self.background.blit(self.game.legende[fleck],(x,y))
x+=50
y+=50
x=0
def run(self):
"""The mainloop
"""
self.paint()
running = True
millis = 0
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
running = False
# ------CHEAT KEY----------
if event.key==pygame.K_F1:
for px in range (0,240):
DiscProjectile(pos=(random.randint(540,1024),random.randint(100,400)))
milliseconds = self.clock.tick(self.fps)
millis += milliseconds
seconds=milliseconds /1000.0
self.playtime += milliseconds / 1000.0
self.playtime += milliseconds / 1000.0
self.draw_text("FPS: {:6.3}{}PLAYTIME: {:6.3} SECONDS".format(
self.clock.get_fps(), " "*5, self.playtime))
pygame.display.flip()
self.screen.blit(self.background, (0, 0)) # alles löschen
# level aufbauen
# monster spawn
if random.random()<self.game.SPAWNRATE:
Monster(self.game.level)
# spritecollide
if millis > 500: # jede halbe sekunde neue animation
millis=0
z=0
x=0
y=0
for zeile in self.game.level:
for fleck in zeile:
if fleck == "d" and self.game.fleckanim[z] == 0:
if random.random() < 0.005:
self.game.fleckanim[z] += 1
elif fleck == "g" and self.game.fleckanim[z] == 0:
if random.random() < 0.5:
self.game.fleckanim[z] += 1
else:
self.game.fleckanim[z] += 1 # normaler fleck
if fleck == "v":
targetlist=[]
for target in self.monstergroup:
#pass # pythagoras distanz ausrechnen
#ziel wird gesucht reichweite getestet
#zufälliges ziel wird abgeschossen
distx=abs(target.pos[0]-x)
disty=abs(target.pos[1]-y)
dist=(distx**2+disty**2)**0.5
if dist<self.game.DISCTHROWERRANGE:
targetlist.append(target)
if len(targetlist)>0:
target=random.choice(targetlist)
print("taget gefunden{}".format(target.pos) )
#schuss
DiscProjectile((x,y),
self.game.DISCMAXSPEED*((target.pos[0]-x)/dist)**2,
self.game.DISCMAXSPEED*((target.pos[1]-y)/dist)**2)
else:
print("No target found")
if self.game.fleckanim[z] > 5:
self.game.fleckanim[z] = 0
z+=1
x+=50
y+=50
x=0
# laser
#ferris laserkanonenstrahl hier reincoden
pygame.draw.line(self.screen,(random.randint(0,255),random.randint(0,255),
random.randint(0,255)),(675,25),(random.randint(0,200),
random.randint(0,400)),random.randint(5,15))
#allgroup.clear(screen, background)
self.allgroup.update(seconds)
self.allgroup.draw(self.screen)
pygame.quit()
def draw_text(self, text):
"""Center text in window
"""
fw, fh = self.font.size(text)
surface = self.font.render(text, True, (0, 0, 0))
self.screen.blit(surface, (50,150))
## code on module level
if __name__ == '__main__':
# call with width of window and fps
Viewer().run()
| gpl-3.0 | 40,689,081,233,943,590 | 40.816667 | 121 | 0.526239 | false |
thortex/rpi3-webiopi | webiopi_0.7.1/python/setup.py | 1 | 1891 | from setuptools import setup, Extension
classifiers = ['Development Status :: 3 - Alpha',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development',
'Topic :: Home Automation',
'Topic :: System :: Hardware']
setup(name = 'WebIOPi',
version = '0.7.1',
author = 'Eric PTAK',
author_email = '[email protected]',
description = 'A package to control Raspberry Pi GPIO from the web',
long_description = open('../doc/README').read(),
license = 'Apache',
keywords = 'RaspberryPi GPIO Python REST',
url = 'http://webiopi.trouch.com/',
classifiers = classifiers,
packages = ['_webiopi',
"webiopi",
"webiopi.utils",
"webiopi.clients",
"webiopi.protocols",
"webiopi.server",
"webiopi.decorators",
"webiopi.devices",
"webiopi.devices.digital",
"webiopi.devices.analog",
"webiopi.devices.sensor",
"webiopi.devices.clock",
"webiopi.devices.memory",
"webiopi.devices.shield"
],
ext_modules = [Extension(name='_webiopi.GPIO', sources=['native/bridge.c', 'native/gpio.c', 'native/cpuinfo.c', 'native/pwm.c'], include_dirs=['native/'])],
headers = ['native/cpuinfo.h', 'native/gpio.h', 'native/pwm.h'],
)
| apache-2.0 | -3,684,771,143,085,385,000 | 46.275 | 167 | 0.471179 | false |
iagcl/data_pipeline | data_pipeline/utils/dbuser.py | 1 | 6117 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
###############################################################################
# Module: dbuser
# Purpose: Split database user strings into database connection properties.
# Refer to notes below. Database properties include:
#
# - dbuserid
# - dbpassword
# - host (database name)
# - dbport
#
# OR
#
# - file data directory
#
# Notes:
# (1) database user string format:
#
# <dbuserid>/<dbpassword>@<host>[:<dbport>][/<dbsid>]
#
# (2) for file-base loads, the user string format is a valid directory path:
#
# /path/to/data/files
#
# (3) For MacOS users, if you're seeing an error like the following after
# being prompted for a password:
#
# "keyring.errors.PasswordSetError: Can't store password on keychain"
#
# Please try resigning the python executable as outlined here:
# https://github.com/jaraco/keyring/issues/219
#
# E.g. codesign -f -s - /path/to/my/virtualenv/bin/python
#
###############################################################################
import getpass
import keyring
import logging
import os
import re
from data_pipeline.db.connection_details import ConnectionDetails
logger = logging.getLogger(__name__)
def get_dbuser_properties(dbuser):
if not dbuser:
return None
conn_details = _get_db_connection_properties(dbuser)
if conn_details.data_dir:
return conn_details
if not conn_details.password:
if not conn_details.host:
raise ValueError("Invalid connection string: Host missing.")
if not conn_details.userid:
raise ValueError("Invalid connection string: User ID missing.")
logger.info("Trying to fetch password from keyring conn_details: {}"
.format(conn_details))
# Try to fetch the password from keyring
system_key = ("{host}:{port}/{dbsid}"
.format(host=conn_details.host,
port=conn_details.port,
dbsid=conn_details.dbsid))
password = keyring.get_password(system_key,
conn_details.userid)
password_confirm = password
while password is None or password != password_confirm:
password = getpass.getpass(
"Password for {dbuser}: ".format(dbuser=dbuser))
password_confirm = getpass.getpass(
"Confirm password for {dbuser}: ".format(dbuser=dbuser))
if password != password_confirm:
logger.warn("Passwords do not match. Please try again...")
# Store the password on the machine's keychain for later retrieval
keyring.set_password(system_key,
conn_details.userid,
password)
conn_details.password = password
_validate(conn_details, dbuser)
return conn_details
def _validate(conn_details, conn_string):
if conn_details is None:
raise ValueError("Connection details could not be defined with "
"connection string: {}".format(conn_string))
if not conn_details.userid:
raise ValueError("User ID was not defined in connection string: {}"
.format(conn_string))
if not conn_details.password:
raise ValueError("Password is not defined for connection string: {}"
.format(conn_string))
if not conn_details.host:
raise ValueError("Host is not defined in connection string: {}"
.format(conn_string))
def _get_dbuser(buf):
fwd_slash_index = buf.find('/')
if fwd_slash_index >= 0:
return buf[:fwd_slash_index]
return buf
def _get_dbpassword(buf):
fwd_slash_index = buf.find('/')
if fwd_slash_index >= 0:
return buf[fwd_slash_index+1:]
return None
def _get_host(buf):
colon_index = buf.rfind(':')
if colon_index >= 0:
return buf[:colon_index]
fwd_slash_index = buf.rfind('/')
if fwd_slash_index >= 0:
return buf[:fwd_slash_index]
return buf
def _get_dbport(buf):
colon_index = buf.rfind(':')
if colon_index >= 0:
fwd_slash_index = buf.rfind('/')
if fwd_slash_index < 0:
return buf[colon_index+1:]
return buf[colon_index+1:fwd_slash_index]
return None
def _get_dbsid(buf):
fwd_slash_index = buf.rfind('/')
if fwd_slash_index >= 0:
return buf[fwd_slash_index+1:]
return None
def _get_db_connection_properties(buf):
last_at_index = buf.rfind('@')
# Treat this as a data directory path
if last_at_index < 0:
if not os.path.isdir(buf):
raise ValueError("No such directory: {d}".format(d=buf))
if not os.listdir(buf):
raise ValueError("Directory is empty: {d}".format(d=buf))
return ConnectionDetails(data_dir=buf)
user_password_part = buf[:last_at_index]
userid = _get_dbuser(user_password_part)
password = _get_dbpassword(user_password_part)
rest = buf[last_at_index+1:]
host = _get_host(rest)
port = _get_dbport(rest)
dbsid = _get_dbsid(rest)
return ConnectionDetails(userid=userid, password=password,
host=host, port=port, dbsid=dbsid)
| apache-2.0 | 7,837,035,938,694,188,000 | 29.893939 | 79 | 0.607487 | false |
SCLT1975/python_training | generator/contact.py | 1 | 2052 | from model.new_user_data import N_u_d
import random
import string
import os.path
import json
import getopt
import sys
import jsonpickle
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of contacts", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def random_string(prefix, maxlen):
symbols = string.ascii_letters + string.digits + string.punctuation + " "*20
return prefix + "".join([random.choice(symbols) for i in range (random.randrange(maxlen))])
testdata = [
N_u_d(namef=random_string("namef", 10), namem=random_string("namem", 10), namel=random_string("namel", 10),
nick=random_string("nick", 6), title=random_string("title", 9), firm=random_string("firm", 12),
addr=random_string("address", 20), phone_h=random_string("phoneh", 7),
phone_m=random_string("phone_m", 7), phone_work=random_string("phone_w", 7),
phone_fax=random_string("phone_fax", 7), email_1=random_string("email1", 7),
email_2=random_string("email_2", 10), email_3=random_string("email_3", 10),
homep=random_string("home_page", 12), day_1 = "//div[@id='content']/form/select[1]//option[3]",
month_1 = "//div[@id='content']/form/select[2]//option[2]", year_1 = random_string("year", 6),
day_2 = "//div[@id='content']/form/select[3]//option[3]",
month_2 = "//div[@id='content']/form/select[4]//option[2]",
year_2 = random_string("year", 6), address_2=random_string("address", 15),
phone_h2=random_string("phone_h2", 6), notes=random_string("notes", 20))
for i in range(5)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options('json', indent=2)
out.write(jsonpickle.encode(testdata))
| apache-2.0 | 2,522,733,888,872,232,400 | 38.461538 | 119 | 0.586257 | false |
pwaller/pgi | pgi/codegen/fields.py | 1 | 5203 | # Copyright 2013 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
from pgi.clib.gir import GIInfoType, GITypeTag
from pgi.gtype import PGType
from pgi.util import import_attribute
class Field(object):
TAG = None
py_type = None
def __init__(self, info, type, backend):
self.backend = backend
self.info = info
self.type = type
@classmethod
def get_class(cls, type_):
return cls
def setup(self):
pass
def get(self, name):
raise NotImplementedError("no getter implemented")
def set(self, name, value_name):
raise NotImplementedError("no setter implemented")
def get_param_type(self, index):
"""Returns a ReturnValue instance for param type 'index'"""
assert index in (0, 1)
type_info = self.type.get_param_type(index)
type_cls = get_field_class(type_info)
instance = type_cls(self.backend, type_info, None)
instance.setup()
return instance
class InterfaceField(Field):
TAG = GITypeTag.INTERFACE
py_type = object
def setup(self):
iface = self.type.get_interface()
try:
self.py_type = import_attribute(iface.namespace, iface.name)
except ImportError:
# fall back to object
pass
def get(self, name):
var = self.backend.get_type(self.type)
iface = self.type.get_interface()
iface_type = iface.type.value
if iface_type == GIInfoType.ENUM:
out = var.unpack_out(name)
return var.block, out
elif iface_type == GIInfoType.STRUCT:
out = var.unpack_out(name)
return var.block, out
elif iface_type == GIInfoType.OBJECT:
out = var.unpack_out(name)
return var.block, out
elif iface_type == GIInfoType.FLAGS:
out = var.unpack_out(name)
return var.block, out
raise NotImplementedError(
"interface type not supported: %r" % iface.type)
class TypeField(Field):
TAG = GITypeTag.GTYPE
py_type = PGType
def get(self, name):
var = self.backend.get_type(self.type)
out = var.unpack_out(name)
return var.block, out
class GHashField(Field):
TAG = GITypeTag.GHASH
py_type = dict
def setup(self):
self.py_type = {
self.get_param_type(0).py_type: self.get_param_type(1).py_type}
class BasicField(Field):
def get(self, name):
var = self.backend.get_type(self.type)
out = var.unpack_out(name)
return var.block, out
def set(self, name, value_name):
var = self.backend.get_type(self.type)
out = var.pack_out(value_name)
return var.block, out
class DoubleField(BasicField):
TAG = GITypeTag.DOUBLE
py_type = float
class UInt32Field(BasicField):
TAG = GITypeTag.UINT32
py_type = int
class UInt8Field(BasicField):
TAG = GITypeTag.UINT8
py_type = int
class Int32Field(BasicField):
TAG = GITypeTag.INT32
py_type = int
class Int64Field(BasicField):
TAG = GITypeTag.INT64
py_type = int
class UInt64Field(BasicField):
TAG = GITypeTag.UINT64
py_type = int
class UInt16Field(BasicField):
TAG = GITypeTag.UINT16
py_type = int
class Int8Field(BasicField):
TAG = GITypeTag.INT8
py_type = int
class Int16Field(BasicField):
TAG = GITypeTag.INT16
py_type = int
class FloatField(BasicField):
TAG = GITypeTag.FLOAT
py_type = float
class BooleanField(BasicField):
TAG = GITypeTag.BOOLEAN
py_type = bool
class ArrayField(Field):
TAG = GITypeTag.ARRAY
py_type = list
def setup(self):
elm_type = self.get_param_type(0)
if isinstance(elm_type, UInt8Field):
self.py_type = "bytes"
else:
self.py_type = [elm_type.py_type]
def get(self, name):
return None, "None"
def set(self, name, value_name):
return None, ""
class Utf8Field(BasicField):
TAG = GITypeTag.UTF8
py_type = str
class VoidField(BasicField):
TAG = GITypeTag.VOID
py_type = object
class GSListField(Field):
TAG = GITypeTag.GSLIST
py_type = list
def setup(self):
self.py_type = [self.get_param_type(0).py_type]
class GListField(Field):
TAG = GITypeTag.GLIST
py_type = list
def setup(self):
self.py_type = [self.get_param_type(0).py_type]
_classes = {}
def _find_fields():
global _classes
cls = [a for a in globals().values() if isinstance(a, type)]
args = [a for a in cls if issubclass(a, Field) and a is not Field]
_classes = dict(((a.TAG, a) for a in args))
_find_fields()
def get_field_class(arg_type):
global _classes
tag_value = arg_type.tag.value
try:
cls = _classes[tag_value]
except KeyError:
raise NotImplementedError("%r not supported" % arg_type.tag)
else:
return cls.get_class(arg_type)
| lgpl-2.1 | 650,259,675,449,957,000 | 21.52381 | 75 | 0.620796 | false |
stianrh/askbot-nordic | askbot/deps/livesettings/templatetags/config_tags.py | 1 | 2229 | from django import template
from django.contrib.sites.models import Site
from django.core import urlresolvers
from askbot.deps.livesettings import config_value
from askbot.deps.livesettings.utils import url_join
import logging
log = logging.getLogger('configuration.config_tags')
register = template.Library()
def force_space(value, chars=40):
"""Forces spaces every `chars` in value"""
chars = int(chars)
if len(value) < chars:
return value
else:
out = []
start = 0
end = 0
looping = True
while looping:
start = end
end += chars
out.append(value[start:end])
looping = end < len(value)
return ', '.join(out)
def break_at(value, chars=40):
"""Force spaces into long lines which don't have spaces"""
#todo: EF - lazy patch
return value
chars = int(chars)
value = unicode(value)
if len(value) < chars:
return value
else:
out = []
line = value.split(', ')
for word in line:
if len(word) > chars:
out.append(force_space(word, chars))
else:
out.append(word)
return ", ".join(out)
register.filter('break_at', break_at)
def config_boolean(option):
"""Looks up the configuration option, returning true or false."""
args = option.split('.')
try:
val = config_value(*args)
except:
log.warn('config_boolean tag: Tried to look up config setting "%s", got SettingNotSet, returning False', option)
val = False
if val:
return "true"
else:
return ""
register.filter('config_boolean', config_boolean)
def admin_site_views(view):
"""Returns a formatted list of sites, rendering for view, if any"""
if view:
path = urlresolvers.reverse(view)
else:
path = None
links = []
for site in Site.objects.all():
paths = ["http://", site.domain]
if path:
paths.append(path)
links.append((site.name, url_join(paths)))
ret = {
'links' : links,
}
return ret
register.inclusion_tag('askbot.deps.livesettings/_admin_site_views.html')(admin_site_views)
| gpl-3.0 | 3,153,345,601,331,635,000 | 23.494505 | 120 | 0.593988 | false |
jonathaneunice/quoter | quoter/markdown.py | 1 | 3316 |
import re
import six
from options import Options, OptionsClass, Prohibited, Transient
from .util import *
from .quoter import Quoter
from .joiner import joinlines
from .styleset import StyleSet
# MD_ATTRS = set(['a', 'p', 'doc', 'h'])
# MD_ATTRS.update(QUOTER_ATTRS)
class MDQuoter(Quoter):
"""
A more sophisticated quoter for Markdown elements.
"""
options = Quoter.options.add(
misc = Prohibited,
)
def __init__(self, *args, **kwargs):
"""
Create an MDQuoter
"""
# Restating basic init to avoid errors of self.__getattribute__
# that can flummox superclass instantiation
super(Quoter, self).__init__()
opts = self.options = self.__class__.options.push(kwargs)
def a(self, text, href, **kwargs):
opts = self.options.push(kwargs)
parts = ["[", text, "](", href, ")"]
return self._output(parts, opts)
def p(self, *args, **kwargs):
opts = self.options.push(kwargs)
return self._output(args, opts)
def doc(self, seq, **kwargs):
opts = self.options.push(kwargs)
return joinlines(seq, sep="\n\n")
# FIXME: kwargs not really used
def h(self, text, level=1, close=False, setext=False, **kwargs):
"""
Headers at varous levels. Either atx style (hashmark prefix)
by default, or Setext (underlining) style optionally.
"""
opts = self.options.push(kwargs)
if setext:
char = '=' if level == 1 else '-'
parts = [text, '\n', char * len(text), '\n']
else:
prefix = "#" * level
parts = [prefix, ' ', text]
if close:
parts.extend([' ', prefix])
return self._output(parts, opts)
def h1(self, text, **kwargs):
kwargs['level'] = 1
return self.h(text, **kwargs)
def h2(self, text, **kwargs):
kwargs['level'] = 2
return self.h(text, **kwargs)
def h3(self, text, **kwargs):
kwargs['level'] = 3
return self.h(text, **kwargs)
def h4(self, text, **kwargs):
kwargs['level'] = 4
return self.h(text, **kwargs)
def h5(self, text, **kwargs):
kwargs['level'] = 5
return self.h(text, **kwargs)
def h6(self, text, **kwargs):
kwargs['level'] = 6
return self.h(text, **kwargs)
def hr(self, **kwargs):
opts = self.options.push(kwargs)
return self._output(['-' * 5], opts)
# see http://daringfireball.net/projects/markdown/syntax
# for basic syntax
# TODO: blockquote
# TODO: code
# TODO: list (ordered)
# TODO: list (unordered)
# TODO: image
# TODO: automatic link
# TODO: footnote
# TODO: table
# TODO: literal asterisks
# TODO: get vsep working
# need this because basic joiners dont do varargs yet
md = StyleSet(
factory = MDQuoter,
immediate = MDQuoter(),
instant = False,
promote = 'but clone p a doc h')
md.i = MDQuoter(prefix="*", suffix="*")
md.b = MDQuoter(prefix="**", suffix="**")
# _md_doc = joinlines.but(sep="\n\n")
# MDQuoter.styles['doc'] = _md_doc
# object.__setattr__(MDQuoter, 'doc') == _md_doc
# some obvious glitches and complexities in __getargument__ setup still,
# given complexity of defining doc method - look into
| apache-2.0 | -5,787,263,924,212,046,000 | 25.741935 | 72 | 0.578709 | false |
mice-software/maus | src/common_py/calibration/get_kl_calib.py | 1 | 3304 | # This file is part of MAUS: http://micewww.pp.rl.ac.uk/projects/maus
#
# MAUS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MAUS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MAUS. If not, see <http://www.gnu.org/licenses/>.
#
"""
Get KL calibrations from DB
"""
import cdb
import json
from Configuration import Configuration
from cdb._exceptions import CdbPermanentError
class GetCalib:
"""
Evaluator class to evaluate mathematical expressions
Able to see many simple math expressions and some common units; the
standard geant4 system of units is enabled.
"""
def __init__(self):
"""
Initialise the evaluator with math functions and units
"""
self._current_cali = {}
self.reset()
cfg = Configuration()
cfgdoc = cfg.getConfigJSON()
cfgdoc_json = json.loads(cfgdoc)
cdb_url = cfgdoc_json['cdb_download_url'] + 'calibration?wsdl'
self.cdb_server = cdb.Calibration()
self.cdb_server.set_url(cdb_url)
#print 'Server: ', self.cdb_server.get_name(), \
# self.cdb_server.get_version()
try:
cdb.Calibration().get_status()
except CdbPermanentError:
raise CdbPermanentError("CDB error")
def get_calib(self, devname, ctype, fromdate):
"""
Evaluate a string expression given by formula
"""
if devname != "" and ctype != "":
if devname != "KL" or ctype != "gain":
raise Exception('get_kl_calib failed. \
Invalid detector/calib type.')
# check whether we are asked for the current calibration
# or calibration for an older date
if fromdate == "" or fromdate == "current":
#print 'getting current calib', devname, ctype
try:
self._current_cali = \
self.cdb_server.get_current_calibration(devname, ctype)
except CdbPermanentError:
self._current_cali = "cdb_permanent_error"
else:
#print 'getting calib for date', fromdate
try:
self._current_cali = \
self.cdb_server.get_calibration_for_date(devname,
fromdate,
ctype)
except CdbPermanentError:
self._current_cali = "cdb_permanent_error"
#print self._current_cali
else:
raise Exception('get_kl_calib failed. No device/calibration type.')
return self._current_cali
def reset(self):
"""
Reinitialize calibration
"""
self._current_cali = {}
| gpl-3.0 | 456,927,340,829,286,300 | 36.545455 | 80 | 0.572942 | false |
simone-campagna/rubik | rubik/application/help_functions/help_configuration.py | 1 | 1593 | #!/usr/bin/env python3
#
# Copyright 2014 Simone Campagna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = "Simone Campagna"
__all__ = [
'help_configuration',
'HelpConfiguration',
]
from .functor_help_text import HelpText
from ..config import Config
from ...table import Table
def help_configuration(test=None, interactive=None, writer=None):
HelpConfiguration(test=test, interactive=interactive, writer=writer)()
class HelpConfiguration(HelpText):
TEXT = None
def get_text(self):
text = """\
# Configuration
Rubik can be configured. A configuration file ($RUBIK_CONFIG) contained in a
configuration dir ($RUBIK_DIR) are used to configure rubik
(see --help-environment-variables/-hE option).
The configuration file can be used to set default values for some options:
"""
table = Table(headers=['KEY', '', 'DEFAULT_VALUE'])
for key, default_value in Config.CONFIG_DEFAULTS.items():
table.add_row((key, "=", repr(default_value)))
text += table.render() + '\n'
return text
| apache-2.0 | -1,421,581,509,010,283,000 | 30.86 | 76 | 0.696171 | false |
GiulianoFranchetto/zephyr | scripts/gen_relocate_app.py | 1 | 15643 | #!/usr/bin/env python3
#
# Copyright (c) 2018 Intel Corporation.
#
# SPDX-License-Identifier: Apache-2.0
#
# This script will relocate .text, .rodata, .data and .bss sections from required files
# and places it in the required memory region. This memory region and file
# are given to this python script in the form of a string.
# Example of such a string would be:
# SRAM2:/home/xyz/zephyr/samples/hello_world/src/main.c,\
# SRAM1:/home/xyz/zephyr/samples/hello_world/src/main2.c
# To invoke this script:
# python3 gen_relocate_app.py -i input_string -o generated_linker -c generated_code
# Configuration that needs to be sent to the python script.
# if the memory is like SRAM1/SRAM2/CCD/AON then place full object in
# the sections
# if the memory type is appended with _DATA / _TEXT/ _RODATA/ _BSS only the
# selected memory is placed in the required memory region. Others are
# ignored.
# NOTE: multiple regions can be appended together like SRAM2_DATA_BSS
# this will place data and bss inside SRAM2
import sys
import argparse
import os
import glob
import warnings
from elftools.elf.elffile import ELFFile
# This script will create linker comands for text,rodata data, bss section relocation
PRINT_TEMPLATE = """
KEEP(*({0}))
"""
SECTION_LOAD_MEMORY_SEQ = """
__{0}_{1}_rom_start = LOADADDR(_{2}_{3}_SECTION_NAME);
"""
LOAD_ADDRESS_LOCATION_FLASH = """
#ifdef CONFIG_XIP
GROUP_DATA_LINK_IN({0}, FLASH)
#else
GROUP_DATA_LINK_IN({0}, {0})
#endif
"""
LOAD_ADDRESS_LOCATION_BSS = "GROUP_LINK_IN({0})"
MPU_RO_REGION_START = """
_{0}_mpu_ro_region_start = {1}_ADDR;
"""
MPU_RO_REGION_END = """
MPU_ALIGN(_{0}_mpu_ro_region_end - _{0}_mpu_ro_region_start);
_{0}_mpu_ro_region_end = .;
"""
# generic section creation format
LINKER_SECTION_SEQ = """
/* Linker section for memory region {2} for {3} section */
SECTION_PROLOGUE(_{2}_{3}_SECTION_NAME,,)
{{
. = ALIGN(4);
{4}
. = ALIGN(4);
}} {5}
__{0}_{1}_end = .;
__{0}_{1}_start = ADDR(_{2}_{3}_SECTION_NAME);
__{0}_{1}_size = SIZEOF(_{2}_{3}_SECTION_NAME);
"""
SOURCE_CODE_INCLUDES = """
/* Auto generated code. Do not modify.*/
#include <zephyr.h>
#include <linker/linker-defs.h>
#include <kernel_structs.h>
"""
EXTERN_LINKER_VAR_DECLARATION = """
extern char __{0}_{1}_start[];
extern char __{0}_{1}_rom_start[];
extern char __{0}_{1}_size[];
"""
DATA_COPY_FUNCTION = """
void data_copy_xip_relocation(void)
{{
{0}
}}
"""
BSS_ZEROING_FUNCTION = """
void bss_zeroing_relocation(void)
{{
{0}
}}
"""
MEMCPY_TEMPLATE = """
(void)memcpy(&__{0}_{1}_start, &__{0}_{1}_rom_start,
(u32_t) &__{0}_{1}_size);
"""
MEMSET_TEMPLATE = """
(void)memset(&__{0}_bss_start, 0,
(u32_t) &__{0}_bss_size);
"""
def find_sections(filename, full_list_of_sections):
with open(filename, 'rb') as obj_file_desc:
full_lib = ELFFile(obj_file_desc)
if not full_lib:
print("Error parsing file: ", filename)
sys.exit(1)
sections = [x for x in full_lib.iter_sections()]
for section in sections:
if ".text." in section.name:
full_list_of_sections["text"].append(section.name)
if ".rodata." in section.name:
full_list_of_sections["rodata"].append(section.name)
if ".data." in section.name:
full_list_of_sections["data"].append(section.name)
if ".bss." in section.name:
full_list_of_sections["bss"].append(section.name)
# Common variables will be placed in the .bss section
# only after linking in the final executable. This "if" findes
# common symbols and warns the user of the problem.
# The solution to which is simply assigning a 0 to
# bss variable and it will go to the required place.
if ".symtab" in section.name:
symbols = [x for x in section.iter_symbols()]
for symbol in symbols:
if symbol.entry["st_shndx"] == 'SHN_COMMON':
warnings.warn("Common variable found. Move "+
symbol.name + " to bss by assigning it to 0/NULL")
return full_list_of_sections
def assign_to_correct_mem_region(memory_type,
full_list_of_sections, complete_list_of_sections):
all_regions = False
iteration_sections = {"text":False, "rodata":False, "data":False, "bss":False}
if "_TEXT" in memory_type:
iteration_sections["text"] = True
memory_type = memory_type.replace("_TEXT", "")
if "_RODATA" in memory_type:
iteration_sections["rodata"] = True
memory_type = memory_type.replace("_RODATA", "")
if "_DATA" in memory_type:
iteration_sections["data"] = True
memory_type = memory_type.replace("_DATA", "")
if "_BSS" in memory_type:
iteration_sections["bss"] = True
memory_type = memory_type.replace("_BSS", "")
if not (iteration_sections["data"] or iteration_sections["bss"] or
iteration_sections["text"] or iteration_sections["rodata"]):
all_regions = True
if memory_type in complete_list_of_sections:
for iter_sec in ["text", "rodata", "data", "bss"]:
if ((iteration_sections[iter_sec] or all_regions) and
full_list_of_sections[iter_sec] != []):
complete_list_of_sections[memory_type][iter_sec] += (
full_list_of_sections[iter_sec])
else:
#new memory type was found. in which case just assign the
# full_list_of_sections to the memorytype dict
tmp_list = {"text":[], "rodata":[], "data":[], "bss":[]}
for iter_sec in ["text", "rodata", "data", "bss"]:
if ((iteration_sections[iter_sec] or all_regions) and
full_list_of_sections[iter_sec] != []):
tmp_list[iter_sec] = full_list_of_sections[iter_sec]
complete_list_of_sections[memory_type] = tmp_list
return complete_list_of_sections
def print_linker_sections(list_sections):
print_string = ''
for section in list_sections:
print_string += PRINT_TEMPLATE.format(section)
return print_string
def string_create_helper(region, memory_type,
full_list_of_sections, load_address_in_flash):
linker_string = ''
if load_address_in_flash:
load_address_string = LOAD_ADDRESS_LOCATION_FLASH.format(memory_type)
else:
load_address_string = LOAD_ADDRESS_LOCATION_BSS.format(memory_type)
if full_list_of_sections[region] != []:
# Create a complete list of funcs/ variables that goes in for this
# memory type
tmp = print_linker_sections(full_list_of_sections[region])
if memory_type == 'SRAM' and (region == 'data' or region == 'bss'):
linker_string += tmp
else:
linker_string += LINKER_SECTION_SEQ.format(memory_type.lower(), region,
memory_type.upper(), region.upper(),
tmp, load_address_string)
if load_address_in_flash:
linker_string += SECTION_LOAD_MEMORY_SEQ.format(memory_type.lower(),
region,
memory_type.upper(),
region.upper())
return linker_string
def generate_linker_script(linker_file, sram_data_linker_file,
sram_bss_linker_file, complete_list_of_sections):
gen_string = ''
gen_string_sram_data = ''
gen_string_sram_bss = ''
for memory_type, full_list_of_sections in complete_list_of_sections.items():
if memory_type != "SRAM":
gen_string += MPU_RO_REGION_START.format(memory_type.lower(),
memory_type.upper())
gen_string += string_create_helper("text",
memory_type, full_list_of_sections, 1)
gen_string += string_create_helper("rodata",
memory_type, full_list_of_sections, 1)
if memory_type != "SRAM":
gen_string += MPU_RO_REGION_END.format(memory_type.lower())
if memory_type == 'SRAM':
gen_string_sram_data += string_create_helper("data",
memory_type, full_list_of_sections, 1)
gen_string_sram_bss += string_create_helper("bss",
memory_type, full_list_of_sections, 0)
else:
gen_string += string_create_helper("data",
memory_type, full_list_of_sections, 1)
gen_string += string_create_helper("bss",
memory_type, full_list_of_sections, 0)
#finally writing to the linker file
with open(linker_file, "a+") as file_desc:
file_desc.write(gen_string)
with open(sram_data_linker_file, "a+") as file_desc:
file_desc.write(gen_string_sram_data)
with open(sram_bss_linker_file, "a+") as file_desc:
file_desc.write(gen_string_sram_bss)
def generate_memcpy_code(memory_type, full_list_of_sections, code_generation):
all_sections = True
generate_section = {"text":False, "rodata":False, "data":False, "bss":False}
for section_name in ["_TEXT", "_RODATA", "_DATA", "_BSS"]:
if section_name in memory_type:
generate_section[section_name.lower()[1:]] = True
memory_type = memory_type.replace(section_name, "")
all_sections = False
if all_sections:
generate_section["text"] = True
generate_section["rodata"] = True
generate_section["data"] = True
generate_section["bss"] = True
#add all the regions that needs to be copied on boot up
for mtype in ["text", "rodata", "data"]:
if memory_type == "SRAM" and mtype == "data":
continue
if full_list_of_sections[mtype] and generate_section[mtype]:
code_generation["copy_code"] += MEMCPY_TEMPLATE.format(memory_type.lower(), mtype)
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
memory_type.lower(), mtype)
# add for all the bss data that needs to be zeored on boot up
if full_list_of_sections["bss"] and generate_section["bss"] and memory_type != "SRAM":
code_generation["zero_code"] += MEMSET_TEMPLATE.format(memory_type.lower())
code_generation["extern"] += EXTERN_LINKER_VAR_DECLARATION.format(
memory_type.lower(), "bss")
return code_generation
def dump_header_file(header_file, code_generation):
code_string = ''
# create a dummy void function if there is no code to generate for
# bss/data/text regions
code_string += code_generation["extern"]
if code_generation["copy_code"]:
code_string += DATA_COPY_FUNCTION.format(code_generation["copy_code"])
else:
code_string += DATA_COPY_FUNCTION.format("void;")
if code_generation["zero_code"]:
code_string += BSS_ZEROING_FUNCTION.format(code_generation["zero_code"])
else:
code_string += BSS_ZEROING_FUNCTION.format("return;")
with open(header_file, "w") as header_file_desc:
header_file_desc.write(SOURCE_CODE_INCLUDES)
header_file_desc.write(code_string)
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-d", "--directory", required=True,
help="obj file's directory")
parser.add_argument("-i", "--input_rel_dict", required=True,
help="input src:memory type(sram2 or ccm or aon etc) string")
parser.add_argument("-o", "--output", required=False, help="Output ld file")
parser.add_argument("-s", "--output_sram_data", required=False,
help="Output sram data ld file")
parser.add_argument("-b", "--output_sram_bss", required=False,
help="Output sram bss ld file")
parser.add_argument("-c", "--output_code", required=False,
help="Output relocation code header file")
parser.add_argument("-v", "--verbose", action="count", default=0,
help="Verbose Output")
args = parser.parse_args()
#return the absolute path for the object file.
def get_obj_filename(searchpath, filename):
# get the object file name which is almost always pended with .obj
obj_filename = filename.split("/")[-1] + ".obj"
for dirpath, _, files in os.walk(searchpath):
for filename1 in files:
if filename1 == obj_filename:
if filename.split("/")[-2] in dirpath.split("/")[-1]:
fullname = os.path.join(dirpath, filename1)
return fullname
# Create a dict with key as memory type and files as a list of values.
def create_dict_wrt_mem():
#need to support wild card *
rel_dict = dict()
if args.input_rel_dict == '':
print("Disable CONFIG_CODE_DATA_RELOCATION if no file needs relocation")
sys.exit(1)
for line in args.input_rel_dict.split(';'):
mem_region, file_name = line.split(':')
file_name_list = glob.glob(file_name)
if not file_name_list:
warnings.warn("File: "+file_name+" Not found")
continue
if mem_region == '':
continue
if args.verbose:
print("Memory region ", mem_region, " Selected for file:", file_name_list)
if mem_region in rel_dict:
rel_dict[mem_region].extend(file_name_list)
else:
rel_dict[mem_region] = file_name_list
return rel_dict
def main():
parse_args()
searchpath = args.directory
linker_file = args.output
sram_data_linker_file = args.output_sram_data
sram_bss_linker_file = args.output_sram_bss
rel_dict = create_dict_wrt_mem()
complete_list_of_sections = {}
# Create/or trucate file contents if it already exists
# raw = open(linker_file, "w")
code_generation = {"copy_code": '', "zero_code":'', "extern":''}
#for each memory_type, create text/rodata/data/bss sections for all obj files
for memory_type, files in rel_dict.items():
full_list_of_sections = {"text":[], "rodata":[], "data":[], "bss":[]}
for filename in files:
obj_filename = get_obj_filename(searchpath, filename)
# the obj file wasn't found. Probably not compiled.
if not obj_filename:
continue
full_list_of_sections = find_sections(obj_filename, full_list_of_sections)
#cleanup and attach the sections to the memory type after cleanup.
complete_list_of_sections = assign_to_correct_mem_region(memory_type,
full_list_of_sections,
complete_list_of_sections)
generate_linker_script(linker_file, sram_data_linker_file,
sram_bss_linker_file, complete_list_of_sections)
for mem_type, list_of_sections in complete_list_of_sections.items():
code_generation = generate_memcpy_code(mem_type,
list_of_sections, code_generation)
dump_header_file(args.output_code, code_generation)
if __name__ == '__main__':
main()
| apache-2.0 | -298,017,829,541,320,450 | 35.981087 | 94 | 0.58825 | false |
resync/resync | tests/test_resync-build_script.py | 1 | 2112 | import sys
import unittest
import io
from resync.resource import Resource
from resync.resource_list import ResourceList
from resync.capability_list import CapabilityList
from resync.sitemap import Sitemap, SitemapIndexError, SitemapParseError
import subprocess
def run_resync(args):
args.insert(0, './resync-build')
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
(out, err) = proc.communicate()
return(out)
class TestClientLinkOptions(unittest.TestCase):
def test01_no_links(self):
xml = run_resync(['--write-resourcelist',
'http://example.org/t', 'tests/testdata/dir1'])
rl = ResourceList()
rl.parse(fh=io.BytesIO(xml))
self.assertEqual(len(rl), 2)
self.assertEqual(rl.link('describedby'), None)
def test02_resource_list_links(self):
xml = run_resync(['--write-resourcelist',
'--describedby-link=a',
'--sourcedescription-link=b', # will be ignored
'--capabilitylist-link=c',
'http://example.org/t', 'tests/testdata/dir1'])
rl = ResourceList()
rl.parse(fh=io.BytesIO(xml))
self.assertEqual(len(rl), 2)
self.assertNotEqual(rl.link('describedby'), None)
self.assertEqual(rl.link('describedby')['href'], 'a')
self.assertNotEqual(rl.link('up'), None)
self.assertEqual(rl.link('up')['href'], 'c')
def test03_capability_list_links(self):
xml = run_resync(['--write-capabilitylist=resourcelist=rl,changedump=cd',
'--describedby-link=a',
'--sourcedescription-link=b',
'--capabilitylist-link=c']) # will be ignored
capl = CapabilityList()
capl.parse(fh=io.BytesIO(xml))
self.assertEqual(len(capl), 2)
self.assertNotEqual(capl.link('describedby'), None)
self.assertEqual(capl.link('describedby')['href'], 'a')
self.assertNotEqual(capl.link('up'), None)
self.assertEqual(capl.link('up')['href'], 'b')
| apache-2.0 | 5,473,029,633,957,304,000 | 37.4 | 81 | 0.600852 | false |
mafik/pargen | ngram_model.py | 1 | 4470 | import model
from utils import *
import data
with status("Loading n-gram model..."):
n = 5
ngram_db_path = "ngram_db_{}.npz".format(n)
train_test_divider = int(len(data.lengths) * 0.80)
try:
npzfile = np.load(ngram_db_path)
ngram_dataset = npzfile["ngram_dataset"]
ngram_probability_table = npzfile["ngram_probability_table"]
except IOError as e:
from collections import Counter
from nltk.util import ngrams as ngram_generator
log("Building n-gram model from scratch...")
ngrams = tuple(Counter() for i in range(n+1))
ngrams[n].update(ngram for array in arrays[:train_test_divider] for ngram in ngram_generator(chain((data.GO,)*n, array, (data.EOS,)), n))
for i in range(n - 1, 0, -1):
for ngram, count in ngrams[i+1].items():
ngrams[i][ngram[1:]] += count
log("Precomputing unique prefixes/suffixes")
# unique_prefixes[i][ngram] where len(ngram) == i contains number of different symbols that proceed given ngram
unique_prefixes = tuple(Counter() for i in range(n))
# unique_suffixes[i][ngram] where len(ngram) == i contains number of different symbols that follow given ngram
unique_suffixes = tuple(Counter() for i in range(n))
for i in range(n, 0, -1):
unique_prefixes[i-1].update(ngram[1:] for ngram in ngrams[i].keys())
unique_suffixes[i-1].update(ngram[:-1] for ngram in ngrams[i].keys())
log("Indexing ngrams")
all_ngrams = tuple(set() for i in range(n+1))
for array in arrays:
for ngram in ngram_generator(chain((data.GO,)*n, array, (data.EOS,)), n):
all_ngrams[n].add(ngram)
for i in range(n - 1, 0, -1):
for ngram in all_ngrams[i+1]:
all_ngrams[i].add(ngram[1:])
# maps ngram tuple to ngram number
ngram_idx = tuple(dict() for i in range(n))
for i in range(n, 0, -1):
for num, ngram in enumerate(all_ngrams[i]):
ngram_idx[i-1][ngram] = num
discount = (1.0, 0.5, 0.75, 0.75, 0.75, 0.75)
def prob(full_ngram):
current_p = 0.0
p_multiplier = 1.0
estimation_base = ngrams
for i in range(n, 0, -1):
ngram = full_ngram[-i:]
prefix = ngram[:-1]
if estimation_base[i-1][prefix]:
#print("i", i)
#print("full ngram", full_ngram)
#print("ngram", ngram)
#print("prefix", prefix)
#print("estamition_base", estimation_base[i][ngram])
p = max(0, estimation_base[i][ngram] - discount[i]) / estimation_base[i-1][prefix]
current_p += p * p_multiplier
p_multiplier *= discount[i] / estimation_base[i-1][prefix] * unique_suffixes[i-1][prefix]
estimation_base = unique_prefixes
current_p += p_multiplier / symbol_count # probability of an unseen symbol
#print(u"Prob of {}: {}".format(''.join(symbol_map[c] for c in ngram), prob_cache[ngram]))
return current_p
precomputing_started_clock = log("Precomputing successor probabilities")
# probabilities for the next symbol based on the last (n-1)-gram
ngram_probability_table = np.zeros((len(ngram_idx[-2]), symbol_count), dtype=np.float32)
progress = 0
for ngram, idx in ngram_idx[-2].items():
if progress % 500 == 0:
if progress:
time_left = (clock() - precomputing_started_clock) / progress * (len(ngram_idx[-2]) - progress)
else:
time_left = float("inf")
log("Precomputing successor probabilities: {:.1f}% ({:.0f} seconds left)".format(100.0 * progress / len(ngram_idx[-2]), time_left))
probs = np.array([prob(ngram + (i,)) for i in range(symbol_count)])
probs = probs / max(np.sum(probs), 0.0001)
#pred_symbol = np.argmax(ngram_probability_table[ngram_idx[n1][prefix]])
#print(u"Prefix '{}', prediction {:3d} '{}'".format(''.join(symbol_map[c] for c in prefix), pred_symbol, symbol_map[pred_symbol]))
ngram_probability_table[idx, :] = probs
progress += 1
log("Building ngram sequence")
ngram_dataset = np.zeros(dataset_np.shape + (n,), dtype=np.int32)
offset = 0
for array_index, arr in enumerate(arrays):
for i in range(n):
ngram_length = i + 1
for pos, ngram in enumerate(ngram_generator(chain((data.GO,) * ngram_length, arr), ngram_length)):
ngram_dataset[offset + pos, i] = ngram_idx[i][ngram]
offset += len(arr) + 2
np.savez(ngram_db_path, ngram_dataset=ngram_dataset, ngram_probability_table=ngram_probability_table)
| agpl-3.0 | -1,377,047,284,201,694,000 | 43.7 | 141 | 0.624385 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.